mirror of
https://github.com/microsoft/TypeScript.git
synced 2026-02-14 19:16:17 -06:00
Merge branch 'master' into asyncGenerators
This commit is contained in:
commit
5d415cac52
14
.mailmap
14
.mailmap
@ -8,11 +8,14 @@ Alexander Rusakov <a_s_rusakov@mail.ru>
|
||||
Alex Eagle <alexeagle@google.com>
|
||||
Anatoly Ressin <anatoly.ressin@icloud.com>
|
||||
Anders Hejlsberg <andersh@microsoft.com> unknown <andersh@AndersX1.NOE.Nokia.com> unknown <andersh@andersh-yoga.redmond.corp.microsoft.com>
|
||||
about-code <about-code@users.noreply.github.com> # Andreas Martin
|
||||
Andrej Baran <andrej.baran@gmail.com>
|
||||
Andrew Ochsner <andrew.ochsner@wipro.com>
|
||||
Andrew Z Allen <me@andrewzallen.com>
|
||||
Andy Hanson <anhans@microsoft.com> Andy <anhans@microsoft.com>
|
||||
Anil Anar <anilanar@hotmail.com>
|
||||
Anton Tolmachev <myste@mail.ru>
|
||||
Anubha Mathur <anubmat@microsoft.com> anubmat <anubmat@microsoft.com>
|
||||
Arnavion <arnavion@gmail.com> # Arnav Singh
|
||||
Arthur Ozga <aozgaa@umich.edu> Arthur Ozga <t-arthoz@microsoft.com> Arthur Ozga <aozgaa-ms@outlook.com> Arthur Ozga <aozgaa@users.noreply.github.com> Arthur Ozga <arozga@microsoft.com>
|
||||
Asad Saeeduddin <masaeedu@gmail.com>
|
||||
@ -37,6 +40,7 @@ Dan Corder <dev@dancorder.com>
|
||||
Dan Quirk <danquirk@microsoft.com> Dan Quirk <danquirk@users.noreply.github.com> nknown <danquirk@DANQUIRK1.redmond.corp.microsoft.com>
|
||||
Daniel Rosenwasser <drosen@microsoft.com> Daniel Rosenwasser <DanielRosenwasser@users.noreply.github.com> Daniel Rosenwasser <DanielRosenwasser@gmail.com> Daniel Rosenwasser <Daniel.Rosenwasser@microsoft.com> Daniel Rosenwasser <DanielRosenwasser@microsoft.com>
|
||||
David Li <jiawei.davidli@gmail.com>
|
||||
David Sheldrick <david@futurice.com>
|
||||
David Souther <davidsouther@gmail.com>
|
||||
Denis Nedelyaev <denvned@gmail.com>
|
||||
Dick van den Brink <d_vandenbrink@outlook.com> unknown <d_vandenbrink@outlook.com> unknown <d_vandenbrink@live.com>
|
||||
@ -52,6 +56,7 @@ Evan Sebastian <evanlhoini@gmail.com>
|
||||
Eyas <eyas.sharaiha@gmail.com> # Eyas Sharaiha
|
||||
Fabian Cook <faybecook@gmail.com>
|
||||
falsandtru <falsandtru@users.noreply.github.com> # @falsandtru
|
||||
flowmemo <flowmemo@outlook.com> # @flowmemo
|
||||
Frank Wallis <fwallis@outlook.com>
|
||||
František Žiacik <fziacik@gratex.com> František Žiacik <ziacik@gmail.com>
|
||||
Gabe Moothart <gmoothart@gmail.com>
|
||||
@ -62,6 +67,7 @@ Graeme Wicksted <graeme.wicksted@gmail.com>
|
||||
Guillaume Salles <guillaume.salles@me.com>
|
||||
Guy Bedford <guybedford@gmail.com> guybedford <guybedford@gmail.com>
|
||||
Harald Niesche <harald@niesche.de>
|
||||
Homa Wong <homawong@gmail.com>
|
||||
Iain Monro <iain.monro@softwire.com>
|
||||
Ingvar Stepanyan <me@rreverser.com>
|
||||
impinball <impinball@gmail.com> # Isiah Meadows
|
||||
@ -81,6 +87,7 @@ Jonathan Park <jpark@daptiv.com>
|
||||
Jonathan Turner <jont@microsoft.com> Jonathan Turner <probata@hotmail.com>
|
||||
Jonathan Toland <toland@dnalot.com>
|
||||
Jesse Schalken <me@jesseschalken.com>
|
||||
Joel Day <joelday@gmail.com>
|
||||
Josh Abernathy <joshaber@gmail.com> joshaber <joshaber@gmail.com>
|
||||
Josh Kalderimis <josh.kalderimis@gmail.com>
|
||||
Josh Soref <jsoref@users.noreply.github.com>
|
||||
@ -95,10 +102,12 @@ Kanchalai Tanglertsampan <yuisu@microsoft.com> Yui T <yuisu@microsoft.com>
|
||||
Kanchalai Tanglertsampan <yuisu@microsoft.com> Yui <yuit@users.noreply.github.com>
|
||||
Kanchalai Tanglertsampan <yuisu@microsoft.com> Yui <yuisu@microsoft.com>
|
||||
Kanchalai Tanglertsampan <yuisu@microsoft.com> yui T <yuisu@microsoft.com>
|
||||
Kārlis Gaņģis <Knagis@users.noreply.github.com>
|
||||
Keith Mashinter <kmashint@yahoo.com> kmashint <kmashint@yahoo.com>
|
||||
Ken Howard <ken@simplicatedweb.com>
|
||||
Kevin Lang <klang2012@gmail.com>
|
||||
kimamula <kenji.imamula@gmail.com> # Kenji Imamula
|
||||
Klaus Meinhardt <klaus.meinhardt1@gmail.com>
|
||||
Kyle Kelley <rgbkrk@gmail.com>
|
||||
Lorant Pinter <lorant.pinter@prezi.com>
|
||||
Lucien Greathouse <me@lpghatguy.com>
|
||||
@ -107,6 +116,7 @@ Martin Vseticka <vseticka.martin@gmail.com> Martin Všeticka <vseticka.martin@gm
|
||||
gcnew <gcnew@abv.bg> # Marin Marinov
|
||||
vvakame <vvakame+dev@gmail.com> # Masahiro Wakame
|
||||
Matt McCutchen <rmccutch@mit.edu>
|
||||
MANISH-GIRI <manish.giri.me@gmail.com> # Manish Giri
|
||||
Max Deepfield <maxdeepfield@absolutefreakout.com>
|
||||
Micah Zoltu <micah@zoltu.net>
|
||||
Michael <maykelchiche@gmail.com>
|
||||
@ -213,4 +223,6 @@ Tim Perry <tim.perry@softwire.com>
|
||||
Vidar Tonaas Fauske <vidartf@gmail.com>
|
||||
Viktor Zozulyak <zozulyakviktor@gmail.com>
|
||||
rix <rix@rixs-MacBook-Pro.local> # Richard Sentino
|
||||
rohitverma007 <rohitverma@live.ca> # Rohit Verma
|
||||
rohitverma007 <rohitverma@live.ca> # Rohit Verma
|
||||
rdosanjh <me@rajdeep.io> # Raj Dosanjh
|
||||
gdh1995 <gdh1995@qq.com> # Dahan Gong
|
||||
12
AUTHORS.md
12
AUTHORS.md
@ -12,13 +12,16 @@ TypeScript is authored by:
|
||||
* Aliaksandr Radzivanovich
|
||||
* Anatoly Ressin
|
||||
* Anders Hejlsberg
|
||||
* Andreas Martin
|
||||
* Andrej Baran
|
||||
* Andrew Ochsner
|
||||
* Andrew Z Allen
|
||||
* András Parditka
|
||||
* Andy Hanson
|
||||
* Anil Anar
|
||||
* Anton Khlynovskiy
|
||||
* Anton Tolmachev
|
||||
* Anubha Mathur
|
||||
* Arnav Singh
|
||||
* Arthur Ozga
|
||||
* Asad Saeeduddin
|
||||
@ -42,12 +45,14 @@ TypeScript is authored by:
|
||||
* Cotton Hou
|
||||
* Cyrus Najmabadi
|
||||
* Dafrok Zhang
|
||||
* Dahan Gong
|
||||
* Dan Corder
|
||||
* Dan Quirk
|
||||
* Daniel Hollocher
|
||||
* Daniel Rosenwasser
|
||||
* David Kmenta
|
||||
* David Li
|
||||
* David Sheldrick
|
||||
* David Souther
|
||||
* Denis Nedelyaev
|
||||
* Dick van den Brink
|
||||
@ -66,6 +71,7 @@ TypeScript is authored by:
|
||||
* Eyas Sharaiha
|
||||
* Fabian Cook
|
||||
* @falsandtru
|
||||
* @flowmemo
|
||||
* Frank Wallis
|
||||
* Franklin Tse
|
||||
* František Žiacik
|
||||
@ -79,6 +85,7 @@ TypeScript is authored by:
|
||||
* Guy Bedford
|
||||
* Harald Niesche
|
||||
* Herrington Darkholme
|
||||
* Homa Wong
|
||||
* Iain Monro
|
||||
* Ingvar Stepanyan
|
||||
* Isiah Meadows
|
||||
@ -93,6 +100,7 @@ TypeScript is authored by:
|
||||
* Jeffrey Morlan
|
||||
* Jesse Schalken
|
||||
* Jiri Tobisek
|
||||
* Joel Day
|
||||
* Joey Wilson
|
||||
* Johannes Rieken
|
||||
* John Vilk
|
||||
@ -114,10 +122,13 @@ TypeScript is authored by:
|
||||
* Ken Howard
|
||||
* Kenji Imamula
|
||||
* Kevin Lang
|
||||
* Klaus Meinhardt
|
||||
* Kyle Kelley
|
||||
* Kārlis Gaņģis
|
||||
* Lorant Pinter
|
||||
* Lucien Greathouse
|
||||
* Lukas Elmer
|
||||
* Manish Giri
|
||||
* Marin Marinov
|
||||
* Marius Schulz
|
||||
* Martin Vseticka
|
||||
@ -155,6 +166,7 @@ TypeScript is authored by:
|
||||
* @progre
|
||||
* Punya Biswal
|
||||
* Rado Kirov
|
||||
* Raj Dosanjh
|
||||
* Richard Knoll
|
||||
* Richard Sentino
|
||||
* Robert Coie
|
||||
|
||||
@ -50,7 +50,8 @@ const cmdLineOptions = minimist(process.argv.slice(2), {
|
||||
r: "reporter",
|
||||
color: "colors",
|
||||
f: "files",
|
||||
file: "files"
|
||||
file: "files",
|
||||
w: "workers",
|
||||
},
|
||||
default: {
|
||||
soft: false,
|
||||
@ -63,6 +64,7 @@ const cmdLineOptions = minimist(process.argv.slice(2), {
|
||||
reporter: process.env.reporter || process.env.r,
|
||||
lint: process.env.lint || true,
|
||||
files: process.env.f || process.env.file || process.env.files || "",
|
||||
workers: process.env.workerCount || os.cpus().length,
|
||||
}
|
||||
});
|
||||
|
||||
@ -337,6 +339,7 @@ const builtGeneratedDiagnosticMessagesJSON = path.join(builtLocalDirectory, "dia
|
||||
// processDiagnosticMessages script
|
||||
gulp.task(processDiagnosticMessagesJs, false, [], () => {
|
||||
const settings: tsc.Settings = getCompilerSettings({
|
||||
target: "es5",
|
||||
declaration: false,
|
||||
removeComments: true,
|
||||
noResolve: false,
|
||||
@ -612,7 +615,7 @@ function runConsoleTests(defaultReporter: string, runInParallel: boolean, done:
|
||||
} while (fs.existsSync(taskConfigsFolder));
|
||||
fs.mkdirSync(taskConfigsFolder);
|
||||
|
||||
workerCount = process.env.workerCount || os.cpus().length;
|
||||
workerCount = cmdLineOptions["workers"];
|
||||
}
|
||||
|
||||
if (tests || light || taskConfigsFolder) {
|
||||
@ -1025,7 +1028,7 @@ gulp.task("lint", "Runs tslint on the compiler sources. Optional arguments are:
|
||||
cb();
|
||||
}, (cb) => {
|
||||
files = files.filter(file => fileMatcher.test(file.path)).sort((filea, fileb) => filea.stat.size - fileb.stat.size);
|
||||
const workerCount = (process.env.workerCount && +process.env.workerCount) || os.cpus().length;
|
||||
const workerCount = cmdLineOptions["workers"];
|
||||
for (let i = 0; i < workerCount; i++) {
|
||||
spawnLintWorker(files, finished);
|
||||
}
|
||||
|
||||
@ -269,6 +269,7 @@ var harnessSources = harnessCoreSources.concat([
|
||||
"projectErrors.ts",
|
||||
"matchFiles.ts",
|
||||
"initializeTSConfig.ts",
|
||||
"printer.ts",
|
||||
].map(function (f) {
|
||||
return path.join(unittestsDirectory, f);
|
||||
})).concat([
|
||||
@ -465,7 +466,7 @@ function compileFile(outFile, sources, prereqs, prefixes, useBuiltCompiler, opts
|
||||
options += " --stripInternal";
|
||||
}
|
||||
|
||||
options += " --target es5 --noUnusedLocals --noUnusedParameters";
|
||||
options += " --target es5 --lib es5,scripthost --noUnusedLocals --noUnusedParameters";
|
||||
|
||||
var cmd = host + " " + compilerPath + " " + options + " ";
|
||||
cmd = cmd + sources.join(" ");
|
||||
@ -735,7 +736,7 @@ compileFile(
|
||||
|
||||
// Appending exports at the end of the server library
|
||||
var tsserverLibraryDefinitionFileContents =
|
||||
fs.readFileSync(tsserverLibraryDefinitionFile).toString() +
|
||||
fs.readFileSync(tsserverLibraryDefinitionFile).toString() +
|
||||
"\r\nexport = ts;" +
|
||||
"\r\nexport as namespace ts;";
|
||||
|
||||
|
||||
@ -27,7 +27,7 @@ function main(): void {
|
||||
|
||||
var inputFilePath = sys.args[0].replace(/\\/g, "/");
|
||||
var inputStr = sys.readFile(inputFilePath);
|
||||
|
||||
|
||||
var diagnosticMessages: InputDiagnosticMessageTable = JSON.parse(inputStr);
|
||||
|
||||
var names = Utilities.getObjectKeys(diagnosticMessages);
|
||||
@ -44,7 +44,7 @@ function main(): void {
|
||||
function checkForUniqueCodes(messages: string[], diagnosticTable: InputDiagnosticMessageTable) {
|
||||
const originalMessageForCode: string[] = [];
|
||||
let numConflicts = 0;
|
||||
|
||||
|
||||
for (const currentMessage of messages) {
|
||||
const code = diagnosticTable[currentMessage].code;
|
||||
|
||||
@ -74,7 +74,7 @@ function buildUniqueNameMap(names: string[]): ts.Map<string> {
|
||||
var uniqueNames = NameGenerator.ensureUniqueness(names, /* isCaseSensitive */ false, /* isFixed */ undefined);
|
||||
|
||||
for (var i = 0; i < names.length; i++) {
|
||||
nameMap[names[i]] = uniqueNames[i];
|
||||
nameMap.set(names[i], uniqueNames[i]);
|
||||
}
|
||||
|
||||
return nameMap;
|
||||
@ -91,7 +91,7 @@ function buildInfoFileOutput(messageTable: InputDiagnosticMessageTable, nameMap:
|
||||
for (var i = 0; i < names.length; i++) {
|
||||
var name = names[i];
|
||||
var diagnosticDetails = messageTable[name];
|
||||
var propName = convertPropertyName(nameMap[name]);
|
||||
var propName = convertPropertyName(nameMap.get(name));
|
||||
|
||||
result +=
|
||||
' ' + propName +
|
||||
@ -114,7 +114,7 @@ function buildDiagnosticMessageOutput(messageTable: InputDiagnosticMessageTable,
|
||||
for (var i = 0; i < names.length; i++) {
|
||||
var name = names[i];
|
||||
var diagnosticDetails = messageTable[name];
|
||||
var propName = convertPropertyName(nameMap[name]);
|
||||
var propName = convertPropertyName(nameMap.get(name));
|
||||
|
||||
result += '\r\n "' + createKey(propName, diagnosticDetails.code) + '"' + ' : "' + name.replace(/[\"]/g, '\\"') + '"';
|
||||
if (i !== names.length - 1) {
|
||||
|
||||
@ -349,17 +349,20 @@ namespace ts {
|
||||
// Otherwise, we'll be merging into a compatible existing symbol (for example when
|
||||
// you have multiple 'vars' with the same name in the same container). In this case
|
||||
// just add this node into the declarations list of the symbol.
|
||||
symbol = symbolTable[name] || (symbolTable[name] = createSymbol(SymbolFlags.None, name));
|
||||
symbol = symbolTable.get(name);
|
||||
if (!symbol) {
|
||||
symbolTable.set(name, symbol = createSymbol(SymbolFlags.None, name));
|
||||
}
|
||||
|
||||
if (name && (includes & SymbolFlags.Classifiable)) {
|
||||
classifiableNames[name] = name;
|
||||
classifiableNames.set(name, name);
|
||||
}
|
||||
|
||||
if (symbol.flags & excludes) {
|
||||
if (symbol.isReplaceableByMethod) {
|
||||
// Javascript constructor-declared symbols can be discarded in favor of
|
||||
// prototype symbols like methods.
|
||||
symbol = symbolTable[name] = createSymbol(SymbolFlags.None, name);
|
||||
symbolTable.set(name, symbol = createSymbol(SymbolFlags.None, name));
|
||||
}
|
||||
else {
|
||||
if (node.name) {
|
||||
@ -1512,7 +1515,7 @@ namespace ts {
|
||||
errorOnFirstToken(node, Diagnostics.export_modifier_cannot_be_applied_to_ambient_modules_and_module_augmentations_since_they_are_always_visible);
|
||||
}
|
||||
if (isExternalModuleAugmentation(node)) {
|
||||
declareSymbolAndAddToSymbolTable(node, SymbolFlags.NamespaceModule, SymbolFlags.NamespaceModuleExcludes);
|
||||
declareModuleSymbol(node);
|
||||
}
|
||||
else {
|
||||
let pattern: Pattern | undefined;
|
||||
@ -1534,12 +1537,8 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
else {
|
||||
const state = getModuleInstanceState(node);
|
||||
if (state === ModuleInstanceState.NonInstantiated) {
|
||||
declareSymbolAndAddToSymbolTable(node, SymbolFlags.NamespaceModule, SymbolFlags.NamespaceModuleExcludes);
|
||||
}
|
||||
else {
|
||||
declareSymbolAndAddToSymbolTable(node, SymbolFlags.ValueModule, SymbolFlags.ValueModuleExcludes);
|
||||
const state = declareModuleSymbol(node);
|
||||
if (state !== ModuleInstanceState.NonInstantiated) {
|
||||
if (node.symbol.flags & (SymbolFlags.Function | SymbolFlags.Class | SymbolFlags.RegularEnum)) {
|
||||
// if module was already merged with some function, class or non-const enum
|
||||
// treat is a non-const-enum-only
|
||||
@ -1560,6 +1559,15 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
|
||||
function declareModuleSymbol(node: ModuleDeclaration): ModuleInstanceState {
|
||||
const state = getModuleInstanceState(node);
|
||||
const instantiated = state !== ModuleInstanceState.NonInstantiated;
|
||||
declareSymbolAndAddToSymbolTable(node,
|
||||
instantiated ? SymbolFlags.ValueModule : SymbolFlags.NamespaceModule,
|
||||
instantiated ? SymbolFlags.ValueModuleExcludes : SymbolFlags.NamespaceModuleExcludes);
|
||||
return state;
|
||||
}
|
||||
|
||||
function bindFunctionOrConstructorType(node: SignatureDeclaration): void {
|
||||
// For a given function symbol "<...>(...) => T" we want to generate a symbol identical
|
||||
// to the one we would get for: { <...>(...): T }
|
||||
@ -1573,7 +1581,7 @@ namespace ts {
|
||||
const typeLiteralSymbol = createSymbol(SymbolFlags.TypeLiteral, "__type");
|
||||
addDeclarationToSymbol(typeLiteralSymbol, node, SymbolFlags.TypeLiteral);
|
||||
typeLiteralSymbol.members = createMap<Symbol>();
|
||||
typeLiteralSymbol.members[symbol.name] = symbol;
|
||||
typeLiteralSymbol.members.set(symbol.name, symbol);
|
||||
}
|
||||
|
||||
function bindObjectLiteralExpression(node: ObjectLiteralExpression) {
|
||||
@ -1604,9 +1612,9 @@ namespace ts {
|
||||
? ElementKind.Property
|
||||
: ElementKind.Accessor;
|
||||
|
||||
const existingKind = seen[identifier.text];
|
||||
const existingKind = seen.get(identifier.text);
|
||||
if (!existingKind) {
|
||||
seen[identifier.text] = currentKind;
|
||||
seen.set(identifier.text, currentKind);
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -2211,7 +2219,7 @@ namespace ts {
|
||||
constructorFunction.parent = classPrototype;
|
||||
classPrototype.parent = leftSideOfAssignment;
|
||||
|
||||
const funcSymbol = container.locals[constructorFunction.text];
|
||||
const funcSymbol = container.locals.get(constructorFunction.text);
|
||||
if (!funcSymbol || !(funcSymbol.flags & SymbolFlags.Function || isDeclarationOfFunctionExpression(funcSymbol))) {
|
||||
return;
|
||||
}
|
||||
@ -2242,7 +2250,7 @@ namespace ts {
|
||||
bindAnonymousDeclaration(node, SymbolFlags.Class, bindingName);
|
||||
// Add name of class expression into the map for semantic classifier
|
||||
if (node.name) {
|
||||
classifiableNames[node.name.text] = node.name.text;
|
||||
classifiableNames.set(node.name.text, node.name.text);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2258,14 +2266,14 @@ namespace ts {
|
||||
// module might have an exported variable called 'prototype'. We can't allow that as
|
||||
// that would clash with the built-in 'prototype' for the class.
|
||||
const prototypeSymbol = createSymbol(SymbolFlags.Property | SymbolFlags.Prototype, "prototype");
|
||||
if (symbol.exports[prototypeSymbol.name]) {
|
||||
const symbolExport = symbol.exports.get(prototypeSymbol.name);
|
||||
if (symbolExport) {
|
||||
if (node.name) {
|
||||
node.name.parent = node;
|
||||
}
|
||||
file.bindDiagnostics.push(createDiagnosticForNode(symbol.exports[prototypeSymbol.name].declarations[0],
|
||||
Diagnostics.Duplicate_identifier_0, prototypeSymbol.name));
|
||||
file.bindDiagnostics.push(createDiagnosticForNode(symbolExport.declarations[0], Diagnostics.Duplicate_identifier_0, prototypeSymbol.name));
|
||||
}
|
||||
symbol.exports[prototypeSymbol.name] = prototypeSymbol;
|
||||
symbol.exports.set(prototypeSymbol.name, prototypeSymbol);
|
||||
prototypeSymbol.parent = symbol;
|
||||
}
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,4 +1,4 @@
|
||||
/// <reference path="sys.ts"/>
|
||||
/// <reference path="sys.ts"/>
|
||||
/// <reference path="types.ts"/>
|
||||
/// <reference path="core.ts"/>
|
||||
/// <reference path="diagnosticInformationMap.generated.ts"/>
|
||||
@ -65,12 +65,13 @@ namespace ts {
|
||||
},
|
||||
{
|
||||
name: "jsx",
|
||||
type: createMap({
|
||||
type: createMapFromTemplate({
|
||||
"preserve": JsxEmit.Preserve,
|
||||
"react-native": JsxEmit.ReactNative,
|
||||
"react": JsxEmit.React
|
||||
}),
|
||||
paramType: Diagnostics.KIND,
|
||||
description: Diagnostics.Specify_JSX_code_generation_Colon_preserve_or_react,
|
||||
description: Diagnostics.Specify_JSX_code_generation_Colon_preserve_react_native_or_react,
|
||||
},
|
||||
{
|
||||
name: "reactNamespace",
|
||||
@ -100,7 +101,7 @@ namespace ts {
|
||||
{
|
||||
name: "module",
|
||||
shortName: "m",
|
||||
type: createMap({
|
||||
type: createMapFromTemplate({
|
||||
"none": ModuleKind.None,
|
||||
"commonjs": ModuleKind.CommonJS,
|
||||
"amd": ModuleKind.AMD,
|
||||
@ -114,7 +115,7 @@ namespace ts {
|
||||
},
|
||||
{
|
||||
name: "newLine",
|
||||
type: createMap({
|
||||
type: createMapFromTemplate({
|
||||
"crlf": NewLineKind.CarriageReturnLineFeed,
|
||||
"lf": NewLineKind.LineFeed
|
||||
}),
|
||||
@ -263,7 +264,7 @@ namespace ts {
|
||||
{
|
||||
name: "target",
|
||||
shortName: "t",
|
||||
type: createMap({
|
||||
type: createMapFromTemplate({
|
||||
"es3": ScriptTarget.ES3,
|
||||
"es5": ScriptTarget.ES5,
|
||||
"es6": ScriptTarget.ES2015,
|
||||
@ -300,7 +301,7 @@ namespace ts {
|
||||
},
|
||||
{
|
||||
name: "moduleResolution",
|
||||
type: createMap({
|
||||
type: createMapFromTemplate({
|
||||
"node": ModuleResolutionKind.NodeJs,
|
||||
"classic": ModuleResolutionKind.Classic,
|
||||
}),
|
||||
@ -414,7 +415,7 @@ namespace ts {
|
||||
type: "list",
|
||||
element: {
|
||||
name: "lib",
|
||||
type: createMap({
|
||||
type: createMapFromTemplate({
|
||||
// JavaScript only
|
||||
"es5": "lib.es5.d.ts",
|
||||
"es6": "lib.es2015.d.ts",
|
||||
@ -538,9 +539,9 @@ namespace ts {
|
||||
const optionNameMap = createMap<CommandLineOption>();
|
||||
const shortOptionNames = createMap<string>();
|
||||
forEach(optionDeclarations, option => {
|
||||
optionNameMap[option.name.toLowerCase()] = option;
|
||||
optionNameMap.set(option.name.toLowerCase(), option);
|
||||
if (option.shortName) {
|
||||
shortOptionNames[option.shortName] = option.name;
|
||||
shortOptionNames.set(option.shortName, option.name);
|
||||
}
|
||||
});
|
||||
|
||||
@ -550,7 +551,7 @@ namespace ts {
|
||||
|
||||
/* @internal */
|
||||
export function createCompilerDiagnosticForInvalidCustomType(opt: CommandLineOptionOfCustomType): Diagnostic {
|
||||
const namesOfType = Object.keys(opt.type).map(key => `'${key}'`).join(", ");
|
||||
const namesOfType = arrayFrom(opt.type.keys()).map(key => `'${key}'`).join(", ");
|
||||
return createCompilerDiagnostic(Diagnostics.Argument_for_0_option_must_be_Colon_1, `--${opt.name}`, namesOfType);
|
||||
}
|
||||
|
||||
@ -604,13 +605,13 @@ namespace ts {
|
||||
s = s.slice(s.charCodeAt(1) === CharacterCodes.minus ? 2 : 1).toLowerCase();
|
||||
|
||||
// Try to translate short option names to their full equivalents.
|
||||
if (s in shortOptionNames) {
|
||||
s = shortOptionNames[s];
|
||||
const short = shortOptionNames.get(s);
|
||||
if (short !== undefined) {
|
||||
s = short;
|
||||
}
|
||||
|
||||
if (s in optionNameMap) {
|
||||
const opt = optionNameMap[s];
|
||||
|
||||
const opt = optionNameMap.get(s);
|
||||
if (opt) {
|
||||
if (opt.isTSConfigOnly) {
|
||||
errors.push(createCompilerDiagnostic(Diagnostics.Option_0_can_only_be_specified_in_tsconfig_json_file, opt.name));
|
||||
}
|
||||
@ -733,7 +734,7 @@ namespace ts {
|
||||
* @param fileNames array of filenames to be generated into tsconfig.json
|
||||
*/
|
||||
/* @internal */
|
||||
export function generateTSConfig(options: CompilerOptions, fileNames: string[]): { compilerOptions: Map<CompilerOptionsValue> } {
|
||||
export function generateTSConfig(options: CompilerOptions, fileNames: string[]): { compilerOptions: MapLike<CompilerOptionsValue> } {
|
||||
const compilerOptions = extend(options, defaultInitCompilerOptions);
|
||||
const configurations: any = {
|
||||
compilerOptions: serializeCompilerOptions(compilerOptions)
|
||||
@ -758,18 +759,17 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
|
||||
function getNameOfCompilerOptionValue(value: CompilerOptionsValue, customTypeMap: MapLike<string | number>): string | undefined {
|
||||
function getNameOfCompilerOptionValue(value: CompilerOptionsValue, customTypeMap: Map<string | number>): string | undefined {
|
||||
// There is a typeMap associated with this command-line option so use it to map value back to its name
|
||||
for (const key in customTypeMap) {
|
||||
if (customTypeMap[key] === value) {
|
||||
return forEachEntry(customTypeMap, (mapValue, key) => {
|
||||
if (mapValue === value) {
|
||||
return key;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
});
|
||||
}
|
||||
|
||||
function serializeCompilerOptions(options: CompilerOptions): Map<CompilerOptionsValue> {
|
||||
const result = createMap<CompilerOptionsValue>();
|
||||
function serializeCompilerOptions(options: CompilerOptions): MapLike<CompilerOptionsValue> {
|
||||
const result: ts.MapLike<CompilerOptionsValue> = {};
|
||||
const optionsNameMap = getOptionNameMap().optionNameMap;
|
||||
|
||||
for (const name in options) {
|
||||
@ -785,7 +785,7 @@ namespace ts {
|
||||
break;
|
||||
default:
|
||||
const value = options[name];
|
||||
const optionDefinition = optionsNameMap[name.toLowerCase()];
|
||||
const optionDefinition = optionsNameMap.get(name.toLowerCase());
|
||||
if (optionDefinition) {
|
||||
const customTypeMap = getCustomTypeMapOfCommandLineOption(optionDefinition);
|
||||
if (!customTypeMap) {
|
||||
@ -1056,8 +1056,8 @@ namespace ts {
|
||||
const optionNameMap = arrayToMap(optionDeclarations, opt => opt.name);
|
||||
|
||||
for (const id in jsonOptions) {
|
||||
if (id in optionNameMap) {
|
||||
const opt = optionNameMap[id];
|
||||
const opt = optionNameMap.get(id);
|
||||
if (opt) {
|
||||
defaultOptions[opt.name] = convertJsonOption(opt, jsonOptions[id], basePath, errors);
|
||||
}
|
||||
else {
|
||||
@ -1093,8 +1093,9 @@ namespace ts {
|
||||
|
||||
function convertJsonOptionOfCustomType(opt: CommandLineOptionOfCustomType, value: string, errors: Diagnostic[]) {
|
||||
const key = value.toLowerCase();
|
||||
if (key in opt.type) {
|
||||
return opt.type[key];
|
||||
const val = opt.type.get(key);
|
||||
if (val !== undefined) {
|
||||
return val;
|
||||
}
|
||||
else {
|
||||
errors.push(createCompilerDiagnosticForInvalidCustomType(opt));
|
||||
@ -1222,7 +1223,7 @@ namespace ts {
|
||||
// file map that marks whether it was a regular wildcard match (with a `*` or `?` token),
|
||||
// or a recursive directory. This information is used by filesystem watchers to monitor for
|
||||
// new entries in these paths.
|
||||
const wildcardDirectories: Map<WatchDirectoryFlags> = getWildcardDirectories(include, exclude, basePath, host.useCaseSensitiveFileNames);
|
||||
const wildcardDirectories = getWildcardDirectories(include, exclude, basePath, host.useCaseSensitiveFileNames);
|
||||
|
||||
// Rather than requery this for each file and filespec, we query the supported extensions
|
||||
// once and store it on the expansion context.
|
||||
@ -1233,7 +1234,7 @@ namespace ts {
|
||||
if (fileNames) {
|
||||
for (const fileName of fileNames) {
|
||||
const file = combinePaths(basePath, fileName);
|
||||
literalFileMap[keyMapper(file)] = file;
|
||||
literalFileMap.set(keyMapper(file), file);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1256,15 +1257,14 @@ namespace ts {
|
||||
removeWildcardFilesWithLowerPriorityExtension(file, wildcardFileMap, supportedExtensions, keyMapper);
|
||||
|
||||
const key = keyMapper(file);
|
||||
if (!(key in literalFileMap) && !(key in wildcardFileMap)) {
|
||||
wildcardFileMap[key] = file;
|
||||
if (!literalFileMap.has(key) && !wildcardFileMap.has(key)) {
|
||||
wildcardFileMap.set(key, file);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const literalFiles = reduceProperties(literalFileMap, addFileToOutput, []);
|
||||
const wildcardFiles = reduceProperties(wildcardFileMap, addFileToOutput, []);
|
||||
wildcardFiles.sort(host.useCaseSensitiveFileNames ? compareStrings : compareStringsCaseInsensitive);
|
||||
const literalFiles = arrayFrom(literalFileMap.values());
|
||||
const wildcardFiles = arrayFrom(wildcardFileMap.values());
|
||||
return {
|
||||
fileNames: literalFiles.concat(wildcardFiles),
|
||||
wildcardDirectories
|
||||
@ -1294,7 +1294,7 @@ namespace ts {
|
||||
/**
|
||||
* Gets directories in a set of include patterns that should be watched for changes.
|
||||
*/
|
||||
function getWildcardDirectories(include: string[], exclude: string[], path: string, useCaseSensitiveFileNames: boolean): Map<WatchDirectoryFlags> {
|
||||
function getWildcardDirectories(include: string[], exclude: string[], path: string, useCaseSensitiveFileNames: boolean): MapLike<WatchDirectoryFlags> {
|
||||
// We watch a directory recursively if it contains a wildcard anywhere in a directory segment
|
||||
// of the pattern:
|
||||
//
|
||||
@ -1309,7 +1309,7 @@ namespace ts {
|
||||
// /a/b/a?z - Watch /a/b directly to catch any new file matching a?z
|
||||
const rawExcludeRegex = getRegularExpressionForWildcard(exclude, path, "exclude");
|
||||
const excludeRegex = rawExcludeRegex && new RegExp(rawExcludeRegex, useCaseSensitiveFileNames ? "" : "i");
|
||||
const wildcardDirectories = createMap<WatchDirectoryFlags>();
|
||||
const wildcardDirectories: ts.MapLike<WatchDirectoryFlags> = {};
|
||||
if (include !== undefined) {
|
||||
const recursiveKeys: string[] = [];
|
||||
for (const file of include) {
|
||||
@ -1332,13 +1332,13 @@ namespace ts {
|
||||
}
|
||||
|
||||
// Remove any subpaths under an existing recursively watched directory.
|
||||
for (const key in wildcardDirectories) {
|
||||
for (const key in wildcardDirectories) if (hasProperty(wildcardDirectories, key)) {
|
||||
for (const recursiveKey of recursiveKeys) {
|
||||
if (key !== recursiveKey && containsPath(recursiveKey, key, path, !useCaseSensitiveFileNames)) {
|
||||
delete wildcardDirectories[key];
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return wildcardDirectories;
|
||||
@ -1372,7 +1372,7 @@ namespace ts {
|
||||
for (let i = ExtensionPriority.Highest; i < adjustedExtensionPriority; i++) {
|
||||
const higherPriorityExtension = extensions[i];
|
||||
const higherPriorityPath = keyMapper(changeExtension(file, higherPriorityExtension));
|
||||
if (higherPriorityPath in literalFiles || higherPriorityPath in wildcardFiles) {
|
||||
if (literalFiles.has(higherPriorityPath) || wildcardFiles.has(higherPriorityPath)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@ -1394,21 +1394,10 @@ namespace ts {
|
||||
for (let i = nextExtensionPriority; i < extensions.length; i++) {
|
||||
const lowerPriorityExtension = extensions[i];
|
||||
const lowerPriorityPath = keyMapper(changeExtension(file, lowerPriorityExtension));
|
||||
delete wildcardFiles[lowerPriorityPath];
|
||||
wildcardFiles.delete(lowerPriorityPath);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a file to an array of files.
|
||||
*
|
||||
* @param output The output array.
|
||||
* @param file The file path.
|
||||
*/
|
||||
function addFileToOutput(output: string[], file: string) {
|
||||
output.push(file);
|
||||
return output;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a case sensitive key.
|
||||
*
|
||||
|
||||
@ -5,17 +5,16 @@ namespace ts {
|
||||
export interface CommentWriter {
|
||||
reset(): void;
|
||||
setSourceFile(sourceFile: SourceFile): void;
|
||||
emitNodeWithComments(emitContext: EmitContext, node: Node, emitCallback: (emitContext: EmitContext, node: Node) => void): void;
|
||||
setWriter(writer: EmitTextWriter): void;
|
||||
emitNodeWithComments(hint: EmitHint, node: Node, emitCallback: (hint: EmitHint, node: Node) => void): void;
|
||||
emitBodyWithDetachedComments(node: Node, detachedRange: TextRange, emitCallback: (node: Node) => void): void;
|
||||
emitTrailingCommentsOfPosition(pos: number): void;
|
||||
}
|
||||
|
||||
export function createCommentWriter(host: EmitHost, writer: EmitTextWriter, sourceMap: SourceMapWriter): CommentWriter {
|
||||
const compilerOptions = host.getCompilerOptions();
|
||||
const extendedDiagnostics = compilerOptions.extendedDiagnostics;
|
||||
const newLine = host.getNewLine();
|
||||
const { emitPos } = sourceMap;
|
||||
|
||||
export function createCommentWriter(printerOptions: PrinterOptions, emitPos: ((pos: number) => void) | undefined): CommentWriter {
|
||||
const extendedDiagnostics = printerOptions.extendedDiagnostics;
|
||||
const newLine = getNewLineCharacter(printerOptions);
|
||||
let writer: EmitTextWriter;
|
||||
let containerPos = -1;
|
||||
let containerEnd = -1;
|
||||
let declarationListContainerEnd = -1;
|
||||
@ -24,19 +23,20 @@ namespace ts {
|
||||
let currentLineMap: number[];
|
||||
let detachedCommentsInfo: { nodePos: number, detachedCommentEndPos: number}[];
|
||||
let hasWrittenComment = false;
|
||||
let disabled: boolean = compilerOptions.removeComments;
|
||||
let disabled: boolean = printerOptions.removeComments;
|
||||
|
||||
return {
|
||||
reset,
|
||||
setWriter,
|
||||
setSourceFile,
|
||||
emitNodeWithComments,
|
||||
emitBodyWithDetachedComments,
|
||||
emitTrailingCommentsOfPosition,
|
||||
};
|
||||
|
||||
function emitNodeWithComments(emitContext: EmitContext, node: Node, emitCallback: (emitContext: EmitContext, node: Node) => void) {
|
||||
function emitNodeWithComments(hint: EmitHint, node: Node, emitCallback: (hint: EmitHint, node: Node) => void) {
|
||||
if (disabled) {
|
||||
emitCallback(emitContext, node);
|
||||
emitCallback(hint, node);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -47,11 +47,11 @@ namespace ts {
|
||||
// Both pos and end are synthesized, so just emit the node without comments.
|
||||
if (emitFlags & EmitFlags.NoNestedComments) {
|
||||
disabled = true;
|
||||
emitCallback(emitContext, node);
|
||||
emitCallback(hint, node);
|
||||
disabled = false;
|
||||
}
|
||||
else {
|
||||
emitCallback(emitContext, node);
|
||||
emitCallback(hint, node);
|
||||
}
|
||||
}
|
||||
else {
|
||||
@ -94,11 +94,11 @@ namespace ts {
|
||||
|
||||
if (emitFlags & EmitFlags.NoNestedComments) {
|
||||
disabled = true;
|
||||
emitCallback(emitContext, node);
|
||||
emitCallback(hint, node);
|
||||
disabled = false;
|
||||
}
|
||||
else {
|
||||
emitCallback(emitContext, node);
|
||||
emitCallback(hint, node);
|
||||
}
|
||||
|
||||
if (extendedDiagnostics) {
|
||||
@ -198,9 +198,9 @@ namespace ts {
|
||||
}
|
||||
|
||||
// Leading comments are emitted at /*leading comment1 */space/*leading comment*/space
|
||||
emitPos(commentPos);
|
||||
if (emitPos) emitPos(commentPos);
|
||||
writeCommentRange(currentText, currentLineMap, writer, commentPos, commentEnd, newLine);
|
||||
emitPos(commentEnd);
|
||||
if (emitPos) emitPos(commentEnd);
|
||||
|
||||
if (hasTrailingNewLine) {
|
||||
writer.writeLine();
|
||||
@ -220,9 +220,9 @@ namespace ts {
|
||||
writer.write(" ");
|
||||
}
|
||||
|
||||
emitPos(commentPos);
|
||||
if (emitPos) emitPos(commentPos);
|
||||
writeCommentRange(currentText, currentLineMap, writer, commentPos, commentEnd, newLine);
|
||||
emitPos(commentEnd);
|
||||
if (emitPos) emitPos(commentEnd);
|
||||
|
||||
if (hasTrailingNewLine) {
|
||||
writer.writeLine();
|
||||
@ -248,9 +248,9 @@ namespace ts {
|
||||
function emitTrailingCommentOfPosition(commentPos: number, commentEnd: number, _kind: SyntaxKind, hasTrailingNewLine: boolean) {
|
||||
// trailing comments of a position are emitted at /*trailing comment1 */space/*trailing comment*/space
|
||||
|
||||
emitPos(commentPos);
|
||||
if (emitPos) emitPos(commentPos);
|
||||
writeCommentRange(currentText, currentLineMap, writer, commentPos, commentEnd, newLine);
|
||||
emitPos(commentEnd);
|
||||
if (emitPos) emitPos(commentEnd);
|
||||
|
||||
if (hasTrailingNewLine) {
|
||||
writer.writeLine();
|
||||
@ -286,6 +286,10 @@ namespace ts {
|
||||
detachedCommentsInfo = undefined;
|
||||
}
|
||||
|
||||
function setWriter(output: EmitTextWriter): void {
|
||||
writer = output;
|
||||
}
|
||||
|
||||
function setSourceFile(sourceFile: SourceFile) {
|
||||
currentSourceFile = sourceFile;
|
||||
currentText = currentSourceFile.text;
|
||||
@ -323,9 +327,9 @@ namespace ts {
|
||||
}
|
||||
|
||||
function writeComment(text: string, lineMap: number[], writer: EmitTextWriter, commentPos: number, commentEnd: number, newLine: string) {
|
||||
emitPos(commentPos);
|
||||
if (emitPos) emitPos(commentPos);
|
||||
writeCommentRange(text, lineMap, writer, commentPos, commentEnd, newLine);
|
||||
emitPos(commentEnd);
|
||||
if (emitPos) emitPos(commentEnd);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -23,13 +23,14 @@ namespace ts {
|
||||
True = -1
|
||||
}
|
||||
|
||||
const createObject = Object.create;
|
||||
|
||||
// More efficient to create a collator once and use its `compare` than to call `a.localeCompare(b)` many times.
|
||||
export const collator: { compare(a: string, b: string): number } = typeof Intl === "object" && typeof Intl.Collator === "function" ? new Intl.Collator() : undefined;
|
||||
export const collator: { compare(a: string, b: string): number } = typeof Intl === "object" && typeof Intl.Collator === "function" ? new Intl.Collator(/*locales*/ undefined, { usage: "sort", sensitivity: "accent" }) : undefined;
|
||||
// Intl is missing in Safari, and node 0.10 treats "a" as greater than "B".
|
||||
export const localeCompareIsCorrect = ts.collator && ts.collator.compare("a", "B") < 0;
|
||||
|
||||
export function createMap<T>(template?: MapLike<T>): Map<T> {
|
||||
const map: Map<T> = createObject(null); // tslint:disable-line:no-null-keyword
|
||||
/** Create a MapLike with good performance. */
|
||||
function createDictionaryObject<T>(): MapLike<T> {
|
||||
const map = Object.create(null); // tslint:disable-line:no-null-keyword
|
||||
|
||||
// Using 'delete' on an object causes V8 to put the object in dictionary mode.
|
||||
// This disables creation of hidden classes, which are expensive when an object is
|
||||
@ -37,17 +38,113 @@ namespace ts {
|
||||
map["__"] = undefined;
|
||||
delete map["__"];
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
/** Create a new map. If a template object is provided, the map will copy entries from it. */
|
||||
export function createMap<T>(): Map<T> {
|
||||
return new MapCtr<T>();
|
||||
}
|
||||
|
||||
export function createMapFromTemplate<T>(template?: MapLike<T>): Map<T> {
|
||||
const map: Map<T> = new MapCtr<T>();
|
||||
|
||||
// Copies keys/values from template. Note that for..in will not throw if
|
||||
// template is undefined, and instead will just exit the loop.
|
||||
for (const key in template) if (hasOwnProperty.call(template, key)) {
|
||||
map[key] = template[key];
|
||||
map.set(key, template[key]);
|
||||
}
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
// The global Map object. This may not be available, so we must test for it.
|
||||
declare const Map: { new<T>(): Map<T> } | undefined;
|
||||
// Internet Explorer's Map doesn't support iteration, so don't use it.
|
||||
// tslint:disable-next-line:no-in-operator
|
||||
const MapCtr = typeof Map !== "undefined" && "entries" in Map.prototype ? Map : shimMap();
|
||||
|
||||
// Keep the class inside a function so it doesn't get compiled if it's not used.
|
||||
function shimMap(): { new<T>(): Map<T> } {
|
||||
|
||||
class MapIterator<T, U extends (string | T | [string, T])> {
|
||||
private data: MapLike<T>;
|
||||
private keys: string[];
|
||||
private index = 0;
|
||||
private selector: (data: MapLike<T>, key: string) => U;
|
||||
constructor(data: MapLike<T>, selector: (data: MapLike<T>, key: string) => U) {
|
||||
this.data = data;
|
||||
this.selector = selector;
|
||||
this.keys = Object.keys(data);
|
||||
}
|
||||
|
||||
public next(): { value: U, done: false } | { value: never, done: true } {
|
||||
const index = this.index;
|
||||
if (index < this.keys.length) {
|
||||
this.index++;
|
||||
return { value: this.selector(this.data, this.keys[index]), done: false };
|
||||
}
|
||||
return { value: undefined as never, done: true }
|
||||
}
|
||||
}
|
||||
|
||||
return class<T> implements Map<T> {
|
||||
private data = createDictionaryObject<T>();
|
||||
public size = 0;
|
||||
|
||||
get(key: string): T {
|
||||
return this.data[key];
|
||||
}
|
||||
|
||||
set(key: string, value: T): this {
|
||||
if (!this.has(key)) {
|
||||
this.size++;
|
||||
}
|
||||
this.data[key] = value;
|
||||
return this;
|
||||
}
|
||||
|
||||
has(key: string): boolean {
|
||||
// tslint:disable-next-line:no-in-operator
|
||||
return key in this.data;
|
||||
}
|
||||
|
||||
delete(key: string): boolean {
|
||||
if (this.has(key)) {
|
||||
this.size--;
|
||||
delete this.data[key];
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
clear(): void {
|
||||
this.data = createDictionaryObject<T>();
|
||||
this.size = 0;
|
||||
}
|
||||
|
||||
keys() {
|
||||
return new MapIterator(this.data, (_data, key) => key);
|
||||
}
|
||||
|
||||
values() {
|
||||
return new MapIterator(this.data, (data, key) => data[key]);
|
||||
}
|
||||
|
||||
entries() {
|
||||
return new MapIterator(this.data, (data, key) => [key, data[key]] as [string, T]);
|
||||
}
|
||||
|
||||
forEach(action: (value: T, key: string) => void): void {
|
||||
for (const key in this.data) {
|
||||
action(this.data[key], key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function createFileMap<T>(keyMapper?: (key: string) => string): FileMap<T> {
|
||||
let files = createMap<T>();
|
||||
const files = createMap<T>();
|
||||
return {
|
||||
get,
|
||||
set,
|
||||
@ -59,39 +156,34 @@ namespace ts {
|
||||
};
|
||||
|
||||
function forEachValueInMap(f: (key: Path, value: T) => void) {
|
||||
for (const key in files) {
|
||||
f(<Path>key, files[key]);
|
||||
}
|
||||
files.forEach((file, key) => {
|
||||
f(<Path>key, file);
|
||||
});
|
||||
}
|
||||
|
||||
function getKeys() {
|
||||
const keys: Path[] = [];
|
||||
for (const key in files) {
|
||||
keys.push(<Path>key);
|
||||
}
|
||||
return keys;
|
||||
return arrayFrom(files.keys()) as Path[];
|
||||
}
|
||||
|
||||
// path should already be well-formed so it does not need to be normalized
|
||||
function get(path: Path): T {
|
||||
return files[toKey(path)];
|
||||
return files.get(toKey(path));
|
||||
}
|
||||
|
||||
function set(path: Path, value: T) {
|
||||
files[toKey(path)] = value;
|
||||
files.set(toKey(path), value);
|
||||
}
|
||||
|
||||
function contains(path: Path) {
|
||||
return toKey(path) in files;
|
||||
return files.has(toKey(path));
|
||||
}
|
||||
|
||||
function remove(path: Path) {
|
||||
const key = toKey(path);
|
||||
delete files[key];
|
||||
files.delete(toKey(path));
|
||||
}
|
||||
|
||||
function clear() {
|
||||
files = createMap<T>();
|
||||
files.clear();
|
||||
}
|
||||
|
||||
function toKey(path: Path): string {
|
||||
@ -112,6 +204,10 @@ namespace ts {
|
||||
GreaterThan = 1
|
||||
}
|
||||
|
||||
export function length(array: any[]) {
|
||||
return array ? array.length : 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterates through 'array' by index and performs the callback on each element of array until the callback
|
||||
* returns a truthy value, then returns that value.
|
||||
@ -164,6 +260,16 @@ namespace ts {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/** Works like Array.prototype.findIndex, returning `-1` if no element satisfying the predicate is found. */
|
||||
export function findIndex<T>(array: T[], predicate: (element: T, index: number) => boolean): number {
|
||||
for (let i = 0; i < array.length; i++) {
|
||||
if (predicate(array[i], i)) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the first truthy result of `callback`, or else fails.
|
||||
* This is like `forEach`, but never returns undefined.
|
||||
@ -420,17 +526,16 @@ namespace ts {
|
||||
return result;
|
||||
}
|
||||
|
||||
export function mapObject<T, U>(object: MapLike<T>, f: (key: string, x: T) => [string, U]): MapLike<U> {
|
||||
let result: MapLike<U>;
|
||||
if (object) {
|
||||
result = {};
|
||||
for (const v of getOwnKeys(object)) {
|
||||
const [key, value]: [string, U] = f(v, object[v]) || [undefined, undefined];
|
||||
if (key !== undefined) {
|
||||
result[key] = value;
|
||||
}
|
||||
}
|
||||
export function mapEntries<T, U>(map: Map<T>, f: (key: string, value: T) => [string, U]): Map<U> {
|
||||
if (!map) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const result = createMap<U>();
|
||||
map.forEach((value, key) => {
|
||||
const [newKey, newValue] = f(key, value);
|
||||
result.set(newKey, newValue);
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
@ -748,9 +853,6 @@ namespace ts {
|
||||
/**
|
||||
* Indicates whether a map-like contains an own property with the specified key.
|
||||
*
|
||||
* NOTE: This is intended for use only with MapLike<T> objects. For Map<T> objects, use
|
||||
* the 'in' operator.
|
||||
*
|
||||
* @param map A map-like.
|
||||
* @param key A property key.
|
||||
*/
|
||||
@ -761,9 +863,6 @@ namespace ts {
|
||||
/**
|
||||
* Gets the value of an owned property in a map-like.
|
||||
*
|
||||
* NOTE: This is intended for use only with MapLike<T> objects. For Map<T> objects, use
|
||||
* an indexer.
|
||||
*
|
||||
* @param map A map-like.
|
||||
* @param key A property key.
|
||||
*/
|
||||
@ -787,50 +886,48 @@ namespace ts {
|
||||
return keys;
|
||||
}
|
||||
|
||||
/**
|
||||
* Enumerates the properties of a Map<T>, invoking a callback and returning the first truthy result.
|
||||
*
|
||||
* @param map A map for which properties should be enumerated.
|
||||
* @param callback A callback to invoke for each property.
|
||||
*/
|
||||
export function forEachProperty<T, U>(map: Map<T>, callback: (value: T, key: string) => U): U {
|
||||
let result: U;
|
||||
for (const key in map) {
|
||||
if (result = callback(map[key], key)) break;
|
||||
/** Shims `Array.from`. */
|
||||
export function arrayFrom<T>(iterator: Iterator<T>): T[] {
|
||||
const result: T[] = [];
|
||||
for (let { value, done } = iterator.next(); !done; { value, done } = iterator.next()) {
|
||||
result.push(value);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if a Map<T> has some matching property.
|
||||
*
|
||||
* @param map A map whose properties should be tested.
|
||||
* @param predicate An optional callback used to test each property.
|
||||
* Calls `callback` for each entry in the map, returning the first truthy result.
|
||||
* Use `map.forEach` instead for normal iteration.
|
||||
*/
|
||||
export function someProperties<T>(map: Map<T>, predicate?: (value: T, key: string) => boolean) {
|
||||
for (const key in map) {
|
||||
if (!predicate || predicate(map[key], key)) return true;
|
||||
export function forEachEntry<T, U>(map: Map<T>, callback: (value: T, key: string) => U | undefined): U | undefined {
|
||||
const iterator = map.entries();
|
||||
for (let { value: pair, done } = iterator.next(); !done; { value: pair, done } = iterator.next()) {
|
||||
const [key, value] = pair;
|
||||
const result = callback(value, key);
|
||||
if (result) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs a shallow copy of the properties from a source Map<T> to a target MapLike<T>
|
||||
*
|
||||
* @param source A map from which properties should be copied.
|
||||
* @param target A map to which properties should be copied.
|
||||
*/
|
||||
export function copyProperties<T>(source: Map<T>, target: MapLike<T>): void {
|
||||
for (const key in source) {
|
||||
target[key] = source[key];
|
||||
/** `forEachEntry` for just keys. */
|
||||
export function forEachKey<T>(map: Map<{}>, callback: (key: string) => T | undefined): T | undefined {
|
||||
const iterator = map.keys();
|
||||
for (let { value: key, done } = iterator.next(); !done; { value: key, done } = iterator.next()) {
|
||||
const result = callback(key);
|
||||
if (result) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export function appendProperty<T>(map: Map<T>, key: string | number, value: T): Map<T> {
|
||||
if (key === undefined || value === undefined) return map;
|
||||
if (map === undefined) map = createMap<T>();
|
||||
map[key] = value;
|
||||
return map;
|
||||
/** Copy entries from `source` to `target`. */
|
||||
export function copyEntries<T>(source: Map<T>, target: Map<T>): void {
|
||||
source.forEach((value, key) => {
|
||||
target.set(key, value);
|
||||
});
|
||||
}
|
||||
|
||||
export function assign<T1 extends MapLike<{}>, T2, T3>(t: T1, arg1: T2, arg2: T3): T1 & T2 & T3;
|
||||
@ -845,24 +942,6 @@ namespace ts {
|
||||
return t;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reduce the properties of a map.
|
||||
*
|
||||
* NOTE: This is intended for use with Map<T> objects. For MapLike<T> objects, use
|
||||
* reduceOwnProperties instead as it offers better runtime safety.
|
||||
*
|
||||
* @param map The map to reduce
|
||||
* @param callback An aggregation function that is called for each entry in the map
|
||||
* @param initial The initial value for the reduction.
|
||||
*/
|
||||
export function reduceProperties<T, U>(map: Map<T>, callback: (aggregate: U, value: T, key: string) => U, initial: U): U {
|
||||
let result = initial;
|
||||
for (const key in map) {
|
||||
result = callback(result, map[key], String(key));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs a shallow equality comparison of the contents of two map-likes.
|
||||
*
|
||||
@ -897,23 +976,14 @@ namespace ts {
|
||||
export function arrayToMap<T, U>(array: T[], makeKey: (value: T) => string, makeValue?: (value: T) => U): Map<T | U> {
|
||||
const result = createMap<T | U>();
|
||||
for (const value of array) {
|
||||
result[makeKey(value)] = makeValue ? makeValue(value) : value;
|
||||
result.set(makeKey(value), makeValue ? makeValue(value) : value);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
export function isEmpty<T>(map: Map<T>) {
|
||||
for (const id in map) {
|
||||
if (hasProperty(map, id)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
export function cloneMap<T>(map: Map<T>) {
|
||||
const clone = createMap<T>();
|
||||
copyProperties(map, clone);
|
||||
copyEntries(map, clone);
|
||||
return clone;
|
||||
}
|
||||
|
||||
@ -938,32 +1008,43 @@ namespace ts {
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the value to an array of values associated with the key, and returns the array.
|
||||
* Creates the array if it does not already exist.
|
||||
*/
|
||||
export function multiMapAdd<V>(map: Map<V[]>, key: string | number, value: V): V[] {
|
||||
const values = map[key];
|
||||
if (values) {
|
||||
values.push(value);
|
||||
return values;
|
||||
}
|
||||
else {
|
||||
return map[key] = [value];
|
||||
}
|
||||
export interface MultiMap<T> extends Map<T[]> {
|
||||
/**
|
||||
* Adds the value to an array of values associated with the key, and returns the array.
|
||||
* Creates the array if it does not already exist.
|
||||
*/
|
||||
add(key: string, value: T): T[];
|
||||
/**
|
||||
* Removes a value from an array of values associated with the key.
|
||||
* Does not preserve the order of those values.
|
||||
* Does nothing if `key` is not in `map`, or `value` is not in `map[key]`.
|
||||
*/
|
||||
remove(key: string, value: T): void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes a value from an array of values associated with the key.
|
||||
* Does not preserve the order of those values.
|
||||
* Does nothing if `key` is not in `map`, or `value` is not in `map[key]`.
|
||||
*/
|
||||
export function multiMapRemove<V>(map: Map<V[]>, key: string, value: V): void {
|
||||
const values = map[key];
|
||||
export function createMultiMap<T>(): MultiMap<T> {
|
||||
const map = createMap<T[]>() as MultiMap<T>;
|
||||
map.add = multiMapAdd;
|
||||
map.remove = multiMapRemove;
|
||||
return map;
|
||||
}
|
||||
function multiMapAdd<T>(this: MultiMap<T>, key: string, value: T) {
|
||||
let values = this.get(key);
|
||||
if (values) {
|
||||
values.push(value);
|
||||
}
|
||||
else {
|
||||
this.set(key, values = [value]);
|
||||
}
|
||||
return values;
|
||||
|
||||
}
|
||||
function multiMapRemove<T>(this: MultiMap<T>, key: string, value: T) {
|
||||
const values = this.get(key);
|
||||
if (values) {
|
||||
unorderedRemoveItem(values, value);
|
||||
if (!values.length) {
|
||||
delete map[key];
|
||||
this.delete(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1066,7 +1147,7 @@ namespace ts {
|
||||
return text.replace(/{(\d+)}/g, (_match, index?) => args[+index + baseIndex]);
|
||||
}
|
||||
|
||||
export let localizedDiagnosticMessages: Map<string> = undefined;
|
||||
export let localizedDiagnosticMessages: MapLike<string> = undefined;
|
||||
|
||||
export function getLocaleSpecificMessage(message: DiagnosticMessage) {
|
||||
return localizedDiagnosticMessages && localizedDiagnosticMessages[message.key] || message.message;
|
||||
@ -1182,9 +1263,12 @@ namespace ts {
|
||||
if (a === undefined) return Comparison.LessThan;
|
||||
if (b === undefined) return Comparison.GreaterThan;
|
||||
if (ignoreCase) {
|
||||
if (collator && String.prototype.localeCompare) {
|
||||
// accent means a ≠ b, a ≠ á, a = A
|
||||
const result = a.localeCompare(b, /*locales*/ undefined, { usage: "sort", sensitivity: "accent" });
|
||||
// Checking if "collator exists indicates that Intl is available.
|
||||
// We still have to check if "collator.compare" is correct. If it is not, use "String.localeComapre"
|
||||
if (collator) {
|
||||
const result = localeCompareIsCorrect ?
|
||||
collator.compare(a, b) :
|
||||
a.localeCompare(b, /*locales*/ undefined, { usage: "sort", sensitivity: "accent" }); // accent means a ≠ b, a ≠ á, a = A
|
||||
return result < 0 ? Comparison.LessThan : result > 0 ? Comparison.GreaterThan : Comparison.EqualTo;
|
||||
}
|
||||
|
||||
@ -1363,7 +1447,7 @@ namespace ts {
|
||||
return /^\.\.?($|[\\/])/.test(moduleName);
|
||||
}
|
||||
|
||||
export function getEmitScriptTarget(compilerOptions: CompilerOptions) {
|
||||
export function getEmitScriptTarget(compilerOptions: CompilerOptions | PrinterOptions) {
|
||||
return compilerOptions.target || ScriptTarget.ES3;
|
||||
}
|
||||
|
||||
@ -1650,7 +1734,19 @@ namespace ts {
|
||||
const singleAsteriskRegexFragmentFiles = "([^./]|(\\.(?!min\\.js$))?)*";
|
||||
const singleAsteriskRegexFragmentOther = "[^/]*";
|
||||
|
||||
export function getRegularExpressionForWildcard(specs: string[], basePath: string, usage: "files" | "directories" | "exclude") {
|
||||
export function getRegularExpressionForWildcard(specs: string[], basePath: string, usage: "files" | "directories" | "exclude"): string | undefined {
|
||||
const patterns = getRegularExpressionsForWildcards(specs, basePath, usage);
|
||||
if (!patterns || !patterns.length) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const pattern = patterns.map(pattern => `(${pattern})`).join("|");
|
||||
// If excluding, match "foo/bar/baz...", but if including, only allow "foo".
|
||||
const terminator = usage === "exclude" ? "($|/)" : "$";
|
||||
return `^(${pattern})${terminator}`;
|
||||
}
|
||||
|
||||
function getRegularExpressionsForWildcards(specs: string[], basePath: string, usage: "files" | "directories" | "exclude"): string[] | undefined {
|
||||
if (specs === undefined || specs.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
@ -1664,33 +1760,8 @@ namespace ts {
|
||||
*/
|
||||
const doubleAsteriskRegexFragment = usage === "exclude" ? "(/.+?)?" : "(/[^/.][^/]*)*?";
|
||||
|
||||
let pattern = "";
|
||||
let hasWrittenSubpattern = false;
|
||||
for (const spec of specs) {
|
||||
if (!spec) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const subPattern = getSubPatternFromSpec(spec, basePath, usage, singleAsteriskRegexFragment, doubleAsteriskRegexFragment, replaceWildcardCharacter);
|
||||
if (subPattern === undefined) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (hasWrittenSubpattern) {
|
||||
pattern += "|";
|
||||
}
|
||||
|
||||
pattern += "(" + subPattern + ")";
|
||||
hasWrittenSubpattern = true;
|
||||
}
|
||||
|
||||
if (!pattern) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// If excluding, match "foo/bar/baz...", but if including, only allow "foo".
|
||||
const terminator = usage === "exclude" ? "($|/)" : "$";
|
||||
return `^(${pattern})${terminator}`;
|
||||
return flatMap(specs, spec =>
|
||||
spec && getSubPatternFromSpec(spec, basePath, usage, singleAsteriskRegexFragment, doubleAsteriskRegexFragment, replaceWildcardCharacter));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -1785,6 +1856,9 @@ namespace ts {
|
||||
}
|
||||
|
||||
export interface FileMatcherPatterns {
|
||||
/** One pattern for each "include" spec. */
|
||||
includeFilePatterns: string[];
|
||||
/** One pattern matching one of any of the "include" specs. */
|
||||
includeFilePattern: string;
|
||||
includeDirectoryPattern: string;
|
||||
excludePattern: string;
|
||||
@ -1797,6 +1871,7 @@ namespace ts {
|
||||
const absolutePath = combinePaths(currentDirectory, path);
|
||||
|
||||
return {
|
||||
includeFilePatterns: map(getRegularExpressionsForWildcards(includes, absolutePath, "files"), pattern => `^${pattern}$`),
|
||||
includeFilePattern: getRegularExpressionForWildcard(includes, absolutePath, "files"),
|
||||
includeDirectoryPattern: getRegularExpressionForWildcard(includes, absolutePath, "directories"),
|
||||
excludePattern: getRegularExpressionForWildcard(excludes, absolutePath, "exclude"),
|
||||
@ -1811,26 +1886,39 @@ namespace ts {
|
||||
const patterns = getFileMatcherPatterns(path, excludes, includes, useCaseSensitiveFileNames, currentDirectory);
|
||||
|
||||
const regexFlag = useCaseSensitiveFileNames ? "" : "i";
|
||||
const includeFileRegex = patterns.includeFilePattern && new RegExp(patterns.includeFilePattern, regexFlag);
|
||||
const includeFileRegexes = patterns.includeFilePatterns && patterns.includeFilePatterns.map(pattern => new RegExp(pattern, regexFlag));
|
||||
const includeDirectoryRegex = patterns.includeDirectoryPattern && new RegExp(patterns.includeDirectoryPattern, regexFlag);
|
||||
const excludeRegex = patterns.excludePattern && new RegExp(patterns.excludePattern, regexFlag);
|
||||
|
||||
const result: string[] = [];
|
||||
// Associate an array of results with each include regex. This keeps results in order of the "include" order.
|
||||
// If there are no "includes", then just put everything in results[0].
|
||||
const results: string[][] = includeFileRegexes ? includeFileRegexes.map(() => []) : [[]];
|
||||
|
||||
const comparer = useCaseSensitiveFileNames ? compareStrings : compareStringsCaseInsensitive;
|
||||
for (const basePath of patterns.basePaths) {
|
||||
visitDirectory(basePath, combinePaths(currentDirectory, basePath));
|
||||
}
|
||||
return result;
|
||||
|
||||
return flatten(results);
|
||||
|
||||
function visitDirectory(path: string, absolutePath: string) {
|
||||
const { files, directories } = getFileSystemEntries(path);
|
||||
let { files, directories } = getFileSystemEntries(path);
|
||||
files = files.slice().sort(comparer);
|
||||
directories = directories.slice().sort(comparer);
|
||||
|
||||
for (const current of files) {
|
||||
const name = combinePaths(path, current);
|
||||
const absoluteName = combinePaths(absolutePath, current);
|
||||
if ((!extensions || fileExtensionIsAny(name, extensions)) &&
|
||||
(!includeFileRegex || includeFileRegex.test(absoluteName)) &&
|
||||
(!excludeRegex || !excludeRegex.test(absoluteName))) {
|
||||
result.push(name);
|
||||
if (extensions && !fileExtensionIsAny(name, extensions)) continue;
|
||||
if (excludeRegex && excludeRegex.test(absoluteName)) continue;
|
||||
if (!includeFileRegexes) {
|
||||
results[0].push(name);
|
||||
}
|
||||
else {
|
||||
const includeIndex = findIndex(includeFileRegexes, re => re.test(absoluteName));
|
||||
if (includeIndex !== -1) {
|
||||
results[includeIndex].push(name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -32,16 +32,18 @@ namespace ts {
|
||||
|
||||
export function getDeclarationDiagnostics(host: EmitHost, resolver: EmitResolver, targetSourceFile: SourceFile): Diagnostic[] {
|
||||
const declarationDiagnostics = createDiagnosticCollection();
|
||||
forEachExpectedEmitFile(host, getDeclarationDiagnosticsFromFile, targetSourceFile);
|
||||
forEachEmittedFile(host, getDeclarationDiagnosticsFromFile, targetSourceFile);
|
||||
return declarationDiagnostics.getDiagnostics(targetSourceFile ? targetSourceFile.fileName : undefined);
|
||||
|
||||
function getDeclarationDiagnosticsFromFile({ declarationFilePath }: EmitFileNames, sources: SourceFile[], isBundledEmit: boolean) {
|
||||
emitDeclarations(host, resolver, declarationDiagnostics, declarationFilePath, sources, isBundledEmit, /*emitOnlyDtsFiles*/ false);
|
||||
function getDeclarationDiagnosticsFromFile({ declarationFilePath }: EmitFileNames, sourceFileOrBundle: SourceFile | Bundle) {
|
||||
emitDeclarations(host, resolver, declarationDiagnostics, declarationFilePath, sourceFileOrBundle, /*emitOnlyDtsFiles*/ false);
|
||||
}
|
||||
}
|
||||
|
||||
function emitDeclarations(host: EmitHost, resolver: EmitResolver, emitterDiagnostics: DiagnosticCollection, declarationFilePath: string,
|
||||
sourceFiles: SourceFile[], isBundledEmit: boolean, emitOnlyDtsFiles: boolean): DeclarationEmit {
|
||||
sourceFileOrBundle: SourceFile | Bundle, emitOnlyDtsFiles: boolean): DeclarationEmit {
|
||||
const sourceFiles = sourceFileOrBundle.kind === SyntaxKind.Bundle ? sourceFileOrBundle.sourceFiles : [sourceFileOrBundle];
|
||||
const isBundledEmit = sourceFileOrBundle.kind === SyntaxKind.Bundle;
|
||||
const newLine = host.getNewLine();
|
||||
const compilerOptions = host.getCompilerOptions();
|
||||
|
||||
@ -156,9 +158,9 @@ namespace ts {
|
||||
});
|
||||
|
||||
if (usedTypeDirectiveReferences) {
|
||||
for (const directive in usedTypeDirectiveReferences) {
|
||||
forEachKey(usedTypeDirectiveReferences, directive => {
|
||||
referencesOutput += `/// <reference types="${directive}" />${newLine}`;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
@ -271,8 +273,8 @@ namespace ts {
|
||||
usedTypeDirectiveReferences = createMap<string>();
|
||||
}
|
||||
for (const directive of typeReferenceDirectives) {
|
||||
if (!(directive in usedTypeDirectiveReferences)) {
|
||||
usedTypeDirectiveReferences[directive] = directive;
|
||||
if (!usedTypeDirectiveReferences.has(directive)) {
|
||||
usedTypeDirectiveReferences.set(directive, directive);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -390,6 +392,7 @@ namespace ts {
|
||||
case SyntaxKind.StringKeyword:
|
||||
case SyntaxKind.NumberKeyword:
|
||||
case SyntaxKind.BooleanKeyword:
|
||||
case SyntaxKind.ObjectKeyword:
|
||||
case SyntaxKind.SymbolKeyword:
|
||||
case SyntaxKind.VoidKeyword:
|
||||
case SyntaxKind.UndefinedKeyword:
|
||||
@ -581,14 +584,14 @@ namespace ts {
|
||||
// do not need to keep track of created temp names.
|
||||
function getExportDefaultTempVariableName(): string {
|
||||
const baseName = "_default";
|
||||
if (!(baseName in currentIdentifiers)) {
|
||||
if (!currentIdentifiers.has(baseName)) {
|
||||
return baseName;
|
||||
}
|
||||
let count = 0;
|
||||
while (true) {
|
||||
count++;
|
||||
const name = baseName + "_" + count;
|
||||
if (!(name in currentIdentifiers)) {
|
||||
if (!currentIdentifiers.has(name)) {
|
||||
return name;
|
||||
}
|
||||
}
|
||||
@ -1787,7 +1790,7 @@ namespace ts {
|
||||
}
|
||||
else {
|
||||
// Get the declaration file path
|
||||
forEachExpectedEmitFile(host, getDeclFileName, referencedFile, emitOnlyDtsFiles);
|
||||
forEachEmittedFile(host, getDeclFileName, referencedFile, emitOnlyDtsFiles);
|
||||
}
|
||||
|
||||
if (declFileName) {
|
||||
@ -1802,8 +1805,9 @@ namespace ts {
|
||||
}
|
||||
return addedBundledEmitReference;
|
||||
|
||||
function getDeclFileName(emitFileNames: EmitFileNames, _sourceFiles: SourceFile[], isBundledEmit: boolean) {
|
||||
function getDeclFileName(emitFileNames: EmitFileNames, sourceFileOrBundle: SourceFile | Bundle) {
|
||||
// Dont add reference path to this file if it is a bundled emit and caller asked not emit bundled file path
|
||||
const isBundledEmit = sourceFileOrBundle.kind === SyntaxKind.Bundle;
|
||||
if (isBundledEmit && !addBundledFileReference) {
|
||||
return;
|
||||
}
|
||||
@ -1816,10 +1820,11 @@ namespace ts {
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
export function writeDeclarationFile(declarationFilePath: string, sourceFiles: SourceFile[], isBundledEmit: boolean, host: EmitHost, resolver: EmitResolver, emitterDiagnostics: DiagnosticCollection, emitOnlyDtsFiles: boolean) {
|
||||
const emitDeclarationResult = emitDeclarations(host, resolver, emitterDiagnostics, declarationFilePath, sourceFiles, isBundledEmit, emitOnlyDtsFiles);
|
||||
export function writeDeclarationFile(declarationFilePath: string, sourceFileOrBundle: SourceFile | Bundle, host: EmitHost, resolver: EmitResolver, emitterDiagnostics: DiagnosticCollection, emitOnlyDtsFiles: boolean) {
|
||||
const emitDeclarationResult = emitDeclarations(host, resolver, emitterDiagnostics, declarationFilePath, sourceFileOrBundle, emitOnlyDtsFiles);
|
||||
const emitSkipped = emitDeclarationResult.reportedDeclarationError || host.isEmitBlocked(declarationFilePath) || host.getCompilerOptions().noEmit;
|
||||
if (!emitSkipped) {
|
||||
const sourceFiles = sourceFileOrBundle.kind === SyntaxKind.Bundle ? sourceFileOrBundle.sourceFiles : [sourceFileOrBundle];
|
||||
const declarationOutput = emitDeclarationResult.referencesOutput
|
||||
+ getDeclarationOutput(emitDeclarationResult.synchronousDeclarationOutput, emitDeclarationResult.moduleElementDeclarationEmitInfo);
|
||||
writeFile(host, emitterDiagnostics, declarationFilePath, declarationOutput, host.getCompilerOptions().emitBOM, sourceFiles);
|
||||
|
||||
@ -1795,18 +1795,22 @@
|
||||
"category": "Error",
|
||||
"code": 2544
|
||||
},
|
||||
"The type returned by the 'next()' method of an async iterator must be a promise for a type with a 'value' property.": {
|
||||
"A mixin class must have a constructor with a single rest parameter of type 'any[]'.": {
|
||||
"category": "Error",
|
||||
"code": 2545
|
||||
},
|
||||
"Type '{0}' is not an array type or does not have a '[Symbol.iterator]()' method that returns an iterator.": {
|
||||
"The type returned by the 'next()' method of an async iterator must be a promise for a type with a 'value' property.": {
|
||||
"category": "Error",
|
||||
"code": 2546
|
||||
},
|
||||
"Type '{0}' is not an array type or a string type or does not have a '[Symbol.iterator]()' method that returns an iterator.": {
|
||||
"Type '{0}' is not an array type or does not have a '[Symbol.iterator]()' method that returns an iterator.": {
|
||||
"category": "Error",
|
||||
"code": 2547
|
||||
},
|
||||
"Type '{0}' is not an array type or a string type or does not have a '[Symbol.iterator]()' method that returns an iterator.": {
|
||||
"category": "Error",
|
||||
"code": 2548
|
||||
},
|
||||
"JSX element attributes type '{0}' may not be a union type.": {
|
||||
"category": "Error",
|
||||
"code": 2600
|
||||
@ -1847,6 +1851,10 @@
|
||||
"category": "Error",
|
||||
"code": 2609
|
||||
},
|
||||
"Cannot augment module '{0}' with value exports because it resolves to a non-module entity.": {
|
||||
"category": "Error",
|
||||
"code": 2649
|
||||
},
|
||||
"Cannot emit namespaced JSX elements in React": {
|
||||
"category": "Error",
|
||||
"code": 2650
|
||||
@ -2039,6 +2047,10 @@
|
||||
"category": "Error",
|
||||
"code": 2698
|
||||
},
|
||||
"Static property '{0}' conflicts with built-in property 'Function.{0}' of constructor function '{1}'.": {
|
||||
"category": "Error",
|
||||
"code": 2699
|
||||
},
|
||||
"Rest types may only be created from object types.": {
|
||||
"category": "Error",
|
||||
"code": 2700
|
||||
@ -2733,7 +2745,7 @@
|
||||
"category": "Message",
|
||||
"code": 6079
|
||||
},
|
||||
"Specify JSX code generation: 'preserve' or 'react'": {
|
||||
"Specify JSX code generation: 'preserve', 'react-native', or 'react'": {
|
||||
"category": "Message",
|
||||
"code": 6080
|
||||
},
|
||||
@ -3287,6 +3299,10 @@
|
||||
"category": "Message",
|
||||
"code": 90007
|
||||
},
|
||||
"Add 'this.' to unresolved variable.": {
|
||||
"category": "Message",
|
||||
"code": 90008
|
||||
},
|
||||
"Adding a tsconfig.json file will help organize projects that contain both TypeScript and JavaScript files. Learn more at https://aka.ms/tsconfig": {
|
||||
"category": "Error",
|
||||
"code": 90009
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,4 +1,4 @@
|
||||
/// <reference path="core.ts"/>
|
||||
/// <reference path="core.ts"/>
|
||||
/// <reference path="utilities.ts"/>
|
||||
|
||||
/* @internal */
|
||||
@ -1536,6 +1536,19 @@ namespace ts {
|
||||
return node;
|
||||
}
|
||||
|
||||
export function createBundle(sourceFiles: SourceFile[]) {
|
||||
const node = <Bundle>createNode(SyntaxKind.Bundle);
|
||||
node.sourceFiles = sourceFiles;
|
||||
return node;
|
||||
}
|
||||
|
||||
export function updateBundle(node: Bundle, sourceFiles: SourceFile[]) {
|
||||
if (node.sourceFiles !== sourceFiles) {
|
||||
return createBundle(sourceFiles);
|
||||
}
|
||||
return node;
|
||||
}
|
||||
|
||||
// Compound nodes
|
||||
|
||||
export function createComma(left: Expression, right: Expression) {
|
||||
@ -2779,9 +2792,11 @@ namespace ts {
|
||||
return destEmitNode;
|
||||
}
|
||||
|
||||
function mergeTokenSourceMapRanges(sourceRanges: Map<TextRange>, destRanges: Map<TextRange>) {
|
||||
if (!destRanges) destRanges = createMap<TextRange>();
|
||||
copyProperties(sourceRanges, destRanges);
|
||||
function mergeTokenSourceMapRanges(sourceRanges: TextRange[], destRanges: TextRange[]) {
|
||||
if (!destRanges) destRanges = [];
|
||||
for (const key in sourceRanges) {
|
||||
destRanges[key] = sourceRanges[key];
|
||||
}
|
||||
return destRanges;
|
||||
}
|
||||
|
||||
@ -2895,7 +2910,7 @@ namespace ts {
|
||||
*/
|
||||
export function setTokenSourceMapRange<T extends Node>(node: T, token: SyntaxKind, range: TextRange) {
|
||||
const emitNode = getOrCreateEmitNode(node);
|
||||
const tokenSourceMapRanges = emitNode.tokenSourceMapRanges || (emitNode.tokenSourceMapRanges = createMap<TextRange>());
|
||||
const tokenSourceMapRanges = emitNode.tokenSourceMapRanges || (emitNode.tokenSourceMapRanges = []);
|
||||
tokenSourceMapRanges[token] = range;
|
||||
return node;
|
||||
}
|
||||
@ -3110,10 +3125,8 @@ namespace ts {
|
||||
* Here we check if alternative name was provided for a given moduleName and return it if possible.
|
||||
*/
|
||||
function tryRenameExternalModule(moduleName: LiteralExpression, sourceFile: SourceFile) {
|
||||
if (sourceFile.renamedDependencies && hasProperty(sourceFile.renamedDependencies, moduleName.text)) {
|
||||
return createLiteral(sourceFile.renamedDependencies[moduleName.text]);
|
||||
}
|
||||
return undefined;
|
||||
const rename = sourceFile.renamedDependencies && sourceFile.renamedDependencies.get(moduleName.text);
|
||||
return rename && createLiteral(rename);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -3413,7 +3426,7 @@ namespace ts {
|
||||
externalImports: (ImportDeclaration | ImportEqualsDeclaration | ExportDeclaration)[]; // imports of other external modules
|
||||
externalHelpersImportDeclaration: ImportDeclaration | undefined; // import of external helpers
|
||||
exportSpecifiers: Map<ExportSpecifier[]>; // export specifiers by name
|
||||
exportedBindings: Map<Identifier[]>; // exported names of local declarations
|
||||
exportedBindings: Identifier[][]; // exported names of local declarations
|
||||
exportedNames: Identifier[]; // all exported names local to module
|
||||
exportEquals: ExportAssignment | undefined; // an export= declaration if one was present
|
||||
hasExportStarsToExportValues: boolean; // whether this module contains export*
|
||||
@ -3421,8 +3434,8 @@ namespace ts {
|
||||
|
||||
export function collectExternalModuleInfo(sourceFile: SourceFile, resolver: EmitResolver, compilerOptions: CompilerOptions): ExternalModuleInfo {
|
||||
const externalImports: (ImportDeclaration | ImportEqualsDeclaration | ExportDeclaration)[] = [];
|
||||
const exportSpecifiers = createMap<ExportSpecifier[]>();
|
||||
const exportedBindings = createMap<Identifier[]>();
|
||||
const exportSpecifiers = createMultiMap<ExportSpecifier>();
|
||||
const exportedBindings: Identifier[][] = [];
|
||||
const uniqueExports = createMap<boolean>();
|
||||
let exportedNames: Identifier[];
|
||||
let hasExportDefault = false;
|
||||
@ -3473,18 +3486,18 @@ namespace ts {
|
||||
else {
|
||||
// export { x, y }
|
||||
for (const specifier of (<ExportDeclaration>node).exportClause.elements) {
|
||||
if (!uniqueExports[specifier.name.text]) {
|
||||
if (!uniqueExports.get(specifier.name.text)) {
|
||||
const name = specifier.propertyName || specifier.name;
|
||||
multiMapAdd(exportSpecifiers, name.text, specifier);
|
||||
exportSpecifiers.add(name.text, specifier);
|
||||
|
||||
const decl = resolver.getReferencedImportDeclaration(name)
|
||||
|| resolver.getReferencedValueDeclaration(name);
|
||||
|
||||
if (decl) {
|
||||
multiMapAdd(exportedBindings, getOriginalNodeId(decl), specifier.name);
|
||||
multiMapSparseArrayAdd(exportedBindings, getOriginalNodeId(decl), specifier.name);
|
||||
}
|
||||
|
||||
uniqueExports[specifier.name.text] = true;
|
||||
uniqueExports.set(specifier.name.text, true);
|
||||
exportedNames = append(exportedNames, specifier.name);
|
||||
}
|
||||
}
|
||||
@ -3511,16 +3524,16 @@ namespace ts {
|
||||
if (hasModifier(node, ModifierFlags.Default)) {
|
||||
// export default function() { }
|
||||
if (!hasExportDefault) {
|
||||
multiMapAdd(exportedBindings, getOriginalNodeId(node), getDeclarationName(<FunctionDeclaration>node));
|
||||
multiMapSparseArrayAdd(exportedBindings, getOriginalNodeId(node), getDeclarationName(<FunctionDeclaration>node));
|
||||
hasExportDefault = true;
|
||||
}
|
||||
}
|
||||
else {
|
||||
// export function x() { }
|
||||
const name = (<FunctionDeclaration>node).name;
|
||||
if (!uniqueExports[name.text]) {
|
||||
multiMapAdd(exportedBindings, getOriginalNodeId(node), name);
|
||||
uniqueExports[name.text] = true;
|
||||
if (!uniqueExports.get(name.text)) {
|
||||
multiMapSparseArrayAdd(exportedBindings, getOriginalNodeId(node), name);
|
||||
uniqueExports.set(name.text, true);
|
||||
exportedNames = append(exportedNames, name);
|
||||
}
|
||||
}
|
||||
@ -3532,16 +3545,16 @@ namespace ts {
|
||||
if (hasModifier(node, ModifierFlags.Default)) {
|
||||
// export default class { }
|
||||
if (!hasExportDefault) {
|
||||
multiMapAdd(exportedBindings, getOriginalNodeId(node), getDeclarationName(<ClassDeclaration>node));
|
||||
multiMapSparseArrayAdd(exportedBindings, getOriginalNodeId(node), getDeclarationName(<ClassDeclaration>node));
|
||||
hasExportDefault = true;
|
||||
}
|
||||
}
|
||||
else {
|
||||
// export class x { }
|
||||
const name = (<ClassDeclaration>node).name;
|
||||
if (!uniqueExports[name.text]) {
|
||||
multiMapAdd(exportedBindings, getOriginalNodeId(node), name);
|
||||
uniqueExports[name.text] = true;
|
||||
if (!uniqueExports.get(name.text)) {
|
||||
multiMapSparseArrayAdd(exportedBindings, getOriginalNodeId(node), name);
|
||||
uniqueExports.set(name.text, true);
|
||||
exportedNames = append(exportedNames, name);
|
||||
}
|
||||
}
|
||||
@ -3562,11 +3575,23 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
else if (!isGeneratedIdentifier(decl.name)) {
|
||||
if (!uniqueExports[decl.name.text]) {
|
||||
uniqueExports[decl.name.text] = true;
|
||||
if (!uniqueExports.get(decl.name.text)) {
|
||||
uniqueExports.set(decl.name.text, true);
|
||||
exportedNames = append(exportedNames, decl.name);
|
||||
}
|
||||
}
|
||||
return exportedNames;
|
||||
}
|
||||
|
||||
/** Use a sparse array as a multi-map. */
|
||||
function multiMapSparseArrayAdd<V>(map: V[][], key: number, value: V): V[] {
|
||||
let values = map[key];
|
||||
if (values) {
|
||||
values.push(value);
|
||||
}
|
||||
else {
|
||||
map[key] = values = [value];
|
||||
}
|
||||
return values;
|
||||
}
|
||||
}
|
||||
|
||||
@ -336,9 +336,10 @@ namespace ts {
|
||||
if (!moduleHasNonRelativeName(nonRelativeModuleName)) {
|
||||
return undefined;
|
||||
}
|
||||
let perModuleNameCache = moduleNameToDirectoryMap[nonRelativeModuleName];
|
||||
let perModuleNameCache = moduleNameToDirectoryMap.get(nonRelativeModuleName);
|
||||
if (!perModuleNameCache) {
|
||||
moduleNameToDirectoryMap[nonRelativeModuleName] = perModuleNameCache = createPerModuleNameCache();
|
||||
perModuleNameCache = createPerModuleNameCache();
|
||||
moduleNameToDirectoryMap.set(nonRelativeModuleName, perModuleNameCache);
|
||||
}
|
||||
return perModuleNameCache;
|
||||
}
|
||||
@ -422,7 +423,7 @@ namespace ts {
|
||||
}
|
||||
const containingDirectory = getDirectoryPath(containingFile);
|
||||
const perFolderCache = cache && cache.getOrCreateCacheForDirectory(containingDirectory);
|
||||
let result = perFolderCache && perFolderCache[moduleName];
|
||||
let result = perFolderCache && perFolderCache.get(moduleName);
|
||||
|
||||
if (result) {
|
||||
if (traceEnabled) {
|
||||
@ -453,7 +454,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
if (perFolderCache) {
|
||||
perFolderCache[moduleName] = result;
|
||||
perFolderCache.set(moduleName, result);
|
||||
// put result in per-module name cache
|
||||
const perModuleNameCache = cache.getOrCreateCacheForModuleName(moduleName);
|
||||
if (perModuleNameCache) {
|
||||
|
||||
@ -1135,7 +1135,11 @@ namespace ts {
|
||||
|
||||
function internIdentifier(text: string): string {
|
||||
text = escapeIdentifier(text);
|
||||
return identifiers[text] || (identifiers[text] = text);
|
||||
let identifier = identifiers.get(text);
|
||||
if (identifier === undefined) {
|
||||
identifiers.set(text, identifier = text);
|
||||
}
|
||||
return identifier;
|
||||
}
|
||||
|
||||
// An identifier that starts with two underscores has an extra underscore character prepended to it to avoid issues
|
||||
@ -6692,10 +6696,15 @@ namespace ts {
|
||||
typedefTag.fullName = parseJSDocTypeNameWithNamespace(/*flags*/ 0);
|
||||
if (typedefTag.fullName) {
|
||||
let rightNode = typedefTag.fullName;
|
||||
while (rightNode.kind !== SyntaxKind.Identifier) {
|
||||
while (true) {
|
||||
if (rightNode.kind === SyntaxKind.Identifier || !rightNode.body) {
|
||||
// if node is identifier - use it as name
|
||||
// otherwise use name of the rightmost part that we were able to parse
|
||||
typedefTag.name = rightNode.kind === SyntaxKind.Identifier ? rightNode : rightNode.name;
|
||||
break;
|
||||
}
|
||||
rightNode = rightNode.body;
|
||||
}
|
||||
typedefTag.name = rightNode;
|
||||
}
|
||||
typedefTag.typeExpression = typeExpression;
|
||||
skipWhitespace();
|
||||
|
||||
@ -27,8 +27,8 @@ namespace ts.performance {
|
||||
*/
|
||||
export function mark(markName: string) {
|
||||
if (enabled) {
|
||||
marks[markName] = timestamp();
|
||||
counts[markName] = (counts[markName] || 0) + 1;
|
||||
marks.set(markName, timestamp());
|
||||
counts.set(markName, (counts.get(markName) || 0) + 1);
|
||||
profilerEvent(markName);
|
||||
}
|
||||
}
|
||||
@ -44,9 +44,9 @@ namespace ts.performance {
|
||||
*/
|
||||
export function measure(measureName: string, startMarkName?: string, endMarkName?: string) {
|
||||
if (enabled) {
|
||||
const end = endMarkName && marks[endMarkName] || timestamp();
|
||||
const start = startMarkName && marks[startMarkName] || profilerStart;
|
||||
measures[measureName] = (measures[measureName] || 0) + (end - start);
|
||||
const end = endMarkName && marks.get(endMarkName) || timestamp();
|
||||
const start = startMarkName && marks.get(startMarkName) || profilerStart;
|
||||
measures.set(measureName, (measures.get(measureName) || 0) + (end - start));
|
||||
}
|
||||
}
|
||||
|
||||
@ -56,7 +56,7 @@ namespace ts.performance {
|
||||
* @param markName The name of the mark.
|
||||
*/
|
||||
export function getCount(markName: string) {
|
||||
return counts && counts[markName] || 0;
|
||||
return counts && counts.get(markName) || 0;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -65,7 +65,7 @@ namespace ts.performance {
|
||||
* @param measureName The name of the measure whose durations should be accumulated.
|
||||
*/
|
||||
export function getDuration(measureName: string) {
|
||||
return measures && measures[measureName] || 0;
|
||||
return measures && measures.get(measureName) || 0;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -74,9 +74,9 @@ namespace ts.performance {
|
||||
* @param cb The action to perform for each measure
|
||||
*/
|
||||
export function forEachMeasure(cb: (measureName: string, duration: number) => void) {
|
||||
for (const key in measures) {
|
||||
cb(key, measures[key]);
|
||||
}
|
||||
measures.forEach((measure, key) => {
|
||||
cb(key, measure);
|
||||
});
|
||||
}
|
||||
|
||||
/** Enables (and resets) performance measurements for the compiler. */
|
||||
|
||||
@ -111,11 +111,11 @@ namespace ts {
|
||||
}
|
||||
|
||||
function directoryExists(directoryPath: string): boolean {
|
||||
if (directoryPath in existingDirectories) {
|
||||
if (existingDirectories.has(directoryPath)) {
|
||||
return true;
|
||||
}
|
||||
if (sys.directoryExists(directoryPath)) {
|
||||
existingDirectories[directoryPath] = true;
|
||||
existingDirectories.set(directoryPath, true);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
@ -139,11 +139,11 @@ namespace ts {
|
||||
const hash = sys.createHash(data);
|
||||
const mtimeBefore = sys.getModifiedTime(fileName);
|
||||
|
||||
if (mtimeBefore && fileName in outputFingerprints) {
|
||||
const fingerprint = outputFingerprints[fileName];
|
||||
|
||||
if (mtimeBefore) {
|
||||
const fingerprint = outputFingerprints.get(fileName);
|
||||
// If output has not been changed, and the file has no external modification
|
||||
if (fingerprint.byteOrderMark === writeByteOrderMark &&
|
||||
if (fingerprint &&
|
||||
fingerprint.byteOrderMark === writeByteOrderMark &&
|
||||
fingerprint.hash === hash &&
|
||||
fingerprint.mtime.getTime() === mtimeBefore.getTime()) {
|
||||
return;
|
||||
@ -154,11 +154,11 @@ namespace ts {
|
||||
|
||||
const mtimeAfter = sys.getModifiedTime(fileName);
|
||||
|
||||
outputFingerprints[fileName] = {
|
||||
outputFingerprints.set(fileName, {
|
||||
hash,
|
||||
byteOrderMark: writeByteOrderMark,
|
||||
mtime: mtimeAfter
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
function writeFile(fileName: string, data: string, writeByteOrderMark: boolean, onError?: (message: string) => void) {
|
||||
@ -278,9 +278,13 @@ namespace ts {
|
||||
const resolutions: T[] = [];
|
||||
const cache = createMap<T>();
|
||||
for (const name of names) {
|
||||
const result = name in cache
|
||||
? cache[name]
|
||||
: cache[name] = loader(name, containingFile);
|
||||
let result: T;
|
||||
if (cache.has(name)) {
|
||||
result = cache.get(name);
|
||||
}
|
||||
else {
|
||||
cache.set(name, result = loader(name, containingFile));
|
||||
}
|
||||
resolutions.push(result);
|
||||
}
|
||||
return resolutions;
|
||||
@ -435,7 +439,7 @@ namespace ts {
|
||||
|
||||
function getCommonSourceDirectory() {
|
||||
if (commonSourceDirectory === undefined) {
|
||||
const emittedFiles = filterSourceFilesInDirectory(files, isSourceFileFromExternalLibrary);
|
||||
const emittedFiles = filter(files, file => sourceFileMayBeEmitted(file, options, isSourceFileFromExternalLibrary));
|
||||
if (options.rootDir && checkSourceFilesBelongToPath(emittedFiles, options.rootDir)) {
|
||||
// If a rootDir is specified and is valid use it as the commonSourceDirectory
|
||||
commonSourceDirectory = getNormalizedAbsolutePath(options.rootDir, currentDirectory);
|
||||
@ -460,7 +464,7 @@ namespace ts {
|
||||
classifiableNames = createMap<string>();
|
||||
|
||||
for (const sourceFile of files) {
|
||||
copyProperties(sourceFile.classifiableNames, classifiableNames);
|
||||
copyEntries(sourceFile.classifiableNames, classifiableNames);
|
||||
}
|
||||
}
|
||||
|
||||
@ -735,7 +739,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
function isSourceFileFromExternalLibrary(file: SourceFile): boolean {
|
||||
return sourceFilesFoundSearchingNodeModules[file.path];
|
||||
return sourceFilesFoundSearchingNodeModules.get(file.path);
|
||||
}
|
||||
|
||||
function getDiagnosticsProducingTypeChecker() {
|
||||
@ -1298,20 +1302,20 @@ namespace ts {
|
||||
|
||||
// If the file was previously found via a node_modules search, but is now being processed as a root file,
|
||||
// then everything it sucks in may also be marked incorrectly, and needs to be checked again.
|
||||
if (file && sourceFilesFoundSearchingNodeModules[file.path] && currentNodeModulesDepth == 0) {
|
||||
sourceFilesFoundSearchingNodeModules[file.path] = false;
|
||||
if (file && sourceFilesFoundSearchingNodeModules.get(file.path) && currentNodeModulesDepth == 0) {
|
||||
sourceFilesFoundSearchingNodeModules.set(file.path, false);
|
||||
if (!options.noResolve) {
|
||||
processReferencedFiles(file, isDefaultLib);
|
||||
processTypeReferenceDirectives(file);
|
||||
}
|
||||
|
||||
modulesWithElidedImports[file.path] = false;
|
||||
modulesWithElidedImports.set(file.path, false);
|
||||
processImportedModules(file);
|
||||
}
|
||||
// See if we need to reprocess the imports due to prior skipped imports
|
||||
else if (file && modulesWithElidedImports[file.path]) {
|
||||
else if (file && modulesWithElidedImports.get(file.path)) {
|
||||
if (currentNodeModulesDepth < maxNodeModuleJsDepth) {
|
||||
modulesWithElidedImports[file.path] = false;
|
||||
modulesWithElidedImports.set(file.path, false);
|
||||
processImportedModules(file);
|
||||
}
|
||||
}
|
||||
@ -1332,7 +1336,7 @@ namespace ts {
|
||||
|
||||
filesByName.set(path, file);
|
||||
if (file) {
|
||||
sourceFilesFoundSearchingNodeModules[path] = (currentNodeModulesDepth > 0);
|
||||
sourceFilesFoundSearchingNodeModules.set(path, currentNodeModulesDepth > 0);
|
||||
file.path = path;
|
||||
|
||||
if (host.useCaseSensitiveFileNames()) {
|
||||
@ -1393,7 +1397,7 @@ namespace ts {
|
||||
refFile?: SourceFile, refPos?: number, refEnd?: number): void {
|
||||
|
||||
// If we already found this library as a primary reference - nothing to do
|
||||
const previousResolution = resolvedTypeReferenceDirectives[typeReferenceDirective];
|
||||
const previousResolution = resolvedTypeReferenceDirectives.get(typeReferenceDirective);
|
||||
if (previousResolution && previousResolution.primary) {
|
||||
return;
|
||||
}
|
||||
@ -1433,7 +1437,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
if (saveResolution) {
|
||||
resolvedTypeReferenceDirectives[typeReferenceDirective] = resolvedTypeReferenceDirective;
|
||||
resolvedTypeReferenceDirectives.set(typeReferenceDirective, resolvedTypeReferenceDirective);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1486,7 +1490,7 @@ namespace ts {
|
||||
const shouldAddFile = resolvedFileName && !getResolutionDiagnostic(options, resolution) && !options.noResolve && i < file.imports.length && !elideImport;
|
||||
|
||||
if (elideImport) {
|
||||
modulesWithElidedImports[file.path] = true;
|
||||
modulesWithElidedImports.set(file.path, true);
|
||||
}
|
||||
else if (shouldAddFile) {
|
||||
const path = toPath(resolvedFileName, currentDirectory, getCanonicalFileName);
|
||||
@ -1703,7 +1707,7 @@ namespace ts {
|
||||
if (!options.noEmit && !options.suppressOutputPathCheck) {
|
||||
const emitHost = getEmitHost();
|
||||
const emitFilesSeen = createFileMap<boolean>(!host.useCaseSensitiveFileNames() ? key => key.toLocaleLowerCase() : undefined);
|
||||
forEachExpectedEmitFile(emitHost, (emitFileNames) => {
|
||||
forEachEmittedFile(emitHost, (emitFileNames) => {
|
||||
verifyEmitFilePath(emitFileNames.jsFilePath, emitFilesSeen);
|
||||
verifyEmitFilePath(emitFileNames.declarationFilePath, emitFilesSeen);
|
||||
});
|
||||
|
||||
@ -56,7 +56,7 @@ namespace ts {
|
||||
tryScan<T>(callback: () => T): T;
|
||||
}
|
||||
|
||||
const textToToken = createMap({
|
||||
const textToToken = createMapFromTemplate({
|
||||
"abstract": SyntaxKind.AbstractKeyword,
|
||||
"any": SyntaxKind.AnyKeyword,
|
||||
"as": SyntaxKind.AsKeyword,
|
||||
@ -276,9 +276,9 @@ namespace ts {
|
||||
|
||||
function makeReverseMap(source: Map<number>): string[] {
|
||||
const result: string[] = [];
|
||||
for (const name in source) {
|
||||
result[source[name]] = name;
|
||||
}
|
||||
source.forEach((value, name) => {
|
||||
result[value] = name;
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
@ -290,7 +290,7 @@ namespace ts {
|
||||
|
||||
/* @internal */
|
||||
export function stringToToken(s: string): SyntaxKind {
|
||||
return textToToken[s];
|
||||
return textToToken.get(s);
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
@ -364,8 +364,6 @@ namespace ts {
|
||||
return computeLineAndCharacterOfPosition(getLineStarts(sourceFile), position);
|
||||
}
|
||||
|
||||
const hasOwnProperty = Object.prototype.hasOwnProperty;
|
||||
|
||||
export function isWhiteSpace(ch: number): boolean {
|
||||
return isWhiteSpaceSingleLine(ch) || isLineBreak(ch);
|
||||
}
|
||||
@ -1184,8 +1182,11 @@ namespace ts {
|
||||
const len = tokenValue.length;
|
||||
if (len >= 2 && len <= 11) {
|
||||
const ch = tokenValue.charCodeAt(0);
|
||||
if (ch >= CharacterCodes.a && ch <= CharacterCodes.z && hasOwnProperty.call(textToToken, tokenValue)) {
|
||||
return token = textToToken[tokenValue];
|
||||
if (ch >= CharacterCodes.a && ch <= CharacterCodes.z) {
|
||||
token = textToToken.get(tokenValue);
|
||||
if (token !== undefined) {
|
||||
return token;
|
||||
}
|
||||
}
|
||||
}
|
||||
return token = SyntaxKind.Identifier;
|
||||
|
||||
@ -8,10 +8,9 @@ namespace ts {
|
||||
*
|
||||
* @param filePath The path to the generated output file.
|
||||
* @param sourceMapFilePath The path to the output source map file.
|
||||
* @param sourceFiles The input source files for the program.
|
||||
* @param isBundledEmit A value indicating whether the generated output file is a bundle.
|
||||
* @param sourceFileOrBundle The input source file or bundle for the program.
|
||||
*/
|
||||
initialize(filePath: string, sourceMapFilePath: string, sourceFiles: SourceFile[], isBundledEmit: boolean): void;
|
||||
initialize(filePath: string, sourceMapFilePath: string, sourceFileOrBundle: SourceFile | Bundle): void;
|
||||
|
||||
/**
|
||||
* Reset the SourceMapWriter to an empty state.
|
||||
@ -38,11 +37,11 @@ namespace ts {
|
||||
/**
|
||||
* Emits a node with possible leading and trailing source maps.
|
||||
*
|
||||
* @param emitContext The current emit context
|
||||
* @param hint The current emit context
|
||||
* @param node The node to emit.
|
||||
* @param emitCallback The callback used to emit the node.
|
||||
*/
|
||||
emitNodeWithSourceMap(emitContext: EmitContext, node: Node, emitCallback: (emitContext: EmitContext, node: Node) => void): void;
|
||||
emitNodeWithSourceMap(hint: EmitHint, node: Node, emitCallback: (hint: EmitHint, node: Node) => void): void;
|
||||
|
||||
/**
|
||||
* Emits a token of a node node with possible leading and trailing source maps.
|
||||
@ -115,10 +114,9 @@ namespace ts {
|
||||
*
|
||||
* @param filePath The path to the generated output file.
|
||||
* @param sourceMapFilePath The path to the output source map file.
|
||||
* @param sourceFiles The input source files for the program.
|
||||
* @param isBundledEmit A value indicating whether the generated output file is a bundle.
|
||||
* @param sourceFileOrBundle The input source file or bundle for the program.
|
||||
*/
|
||||
function initialize(filePath: string, sourceMapFilePath: string, sourceFiles: SourceFile[], isBundledEmit: boolean) {
|
||||
function initialize(filePath: string, sourceMapFilePath: string, sourceFileOrBundle: SourceFile | Bundle) {
|
||||
if (disabled) {
|
||||
return;
|
||||
}
|
||||
@ -161,11 +159,10 @@ namespace ts {
|
||||
|
||||
if (compilerOptions.mapRoot) {
|
||||
sourceMapDir = normalizeSlashes(compilerOptions.mapRoot);
|
||||
if (!isBundledEmit) { // emitting single module file
|
||||
Debug.assert(sourceFiles.length === 1);
|
||||
if (sourceFileOrBundle.kind === SyntaxKind.SourceFile) { // emitting single module file
|
||||
// For modules or multiple emit files the mapRoot will have directory structure like the sources
|
||||
// So if src\a.ts and src\lib\b.ts are compiled together user would be moving the maps into mapRoot\a.js.map and mapRoot\lib\b.js.map
|
||||
sourceMapDir = getDirectoryPath(getSourceFilePathInNewDir(sourceFiles[0], host, sourceMapDir));
|
||||
sourceMapDir = getDirectoryPath(getSourceFilePathInNewDir(sourceFileOrBundle, host, sourceMapDir));
|
||||
}
|
||||
|
||||
if (!isRootedDiskPath(sourceMapDir) && !isUrl(sourceMapDir)) {
|
||||
@ -311,12 +308,13 @@ namespace ts {
|
||||
/**
|
||||
* Emits a node with possible leading and trailing source maps.
|
||||
*
|
||||
* @param hint A hint as to the intended usage of the node.
|
||||
* @param node The node to emit.
|
||||
* @param emitCallback The callback used to emit the node.
|
||||
*/
|
||||
function emitNodeWithSourceMap(emitContext: EmitContext, node: Node, emitCallback: (emitContext: EmitContext, node: Node) => void) {
|
||||
function emitNodeWithSourceMap(hint: EmitHint, node: Node, emitCallback: (hint: EmitHint, node: Node) => void) {
|
||||
if (disabled) {
|
||||
return emitCallback(emitContext, node);
|
||||
return emitCallback(hint, node);
|
||||
}
|
||||
|
||||
if (node) {
|
||||
@ -332,11 +330,11 @@ namespace ts {
|
||||
|
||||
if (emitFlags & EmitFlags.NoNestedSourceMaps) {
|
||||
disabled = true;
|
||||
emitCallback(emitContext, node);
|
||||
emitCallback(hint, node);
|
||||
disabled = false;
|
||||
}
|
||||
else {
|
||||
emitCallback(emitContext, node);
|
||||
emitCallback(hint, node);
|
||||
}
|
||||
|
||||
if (node.kind !== SyntaxKind.NotEmittedStatement
|
||||
|
||||
@ -1,4 +1,7 @@
|
||||
/// <reference path="core.ts"/>
|
||||
/// <reference path="core.ts"/>
|
||||
|
||||
declare function setTimeout(handler: (...args: any[]) => void, timeout: number): any;
|
||||
declare function clearTimeout(handle: any): void;
|
||||
|
||||
namespace ts {
|
||||
export type FileWatcherCallback = (fileName: string, removed?: boolean) => void;
|
||||
@ -243,23 +246,23 @@ namespace ts {
|
||||
function createWatchedFileSet() {
|
||||
const dirWatchers = createMap<DirectoryWatcher>();
|
||||
// One file can have multiple watchers
|
||||
const fileWatcherCallbacks = createMap<FileWatcherCallback[]>();
|
||||
const fileWatcherCallbacks = createMultiMap<FileWatcherCallback>();
|
||||
return { addFile, removeFile };
|
||||
|
||||
function reduceDirWatcherRefCountForFile(fileName: string) {
|
||||
const dirName = getDirectoryPath(fileName);
|
||||
const watcher = dirWatchers[dirName];
|
||||
const watcher = dirWatchers.get(dirName);
|
||||
if (watcher) {
|
||||
watcher.referenceCount -= 1;
|
||||
if (watcher.referenceCount <= 0) {
|
||||
watcher.close();
|
||||
delete dirWatchers[dirName];
|
||||
dirWatchers.delete(dirName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function addDirWatcher(dirPath: string): void {
|
||||
let watcher = dirWatchers[dirPath];
|
||||
let watcher = dirWatchers.get(dirPath);
|
||||
if (watcher) {
|
||||
watcher.referenceCount += 1;
|
||||
return;
|
||||
@ -270,12 +273,12 @@ namespace ts {
|
||||
(eventName: string, relativeFileName: string) => fileEventHandler(eventName, relativeFileName, dirPath)
|
||||
);
|
||||
watcher.referenceCount = 1;
|
||||
dirWatchers[dirPath] = watcher;
|
||||
dirWatchers.set(dirPath, watcher);
|
||||
return;
|
||||
}
|
||||
|
||||
function addFileWatcherCallback(filePath: string, callback: FileWatcherCallback): void {
|
||||
multiMapAdd(fileWatcherCallbacks, filePath, callback);
|
||||
fileWatcherCallbacks.add(filePath, callback);
|
||||
}
|
||||
|
||||
function addFile(fileName: string, callback: FileWatcherCallback): WatchedFile {
|
||||
@ -291,7 +294,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
function removeFileWatcherCallback(filePath: string, callback: FileWatcherCallback) {
|
||||
multiMapRemove(fileWatcherCallbacks, filePath, callback);
|
||||
fileWatcherCallbacks.remove(filePath, callback);
|
||||
}
|
||||
|
||||
function fileEventHandler(eventName: string, relativeFileName: string, baseDirPath: string) {
|
||||
@ -300,9 +303,12 @@ namespace ts {
|
||||
? undefined
|
||||
: ts.getNormalizedAbsolutePath(relativeFileName, baseDirPath);
|
||||
// Some applications save a working file via rename operations
|
||||
if ((eventName === "change" || eventName === "rename") && fileWatcherCallbacks[fileName]) {
|
||||
for (const fileCallback of fileWatcherCallbacks[fileName]) {
|
||||
fileCallback(fileName);
|
||||
if ((eventName === "change" || eventName === "rename")) {
|
||||
const callbacks = fileWatcherCallbacks.get(fileName);
|
||||
if (callbacks) {
|
||||
for (const fileCallback of callbacks) {
|
||||
fileCallback(fileName);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
/// <reference path="visitor.ts" />
|
||||
/// <reference path="visitor.ts" />
|
||||
/// <reference path="transformers/ts.ts" />
|
||||
/// <reference path="transformers/jsx.ts" />
|
||||
/// <reference path="transformers/esnext.ts" />
|
||||
@ -13,14 +13,16 @@
|
||||
|
||||
/* @internal */
|
||||
namespace ts {
|
||||
const moduleTransformerMap = createMap<Transformer>({
|
||||
[ModuleKind.ES2015]: transformES2015Module,
|
||||
[ModuleKind.System]: transformSystemModule,
|
||||
[ModuleKind.AMD]: transformModule,
|
||||
[ModuleKind.CommonJS]: transformModule,
|
||||
[ModuleKind.UMD]: transformModule,
|
||||
[ModuleKind.None]: transformModule,
|
||||
});
|
||||
function getModuleTransformer(moduleKind: ModuleKind): Transformer {
|
||||
switch (moduleKind) {
|
||||
case ModuleKind.ES2015:
|
||||
return transformES2015Module;
|
||||
case ModuleKind.System:
|
||||
return transformSystemModule;
|
||||
default:
|
||||
return transformModule;
|
||||
}
|
||||
}
|
||||
|
||||
const enum SyntaxKindFeatureFlags {
|
||||
Substitution = 1 << 0,
|
||||
@ -56,7 +58,7 @@ namespace ts {
|
||||
transformers.push(transformGenerators);
|
||||
}
|
||||
|
||||
transformers.push(moduleTransformerMap[moduleKind] || moduleTransformerMap[ModuleKind.None]);
|
||||
transformers.push(getModuleTransformer(moduleKind));
|
||||
|
||||
// The ES5 transformer is last so that it can substitute expressions like `exports.default`
|
||||
// for ES3.
|
||||
@ -103,14 +105,16 @@ namespace ts {
|
||||
hoistFunctionDeclaration,
|
||||
requestEmitHelper,
|
||||
readEmitHelpers,
|
||||
onSubstituteNode: (_emitContext, node) => node,
|
||||
onSubstituteNode: (_, node) => node,
|
||||
enableSubstitution,
|
||||
isSubstitutionEnabled,
|
||||
onEmitNode: (node, emitContext, emitCallback) => emitCallback(node, emitContext),
|
||||
onEmitNode: (hint, node, callback) => callback(hint, node),
|
||||
enableEmitNotification,
|
||||
isEmitNotificationEnabled
|
||||
};
|
||||
|
||||
performance.mark("beforeTransform");
|
||||
|
||||
// Chain together and initialize each transformer.
|
||||
const transformation = chain(...transformers)(context);
|
||||
|
||||
@ -120,6 +124,9 @@ namespace ts {
|
||||
// Disable modification of the lexical environment.
|
||||
lexicalEnvironmentDisabled = true;
|
||||
|
||||
performance.mark("afterTransform");
|
||||
performance.measure("transformTime", "beforeTransform", "afterTransform");
|
||||
|
||||
return {
|
||||
transformed,
|
||||
emitNodeWithSubstitution,
|
||||
@ -157,21 +164,16 @@ namespace ts {
|
||||
/**
|
||||
* Emits a node with possible substitution.
|
||||
*
|
||||
* @param emitContext The current emit context.
|
||||
* @param hint A hint as to the intended usage of the node.
|
||||
* @param node The node to emit.
|
||||
* @param emitCallback The callback used to emit the node or its substitute.
|
||||
*/
|
||||
function emitNodeWithSubstitution(emitContext: EmitContext, node: Node, emitCallback: (emitContext: EmitContext, node: Node) => void) {
|
||||
function emitNodeWithSubstitution(hint: EmitHint, node: Node, emitCallback: (hint: EmitHint, node: Node) => void) {
|
||||
if (node) {
|
||||
if (isSubstitutionEnabled(node)) {
|
||||
const substitute = context.onSubstituteNode(emitContext, node);
|
||||
if (substitute && substitute !== node) {
|
||||
emitCallback(emitContext, substitute);
|
||||
return;
|
||||
}
|
||||
node = context.onSubstituteNode(hint, node) || node;
|
||||
}
|
||||
|
||||
emitCallback(emitContext, node);
|
||||
emitCallback(hint, node);
|
||||
}
|
||||
}
|
||||
|
||||
@ -194,17 +196,17 @@ namespace ts {
|
||||
/**
|
||||
* Emits a node with possible emit notification.
|
||||
*
|
||||
* @param emitContext The current emit context.
|
||||
* @param hint A hint as to the intended usage of the node.
|
||||
* @param node The node to emit.
|
||||
* @param emitCallback The callback used to emit the node.
|
||||
*/
|
||||
function emitNodeWithNotification(emitContext: EmitContext, node: Node, emitCallback: (emitContext: EmitContext, node: Node) => void) {
|
||||
function emitNodeWithNotification(hint: EmitHint, node: Node, emitCallback: (hint: EmitHint, node: Node) => void) {
|
||||
if (node) {
|
||||
if (isEmitNotificationEnabled(node)) {
|
||||
context.onEmitNode(emitContext, node, emitCallback);
|
||||
context.onEmitNode(hint, node, emitCallback);
|
||||
}
|
||||
else {
|
||||
emitCallback(emitContext, node);
|
||||
emitCallback(hint, node);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -587,7 +587,7 @@ namespace ts {
|
||||
if (isGeneratedIdentifier(node)) {
|
||||
return node;
|
||||
}
|
||||
if (node.text !== "arguments" && !resolver.isArgumentsLocalBinding(node)) {
|
||||
if (node.text !== "arguments" || !resolver.isArgumentsLocalBinding(node)) {
|
||||
return node;
|
||||
}
|
||||
return convertedLoopState.argumentsName || (convertedLoopState.argumentsName = createUniqueName("arguments"));
|
||||
@ -601,7 +601,7 @@ namespace ts {
|
||||
// - break/continue is non-labeled and located in non-converted loop/switch statement
|
||||
const jump = node.kind === SyntaxKind.BreakStatement ? Jump.Break : Jump.Continue;
|
||||
const canUseBreakOrContinue =
|
||||
(node.label && convertedLoopState.labels && convertedLoopState.labels[node.label.text]) ||
|
||||
(node.label && convertedLoopState.labels && convertedLoopState.labels.get(node.label.text)) ||
|
||||
(!node.label && (convertedLoopState.allowedNonLabeledJumps & jump));
|
||||
|
||||
if (!canUseBreakOrContinue) {
|
||||
@ -2144,11 +2144,11 @@ namespace ts {
|
||||
}
|
||||
|
||||
function recordLabel(node: LabeledStatement) {
|
||||
convertedLoopState.labels[node.label.text] = node.label.text;
|
||||
convertedLoopState.labels.set(node.label.text, node.label.text);
|
||||
}
|
||||
|
||||
function resetLabel(node: LabeledStatement) {
|
||||
convertedLoopState.labels[node.label.text] = undefined;
|
||||
convertedLoopState.labels.set(node.label.text, undefined);
|
||||
}
|
||||
|
||||
function visitLabeledStatement(node: LabeledStatement): VisitResult<Statement> {
|
||||
@ -2294,7 +2294,7 @@ namespace ts {
|
||||
addRange(statements, convertedLoopBodyStatements);
|
||||
}
|
||||
else {
|
||||
const statement = visitNode(node.statement, visitor, isStatement);
|
||||
const statement = visitNode(node.statement, visitor, isStatement, /*optional*/ false, liftToBlock);
|
||||
if (isBlock(statement)) {
|
||||
addRange(statements, statement.statements);
|
||||
bodyLocation = statement;
|
||||
@ -2903,13 +2903,13 @@ namespace ts {
|
||||
if (!state.labeledNonLocalBreaks) {
|
||||
state.labeledNonLocalBreaks = createMap<string>();
|
||||
}
|
||||
state.labeledNonLocalBreaks[labelText] = labelMarker;
|
||||
state.labeledNonLocalBreaks.set(labelText, labelMarker);
|
||||
}
|
||||
else {
|
||||
if (!state.labeledNonLocalContinues) {
|
||||
state.labeledNonLocalContinues = createMap<string>();
|
||||
}
|
||||
state.labeledNonLocalContinues[labelText] = labelMarker;
|
||||
state.labeledNonLocalContinues.set(labelText, labelMarker);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2917,13 +2917,12 @@ namespace ts {
|
||||
if (!table) {
|
||||
return;
|
||||
}
|
||||
for (const labelText in table) {
|
||||
const labelMarker = table[labelText];
|
||||
table.forEach((labelMarker, labelText) => {
|
||||
const statements: Statement[] = [];
|
||||
// if there are no outer converted loop or outer label in question is located inside outer converted loop
|
||||
// then emit labeled break\continue
|
||||
// otherwise propagate pair 'label -> marker' to outer converted loop and emit 'return labelMarker' so outer loop can later decide what to do
|
||||
if (!outerLoop || (outerLoop.labels && outerLoop.labels[labelText])) {
|
||||
if (!outerLoop || (outerLoop.labels && outerLoop.labels.get(labelText))) {
|
||||
const label = createIdentifier(labelText);
|
||||
statements.push(isBreak ? createBreak(label) : createContinue(label));
|
||||
}
|
||||
@ -2932,7 +2931,7 @@ namespace ts {
|
||||
statements.push(createReturn(loopResultName));
|
||||
}
|
||||
caseClauses.push(createCaseClause(createLiteral(labelMarker), statements));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function processLoopVariableDeclaration(decl: VariableDeclaration | BindingElement, loopParameters: ParameterDeclaration[], loopOutParameters: LoopOutParameter[]) {
|
||||
@ -3556,9 +3555,11 @@ namespace ts {
|
||||
/**
|
||||
* Called by the printer just before a node is printed.
|
||||
*
|
||||
* @param hint A hint as to the intended usage of the node.
|
||||
* @param node The node to be printed.
|
||||
* @param emitCallback The callback used to emit the node.
|
||||
*/
|
||||
function onEmitNode(emitContext: EmitContext, node: Node, emitCallback: (emitContext: EmitContext, node: Node) => void) {
|
||||
function onEmitNode(hint: EmitHint, node: Node, emitCallback: (hint: EmitHint, node: Node) => void) {
|
||||
if (enabledSubstitutions & ES2015SubstitutionFlags.CapturedThis && isFunctionLike(node)) {
|
||||
// If we are tracking a captured `this`, keep track of the enclosing function.
|
||||
const ancestorFacts = enterSubtree(
|
||||
@ -3566,11 +3567,11 @@ namespace ts {
|
||||
getEmitFlags(node) & EmitFlags.CapturesThis
|
||||
? HierarchyFacts.FunctionIncludes | HierarchyFacts.CapturesThis
|
||||
: HierarchyFacts.FunctionIncludes);
|
||||
previousOnEmitNode(emitContext, node, emitCallback);
|
||||
previousOnEmitNode(hint, node, emitCallback);
|
||||
exitSubtree(ancestorFacts, HierarchyFacts.None, HierarchyFacts.None);
|
||||
return;
|
||||
}
|
||||
previousOnEmitNode(emitContext, node, emitCallback);
|
||||
previousOnEmitNode(hint, node, emitCallback);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -3605,13 +3606,13 @@ namespace ts {
|
||||
/**
|
||||
* Hooks node substitutions.
|
||||
*
|
||||
* @param emitContext The context for the emitter.
|
||||
* @param hint The context for the emitter.
|
||||
* @param node The node to substitute.
|
||||
*/
|
||||
function onSubstituteNode(emitContext: EmitContext, node: Node) {
|
||||
node = previousOnSubstituteNode(emitContext, node);
|
||||
function onSubstituteNode(hint: EmitHint, node: Node) {
|
||||
node = previousOnSubstituteNode(hint, node);
|
||||
|
||||
if (emitContext === EmitContext.Expression) {
|
||||
if (hint === EmitHint.Expression) {
|
||||
return substituteExpression(node);
|
||||
}
|
||||
|
||||
|
||||
@ -315,10 +315,11 @@ namespace ts {
|
||||
/**
|
||||
* Hook for node emit.
|
||||
*
|
||||
* @param hint A hint as to the intended usage of the node.
|
||||
* @param node The node to emit.
|
||||
* @param emit A callback used to emit the node in the printer.
|
||||
*/
|
||||
function onEmitNode(emitContext: EmitContext, node: Node, emitCallback: (emitContext: EmitContext, node: Node) => void): void {
|
||||
function onEmitNode(hint: EmitHint, node: Node, emitCallback: (hint: EmitHint, node: Node) => void): void {
|
||||
// If we need to support substitutions for `super` in an async method,
|
||||
// we should track it here.
|
||||
if (enabledSubstitutions & ES2017SubstitutionFlags.AsyncMethodsWithSuper && isSuperContainer(node)) {
|
||||
@ -326,24 +327,23 @@ namespace ts {
|
||||
if (superContainerFlags !== enclosingSuperContainerFlags) {
|
||||
const savedEnclosingSuperContainerFlags = enclosingSuperContainerFlags;
|
||||
enclosingSuperContainerFlags = superContainerFlags;
|
||||
previousOnEmitNode(emitContext, node, emitCallback);
|
||||
previousOnEmitNode(hint, node, emitCallback);
|
||||
enclosingSuperContainerFlags = savedEnclosingSuperContainerFlags;
|
||||
return;
|
||||
}
|
||||
}
|
||||
previousOnEmitNode(emitContext, node, emitCallback);
|
||||
previousOnEmitNode(hint, node, emitCallback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Hooks node substitutions.
|
||||
*
|
||||
* @param hint A hint as to the intended usage of the node.
|
||||
* @param node The node to substitute.
|
||||
* @param isExpression A value indicating whether the node is to be used in an expression
|
||||
* position.
|
||||
*/
|
||||
function onSubstituteNode(emitContext: EmitContext, node: Node) {
|
||||
node = previousOnSubstituteNode(emitContext, node);
|
||||
if (emitContext === EmitContext.Expression && enclosingSuperContainerFlags) {
|
||||
function onSubstituteNode(hint: EmitHint, node: Node) {
|
||||
node = previousOnSubstituteNode(hint, node);
|
||||
if (hint === EmitHint.Expression && enclosingSuperContainerFlags) {
|
||||
return substituteExpression(<Expression>node);
|
||||
}
|
||||
|
||||
|
||||
@ -11,10 +11,10 @@ namespace ts {
|
||||
export function transformES5(context: TransformationContext) {
|
||||
const compilerOptions = context.getCompilerOptions();
|
||||
|
||||
// enable emit notification only if using --jsx preserve
|
||||
let previousOnEmitNode: (emitContext: EmitContext, node: Node, emitCallback: (emitContext: EmitContext, node: Node) => void) => void;
|
||||
// enable emit notification only if using --jsx preserve or react-native
|
||||
let previousOnEmitNode: (hint: EmitHint, node: Node, emitCallback: (hint: EmitHint, node: Node) => void) => void;
|
||||
let noSubstitution: boolean[];
|
||||
if (compilerOptions.jsx === JsxEmit.Preserve) {
|
||||
if (compilerOptions.jsx === JsxEmit.Preserve || compilerOptions.jsx === JsxEmit.ReactNative) {
|
||||
previousOnEmitNode = context.onEmitNode;
|
||||
context.onEmitNode = onEmitNode;
|
||||
context.enableEmitNotification(SyntaxKind.JsxOpeningElement);
|
||||
@ -41,9 +41,11 @@ namespace ts {
|
||||
/**
|
||||
* Called by the printer just before a node is printed.
|
||||
*
|
||||
* @param node The node to be printed.
|
||||
* @param hint A hint as to the intended usage of the node.
|
||||
* @param node The node to emit.
|
||||
* @param emitCallback A callback used to emit the node.
|
||||
*/
|
||||
function onEmitNode(emitContext: EmitContext, node: Node, emitCallback: (emitContext: EmitContext, node: Node) => void) {
|
||||
function onEmitNode(hint: EmitHint, node: Node, emitCallback: (emitContext: EmitHint, node: Node) => void) {
|
||||
switch (node.kind) {
|
||||
case SyntaxKind.JsxOpeningElement:
|
||||
case SyntaxKind.JsxClosingElement:
|
||||
@ -53,21 +55,21 @@ namespace ts {
|
||||
break;
|
||||
}
|
||||
|
||||
previousOnEmitNode(emitContext, node, emitCallback);
|
||||
previousOnEmitNode(hint, node, emitCallback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Hooks node substitutions.
|
||||
*
|
||||
* @param emitContext The context for the emitter.
|
||||
* @param hint A hint as to the intended usage of the node.
|
||||
* @param node The node to substitute.
|
||||
*/
|
||||
function onSubstituteNode(emitContext: EmitContext, node: Node) {
|
||||
function onSubstituteNode(hint: EmitHint, node: Node) {
|
||||
if (node.id && noSubstitution && noSubstitution[node.id]) {
|
||||
return previousOnSubstituteNode(emitContext, node);
|
||||
return previousOnSubstituteNode(hint, node);
|
||||
}
|
||||
|
||||
node = previousOnSubstituteNode(emitContext, node);
|
||||
node = previousOnSubstituteNode(hint, node);
|
||||
if (isPropertyAccessExpression(node)) {
|
||||
return substitutePropertyAccessExpression(node);
|
||||
}
|
||||
@ -116,4 +118,4 @@ namespace ts {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -700,12 +700,13 @@ namespace ts {
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for node emit.
|
||||
* Called by the printer just before a node is printed.
|
||||
*
|
||||
* @param node The node to emit.
|
||||
* @param emit A callback used to emit the node in the printer.
|
||||
* @param hint A hint as to the intended usage of the node.
|
||||
* @param node The node to be printed.
|
||||
* @param emitCallback The callback used to emit the node.
|
||||
*/
|
||||
function onEmitNode(emitContext: EmitContext, node: Node, emitCallback: (emitContext: EmitContext, node: Node) => void): void {
|
||||
function onEmitNode(hint: EmitHint, node: Node, emitCallback: (hint: EmitHint, node: Node) => void) {
|
||||
// If we need to support substitutions for `super` in an async method,
|
||||
// we should track it here.
|
||||
if (enabledSubstitutions & ESNextSubstitutionFlags.AsyncMethodsWithSuper && isSuperContainer(node)) {
|
||||
@ -713,25 +714,24 @@ namespace ts {
|
||||
if (superContainerFlags !== enclosingSuperContainerFlags) {
|
||||
const savedEnclosingSuperContainerFlags = enclosingSuperContainerFlags;
|
||||
enclosingSuperContainerFlags = superContainerFlags;
|
||||
previousOnEmitNode(emitContext, node, emitCallback);
|
||||
previousOnEmitNode(hint, node, emitCallback);
|
||||
enclosingSuperContainerFlags = savedEnclosingSuperContainerFlags;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
previousOnEmitNode(emitContext, node, emitCallback);
|
||||
previousOnEmitNode(hint, node, emitCallback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Hooks node substitutions.
|
||||
*
|
||||
* @param hint The context for the emitter.
|
||||
* @param node The node to substitute.
|
||||
* @param isExpression A value indicating whether the node is to be used in an expression
|
||||
* position.
|
||||
*/
|
||||
function onSubstituteNode(emitContext: EmitContext, node: Node) {
|
||||
node = previousOnSubstituteNode(emitContext, node);
|
||||
if (emitContext === EmitContext.Expression && enclosingSuperContainerFlags) {
|
||||
function onSubstituteNode(hint: EmitHint, node: Node) {
|
||||
node = previousOnSubstituteNode(hint, node);
|
||||
if (hint === EmitHint.Expression && enclosingSuperContainerFlags) {
|
||||
return substituteExpression(<Expression>node);
|
||||
}
|
||||
return node;
|
||||
@ -835,6 +835,11 @@ namespace ts {
|
||||
};
|
||||
|
||||
export function createAssignHelper(context: TransformationContext, attributesSegments: Expression[]) {
|
||||
if (context.getCompilerOptions().target >= ScriptTarget.ES2015) {
|
||||
return createCall(createPropertyAccess(createIdentifier("Object"), "assign"),
|
||||
/*typeArguments*/ undefined,
|
||||
attributesSegments);
|
||||
}
|
||||
context.requestEmitHelper(assignHelper);
|
||||
return createCall(
|
||||
getHelperName("__assign"),
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
/// <reference path="../factory.ts" />
|
||||
/// <reference path="../factory.ts" />
|
||||
/// <reference path="../visitor.ts" />
|
||||
|
||||
// Transforms generator functions into a compatible ES5 representation with similar runtime
|
||||
@ -217,13 +217,15 @@ namespace ts {
|
||||
Endfinally = 7,
|
||||
}
|
||||
|
||||
const instructionNames = createMap<string>({
|
||||
[Instruction.Return]: "return",
|
||||
[Instruction.Break]: "break",
|
||||
[Instruction.Yield]: "yield",
|
||||
[Instruction.YieldStar]: "yield*",
|
||||
[Instruction.Endfinally]: "endfinally",
|
||||
});
|
||||
function getInstructionName(instruction: Instruction): string {
|
||||
switch (instruction) {
|
||||
case Instruction.Return: return "return";
|
||||
case Instruction.Break: return "break";
|
||||
case Instruction.Yield: return "yield";
|
||||
case Instruction.YieldStar: return "yield*";
|
||||
case Instruction.Endfinally: return "endfinally";
|
||||
}
|
||||
}
|
||||
|
||||
export function transformGenerators(context: TransformationContext) {
|
||||
const {
|
||||
@ -241,7 +243,7 @@ namespace ts {
|
||||
|
||||
let currentSourceFile: SourceFile;
|
||||
let renamedCatchVariables: Map<boolean>;
|
||||
let renamedCatchVariableDeclarations: Map<Identifier>;
|
||||
let renamedCatchVariableDeclarations: Identifier[];
|
||||
|
||||
let inGeneratorFunctionBody: boolean;
|
||||
let inStatementContainingYield: boolean;
|
||||
@ -1911,9 +1913,9 @@ namespace ts {
|
||||
return -1;
|
||||
}
|
||||
|
||||
function onSubstituteNode(emitContext: EmitContext, node: Node): Node {
|
||||
node = previousOnSubstituteNode(emitContext, node);
|
||||
if (emitContext === EmitContext.Expression) {
|
||||
function onSubstituteNode(hint: EmitHint, node: Node): Node {
|
||||
node = previousOnSubstituteNode(hint, node);
|
||||
if (hint === EmitHint.Expression) {
|
||||
return substituteExpression(<Expression>node);
|
||||
}
|
||||
return node;
|
||||
@ -1927,12 +1929,12 @@ namespace ts {
|
||||
}
|
||||
|
||||
function substituteExpressionIdentifier(node: Identifier) {
|
||||
if (renamedCatchVariables && hasProperty(renamedCatchVariables, node.text)) {
|
||||
if (renamedCatchVariables && renamedCatchVariables.has(node.text)) {
|
||||
const original = getOriginalNode(node);
|
||||
if (isIdentifier(original) && original.parent) {
|
||||
const declaration = resolver.getReferencedValueDeclaration(original);
|
||||
if (declaration) {
|
||||
const name = getProperty(renamedCatchVariableDeclarations, String(getOriginalNodeId(declaration)));
|
||||
const name = renamedCatchVariableDeclarations[getOriginalNodeId(declaration)];
|
||||
if (name) {
|
||||
const clone = getMutableClone(name);
|
||||
setSourceMapRange(clone, node);
|
||||
@ -2098,11 +2100,11 @@ namespace ts {
|
||||
|
||||
if (!renamedCatchVariables) {
|
||||
renamedCatchVariables = createMap<boolean>();
|
||||
renamedCatchVariableDeclarations = createMap<Identifier>();
|
||||
renamedCatchVariableDeclarations = [];
|
||||
context.enableSubstitution(SyntaxKind.Identifier);
|
||||
}
|
||||
|
||||
renamedCatchVariables[text] = true;
|
||||
renamedCatchVariables.set(text, true);
|
||||
renamedCatchVariableDeclarations[getOriginalNodeId(variable)] = name;
|
||||
|
||||
const exception = <ExceptionBlock>peekBlock();
|
||||
@ -2407,7 +2409,7 @@ namespace ts {
|
||||
*/
|
||||
function createInstruction(instruction: Instruction): NumericLiteral {
|
||||
const literal = createLiteral(instruction);
|
||||
literal.trailingComment = instructionNames[instruction];
|
||||
literal.trailingComment = getInstructionName(instruction);
|
||||
return literal;
|
||||
}
|
||||
|
||||
|
||||
@ -233,7 +233,7 @@ namespace ts {
|
||||
return String.fromCharCode(parseInt(hex, 16));
|
||||
}
|
||||
else {
|
||||
const ch = entities[word];
|
||||
const ch = entities.get(word);
|
||||
// If this is not a valid entity, then just use `match` (replace it with itself, i.e. don't replace)
|
||||
return ch ? String.fromCharCode(ch) : match;
|
||||
}
|
||||
@ -281,7 +281,7 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
|
||||
const entities = createMap<number>({
|
||||
const entities = createMapFromTemplate<number>({
|
||||
"quot": 0x0022,
|
||||
"amp": 0x0026,
|
||||
"apos": 0x0027,
|
||||
|
||||
@ -71,18 +71,18 @@ namespace ts {
|
||||
/**
|
||||
* Hook for node emit.
|
||||
*
|
||||
* @param emitContext A context hint for the emitter.
|
||||
* @param hint A hint as to the intended usage of the node.
|
||||
* @param node The node to emit.
|
||||
* @param emit A callback used to emit the node in the printer.
|
||||
*/
|
||||
function onEmitNode(emitContext: EmitContext, node: Node, emitCallback: (emitContext: EmitContext, node: Node) => void): void {
|
||||
function onEmitNode(hint: EmitHint, node: Node, emitCallback: (hint: EmitHint, node: Node) => void): void {
|
||||
if (isSourceFile(node)) {
|
||||
currentSourceFile = node;
|
||||
previousOnEmitNode(emitContext, node, emitCallback);
|
||||
previousOnEmitNode(hint, node, emitCallback);
|
||||
currentSourceFile = undefined;
|
||||
}
|
||||
else {
|
||||
previousOnEmitNode(emitContext, node, emitCallback);
|
||||
previousOnEmitNode(hint, node, emitCallback);
|
||||
}
|
||||
}
|
||||
|
||||
@ -93,12 +93,12 @@ namespace ts {
|
||||
/**
|
||||
* Hooks node substitutions.
|
||||
*
|
||||
* @param emitContext A context hint for the emitter.
|
||||
* @param hint A hint as to the intended usage of the node.
|
||||
* @param node The node to substitute.
|
||||
*/
|
||||
function onSubstituteNode(emitContext: EmitContext, node: Node) {
|
||||
node = previousOnSubstituteNode(emitContext, node);
|
||||
if (isIdentifier(node) && emitContext === EmitContext.Expression) {
|
||||
function onSubstituteNode(hint: EmitHint, node: Node) {
|
||||
node = previousOnSubstituteNode(hint, node);
|
||||
if (isIdentifier(node) && hint === EmitHint.Expression) {
|
||||
return substituteExpressionIdentifier(node);
|
||||
}
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
/// <reference path="../../factory.ts" />
|
||||
/// <reference path="../../factory.ts" />
|
||||
/// <reference path="../../visitor.ts" />
|
||||
/// <reference path="../destructuring.ts" />
|
||||
|
||||
@ -11,12 +11,13 @@ namespace ts {
|
||||
importAliasNames: ParameterDeclaration[];
|
||||
}
|
||||
|
||||
const transformModuleDelegates = createMap<(node: SourceFile) => SourceFile>({
|
||||
[ModuleKind.None]: transformCommonJSModule,
|
||||
[ModuleKind.CommonJS]: transformCommonJSModule,
|
||||
[ModuleKind.AMD]: transformAMDModule,
|
||||
[ModuleKind.UMD]: transformUMDModule,
|
||||
});
|
||||
function getTransformModuleDelegate(moduleKind: ModuleKind): (node: SourceFile) => SourceFile {
|
||||
switch (moduleKind) {
|
||||
case ModuleKind.AMD: return transformAMDModule;
|
||||
case ModuleKind.UMD: return transformUMDModule;
|
||||
default: return transformCommonJSModule;
|
||||
}
|
||||
}
|
||||
|
||||
const {
|
||||
startLexicalEnvironment,
|
||||
@ -39,12 +40,12 @@ namespace ts {
|
||||
context.enableSubstitution(SyntaxKind.ShorthandPropertyAssignment); // Substitutes shorthand property assignments for imported/exported symbols.
|
||||
context.enableEmitNotification(SyntaxKind.SourceFile); // Restore state when substituting nodes in a file.
|
||||
|
||||
const moduleInfoMap = createMap<ExternalModuleInfo>(); // The ExternalModuleInfo for each file.
|
||||
const deferredExports = createMap<Statement[]>(); // Exports to defer until an EndOfDeclarationMarker is found.
|
||||
const moduleInfoMap: ExternalModuleInfo[] = []; // The ExternalModuleInfo for each file.
|
||||
const deferredExports: Statement[][] = []; // Exports to defer until an EndOfDeclarationMarker is found.
|
||||
|
||||
let currentSourceFile: SourceFile; // The current file.
|
||||
let currentModuleInfo: ExternalModuleInfo; // The ExternalModuleInfo for the current file.
|
||||
let noSubstitution: Map<boolean>; // Set of nodes for which substitution rules should be ignored.
|
||||
let noSubstitution: boolean[]; // Set of nodes for which substitution rules should be ignored.
|
||||
|
||||
return transformSourceFile;
|
||||
|
||||
@ -61,10 +62,11 @@ namespace ts {
|
||||
}
|
||||
|
||||
currentSourceFile = node;
|
||||
currentModuleInfo = moduleInfoMap[getOriginalNodeId(node)] = collectExternalModuleInfo(node, resolver, compilerOptions);
|
||||
currentModuleInfo = collectExternalModuleInfo(node, resolver, compilerOptions);
|
||||
moduleInfoMap[getOriginalNodeId(node)] = currentModuleInfo;
|
||||
|
||||
// Perform the transformation.
|
||||
const transformModule = transformModuleDelegates[moduleKind] || transformModuleDelegates[ModuleKind.None];
|
||||
const transformModule = getTransformModuleDelegate(moduleKind);
|
||||
const updated = transformModule(node);
|
||||
|
||||
currentSourceFile = undefined;
|
||||
@ -1086,7 +1088,7 @@ namespace ts {
|
||||
*/
|
||||
function appendExportsOfDeclaration(statements: Statement[] | undefined, decl: Declaration): Statement[] | undefined {
|
||||
const name = getDeclarationName(decl);
|
||||
const exportSpecifiers = currentModuleInfo.exportSpecifiers[name.text];
|
||||
const exportSpecifiers = currentModuleInfo.exportSpecifiers.get(name.text);
|
||||
if (exportSpecifiers) {
|
||||
for (const exportSpecifier of exportSpecifiers) {
|
||||
statements = appendExportStatement(statements, exportSpecifier.name, name, /*location*/ exportSpecifier.name);
|
||||
@ -1110,7 +1112,7 @@ namespace ts {
|
||||
function appendExportStatement(statements: Statement[] | undefined, exportName: Identifier, expression: Expression, location?: TextRange, allowComments?: boolean): Statement[] | undefined {
|
||||
if (exportName.text === "default") {
|
||||
const sourceFile = getOriginalNode(currentSourceFile, isSourceFile);
|
||||
if (sourceFile && !sourceFile.symbol.exports["___esModule"]) {
|
||||
if (sourceFile && !sourceFile.symbol.exports.get("___esModule")) {
|
||||
if (languageVersion === ScriptTarget.ES3) {
|
||||
statements = append(statements,
|
||||
createStatement(
|
||||
@ -1208,24 +1210,24 @@ namespace ts {
|
||||
/**
|
||||
* Hook for node emit notifications.
|
||||
*
|
||||
* @param emitContext A context hint for the emitter.
|
||||
* @param hint A hint as to the intended usage of the node.
|
||||
* @param node The node to emit.
|
||||
* @param emit A callback used to emit the node in the printer.
|
||||
*/
|
||||
function onEmitNode(emitContext: EmitContext, node: Node, emitCallback: (emitContext: EmitContext, node: Node) => void): void {
|
||||
function onEmitNode(hint: EmitHint, node: Node, emitCallback: (hint: EmitHint, node: Node) => void): void {
|
||||
if (node.kind === SyntaxKind.SourceFile) {
|
||||
currentSourceFile = <SourceFile>node;
|
||||
currentModuleInfo = moduleInfoMap[getOriginalNodeId(currentSourceFile)];
|
||||
noSubstitution = createMap<boolean>();
|
||||
noSubstitution = [];
|
||||
|
||||
previousOnEmitNode(emitContext, node, emitCallback);
|
||||
previousOnEmitNode(hint, node, emitCallback);
|
||||
|
||||
currentSourceFile = undefined;
|
||||
currentModuleInfo = undefined;
|
||||
noSubstitution = undefined;
|
||||
}
|
||||
else {
|
||||
previousOnEmitNode(emitContext, node, emitCallback);
|
||||
previousOnEmitNode(hint, node, emitCallback);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1236,16 +1238,16 @@ namespace ts {
|
||||
/**
|
||||
* Hooks node substitutions.
|
||||
*
|
||||
* @param emitContext A context hint for the emitter.
|
||||
* @param hint A hint as to the intended usage of the node.
|
||||
* @param node The node to substitute.
|
||||
*/
|
||||
function onSubstituteNode(emitContext: EmitContext, node: Node) {
|
||||
node = previousOnSubstituteNode(emitContext, node);
|
||||
function onSubstituteNode(hint: EmitHint, node: Node) {
|
||||
node = previousOnSubstituteNode(hint, node);
|
||||
if (node.id && noSubstitution[node.id]) {
|
||||
return node;
|
||||
}
|
||||
|
||||
if (emitContext === EmitContext.Expression) {
|
||||
if (hint === EmitHint.Expression) {
|
||||
return substituteExpression(<Expression>node);
|
||||
}
|
||||
else if (isShorthandPropertyAssignment(node)) {
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
/// <reference path="../../factory.ts" />
|
||||
/// <reference path="../../factory.ts" />
|
||||
/// <reference path="../../visitor.ts" />
|
||||
/// <reference path="../destructuring.ts" />
|
||||
|
||||
@ -29,10 +29,10 @@ namespace ts {
|
||||
context.enableSubstitution(SyntaxKind.PostfixUnaryExpression); // Substitutes updates to exported symbols.
|
||||
context.enableEmitNotification(SyntaxKind.SourceFile); // Restore state when substituting nodes in a file.
|
||||
|
||||
const moduleInfoMap = createMap<ExternalModuleInfo>(); // The ExternalModuleInfo for each file.
|
||||
const deferredExports = createMap<Statement[]>(); // Exports to defer until an EndOfDeclarationMarker is found.
|
||||
const exportFunctionsMap = createMap<Identifier>(); // The export function associated with a source file.
|
||||
const noSubstitutionMap = createMap<Map<boolean>>(); // Set of nodes for which substitution rules should be ignored for each file.
|
||||
const moduleInfoMap: ExternalModuleInfo[] = []; // The ExternalModuleInfo for each file.
|
||||
const deferredExports: Statement[][] = []; // Exports to defer until an EndOfDeclarationMarker is found.
|
||||
const exportFunctionsMap: Identifier[] = []; // The export function associated with a source file.
|
||||
const noSubstitutionMap: boolean[][] = []; // Set of nodes for which substitution rules should be ignored for each file.
|
||||
|
||||
let currentSourceFile: SourceFile; // The current file.
|
||||
let moduleInfo: ExternalModuleInfo; // ExternalModuleInfo for the current file.
|
||||
@ -40,7 +40,7 @@ namespace ts {
|
||||
let contextObject: Identifier; // The context object for the current file.
|
||||
let hoistedStatements: Statement[];
|
||||
let enclosingBlockScopedContainer: Node;
|
||||
let noSubstitution: Map<boolean>; // Set of nodes for which substitution rules should be ignored.
|
||||
let noSubstitution: boolean[]; // Set of nodes for which substitution rules should be ignored.
|
||||
|
||||
return transformSourceFile;
|
||||
|
||||
@ -78,7 +78,8 @@ namespace ts {
|
||||
|
||||
// Make sure that the name of the 'exports' function does not conflict with
|
||||
// existing identifiers.
|
||||
exportFunction = exportFunctionsMap[id] = createUniqueName("exports");
|
||||
exportFunction = createUniqueName("exports");
|
||||
exportFunctionsMap[id] = exportFunction;
|
||||
contextObject = createUniqueName("context");
|
||||
|
||||
// Add the body of the module.
|
||||
@ -148,13 +149,13 @@ namespace ts {
|
||||
const externalImport = externalImports[i];
|
||||
const externalModuleName = getExternalModuleNameLiteral(externalImport, currentSourceFile, host, resolver, compilerOptions);
|
||||
const text = externalModuleName.text;
|
||||
if (hasProperty(groupIndices, text)) {
|
||||
const groupIndex = groupIndices.get(text);
|
||||
if (groupIndex !== undefined) {
|
||||
// deduplicate/group entries in dependency list by the dependency name
|
||||
const groupIndex = groupIndices[text];
|
||||
dependencyGroups[groupIndex].externalImports.push(externalImport);
|
||||
}
|
||||
else {
|
||||
groupIndices[text] = dependencyGroups.length;
|
||||
groupIndices.set(text, dependencyGroups.length);
|
||||
dependencyGroups.push({
|
||||
name: externalModuleName,
|
||||
externalImports: [externalImport]
|
||||
@ -306,7 +307,7 @@ namespace ts {
|
||||
// this set is used to filter names brought by star expors.
|
||||
|
||||
// local names set should only be added if we have anything exported
|
||||
if (!moduleInfo.exportedNames && isEmpty(moduleInfo.exportSpecifiers)) {
|
||||
if (!moduleInfo.exportedNames && moduleInfo.exportSpecifiers.size === 0) {
|
||||
// no exported declarations (export var ...) or export specifiers (export {x})
|
||||
// check if we have any non star export declarations.
|
||||
let hasExportDeclarationWithExportClause = false;
|
||||
@ -1082,7 +1083,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
const name = getDeclarationName(decl);
|
||||
const exportSpecifiers = moduleInfo.exportSpecifiers[name.text];
|
||||
const exportSpecifiers = moduleInfo.exportSpecifiers.get(name.text);
|
||||
if (exportSpecifiers) {
|
||||
for (const exportSpecifier of exportSpecifiers) {
|
||||
if (exportSpecifier.name.text !== excludeName) {
|
||||
@ -1549,11 +1550,11 @@ namespace ts {
|
||||
/**
|
||||
* Hook for node emit notifications.
|
||||
*
|
||||
* @param emitContext A context hint for the emitter.
|
||||
* @param hint A hint as to the intended usage of the node.
|
||||
* @param node The node to emit.
|
||||
* @param emit A callback used to emit the node in the printer.
|
||||
* @param emitCallback A callback used to emit the node in the printer.
|
||||
*/
|
||||
function onEmitNode(emitContext: EmitContext, node: Node, emitCallback: (emitContext: EmitContext, node: Node) => void): void {
|
||||
function onEmitNode(hint: EmitHint, node: Node, emitCallback: (hint: EmitHint, node: Node) => void): void {
|
||||
if (node.kind === SyntaxKind.SourceFile) {
|
||||
const id = getOriginalNodeId(node);
|
||||
currentSourceFile = <SourceFile>node;
|
||||
@ -1565,7 +1566,7 @@ namespace ts {
|
||||
delete noSubstitutionMap[id];
|
||||
}
|
||||
|
||||
previousOnEmitNode(emitContext, node, emitCallback);
|
||||
previousOnEmitNode(hint, node, emitCallback);
|
||||
|
||||
currentSourceFile = undefined;
|
||||
moduleInfo = undefined;
|
||||
@ -1573,7 +1574,7 @@ namespace ts {
|
||||
noSubstitution = undefined;
|
||||
}
|
||||
else {
|
||||
previousOnEmitNode(emitContext, node, emitCallback);
|
||||
previousOnEmitNode(hint, node, emitCallback);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1584,16 +1585,16 @@ namespace ts {
|
||||
/**
|
||||
* Hooks node substitutions.
|
||||
*
|
||||
* @param emitContext A context hint for the emitter.
|
||||
* @param hint A hint as to the intended usage of the node.
|
||||
* @param node The node to substitute.
|
||||
*/
|
||||
function onSubstituteNode(emitContext: EmitContext, node: Node) {
|
||||
node = previousOnSubstituteNode(emitContext, node);
|
||||
function onSubstituteNode(hint: EmitHint, node: Node) {
|
||||
node = previousOnSubstituteNode(hint, node);
|
||||
if (isSubstitutionPrevented(node)) {
|
||||
return node;
|
||||
}
|
||||
|
||||
if (emitContext === EmitContext.Expression) {
|
||||
if (hint === EmitHint.Expression) {
|
||||
return substituteExpression(<Expression>node);
|
||||
}
|
||||
|
||||
@ -1773,7 +1774,7 @@ namespace ts {
|
||||
* @param node The node which should not be substituted.
|
||||
*/
|
||||
function preventSubstitution<T extends Node>(node: T): T {
|
||||
if (noSubstitution === undefined) noSubstitution = createMap<boolean>();
|
||||
if (noSubstitution === undefined) noSubstitution = [];
|
||||
noSubstitution[getNodeId(node)] = true;
|
||||
return node;
|
||||
}
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
/// <reference path="../factory.ts" />
|
||||
/// <reference path="../factory.ts" />
|
||||
/// <reference path="../visitor.ts" />
|
||||
/// <reference path="./destructuring.ts" />
|
||||
|
||||
@ -60,7 +60,7 @@ namespace ts {
|
||||
* A map that keeps track of aliases created for classes with decorators to avoid issues
|
||||
* with the double-binding behavior of classes.
|
||||
*/
|
||||
let classAliases: Map<Identifier>;
|
||||
let classAliases: Identifier[];
|
||||
|
||||
/**
|
||||
* Keeps track of whether we are within any containing namespaces when performing
|
||||
@ -2547,8 +2547,8 @@ namespace ts {
|
||||
currentScopeFirstDeclarationsOfName = createMap<Node>();
|
||||
}
|
||||
|
||||
if (!(name in currentScopeFirstDeclarationsOfName)) {
|
||||
currentScopeFirstDeclarationsOfName[name] = node;
|
||||
if (!currentScopeFirstDeclarationsOfName.has(name)) {
|
||||
currentScopeFirstDeclarationsOfName.set(name, node);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2561,7 +2561,7 @@ namespace ts {
|
||||
if (currentScopeFirstDeclarationsOfName) {
|
||||
const name = node.symbol && node.symbol.name;
|
||||
if (name) {
|
||||
return currentScopeFirstDeclarationsOfName[name] === node;
|
||||
return currentScopeFirstDeclarationsOfName.get(name) === node;
|
||||
}
|
||||
}
|
||||
|
||||
@ -3134,7 +3134,7 @@ namespace ts {
|
||||
context.enableSubstitution(SyntaxKind.Identifier);
|
||||
|
||||
// Keep track of class aliases.
|
||||
classAliases = createMap<Identifier>();
|
||||
classAliases = [];
|
||||
}
|
||||
}
|
||||
|
||||
@ -3163,11 +3163,11 @@ namespace ts {
|
||||
/**
|
||||
* Hook for node emit.
|
||||
*
|
||||
* @param emitContext A context hint for the emitter.
|
||||
* @param hint A hint as to the intended usage of the node.
|
||||
* @param node The node to emit.
|
||||
* @param emit A callback used to emit the node in the printer.
|
||||
*/
|
||||
function onEmitNode(emitContext: EmitContext, node: Node, emitCallback: (emitContext: EmitContext, node: Node) => void): void {
|
||||
function onEmitNode(hint: EmitHint, node: Node, emitCallback: (hint: EmitHint, node: Node) => void): void {
|
||||
const savedApplicableSubstitutions = applicableSubstitutions;
|
||||
const savedCurrentSourceFile = currentSourceFile;
|
||||
|
||||
@ -3183,7 +3183,7 @@ namespace ts {
|
||||
applicableSubstitutions |= TypeScriptSubstitutionFlags.NonQualifiedEnumMembers;
|
||||
}
|
||||
|
||||
previousOnEmitNode(emitContext, node, emitCallback);
|
||||
previousOnEmitNode(hint, node, emitCallback);
|
||||
|
||||
applicableSubstitutions = savedApplicableSubstitutions;
|
||||
currentSourceFile = savedCurrentSourceFile;
|
||||
@ -3192,12 +3192,12 @@ namespace ts {
|
||||
/**
|
||||
* Hooks node substitutions.
|
||||
*
|
||||
* @param emitContext A context hint for the emitter.
|
||||
* @param hint A hint as to the intended usage of the node.
|
||||
* @param node The node to substitute.
|
||||
*/
|
||||
function onSubstituteNode(emitContext: EmitContext, node: Node) {
|
||||
node = previousOnSubstituteNode(emitContext, node);
|
||||
if (emitContext === EmitContext.Expression) {
|
||||
function onSubstituteNode(hint: EmitHint, node: Node) {
|
||||
node = previousOnSubstituteNode(hint, node);
|
||||
if (hint === EmitHint.Expression) {
|
||||
return substituteExpression(<Expression>node);
|
||||
}
|
||||
else if (isShorthandPropertyAssignment(node)) {
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
/// <reference path="program.ts"/>
|
||||
/// <reference path="program.ts"/>
|
||||
/// <reference path="commandLineParser.ts"/>
|
||||
|
||||
namespace ts {
|
||||
@ -67,11 +67,13 @@ namespace ts {
|
||||
const gutterSeparator = " ";
|
||||
const resetEscapeSequence = "\u001b[0m";
|
||||
const ellipsis = "...";
|
||||
const categoryFormatMap = createMap<string>({
|
||||
[DiagnosticCategory.Warning]: yellowForegroundEscapeSequence,
|
||||
[DiagnosticCategory.Error]: redForegroundEscapeSequence,
|
||||
[DiagnosticCategory.Message]: blueForegroundEscapeSequence,
|
||||
});
|
||||
function getCategoryFormat(category: DiagnosticCategory): string {
|
||||
switch (category) {
|
||||
case DiagnosticCategory.Warning: return yellowForegroundEscapeSequence;
|
||||
case DiagnosticCategory.Error: return redForegroundEscapeSequence;
|
||||
case DiagnosticCategory.Message: return blueForegroundEscapeSequence;
|
||||
}
|
||||
}
|
||||
|
||||
function formatAndReset(text: string, formatStyle: string) {
|
||||
return formatStyle + text + resetEscapeSequence;
|
||||
@ -139,7 +141,7 @@ namespace ts {
|
||||
output += `${ relativeFileName }(${ firstLine + 1 },${ firstLineChar + 1 }): `;
|
||||
}
|
||||
|
||||
const categoryColor = categoryFormatMap[diagnostic.category];
|
||||
const categoryColor = getCategoryFormat(diagnostic.category);
|
||||
const category = DiagnosticCategory[diagnostic.category].toLowerCase();
|
||||
output += `${ formatAndReset(category, categoryColor) } TS${ diagnostic.code }: ${ flattenDiagnosticMessageText(diagnostic.messageText, sys.newLine) }`;
|
||||
output += sys.newLine + sys.newLine;
|
||||
@ -155,7 +157,7 @@ namespace ts {
|
||||
output += `${ diagnostic.file.fileName }(${ loc.line + 1 },${ loc.character + 1 }): `;
|
||||
}
|
||||
|
||||
output += `${ flattenDiagnosticMessageText(diagnostic.messageText, sys.newLine) }${ sys.newLine }`;
|
||||
output += `${ flattenDiagnosticMessageText(diagnostic.messageText, sys.newLine) }${ sys.newLine + sys.newLine + sys.newLine }`;
|
||||
|
||||
sys.write(output);
|
||||
}
|
||||
@ -378,9 +380,11 @@ namespace ts {
|
||||
}
|
||||
|
||||
function cachedFileExists(fileName: string): boolean {
|
||||
return fileName in cachedExistingFiles
|
||||
? cachedExistingFiles[fileName]
|
||||
: cachedExistingFiles[fileName] = hostFileExists(fileName);
|
||||
let fileExists = cachedExistingFiles.get(fileName);
|
||||
if (fileExists === undefined) {
|
||||
cachedExistingFiles.set(fileName, fileExists = hostFileExists(fileName));
|
||||
}
|
||||
return fileExists;
|
||||
}
|
||||
|
||||
function getSourceFile(fileName: string, languageVersion: ScriptTarget, onError?: (message: string) => void) {
|
||||
@ -674,13 +678,9 @@ namespace ts {
|
||||
|
||||
if (option.name === "lib") {
|
||||
description = getDiagnosticText(option.description);
|
||||
const options: string[] = [];
|
||||
const element = (<CommandLineOptionOfListType>option).element;
|
||||
const typeMap = <Map<number | string>>element.type;
|
||||
for (const key in typeMap) {
|
||||
options.push(`'${key}'`);
|
||||
}
|
||||
optionsDescriptionMap[description] = options;
|
||||
optionsDescriptionMap.set(description, arrayFrom(typeMap.keys()).map(key => `'${key}'`));
|
||||
}
|
||||
else {
|
||||
description = getDiagnosticText(option.description);
|
||||
@ -702,7 +702,7 @@ namespace ts {
|
||||
for (let i = 0; i < usageColumn.length; i++) {
|
||||
const usage = usageColumn[i];
|
||||
const description = descriptionColumn[i];
|
||||
const kindsList = optionsDescriptionMap[description];
|
||||
const kindsList = optionsDescriptionMap.get(description);
|
||||
output.push(usage + makePadding(marginLength - usage.length + 2) + description + sys.newLine);
|
||||
|
||||
if (kindsList) {
|
||||
|
||||
@ -3,8 +3,7 @@
|
||||
"compilerOptions": {
|
||||
"removeComments": true,
|
||||
"outFile": "../../built/local/tsc.js",
|
||||
"declaration": true,
|
||||
"types": [ ]
|
||||
"declaration": true
|
||||
},
|
||||
"files": [
|
||||
"core.ts",
|
||||
|
||||
@ -1,11 +1,30 @@
|
||||
namespace ts {
|
||||
|
||||
namespace ts {
|
||||
/**
|
||||
* Type of objects whose values are all of the same type.
|
||||
* The `in` and `for-in` operators can *not* be safely used,
|
||||
* since `Object.prototype` may be modified by outside code.
|
||||
*/
|
||||
export interface MapLike<T> {
|
||||
[index: string]: T;
|
||||
}
|
||||
|
||||
export interface Map<T> extends MapLike<T> {
|
||||
__mapBrand: any;
|
||||
/** ES6 Map interface. */
|
||||
export interface Map<T> {
|
||||
get(key: string): T;
|
||||
has(key: string): boolean;
|
||||
set(key: string, value: T): this;
|
||||
delete(key: string): boolean;
|
||||
clear(): void;
|
||||
forEach(action: (value: T, key: string) => void): void;
|
||||
readonly size: number;
|
||||
keys(): Iterator<string>;
|
||||
values(): Iterator<T>;
|
||||
entries(): Iterator<[string, T]>;
|
||||
}
|
||||
|
||||
/** ES6 Iterator type. */
|
||||
export interface Iterator<T> {
|
||||
next(): { value: T, done: false } | { value: never, done: true };
|
||||
}
|
||||
|
||||
// branded string type used to store absolute, normalized and canonicalized paths
|
||||
@ -329,6 +348,7 @@ namespace ts {
|
||||
EnumMember,
|
||||
// Top-level nodes
|
||||
SourceFile,
|
||||
Bundle,
|
||||
|
||||
// JSDoc nodes
|
||||
JSDocTypeExpression,
|
||||
@ -402,7 +422,7 @@ namespace ts {
|
||||
LastBinaryOperator = CaretEqualsToken,
|
||||
FirstNode = QualifiedName,
|
||||
FirstJSDocNode = JSDocTypeExpression,
|
||||
LastJSDocNode = JSDocLiteralType,
|
||||
LastJSDocNode = JSDocNeverKeyword,
|
||||
FirstJSDocTagNode = JSDocComment,
|
||||
LastJSDocTagNode = JSDocNeverKeyword
|
||||
}
|
||||
@ -2190,6 +2210,11 @@ namespace ts {
|
||||
/* @internal */ ambientModuleNames: string[];
|
||||
}
|
||||
|
||||
export interface Bundle extends Node {
|
||||
kind: SyntaxKind.Bundle;
|
||||
sourceFiles: SourceFile[];
|
||||
}
|
||||
|
||||
export interface ScriptReferenceHost {
|
||||
getCompilerOptions(): CompilerOptions;
|
||||
getSourceFile(fileName: string): SourceFile;
|
||||
@ -2347,8 +2372,13 @@ namespace ts {
|
||||
getIndexInfoOfType(type: Type, kind: IndexKind): IndexInfo;
|
||||
getSignaturesOfType(type: Type, kind: SignatureKind): Signature[];
|
||||
getIndexTypeOfType(type: Type, kind: IndexKind): Type;
|
||||
getBaseTypes(type: InterfaceType): ObjectType[];
|
||||
getBaseTypes(type: InterfaceType): BaseType[];
|
||||
getReturnTypeOfSignature(signature: Signature): Type;
|
||||
/**
|
||||
* Gets the type of a parameter at a given position in a signature.
|
||||
* Returns `any` if the index is not valid.
|
||||
*/
|
||||
/* @internal */ getParameterType(signature: Signature, parameterIndex: number): Type;
|
||||
getNonNullableType(type: Type): Type;
|
||||
|
||||
getSymbolsInScope(location: Node, meaning: SymbolFlags): Symbol[];
|
||||
@ -2696,6 +2726,7 @@ namespace ts {
|
||||
containingType?: UnionOrIntersectionType; // Containing union or intersection type for synthetic property
|
||||
leftSpread?: Symbol; // Left source for synthetic spread property
|
||||
rightSpread?: Symbol; // Right source for synthetic spread property
|
||||
mappedTypeOrigin?: Symbol; // For a property on a mapped type, points back to the orignal 'T' from 'keyof T'.
|
||||
hasNonUniformType?: boolean; // True if constituents have non-uniform types
|
||||
isPartial?: boolean; // True if syntheric property of union type occurs in some but not all constituents
|
||||
isDiscriminantProperty?: boolean; // True if discriminant synthetic property
|
||||
@ -2857,7 +2888,7 @@ namespace ts {
|
||||
|
||||
// Enum types (TypeFlags.Enum)
|
||||
export interface EnumType extends Type {
|
||||
memberTypes: Map<EnumLiteralType>;
|
||||
memberTypes: EnumLiteralType[];
|
||||
}
|
||||
|
||||
// Enum types (TypeFlags.EnumLiteral)
|
||||
@ -2894,9 +2925,12 @@ namespace ts {
|
||||
/* @internal */
|
||||
resolvedBaseConstructorType?: Type; // Resolved base constructor type of class
|
||||
/* @internal */
|
||||
resolvedBaseTypes: ObjectType[]; // Resolved base types
|
||||
resolvedBaseTypes: BaseType[]; // Resolved base types
|
||||
}
|
||||
|
||||
// Object type or intersection of object types
|
||||
export type BaseType = ObjectType | IntersectionType;
|
||||
|
||||
export interface InterfaceTypeWithDeclaredMembers extends InterfaceType {
|
||||
declaredProperties: Symbol[]; // Declared members
|
||||
declaredCallSignatures: Signature[]; // Declared call signatures
|
||||
@ -2929,7 +2963,9 @@ namespace ts {
|
||||
export interface UnionOrIntersectionType extends Type {
|
||||
types: Type[]; // Constituent types
|
||||
/* @internal */
|
||||
resolvedProperties: SymbolTable; // Cache of resolved properties
|
||||
propertyCache: SymbolTable; // Cache of resolved properties
|
||||
/* @internal */
|
||||
resolvedProperties: Symbol[];
|
||||
/* @internal */
|
||||
resolvedIndexType: IndexType;
|
||||
/* @internal */
|
||||
@ -2940,7 +2976,10 @@ namespace ts {
|
||||
|
||||
export interface UnionType extends UnionOrIntersectionType { }
|
||||
|
||||
export interface IntersectionType extends UnionOrIntersectionType { }
|
||||
export interface IntersectionType extends UnionOrIntersectionType {
|
||||
/* @internal */
|
||||
resolvedApparentType: Type;
|
||||
}
|
||||
|
||||
export type StructuredType = ObjectType | UnionType | IntersectionType;
|
||||
|
||||
@ -3284,7 +3323,8 @@ namespace ts {
|
||||
export const enum JsxEmit {
|
||||
None = 0,
|
||||
Preserve = 1,
|
||||
React = 2
|
||||
React = 2,
|
||||
ReactNative = 3
|
||||
}
|
||||
|
||||
export const enum NewLineKind {
|
||||
@ -3702,7 +3742,7 @@ namespace ts {
|
||||
flags?: EmitFlags; // Flags that customize emit
|
||||
commentRange?: TextRange; // The text range to use when emitting leading or trailing comments
|
||||
sourceMapRange?: TextRange; // The text range to use when emitting leading or trailing source mappings
|
||||
tokenSourceMapRanges?: Map<TextRange>; // The text range to use when emitting source mappings for tokens
|
||||
tokenSourceMapRanges?: TextRange[]; // The text range to use when emitting source mappings for tokens
|
||||
constantValue?: number; // The constant value of an expression
|
||||
externalHelpersModuleName?: Identifier; // The local name for an imported helpers module
|
||||
helpers?: EmitHelper[]; // Emit helpers for the node
|
||||
@ -3779,8 +3819,7 @@ namespace ts {
|
||||
LastEmitHelper = AsyncValues
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
export const enum EmitContext {
|
||||
export const enum EmitHint {
|
||||
SourceFile, // Emitting a SourceFile
|
||||
Expression, // Emitting an Expression
|
||||
IdentifierName, // Emitting an IdentifierName
|
||||
@ -3855,7 +3894,7 @@ namespace ts {
|
||||
* Hook used by transformers to substitute expressions just before they
|
||||
* are emitted by the pretty printer.
|
||||
*/
|
||||
onSubstituteNode?: (emitContext: EmitContext, node: Node) => Node;
|
||||
onSubstituteNode?: (hint: EmitHint, node: Node) => Node;
|
||||
|
||||
/**
|
||||
* Enables before/after emit notifications in the pretty printer for the provided
|
||||
@ -3873,7 +3912,7 @@ namespace ts {
|
||||
* Hook used to allow transformers to capture state before or after
|
||||
* the printer emits a node.
|
||||
*/
|
||||
onEmitNode?: (emitContext: EmitContext, node: Node, emitCallback: (emitContext: EmitContext, node: Node) => void) => void;
|
||||
onEmitNode?: (hint: EmitHint, node: Node, emitCallback: (hint: EmitHint, node: Node) => void) => void;
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
@ -3886,25 +3925,132 @@ namespace ts {
|
||||
/**
|
||||
* Emits the substitute for a node, if one is available; otherwise, emits the node.
|
||||
*
|
||||
* @param emitContext The current emit context.
|
||||
* @param hint A hint as to the intended usage of the node.
|
||||
* @param node The node to substitute.
|
||||
* @param emitCallback A callback used to emit the node or its substitute.
|
||||
*/
|
||||
emitNodeWithSubstitution(emitContext: EmitContext, node: Node, emitCallback: (emitContext: EmitContext, node: Node) => void): void;
|
||||
emitNodeWithSubstitution(hint: EmitHint, node: Node, emitCallback: (hint: EmitHint, node: Node) => void): void;
|
||||
|
||||
/**
|
||||
* Emits a node with possible notification.
|
||||
*
|
||||
* @param emitContext The current emit context.
|
||||
* @param hint A hint as to the intended usage of the node.
|
||||
* @param node The node to emit.
|
||||
* @param emitCallback A callback used to emit the node.
|
||||
*/
|
||||
emitNodeWithNotification(emitContext: EmitContext, node: Node, emitCallback: (emitContext: EmitContext, node: Node) => void): void;
|
||||
emitNodeWithNotification(hint: EmitHint, node: Node, emitCallback: (hint: EmitHint, node: Node) => void): void;
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
export type Transformer = (context: TransformationContext) => (node: SourceFile) => SourceFile;
|
||||
|
||||
export interface Printer {
|
||||
/**
|
||||
* Print a node and its subtree as-is, without any emit transformations.
|
||||
* @param hint A value indicating the purpose of a node. This is primarily used to
|
||||
* distinguish between an `Identifier` used in an expression position, versus an
|
||||
* `Identifier` used as an `IdentifierName` as part of a declaration. For most nodes you
|
||||
* should just pass `Unspecified`.
|
||||
* @param node The node to print. The node and its subtree are printed as-is, without any
|
||||
* emit transformations.
|
||||
* @param sourceFile A source file that provides context for the node. The source text of
|
||||
* the file is used to emit the original source content for literals and identifiers, while
|
||||
* the identifiers of the source file are used when generating unique names to avoid
|
||||
* collisions.
|
||||
*/
|
||||
printNode(hint: EmitHint, node: Node, sourceFile: SourceFile): string;
|
||||
/**
|
||||
* Prints a source file as-is, without any emit transformations.
|
||||
*/
|
||||
printFile(sourceFile: SourceFile): string;
|
||||
/**
|
||||
* Prints a bundle of source files as-is, without any emit transformations.
|
||||
*/
|
||||
printBundle(bundle: Bundle): string;
|
||||
/*@internal*/ writeNode(hint: EmitHint, node: Node, sourceFile: SourceFile, writer: EmitTextWriter): void;
|
||||
/*@internal*/ writeFile(sourceFile: SourceFile, writer: EmitTextWriter): void;
|
||||
/*@internal*/ writeBundle(bundle: Bundle, writer: EmitTextWriter): void;
|
||||
}
|
||||
|
||||
export interface PrintHandlers {
|
||||
/**
|
||||
* A hook used by the Printer when generating unique names to avoid collisions with
|
||||
* globally defined names that exist outside of the current source file.
|
||||
*/
|
||||
hasGlobalName?(name: string): boolean;
|
||||
/**
|
||||
* A hook used by the Printer to provide notifications prior to emitting a node. A
|
||||
* compatible implementation **must** invoke `emitCallback` with the provided `hint` and
|
||||
* `node` values.
|
||||
* @param hint A hint indicating the intended purpose of the node.
|
||||
* @param node The node to emit.
|
||||
* @param emitCallback A callback that, when invoked, will emit the node.
|
||||
* @example
|
||||
* ```ts
|
||||
* var printer = createPrinter(printerOptions, {
|
||||
* onEmitNode(hint, node, emitCallback) {
|
||||
* // set up or track state prior to emitting the node...
|
||||
* emitCallback(hint, node);
|
||||
* // restore state after emitting the node...
|
||||
* }
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
onEmitNode?(hint: EmitHint, node: Node, emitCallback: (hint: EmitHint, node: Node) => void): void;
|
||||
/**
|
||||
* A hook used by the Printer to perform just-in-time substitution of a node. This is
|
||||
* primarily used by node transformations that need to substitute one node for another,
|
||||
* such as replacing `myExportedVar` with `exports.myExportedVar`. A compatible
|
||||
* implementation **must** invoke `emitCallback` eith the provided `hint` and either
|
||||
* the provided `node`, or its substitute.
|
||||
* @param hint A hint indicating the intended purpose of the node.
|
||||
* @param node The node to emit.
|
||||
* @param emitCallback A callback that, when invoked, will emit the node.
|
||||
* @example
|
||||
* ```ts
|
||||
* var printer = createPrinter(printerOptions, {
|
||||
* onSubstituteNode(hint, node, emitCallback) {
|
||||
* // perform substitution if necessary...
|
||||
* emitCallback(hint, node);
|
||||
* }
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
onSubstituteNode?(hint: EmitHint, node: Node, emitCallback: (hint: EmitHint, node: Node) => void): void;
|
||||
/*@internal*/ onEmitSourceMapOfNode?: (hint: EmitHint, node: Node, emitCallback: (hint: EmitHint, node: Node) => void) => void;
|
||||
/*@internal*/ onEmitSourceMapOfToken?: (node: Node, token: SyntaxKind, pos: number, emitCallback: (token: SyntaxKind, pos: number) => number) => number;
|
||||
/*@internal*/ onEmitSourceMapOfPosition?: (pos: number) => void;
|
||||
/*@internal*/ onEmitHelpers?: (node: Node, writeLines: (text: string) => void) => void;
|
||||
/*@internal*/ onSetSourceFile?: (node: SourceFile) => void;
|
||||
}
|
||||
|
||||
export interface PrinterOptions {
|
||||
target?: ScriptTarget;
|
||||
removeComments?: boolean;
|
||||
newLine?: NewLineKind;
|
||||
/*@internal*/ sourceMap?: boolean;
|
||||
/*@internal*/ inlineSourceMap?: boolean;
|
||||
/*@internal*/ extendedDiagnostics?: boolean;
|
||||
}
|
||||
|
||||
/*@internal*/
|
||||
export interface EmitTextWriter {
|
||||
write(s: string): void;
|
||||
writeTextOfNode(text: string, node: Node): void;
|
||||
writeLine(): void;
|
||||
increaseIndent(): void;
|
||||
decreaseIndent(): void;
|
||||
getText(): string;
|
||||
rawWrite(s: string): void;
|
||||
writeLiteral(s: string): void;
|
||||
getTextPos(): number;
|
||||
getLine(): number;
|
||||
getColumn(): number;
|
||||
getIndent(): number;
|
||||
isAtStartOfLine(): boolean;
|
||||
reset(): void;
|
||||
}
|
||||
|
||||
export interface TextSpan {
|
||||
start: number;
|
||||
length: number;
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
/// <reference path="sys.ts" />
|
||||
/// <reference path="sys.ts" />
|
||||
|
||||
/* @internal */
|
||||
namespace ts {
|
||||
@ -70,11 +70,11 @@ namespace ts {
|
||||
}
|
||||
|
||||
export function hasResolvedModule(sourceFile: SourceFile, moduleNameText: string): boolean {
|
||||
return !!(sourceFile && sourceFile.resolvedModules && sourceFile.resolvedModules[moduleNameText]);
|
||||
return !!(sourceFile && sourceFile.resolvedModules && sourceFile.resolvedModules.get(moduleNameText));
|
||||
}
|
||||
|
||||
export function getResolvedModule(sourceFile: SourceFile, moduleNameText: string): ResolvedModuleFull {
|
||||
return hasResolvedModule(sourceFile, moduleNameText) ? sourceFile.resolvedModules[moduleNameText] : undefined;
|
||||
return hasResolvedModule(sourceFile, moduleNameText) ? sourceFile.resolvedModules.get(moduleNameText) : undefined;
|
||||
}
|
||||
|
||||
export function setResolvedModule(sourceFile: SourceFile, moduleNameText: string, resolvedModule: ResolvedModuleFull): void {
|
||||
@ -82,7 +82,7 @@ namespace ts {
|
||||
sourceFile.resolvedModules = createMap<ResolvedModuleFull>();
|
||||
}
|
||||
|
||||
sourceFile.resolvedModules[moduleNameText] = resolvedModule;
|
||||
sourceFile.resolvedModules.set(moduleNameText, resolvedModule);
|
||||
}
|
||||
|
||||
export function setResolvedTypeReferenceDirective(sourceFile: SourceFile, typeReferenceDirectiveName: string, resolvedTypeReferenceDirective: ResolvedTypeReferenceDirective): void {
|
||||
@ -90,7 +90,7 @@ namespace ts {
|
||||
sourceFile.resolvedTypeReferenceDirectiveNames = createMap<ResolvedTypeReferenceDirective>();
|
||||
}
|
||||
|
||||
sourceFile.resolvedTypeReferenceDirectiveNames[typeReferenceDirectiveName] = resolvedTypeReferenceDirective;
|
||||
sourceFile.resolvedTypeReferenceDirectiveNames.set(typeReferenceDirectiveName, resolvedTypeReferenceDirective);
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
@ -112,7 +112,7 @@ namespace ts {
|
||||
}
|
||||
for (let i = 0; i < names.length; i++) {
|
||||
const newResolution = newResolutions[i];
|
||||
const oldResolution = oldResolutions && oldResolutions[names[i]];
|
||||
const oldResolution = oldResolutions && oldResolutions.get(names[i]);
|
||||
const changed =
|
||||
oldResolution
|
||||
? !newResolution || !comparer(oldResolution, newResolution)
|
||||
@ -341,16 +341,52 @@ namespace ts {
|
||||
}
|
||||
|
||||
export function isBinaryOrOctalIntegerLiteral(node: LiteralLikeNode, text: string) {
|
||||
if (node.kind === SyntaxKind.NumericLiteral && text.length > 1) {
|
||||
return node.kind === SyntaxKind.NumericLiteral
|
||||
&& (getNumericLiteralFlags(text, /*hint*/ NumericLiteralFlags.BinaryOrOctal) & NumericLiteralFlags.BinaryOrOctal) !== 0;
|
||||
}
|
||||
|
||||
export const enum NumericLiteralFlags {
|
||||
None = 0,
|
||||
Hexadecimal = 1 << 0,
|
||||
Binary = 1 << 1,
|
||||
Octal = 1 << 2,
|
||||
Scientific = 1 << 3,
|
||||
|
||||
BinaryOrOctal = Binary | Octal,
|
||||
BinaryOrOctalOrHexadecimal = BinaryOrOctal | Hexadecimal,
|
||||
All = Hexadecimal | Binary | Octal | Scientific,
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans a numeric literal string to determine the form of the number.
|
||||
* @param text Numeric literal text
|
||||
* @param hint If `Scientific` or `All` is specified, performs a more expensive check to scan for scientific notation.
|
||||
*/
|
||||
export function getNumericLiteralFlags(text: string, hint?: NumericLiteralFlags) {
|
||||
if (text.length > 1) {
|
||||
switch (text.charCodeAt(1)) {
|
||||
case CharacterCodes.b:
|
||||
case CharacterCodes.B:
|
||||
return NumericLiteralFlags.Binary;
|
||||
case CharacterCodes.o:
|
||||
case CharacterCodes.O:
|
||||
return true;
|
||||
return NumericLiteralFlags.Octal;
|
||||
case CharacterCodes.x:
|
||||
case CharacterCodes.X:
|
||||
return NumericLiteralFlags.Hexadecimal;
|
||||
}
|
||||
|
||||
if (hint & NumericLiteralFlags.Scientific) {
|
||||
for (let i = text.length - 1; i >= 0; i--) {
|
||||
switch (text.charCodeAt(i)) {
|
||||
case CharacterCodes.e:
|
||||
case CharacterCodes.E:
|
||||
return NumericLiteralFlags.Scientific;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
return NumericLiteralFlags.None;
|
||||
}
|
||||
|
||||
function getQuotedEscapedLiteralText(leftQuote: string, text: string, rightQuote: string) {
|
||||
@ -395,7 +431,7 @@ namespace ts {
|
||||
|
||||
function isShorthandAmbientModule(node: Node): boolean {
|
||||
// The only kind of module that can be missing a body is a shorthand ambient module.
|
||||
return node.kind === SyntaxKind.ModuleDeclaration && (!(<ModuleDeclaration>node).body);
|
||||
return node && node.kind === SyntaxKind.ModuleDeclaration && (!(<ModuleDeclaration>node).body);
|
||||
}
|
||||
|
||||
export function isBlockScopedContainerTopLevel(node: Node): boolean {
|
||||
@ -1662,11 +1698,15 @@ namespace ts {
|
||||
node = parent;
|
||||
break;
|
||||
case SyntaxKind.ShorthandPropertyAssignment:
|
||||
if ((<ShorthandPropertyAssignment>parent).name !== node) {
|
||||
if ((parent as ShorthandPropertyAssignment).name !== node) {
|
||||
return AssignmentKind.None;
|
||||
}
|
||||
// Fall through
|
||||
node = parent.parent;
|
||||
break;
|
||||
case SyntaxKind.PropertyAssignment:
|
||||
if ((parent as ShorthandPropertyAssignment).name === node) {
|
||||
return AssignmentKind.None;
|
||||
}
|
||||
node = parent.parent;
|
||||
break;
|
||||
default:
|
||||
@ -1678,7 +1718,8 @@ namespace ts {
|
||||
|
||||
// A node is an assignment target if it is on the left hand side of an '=' token, if it is parented by a property
|
||||
// assignment in an object literal that is an assignment target, or if it is parented by an array literal that is
|
||||
// an assignment target. Examples include 'a = xxx', '{ p: a } = xxx', '[{ p: a}] = xxx'.
|
||||
// an assignment target. Examples include 'a = xxx', '{ p: a } = xxx', '[{ a }] = xxx'.
|
||||
// (Note that `p` is not a target in the above examples, only `a`.)
|
||||
export function isAssignmentTarget(node: Node): boolean {
|
||||
return getAssignmentTargetKind(node) !== AssignmentKind.None;
|
||||
}
|
||||
@ -2095,16 +2136,19 @@ namespace ts {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export function getOriginalSourceFiles(sourceFiles: SourceFile[]) {
|
||||
const originalSourceFiles: SourceFile[] = [];
|
||||
for (const sourceFile of sourceFiles) {
|
||||
const originalSourceFile = getParseTreeNode(sourceFile, isSourceFile);
|
||||
if (originalSourceFile) {
|
||||
originalSourceFiles.push(originalSourceFile);
|
||||
}
|
||||
export function getOriginalSourceFileOrBundle(sourceFileOrBundle: SourceFile | Bundle) {
|
||||
if (sourceFileOrBundle.kind === SyntaxKind.Bundle) {
|
||||
return updateBundle(sourceFileOrBundle, sameMap(sourceFileOrBundle.sourceFiles, getOriginalSourceFile));
|
||||
}
|
||||
return getOriginalSourceFile(sourceFileOrBundle);
|
||||
}
|
||||
|
||||
return originalSourceFiles;
|
||||
function getOriginalSourceFile(sourceFile: SourceFile) {
|
||||
return getParseTreeNode(sourceFile, isSourceFile) || sourceFile;
|
||||
}
|
||||
|
||||
export function getOriginalSourceFiles(sourceFiles: SourceFile[]) {
|
||||
return sameMap(sourceFiles, getOriginalSourceFile);
|
||||
}
|
||||
|
||||
export function getOriginalNodeId(node: Node) {
|
||||
@ -2328,22 +2372,16 @@ namespace ts {
|
||||
}
|
||||
|
||||
function reattachFileDiagnostics(newFile: SourceFile): void {
|
||||
if (!hasProperty(fileDiagnostics, newFile.fileName)) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const diagnostic of fileDiagnostics[newFile.fileName]) {
|
||||
diagnostic.file = newFile;
|
||||
}
|
||||
forEach(fileDiagnostics.get(newFile.fileName), diagnostic => diagnostic.file = newFile);
|
||||
}
|
||||
|
||||
function add(diagnostic: Diagnostic): void {
|
||||
let diagnostics: Diagnostic[];
|
||||
if (diagnostic.file) {
|
||||
diagnostics = fileDiagnostics[diagnostic.file.fileName];
|
||||
diagnostics = fileDiagnostics.get(diagnostic.file.fileName);
|
||||
if (!diagnostics) {
|
||||
diagnostics = [];
|
||||
fileDiagnostics[diagnostic.file.fileName] = diagnostics;
|
||||
fileDiagnostics.set(diagnostic.file.fileName, diagnostics);
|
||||
}
|
||||
}
|
||||
else {
|
||||
@ -2363,7 +2401,7 @@ namespace ts {
|
||||
function getDiagnostics(fileName?: string): Diagnostic[] {
|
||||
sortAndDeduplicate();
|
||||
if (fileName) {
|
||||
return fileDiagnostics[fileName] || [];
|
||||
return fileDiagnostics.get(fileName) || [];
|
||||
}
|
||||
|
||||
const allDiagnostics: Diagnostic[] = [];
|
||||
@ -2373,9 +2411,9 @@ namespace ts {
|
||||
|
||||
forEach(nonFileDiagnostics, pushDiagnostic);
|
||||
|
||||
for (const key in fileDiagnostics) {
|
||||
forEach(fileDiagnostics[key], pushDiagnostic);
|
||||
}
|
||||
fileDiagnostics.forEach(diagnostics => {
|
||||
forEach(diagnostics, pushDiagnostic);
|
||||
});
|
||||
|
||||
return sortAndDeduplicateDiagnostics(allDiagnostics);
|
||||
}
|
||||
@ -2388,9 +2426,9 @@ namespace ts {
|
||||
diagnosticsModified = false;
|
||||
nonFileDiagnostics = sortAndDeduplicateDiagnostics(nonFileDiagnostics);
|
||||
|
||||
for (const key in fileDiagnostics) {
|
||||
fileDiagnostics[key] = sortAndDeduplicateDiagnostics(fileDiagnostics[key]);
|
||||
}
|
||||
fileDiagnostics.forEach((diagnostics, key) => {
|
||||
fileDiagnostics.set(key, sortAndDeduplicateDiagnostics(diagnostics));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@ -2400,7 +2438,7 @@ namespace ts {
|
||||
// the map below must be updated. Note that this regexp *does not* include the 'delete' character.
|
||||
// There is no reason for this other than that JSON.stringify does not handle it either.
|
||||
const escapedCharsRegExp = /[\\\"\u0000-\u001f\t\v\f\b\r\n\u2028\u2029\u0085]/g;
|
||||
const escapedCharsMap = createMap({
|
||||
const escapedCharsMap = createMapFromTemplate({
|
||||
"\0": "\\0",
|
||||
"\t": "\\t",
|
||||
"\v": "\\v",
|
||||
@ -2426,7 +2464,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
function getReplacement(c: string) {
|
||||
return escapedCharsMap[c] || get16BitUnicodeEscapeSequence(c.charCodeAt(0));
|
||||
return escapedCharsMap.get(c) || get16BitUnicodeEscapeSequence(c.charCodeAt(0));
|
||||
}
|
||||
|
||||
export function isIntrinsicJsxName(name: string) {
|
||||
@ -2449,23 +2487,6 @@ namespace ts {
|
||||
s;
|
||||
}
|
||||
|
||||
export interface EmitTextWriter {
|
||||
write(s: string): void;
|
||||
writeTextOfNode(text: string, node: Node): void;
|
||||
writeLine(): void;
|
||||
increaseIndent(): void;
|
||||
decreaseIndent(): void;
|
||||
getText(): string;
|
||||
rawWrite(s: string): void;
|
||||
writeLiteral(s: string): void;
|
||||
getTextPos(): number;
|
||||
getLine(): number;
|
||||
getColumn(): number;
|
||||
getIndent(): number;
|
||||
isAtStartOfLine(): boolean;
|
||||
reset(): void;
|
||||
}
|
||||
|
||||
const indentStrings: string[] = ["", " "];
|
||||
export function getIndentString(level: number) {
|
||||
if (indentStrings[level] === undefined) {
|
||||
@ -2617,102 +2638,57 @@ namespace ts {
|
||||
* @param host An EmitHost.
|
||||
* @param targetSourceFile An optional target source file to emit.
|
||||
*/
|
||||
export function getSourceFilesToEmit(host: EmitHost, targetSourceFile?: SourceFile) {
|
||||
export function getSourceFilesToEmit(host: EmitHost, targetSourceFile?: SourceFile): SourceFile[] {
|
||||
const options = host.getCompilerOptions();
|
||||
const isSourceFileFromExternalLibrary = (file: SourceFile) => host.isSourceFileFromExternalLibrary(file);
|
||||
if (options.outFile || options.out) {
|
||||
const moduleKind = getEmitModuleKind(options);
|
||||
const moduleEmitEnabled = moduleKind === ModuleKind.AMD || moduleKind === ModuleKind.System;
|
||||
const sourceFiles = getAllEmittableSourceFiles();
|
||||
// Can emit only sources that are not declaration file and are either non module code or module with --module or --target es6 specified
|
||||
return filter(sourceFiles, moduleEmitEnabled ? isNonDeclarationFile : isBundleEmitNonExternalModule);
|
||||
return filter(host.getSourceFiles(), sourceFile =>
|
||||
(moduleEmitEnabled || !isExternalModule(sourceFile)) && sourceFileMayBeEmitted(sourceFile, options, isSourceFileFromExternalLibrary));
|
||||
}
|
||||
else {
|
||||
const sourceFiles = targetSourceFile === undefined ? getAllEmittableSourceFiles() : [targetSourceFile];
|
||||
return filterSourceFilesInDirectory(sourceFiles, file => host.isSourceFileFromExternalLibrary(file));
|
||||
}
|
||||
|
||||
function getAllEmittableSourceFiles() {
|
||||
return options.noEmitForJsFiles ? filter(host.getSourceFiles(), sourceFile => !isSourceFileJavaScript(sourceFile)) : host.getSourceFiles();
|
||||
const sourceFiles = targetSourceFile === undefined ? host.getSourceFiles() : [targetSourceFile];
|
||||
return filter(sourceFiles, sourceFile => sourceFileMayBeEmitted(sourceFile, options, isSourceFileFromExternalLibrary));
|
||||
}
|
||||
}
|
||||
|
||||
/** Don't call this for `--outFile`, just for `--outDir` or plain emit. */
|
||||
export function filterSourceFilesInDirectory(sourceFiles: SourceFile[], isSourceFileFromExternalLibrary: (file: SourceFile) => boolean): SourceFile[] {
|
||||
return filter(sourceFiles, file => shouldEmitInDirectory(file, isSourceFileFromExternalLibrary));
|
||||
}
|
||||
|
||||
function isNonDeclarationFile(sourceFile: SourceFile) {
|
||||
return !isDeclarationFile(sourceFile);
|
||||
/** Don't call this for `--outFile`, just for `--outDir` or plain emit. `--outFile` needs additional checks. */
|
||||
export function sourceFileMayBeEmitted(sourceFile: SourceFile, options: CompilerOptions, isSourceFileFromExternalLibrary: (file: SourceFile) => boolean) {
|
||||
return !(options.noEmitForJsFiles && isSourceFileJavaScript(sourceFile)) && !isDeclarationFile(sourceFile) && !isSourceFileFromExternalLibrary(sourceFile);
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether a file should be emitted in a non-`--outFile` case.
|
||||
* Don't emit if source file is a declaration file, or was located under node_modules
|
||||
*/
|
||||
function shouldEmitInDirectory(sourceFile: SourceFile, isSourceFileFromExternalLibrary: (file: SourceFile) => boolean): boolean {
|
||||
return isNonDeclarationFile(sourceFile) && !isSourceFileFromExternalLibrary(sourceFile);
|
||||
}
|
||||
|
||||
function isBundleEmitNonExternalModule(sourceFile: SourceFile) {
|
||||
return isNonDeclarationFile(sourceFile) && !isExternalModule(sourceFile);
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterates over each source file to emit. The source files are expected to have been
|
||||
* transformed for use by the pretty printer.
|
||||
*
|
||||
* Originally part of `forEachExpectedEmitFile`, this functionality was extracted to support
|
||||
* transformations.
|
||||
* Iterates over the source files that are expected to have an emit output.
|
||||
*
|
||||
* @param host An EmitHost.
|
||||
* @param sourceFiles The transformed source files to emit.
|
||||
* @param action The action to execute.
|
||||
* @param sourceFilesOrTargetSourceFile
|
||||
* If an array, the full list of source files to emit.
|
||||
* Else, calls `getSourceFilesToEmit` with the (optional) target source file to determine the list of source files to emit.
|
||||
*/
|
||||
export function forEachTransformedEmitFile(host: EmitHost, sourceFiles: SourceFile[],
|
||||
action: (jsFilePath: string, sourceMapFilePath: string, declarationFilePath: string, sourceFiles: SourceFile[], isBundledEmit: boolean) => void,
|
||||
export function forEachEmittedFile(
|
||||
host: EmitHost, action: (emitFileNames: EmitFileNames, sourceFileOrBundle: SourceFile | Bundle, emitOnlyDtsFiles: boolean) => void,
|
||||
sourceFilesOrTargetSourceFile?: SourceFile[] | SourceFile,
|
||||
emitOnlyDtsFiles?: boolean) {
|
||||
|
||||
const sourceFiles = isArray(sourceFilesOrTargetSourceFile) ? sourceFilesOrTargetSourceFile : getSourceFilesToEmit(host, sourceFilesOrTargetSourceFile);
|
||||
const options = host.getCompilerOptions();
|
||||
// Emit on each source file
|
||||
if (options.outFile || options.out) {
|
||||
onBundledEmit(sourceFiles);
|
||||
}
|
||||
else {
|
||||
for (const sourceFile of sourceFiles) {
|
||||
// Don't emit if source file is a declaration file, or was located under node_modules
|
||||
if (!isDeclarationFile(sourceFile) && !host.isSourceFileFromExternalLibrary(sourceFile)) {
|
||||
onSingleFileEmit(host, sourceFile);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function onSingleFileEmit(host: EmitHost, sourceFile: SourceFile) {
|
||||
// JavaScript files are always LanguageVariant.JSX, as JSX syntax is allowed in .js files also.
|
||||
// So for JavaScript files, '.jsx' is only emitted if the input was '.jsx', and JsxEmit.Preserve.
|
||||
// For TypeScript, the only time to emit with a '.jsx' extension, is on JSX input, and JsxEmit.Preserve
|
||||
let extension = ".js";
|
||||
if (options.jsx === JsxEmit.Preserve) {
|
||||
if (isSourceFileJavaScript(sourceFile)) {
|
||||
if (fileExtensionIs(sourceFile.fileName, ".jsx")) {
|
||||
extension = ".jsx";
|
||||
}
|
||||
}
|
||||
else if (sourceFile.languageVariant === LanguageVariant.JSX) {
|
||||
// TypeScript source file preserving JSX syntax
|
||||
extension = ".jsx";
|
||||
}
|
||||
}
|
||||
const jsFilePath = getOwnEmitOutputFilePath(sourceFile, host, extension);
|
||||
const sourceMapFilePath = getSourceMapFilePath(jsFilePath, options);
|
||||
const declarationFilePath = !isSourceFileJavaScript(sourceFile) && (options.declaration || emitOnlyDtsFiles) ? getDeclarationEmitOutputFilePath(sourceFile, host) : undefined;
|
||||
action(jsFilePath, sourceMapFilePath, declarationFilePath, [sourceFile], /*isBundledEmit*/ false);
|
||||
}
|
||||
|
||||
function onBundledEmit(sourceFiles: SourceFile[]) {
|
||||
if (sourceFiles.length) {
|
||||
const jsFilePath = options.outFile || options.out;
|
||||
const sourceMapFilePath = getSourceMapFilePath(jsFilePath, options);
|
||||
const declarationFilePath = options.declaration ? removeFileExtension(jsFilePath) + ".d.ts" : undefined;
|
||||
action(jsFilePath, sourceMapFilePath, declarationFilePath, sourceFiles, /*isBundledEmit*/ true);
|
||||
action({ jsFilePath, sourceMapFilePath, declarationFilePath }, createBundle(sourceFiles), emitOnlyDtsFiles);
|
||||
}
|
||||
}
|
||||
else {
|
||||
for (const sourceFile of sourceFiles) {
|
||||
const jsFilePath = getOwnEmitOutputFilePath(sourceFile, host, getOutputExtension(sourceFile, options));
|
||||
const sourceMapFilePath = getSourceMapFilePath(jsFilePath, options);
|
||||
const declarationFilePath = !isSourceFileJavaScript(sourceFile) && (emitOnlyDtsFiles || options.declaration) ? getDeclarationEmitOutputFilePath(sourceFile, host) : undefined;
|
||||
action({ jsFilePath, sourceMapFilePath, declarationFilePath }, sourceFile, emitOnlyDtsFiles);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2721,77 +2697,22 @@ namespace ts {
|
||||
return options.sourceMap ? jsFilePath + ".map" : undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterates over the source files that are expected to have an emit output. This function
|
||||
* is used by the legacy emitter and the declaration emitter and should not be used by
|
||||
* the tree transforming emitter.
|
||||
*
|
||||
* @param host An EmitHost.
|
||||
* @param action The action to execute.
|
||||
* @param targetSourceFile An optional target source file to emit.
|
||||
*/
|
||||
export function forEachExpectedEmitFile(host: EmitHost,
|
||||
action: (emitFileNames: EmitFileNames, sourceFiles: SourceFile[], isBundledEmit: boolean, emitOnlyDtsFiles: boolean) => void,
|
||||
targetSourceFile?: SourceFile,
|
||||
emitOnlyDtsFiles?: boolean) {
|
||||
const options = host.getCompilerOptions();
|
||||
// Emit on each source file
|
||||
if (options.outFile || options.out) {
|
||||
onBundledEmit(host);
|
||||
}
|
||||
else {
|
||||
const sourceFiles = targetSourceFile === undefined ? getSourceFilesToEmit(host) : [targetSourceFile];
|
||||
for (const sourceFile of sourceFiles) {
|
||||
if (shouldEmitInDirectory(sourceFile, file => host.isSourceFileFromExternalLibrary(file))) {
|
||||
onSingleFileEmit(host, sourceFile);
|
||||
// JavaScript files are always LanguageVariant.JSX, as JSX syntax is allowed in .js files also.
|
||||
// So for JavaScript files, '.jsx' is only emitted if the input was '.jsx', and JsxEmit.Preserve.
|
||||
// For TypeScript, the only time to emit with a '.jsx' extension, is on JSX input, and JsxEmit.Preserve
|
||||
function getOutputExtension(sourceFile: SourceFile, options: CompilerOptions): string {
|
||||
if (options.jsx === JsxEmit.Preserve) {
|
||||
if (isSourceFileJavaScript(sourceFile)) {
|
||||
if (fileExtensionIs(sourceFile.fileName, ".jsx")) {
|
||||
return ".jsx";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function onSingleFileEmit(host: EmitHost, sourceFile: SourceFile) {
|
||||
// JavaScript files are always LanguageVariant.JSX, as JSX syntax is allowed in .js files also.
|
||||
// So for JavaScript files, '.jsx' is only emitted if the input was '.jsx', and JsxEmit.Preserve.
|
||||
// For TypeScript, the only time to emit with a '.jsx' extension, is on JSX input, and JsxEmit.Preserve
|
||||
let extension = ".js";
|
||||
if (options.jsx === JsxEmit.Preserve) {
|
||||
if (isSourceFileJavaScript(sourceFile)) {
|
||||
if (fileExtensionIs(sourceFile.fileName, ".jsx")) {
|
||||
extension = ".jsx";
|
||||
}
|
||||
}
|
||||
else if (sourceFile.languageVariant === LanguageVariant.JSX) {
|
||||
// TypeScript source file preserving JSX syntax
|
||||
extension = ".jsx";
|
||||
}
|
||||
}
|
||||
const jsFilePath = getOwnEmitOutputFilePath(sourceFile, host, extension);
|
||||
const declarationFilePath = !isSourceFileJavaScript(sourceFile) && (emitOnlyDtsFiles || options.declaration) ? getDeclarationEmitOutputFilePath(sourceFile, host) : undefined;
|
||||
const emitFileNames: EmitFileNames = {
|
||||
jsFilePath,
|
||||
sourceMapFilePath: getSourceMapFilePath(jsFilePath, options),
|
||||
declarationFilePath
|
||||
};
|
||||
action(emitFileNames, [sourceFile], /*isBundledEmit*/false, emitOnlyDtsFiles);
|
||||
}
|
||||
|
||||
function onBundledEmit(host: EmitHost) {
|
||||
// Can emit only sources that are not declaration file and are either non module code or module with
|
||||
// --module or --target es6 specified. Files included by searching under node_modules are also not emitted.
|
||||
const bundledSources = filter(getSourceFilesToEmit(host),
|
||||
sourceFile => !isDeclarationFile(sourceFile) &&
|
||||
!host.isSourceFileFromExternalLibrary(sourceFile) &&
|
||||
(!isExternalModule(sourceFile) ||
|
||||
!!getEmitModuleKind(options)));
|
||||
if (bundledSources.length) {
|
||||
const jsFilePath = options.outFile || options.out;
|
||||
const emitFileNames: EmitFileNames = {
|
||||
jsFilePath,
|
||||
sourceMapFilePath: getSourceMapFilePath(jsFilePath, options),
|
||||
declarationFilePath: options.declaration ? removeFileExtension(jsFilePath) + ".d.ts" : undefined
|
||||
};
|
||||
action(emitFileNames, bundledSources, /*isBundledEmit*/true, emitOnlyDtsFiles);
|
||||
else if (sourceFile.languageVariant === LanguageVariant.JSX) {
|
||||
// TypeScript source file preserving JSX syntax
|
||||
return ".jsx";
|
||||
}
|
||||
}
|
||||
return ".js";
|
||||
}
|
||||
|
||||
export function getSourceFilePathInNewDir(sourceFile: SourceFile, host: EmitHost, newDirPath: string) {
|
||||
@ -3253,7 +3174,11 @@ namespace ts {
|
||||
}
|
||||
|
||||
export function getLocalSymbolForExportDefault(symbol: Symbol) {
|
||||
return symbol && symbol.valueDeclaration && hasModifier(symbol.valueDeclaration, ModifierFlags.Default) ? symbol.valueDeclaration.localSymbol : undefined;
|
||||
return isExportDefaultSymbol(symbol) ? symbol.valueDeclaration.localSymbol : undefined;
|
||||
}
|
||||
|
||||
export function isExportDefaultSymbol(symbol: Symbol): boolean {
|
||||
return symbol && symbol.valueDeclaration && hasModifier(symbol.valueDeclaration, ModifierFlags.Default);
|
||||
}
|
||||
|
||||
/** Return ".ts", ".d.ts", or ".tsx", if that is the extension. */
|
||||
@ -3338,7 +3263,7 @@ namespace ts {
|
||||
|
||||
const carriageReturnLineFeed = "\r\n";
|
||||
const lineFeed = "\n";
|
||||
export function getNewLineCharacter(options: CompilerOptions): string {
|
||||
export function getNewLineCharacter(options: CompilerOptions | PrinterOptions): string {
|
||||
if (options.newLine === NewLineKind.CarriageReturnLineFeed) {
|
||||
return carriageReturnLineFeed;
|
||||
}
|
||||
@ -3426,18 +3351,21 @@ namespace ts {
|
||||
return false;
|
||||
}
|
||||
|
||||
const syntaxKindCache = createMap<string>();
|
||||
const syntaxKindCache: string[] = [];
|
||||
|
||||
export function formatSyntaxKind(kind: SyntaxKind): string {
|
||||
const syntaxKindEnum = (<any>ts).SyntaxKind;
|
||||
if (syntaxKindEnum) {
|
||||
if (syntaxKindCache[kind]) {
|
||||
return syntaxKindCache[kind];
|
||||
const cached = syntaxKindCache[kind];
|
||||
if (cached !== undefined) {
|
||||
return cached;
|
||||
}
|
||||
|
||||
for (const name in syntaxKindEnum) {
|
||||
if (syntaxKindEnum[name] === kind) {
|
||||
return syntaxKindCache[kind] = kind.toString() + " (" + name + ")";
|
||||
const result = `${kind} (${name})`;
|
||||
syntaxKindCache[kind] = result;
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
/// <reference path="checker.ts" />
|
||||
/// <reference path="checker.ts" />
|
||||
/// <reference path="factory.ts" />
|
||||
/// <reference path="utilities.ts" />
|
||||
|
||||
@ -46,54 +46,56 @@ namespace ts {
|
||||
* supplant the existing `forEachChild` implementation if performance is not
|
||||
* significantly impacted.
|
||||
*/
|
||||
const nodeEdgeTraversalMap = createMap<NodeTraversalPath>({
|
||||
[SyntaxKind.QualifiedName]: [
|
||||
{ name: "left", test: isEntityName },
|
||||
{ name: "right", test: isIdentifier }
|
||||
],
|
||||
[SyntaxKind.Decorator]: [
|
||||
{ name: "expression", test: isLeftHandSideExpression }
|
||||
],
|
||||
[SyntaxKind.TypeAssertionExpression]: [
|
||||
{ name: "type", test: isTypeNode },
|
||||
{ name: "expression", test: isUnaryExpression }
|
||||
],
|
||||
[SyntaxKind.AsExpression]: [
|
||||
{ name: "expression", test: isExpression },
|
||||
{ name: "type", test: isTypeNode }
|
||||
],
|
||||
[SyntaxKind.NonNullExpression]: [
|
||||
{ name: "expression", test: isLeftHandSideExpression }
|
||||
],
|
||||
[SyntaxKind.EnumDeclaration]: [
|
||||
{ name: "decorators", test: isDecorator },
|
||||
{ name: "modifiers", test: isModifier },
|
||||
{ name: "name", test: isIdentifier },
|
||||
{ name: "members", test: isEnumMember }
|
||||
],
|
||||
[SyntaxKind.ModuleDeclaration]: [
|
||||
{ name: "decorators", test: isDecorator },
|
||||
{ name: "modifiers", test: isModifier },
|
||||
{ name: "name", test: isModuleName },
|
||||
{ name: "body", test: isModuleBody }
|
||||
],
|
||||
[SyntaxKind.ModuleBlock]: [
|
||||
{ name: "statements", test: isStatement }
|
||||
],
|
||||
[SyntaxKind.ImportEqualsDeclaration]: [
|
||||
{ name: "decorators", test: isDecorator },
|
||||
{ name: "modifiers", test: isModifier },
|
||||
{ name: "name", test: isIdentifier },
|
||||
{ name: "moduleReference", test: isModuleReference }
|
||||
],
|
||||
[SyntaxKind.ExternalModuleReference]: [
|
||||
{ name: "expression", test: isExpression, optional: true }
|
||||
],
|
||||
[SyntaxKind.EnumMember]: [
|
||||
{ name: "name", test: isPropertyName },
|
||||
{ name: "initializer", test: isExpression, optional: true, parenthesize: parenthesizeExpressionForList }
|
||||
]
|
||||
});
|
||||
function getNodeEdgeTraversal(kind: SyntaxKind): NodeTraversalPath {
|
||||
switch (kind) {
|
||||
case SyntaxKind.QualifiedName: return [
|
||||
{ name: "left", test: isEntityName },
|
||||
{ name: "right", test: isIdentifier }
|
||||
];
|
||||
case SyntaxKind.Decorator: return [
|
||||
{ name: "expression", test: isLeftHandSideExpression }
|
||||
];
|
||||
case SyntaxKind.TypeAssertionExpression: return [
|
||||
{ name: "type", test: isTypeNode },
|
||||
{ name: "expression", test: isUnaryExpression }
|
||||
];
|
||||
case SyntaxKind.AsExpression: return [
|
||||
{ name: "expression", test: isExpression },
|
||||
{ name: "type", test: isTypeNode }
|
||||
];
|
||||
case SyntaxKind.NonNullExpression: return [
|
||||
{ name: "expression", test: isLeftHandSideExpression }
|
||||
];
|
||||
case SyntaxKind.EnumDeclaration: return [
|
||||
{ name: "decorators", test: isDecorator },
|
||||
{ name: "modifiers", test: isModifier },
|
||||
{ name: "name", test: isIdentifier },
|
||||
{ name: "members", test: isEnumMember }
|
||||
];
|
||||
case SyntaxKind.ModuleDeclaration: return [
|
||||
{ name: "decorators", test: isDecorator },
|
||||
{ name: "modifiers", test: isModifier },
|
||||
{ name: "name", test: isModuleName },
|
||||
{ name: "body", test: isModuleBody }
|
||||
];
|
||||
case SyntaxKind.ModuleBlock: return [
|
||||
{ name: "statements", test: isStatement }
|
||||
];
|
||||
case SyntaxKind.ImportEqualsDeclaration: return [
|
||||
{ name: "decorators", test: isDecorator },
|
||||
{ name: "modifiers", test: isModifier },
|
||||
{ name: "name", test: isIdentifier },
|
||||
{ name: "moduleReference", test: isModuleReference }
|
||||
];
|
||||
case SyntaxKind.ExternalModuleReference: return [
|
||||
{ name: "expression", test: isExpression, optional: true }
|
||||
];
|
||||
case SyntaxKind.EnumMember: return [
|
||||
{ name: "name", test: isPropertyName },
|
||||
{ name: "initializer", test: isExpression, optional: true, parenthesize: parenthesizeExpressionForList }
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
function reduceNode<T>(node: Node, f: (memo: T, node: Node) => T, initial: T) {
|
||||
return node ? f(initial, node) : initial;
|
||||
@ -530,7 +532,7 @@ namespace ts {
|
||||
break;
|
||||
|
||||
default:
|
||||
const edgeTraversalPath = nodeEdgeTraversalMap[kind];
|
||||
const edgeTraversalPath = getNodeEdgeTraversal(kind);
|
||||
if (edgeTraversalPath) {
|
||||
for (const edge of edgeTraversalPath) {
|
||||
const value = (<MapLike<any>>node)[edge.name];
|
||||
@ -1192,10 +1194,10 @@ namespace ts {
|
||||
|
||||
default:
|
||||
let updated: Node & MapLike<any>;
|
||||
const edgeTraversalPath = nodeEdgeTraversalMap[kind];
|
||||
const edgeTraversalPath = getNodeEdgeTraversal(kind);
|
||||
if (edgeTraversalPath) {
|
||||
for (const edge of edgeTraversalPath) {
|
||||
const value = <Node | NodeArray<Node>>(<Node & Map<any>>node)[edge.name];
|
||||
const value = <Node | NodeArray<Node>>(<Node & MapLike<any>>node)[edge.name];
|
||||
if (value !== undefined) {
|
||||
const visited = isArray(value)
|
||||
? visitNodes(value, visitor, edge.test, 0, value.length, edge.parenthesize, node)
|
||||
|
||||
@ -40,7 +40,7 @@ namespace FourSlash {
|
||||
files: FourSlashFile[];
|
||||
|
||||
// A mapping from marker names to name/position pairs
|
||||
markerPositions: { [index: string]: Marker; };
|
||||
markerPositions: ts.Map<Marker>;
|
||||
|
||||
markers: Marker[];
|
||||
|
||||
@ -61,10 +61,6 @@ namespace FourSlash {
|
||||
data?: any;
|
||||
}
|
||||
|
||||
interface MarkerMap {
|
||||
[index: string]: Marker;
|
||||
}
|
||||
|
||||
export interface Range {
|
||||
fileName: string;
|
||||
start: number;
|
||||
@ -94,7 +90,7 @@ namespace FourSlash {
|
||||
|
||||
export import IndentStyle = ts.IndentStyle;
|
||||
|
||||
const entityMap = ts.createMap({
|
||||
const entityMap = ts.createMapFromTemplate({
|
||||
"&": "&",
|
||||
"\"": """,
|
||||
"'": "'",
|
||||
@ -104,7 +100,7 @@ namespace FourSlash {
|
||||
});
|
||||
|
||||
export function escapeXmlAttributeValue(s: string) {
|
||||
return s.replace(/[&<>"'\/]/g, ch => entityMap[ch]);
|
||||
return s.replace(/[&<>"'\/]/g, ch => entityMap.get(ch));
|
||||
}
|
||||
|
||||
// Name of testcase metadata including ts.CompilerOptions properties that will be used by globalOptions
|
||||
@ -230,7 +226,7 @@ namespace FourSlash {
|
||||
}
|
||||
|
||||
function tryAdd(path: string) {
|
||||
const inputFile = inputFiles[path];
|
||||
const inputFile = inputFiles.get(path);
|
||||
if (inputFile && !Harness.isDefaultLibraryFile(path)) {
|
||||
languageServiceAdapterHost.addScript(path, inputFile, /*isRootFile*/ true);
|
||||
return true;
|
||||
@ -265,7 +261,7 @@ namespace FourSlash {
|
||||
let configFileName: string;
|
||||
ts.forEach(testData.files, file => {
|
||||
// Create map between fileName and its content for easily looking up when resolveReference flag is specified
|
||||
this.inputFiles[file.fileName] = file.content;
|
||||
this.inputFiles.set(file.fileName, file.content);
|
||||
if (ts.getBaseFileName(file.fileName).toLowerCase() === "tsconfig.json") {
|
||||
configFileName = file.fileName;
|
||||
}
|
||||
@ -283,7 +279,7 @@ namespace FourSlash {
|
||||
const baseDir = ts.normalizePath(ts.getDirectoryPath(configFileName));
|
||||
const host = new Utils.MockParseConfigHost(baseDir, /*ignoreCase*/ false, this.inputFiles);
|
||||
|
||||
const configJsonObj = ts.parseConfigFileTextToJson(configFileName, this.inputFiles[configFileName]);
|
||||
const configJsonObj = ts.parseConfigFileTextToJson(configFileName, this.inputFiles.get(configFileName));
|
||||
assert.isTrue(configJsonObj.config !== undefined);
|
||||
|
||||
const { options, errors } = ts.parseJsonConfigFileContent(configJsonObj.config, host, baseDir);
|
||||
@ -334,11 +330,11 @@ namespace FourSlash {
|
||||
}
|
||||
else {
|
||||
// resolveReference file-option is not specified then do not resolve any files and include all inputFiles
|
||||
for (const fileName in this.inputFiles) {
|
||||
this.inputFiles.forEach((file, fileName) => {
|
||||
if (!Harness.isDefaultLibraryFile(fileName)) {
|
||||
this.languageServiceAdapterHost.addScript(fileName, this.inputFiles[fileName], /*isRootFile*/ true);
|
||||
this.languageServiceAdapterHost.addScript(fileName, file, /*isRootFile*/ true);
|
||||
}
|
||||
}
|
||||
});
|
||||
this.languageServiceAdapterHost.addScript(Harness.Compiler.defaultLibFileName,
|
||||
Harness.Compiler.getDefaultLibrarySourceFile().text, /*isRootFile*/ false);
|
||||
}
|
||||
@ -376,8 +372,8 @@ namespace FourSlash {
|
||||
}
|
||||
|
||||
// Entry points from fourslash.ts
|
||||
public goToMarker(name = "") {
|
||||
const marker = this.getMarkerByName(name);
|
||||
public goToMarker(name: string | Marker = "") {
|
||||
const marker = typeof name === "string" ? this.getMarkerByName(name) : name;
|
||||
if (this.activeFile.fileName !== marker.fileName) {
|
||||
this.openFile(marker.fileName);
|
||||
}
|
||||
@ -386,10 +382,37 @@ namespace FourSlash {
|
||||
if (marker.position === -1 || marker.position > content.length) {
|
||||
throw new Error(`Marker "${name}" has been invalidated by unrecoverable edits to the file.`);
|
||||
}
|
||||
this.lastKnownMarker = name;
|
||||
const mName = typeof name === "string" ? name : this.markerName(marker);
|
||||
this.lastKnownMarker = mName;
|
||||
this.goToPosition(marker.position);
|
||||
}
|
||||
|
||||
public goToEachMarker(action: () => void) {
|
||||
const markers = this.getMarkers();
|
||||
assert(markers.length);
|
||||
for (const marker of markers) {
|
||||
this.goToMarker(marker);
|
||||
action();
|
||||
}
|
||||
}
|
||||
|
||||
public goToEachRange(action: () => void) {
|
||||
const ranges = this.getRanges();
|
||||
assert(ranges.length);
|
||||
for (const range of ranges) {
|
||||
this.goToRangeStart(range);
|
||||
action();
|
||||
}
|
||||
}
|
||||
|
||||
private markerName(m: Marker): string {
|
||||
return ts.forEachEntry(this.testData.markerPositions, (marker, name) => {
|
||||
if (marker === m) {
|
||||
return name;
|
||||
}
|
||||
})!;
|
||||
}
|
||||
|
||||
public goToPosition(pos: number) {
|
||||
this.currentCaretPosition = pos;
|
||||
}
|
||||
@ -684,11 +707,12 @@ namespace FourSlash {
|
||||
const completions = this.getCompletionListAtCaret();
|
||||
const uniqueItems = ts.createMap<string>();
|
||||
for (const item of completions.entries) {
|
||||
if (!(item.name in uniqueItems)) {
|
||||
uniqueItems[item.name] = item.kind;
|
||||
const uniqueItem = uniqueItems.get(item.name);
|
||||
if (!uniqueItem) {
|
||||
uniqueItems.set(item.name, item.kind);
|
||||
}
|
||||
else {
|
||||
assert.equal(item.kind, uniqueItems[item.name], `Items should have the same kind, got ${item.kind} and ${uniqueItems[item.name]}`);
|
||||
assert.equal(item.kind, uniqueItem, `Items should have the same kind, got ${item.kind} and ${uniqueItem}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -862,9 +886,8 @@ namespace FourSlash {
|
||||
}
|
||||
}
|
||||
|
||||
public verifyReferencesOf({fileName, start}: Range, references: Range[]) {
|
||||
this.openFile(fileName);
|
||||
this.goToPosition(start);
|
||||
public verifyReferencesOf(range: Range, references: Range[]) {
|
||||
this.goToRangeStart(range);
|
||||
this.verifyReferencesAre(references);
|
||||
}
|
||||
|
||||
@ -877,7 +900,7 @@ namespace FourSlash {
|
||||
}
|
||||
|
||||
public verifyRangesWithSameTextReferenceEachOther() {
|
||||
ts.forEachProperty(this.rangesByText(), ranges => this.verifyRangesReferenceEachOther(ranges));
|
||||
this.rangesByText().forEach(ranges => this.verifyRangesReferenceEachOther(ranges));
|
||||
}
|
||||
|
||||
public verifyDisplayPartsOfReferencedSymbol(expected: ts.SymbolDisplayPart[]) {
|
||||
@ -949,7 +972,8 @@ namespace FourSlash {
|
||||
}
|
||||
|
||||
public verifyQuickInfos(namesAndTexts: { [name: string]: string | [string, string] }) {
|
||||
ts.forEachProperty(ts.createMap(namesAndTexts), (text, name) => {
|
||||
for (const name in namesAndTexts) if (ts.hasProperty(namesAndTexts, name)) {
|
||||
const text = namesAndTexts[name];
|
||||
if (text instanceof Array) {
|
||||
assert(text.length === 2);
|
||||
const [expectedText, expectedDocumentation] = text;
|
||||
@ -958,7 +982,7 @@ namespace FourSlash {
|
||||
else {
|
||||
this.verifyQuickInfoAt(name, text);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
public verifyQuickInfoString(expectedText: string, expectedDocumentation?: string) {
|
||||
@ -1671,6 +1695,11 @@ namespace FourSlash {
|
||||
this.goToPosition(len);
|
||||
}
|
||||
|
||||
public goToRangeStart({fileName, start}: Range) {
|
||||
this.openFile(fileName);
|
||||
this.goToPosition(start);
|
||||
}
|
||||
|
||||
public goToTypeDefinition(definitionIndex: number) {
|
||||
const definitions = this.languageService.getTypeDefinitionAtPosition(this.activeFile.fileName, this.currentCaretPosition);
|
||||
if (!definitions || !definitions.length) {
|
||||
@ -1796,7 +1825,7 @@ namespace FourSlash {
|
||||
}
|
||||
|
||||
public getMarkerNames(): string[] {
|
||||
return Object.keys(this.testData.markerPositions);
|
||||
return ts.arrayFrom(this.testData.markerPositions.keys());
|
||||
}
|
||||
|
||||
public getRanges(): Range[] {
|
||||
@ -1804,10 +1833,10 @@ namespace FourSlash {
|
||||
}
|
||||
|
||||
public rangesByText(): ts.Map<Range[]> {
|
||||
const result = ts.createMap<Range[]>();
|
||||
const result = ts.createMultiMap<Range>();
|
||||
for (const range of this.getRanges()) {
|
||||
const text = this.rangeText(range);
|
||||
ts.multiMapAdd(result, text, range);
|
||||
result.add(text, range);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
@ -2163,7 +2192,7 @@ namespace FourSlash {
|
||||
|
||||
public verifyBraceCompletionAtPosition(negative: boolean, openingBrace: string) {
|
||||
|
||||
const openBraceMap = ts.createMap<ts.CharacterCodes>({
|
||||
const openBraceMap = ts.createMapFromTemplate<ts.CharacterCodes>({
|
||||
"(": ts.CharacterCodes.openParen,
|
||||
"{": ts.CharacterCodes.openBrace,
|
||||
"[": ts.CharacterCodes.openBracket,
|
||||
@ -2173,7 +2202,7 @@ namespace FourSlash {
|
||||
"<": ts.CharacterCodes.lessThan
|
||||
});
|
||||
|
||||
const charCode = openBraceMap[openingBrace];
|
||||
const charCode = openBraceMap.get(openingBrace);
|
||||
|
||||
if (!charCode) {
|
||||
this.raiseError(`Invalid openingBrace '${openingBrace}' specified.`);
|
||||
@ -2363,40 +2392,60 @@ namespace FourSlash {
|
||||
return this.languageService.getDocumentHighlights(this.activeFile.fileName, this.currentCaretPosition, filesToSearch);
|
||||
}
|
||||
|
||||
public verifyDocumentHighlightsAtPositionListContains(fileName: string, start: number, end: number, fileNamesToSearch: string[], kind?: string) {
|
||||
const documentHighlights = this.getDocumentHighlightsAtCurrentPosition(fileNamesToSearch);
|
||||
|
||||
if (!documentHighlights || documentHighlights.length === 0) {
|
||||
this.raiseError("verifyDocumentHighlightsAtPositionListContains failed - found 0 highlights, expected at least one.");
|
||||
public verifyRangesAreOccurrences(isWriteAccess?: boolean) {
|
||||
const ranges = this.getRanges();
|
||||
for (const r of ranges) {
|
||||
this.goToRangeStart(r);
|
||||
this.verifyOccurrencesAtPositionListCount(ranges.length);
|
||||
for (const range of ranges) {
|
||||
this.verifyOccurrencesAtPositionListContains(range.fileName, range.start, range.end, isWriteAccess);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const documentHighlight of documentHighlights) {
|
||||
if (documentHighlight.fileName === fileName) {
|
||||
const { highlightSpans } = documentHighlight;
|
||||
public verifyRangesAreRenameLocations(findInStrings: boolean, findInComments: boolean) {
|
||||
this.goToEachRange(() => this.verifyRenameLocations(findInStrings, findInComments));
|
||||
}
|
||||
|
||||
for (const highlight of highlightSpans) {
|
||||
if (highlight && highlight.textSpan.start === start && ts.textSpanEnd(highlight.textSpan) === end) {
|
||||
if (typeof kind !== "undefined" && highlight.kind !== kind) {
|
||||
this.raiseError(`verifyDocumentHighlightsAtPositionListContains failed - item "kind" value does not match, actual: ${highlight.kind}, expected: ${kind}.`);
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
public verifyRangesWithSameTextAreDocumentHighlights() {
|
||||
this.rangesByText().forEach(ranges => this.verifyRangesAreDocumentHighlights(ranges));
|
||||
}
|
||||
|
||||
public verifyRangesAreDocumentHighlights(ranges?: Range[]) {
|
||||
ranges = ranges || this.getRanges();
|
||||
const fileNames = unique(ranges, range => range.fileName);
|
||||
for (const range of ranges) {
|
||||
this.goToRangeStart(range);
|
||||
this.verifyDocumentHighlights(ranges, fileNames);
|
||||
}
|
||||
}
|
||||
|
||||
private verifyDocumentHighlights(expectedRanges: Range[], fileNames: string[] = [this.activeFile.fileName]) {
|
||||
const documentHighlights = this.getDocumentHighlightsAtCurrentPosition(fileNames) || [];
|
||||
|
||||
for (const dh of documentHighlights) {
|
||||
if (fileNames.indexOf(dh.fileName) === -1) {
|
||||
this.raiseError(`verifyDocumentHighlights failed - got highlights in unexpected file name ${dh.fileName}`);
|
||||
}
|
||||
}
|
||||
|
||||
const missingItem = { fileName: fileName, start: start, end: end, kind: kind };
|
||||
this.raiseError(`verifyDocumentHighlightsAtPositionListContains failed - could not find the item: ${stringify(missingItem)} in the returned list: (${stringify(documentHighlights)})`);
|
||||
}
|
||||
for (const fileName of fileNames) {
|
||||
const expectedRangesInFile = expectedRanges.filter(r => r.fileName === fileName);
|
||||
const highlights = ts.find(documentHighlights, dh => dh.fileName === fileName);
|
||||
if (!highlights) {
|
||||
this.raiseError(`verifyDocumentHighlights failed - found no highlights in ${fileName}`);
|
||||
}
|
||||
const spansInFile = highlights.highlightSpans.sort((s1, s2) => s1.textSpan.start - s2.textSpan.start);
|
||||
|
||||
public verifyDocumentHighlightsAtPositionListCount(expectedCount: number, fileNamesToSearch: string[]) {
|
||||
const documentHighlights = this.getDocumentHighlightsAtCurrentPosition(fileNamesToSearch);
|
||||
const actualCount = documentHighlights
|
||||
? documentHighlights.reduce((currentCount, { highlightSpans }) => currentCount + highlightSpans.length, 0)
|
||||
: 0;
|
||||
if (expectedRangesInFile.length !== spansInFile.length) {
|
||||
this.raiseError(`verifyDocumentHighlights failed - In ${fileName}, expected ${expectedRangesInFile.length} highlights, got ${spansInFile.length}`);
|
||||
}
|
||||
|
||||
if (expectedCount !== actualCount) {
|
||||
this.raiseError("verifyDocumentHighlightsAtPositionListCount failed - actual: " + actualCount + ", expected:" + expectedCount);
|
||||
ts.zipWith(expectedRangesInFile, spansInFile, (expectedRange, span) => {
|
||||
if (span.textSpan.start !== expectedRange.start || ts.textSpanEnd(span.textSpan) !== expectedRange.end) {
|
||||
this.raiseError(`verifyDocumentHighlights failed - span does not match, actual: ${JSON.stringify(span.textSpan)}, expected: ${expectedRange.start}--${expectedRange.end}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@ -2529,11 +2578,9 @@ namespace FourSlash {
|
||||
}
|
||||
|
||||
public getMarkerByName(markerName: string) {
|
||||
const markerPos = this.testData.markerPositions[markerName];
|
||||
const markerPos = this.testData.markerPositions.get(markerName);
|
||||
if (markerPos === undefined) {
|
||||
const markerNames: string[] = [];
|
||||
for (const m in this.testData.markerPositions) markerNames.push(m);
|
||||
throw new Error(`Unknown marker "${markerName}" Available markers: ${markerNames.map(m => "\"" + m + "\"").join(", ")}`);
|
||||
throw new Error(`Unknown marker "${markerName}" Available markers: ${this.getMarkerNames().map(m => "\"" + m + "\"").join(", ")}`);
|
||||
}
|
||||
else {
|
||||
return markerPos;
|
||||
@ -2626,7 +2673,7 @@ ${code}
|
||||
// we have to string-based splitting instead and try to figure out the delimiting chars
|
||||
const lines = contents.split("\n");
|
||||
|
||||
const markerPositions: MarkerMap = {};
|
||||
const markerPositions = ts.createMap<Marker>();
|
||||
const markers: Marker[] = [];
|
||||
const ranges: Range[] = [];
|
||||
|
||||
@ -2765,7 +2812,7 @@ ${code}
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
|
||||
function recordObjectMarker(fileName: string, location: LocationInformation, text: string, markerMap: MarkerMap, markers: Marker[]): Marker {
|
||||
function recordObjectMarker(fileName: string, location: LocationInformation, text: string, markerMap: ts.Map<Marker>, markers: Marker[]): Marker {
|
||||
let markerValue: any = undefined;
|
||||
try {
|
||||
// Attempt to parse the marker value as JSON
|
||||
@ -2788,7 +2835,7 @@ ${code}
|
||||
|
||||
// Object markers can be anonymous
|
||||
if (markerValue.name) {
|
||||
markerMap[markerValue.name] = marker;
|
||||
markerMap.set(markerValue.name, marker);
|
||||
}
|
||||
|
||||
markers.push(marker);
|
||||
@ -2796,26 +2843,26 @@ ${code}
|
||||
return marker;
|
||||
}
|
||||
|
||||
function recordMarker(fileName: string, location: LocationInformation, name: string, markerMap: MarkerMap, markers: Marker[]): Marker {
|
||||
function recordMarker(fileName: string, location: LocationInformation, name: string, markerMap: ts.Map<Marker>, markers: Marker[]): Marker {
|
||||
const marker: Marker = {
|
||||
fileName,
|
||||
position: location.position
|
||||
};
|
||||
|
||||
// Verify markers for uniqueness
|
||||
if (markerMap[name] !== undefined) {
|
||||
if (markerMap.has(name)) {
|
||||
const message = "Marker '" + name + "' is duplicated in the source file contents.";
|
||||
reportError(marker.fileName, location.sourceLine, location.sourceColumn, message);
|
||||
return undefined;
|
||||
}
|
||||
else {
|
||||
markerMap[name] = marker;
|
||||
markerMap.set(name, marker);
|
||||
markers.push(marker);
|
||||
return marker;
|
||||
}
|
||||
}
|
||||
|
||||
function parseFileContent(content: string, fileName: string, markerMap: MarkerMap, markers: Marker[], ranges: Range[]): FourSlashFile {
|
||||
function parseFileContent(content: string, fileName: string, markerMap: ts.Map<Marker>, markers: Marker[], ranges: Range[]): FourSlashFile {
|
||||
content = chompLeadingSpace(content);
|
||||
|
||||
// Any slash-star comment with a character not in this string is not a marker.
|
||||
@ -3025,6 +3072,16 @@ ${code}
|
||||
function stringify(data: any, replacer?: (key: string, value: any) => any): string {
|
||||
return JSON.stringify(data, replacer, 2);
|
||||
}
|
||||
|
||||
/** Collects an array of unique outputs. */
|
||||
function unique<T>(inputs: T[], getOutput: (t: T) => string): string[] {
|
||||
const set = ts.createMap<true>();
|
||||
for (const input of inputs) {
|
||||
const out = getOutput(input);
|
||||
set.set(out, true);
|
||||
}
|
||||
return ts.arrayFrom(set.keys());
|
||||
}
|
||||
}
|
||||
|
||||
namespace FourSlashInterface {
|
||||
@ -3063,10 +3120,22 @@ namespace FourSlashInterface {
|
||||
// Moves the caret to the specified marker,
|
||||
// or the anonymous marker ('/**/') if no name
|
||||
// is given
|
||||
public marker(name?: string) {
|
||||
public marker(name?: string | FourSlash.Marker) {
|
||||
this.state.goToMarker(name);
|
||||
}
|
||||
|
||||
public eachMarker(action: () => void) {
|
||||
this.state.goToEachMarker(action);
|
||||
}
|
||||
|
||||
public rangeStart(range: FourSlash.Range) {
|
||||
this.state.goToRangeStart(range);
|
||||
}
|
||||
|
||||
public eachRange(action: () => void) {
|
||||
this.state.goToEachRange(action);
|
||||
}
|
||||
|
||||
public bof() {
|
||||
this.state.goToBOF();
|
||||
}
|
||||
@ -3417,12 +3486,20 @@ namespace FourSlashInterface {
|
||||
this.state.verifyOccurrencesAtPositionListCount(expectedCount);
|
||||
}
|
||||
|
||||
public documentHighlightsAtPositionContains(range: FourSlash.Range, fileNamesToSearch: string[], kind?: string) {
|
||||
this.state.verifyDocumentHighlightsAtPositionListContains(range.fileName, range.start, range.end, fileNamesToSearch, kind);
|
||||
public rangesAreOccurrences(isWriteAccess?: boolean) {
|
||||
this.state.verifyRangesAreOccurrences(isWriteAccess);
|
||||
}
|
||||
|
||||
public documentHighlightsAtPositionCount(expectedCount: number, fileNamesToSearch: string[]) {
|
||||
this.state.verifyDocumentHighlightsAtPositionListCount(expectedCount, fileNamesToSearch);
|
||||
public rangesAreRenameLocations(findInStrings = false, findInComments = false) {
|
||||
this.state.verifyRangesAreRenameLocations(findInStrings, findInComments);
|
||||
}
|
||||
|
||||
public rangesAreDocumentHighlights(ranges?: FourSlash.Range[]) {
|
||||
this.state.verifyRangesAreDocumentHighlights(ranges);
|
||||
}
|
||||
|
||||
public rangesWithSameTextAreDocumentHighlights() {
|
||||
this.state.verifyRangesWithSameTextAreDocumentHighlights();
|
||||
}
|
||||
|
||||
public completionEntryDetailIs(entryName: string, text: string, documentation?: string, kind?: string) {
|
||||
|
||||
@ -40,6 +40,19 @@ declare namespace NodeJS {
|
||||
ActiveXObject: typeof ActiveXObject;
|
||||
}
|
||||
}
|
||||
|
||||
declare var window: {};
|
||||
declare var XMLHttpRequest: {
|
||||
new(): XMLHttpRequest;
|
||||
}
|
||||
interface XMLHttpRequest {
|
||||
readonly readyState: number;
|
||||
readonly responseText: string;
|
||||
readonly status: number;
|
||||
open(method: string, url: string, async?: boolean, user?: string, password?: string): void;
|
||||
send(data?: string): void;
|
||||
setRequestHeader(header: string, value: string): void;
|
||||
}
|
||||
/* tslint:enable:no-var-keyword */
|
||||
|
||||
namespace Utils {
|
||||
@ -909,7 +922,7 @@ namespace Harness {
|
||||
export const defaultLibFileName = "lib.d.ts";
|
||||
export const es2015DefaultLibFileName = "lib.es2015.d.ts";
|
||||
|
||||
const libFileNameSourceFileMap = ts.createMap<ts.SourceFile>({
|
||||
const libFileNameSourceFileMap = ts.createMapFromTemplate<ts.SourceFile>({
|
||||
[defaultLibFileName]: createSourceFileAndAssertInvariants(defaultLibFileName, IO.readFile(libFolder + "lib.es5.d.ts"), /*languageVersion*/ ts.ScriptTarget.Latest)
|
||||
});
|
||||
|
||||
@ -918,10 +931,11 @@ namespace Harness {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (!libFileNameSourceFileMap[fileName]) {
|
||||
libFileNameSourceFileMap[fileName] = createSourceFileAndAssertInvariants(fileName, IO.readFile(libFolder + fileName), ts.ScriptTarget.Latest);
|
||||
let sourceFile = libFileNameSourceFileMap.get(fileName);
|
||||
if (!sourceFile) {
|
||||
libFileNameSourceFileMap.set(fileName, sourceFile = createSourceFileAndAssertInvariants(fileName, IO.readFile(libFolder + fileName), ts.ScriptTarget.Latest));
|
||||
}
|
||||
return libFileNameSourceFileMap[fileName];
|
||||
return sourceFile;
|
||||
}
|
||||
|
||||
export function getDefaultLibFileName(options: ts.CompilerOptions): string {
|
||||
@ -1103,10 +1117,10 @@ namespace Harness {
|
||||
optionsIndex = ts.createMap<ts.CommandLineOption>();
|
||||
const optionDeclarations = harnessOptionDeclarations.concat(ts.optionDeclarations);
|
||||
for (const option of optionDeclarations) {
|
||||
optionsIndex[option.name.toLowerCase()] = option;
|
||||
optionsIndex.set(option.name.toLowerCase(), option);
|
||||
}
|
||||
}
|
||||
return optionsIndex[name.toLowerCase()];
|
||||
return optionsIndex.get(name.toLowerCase());
|
||||
}
|
||||
|
||||
export function setCompilerOptionsFromHarnessSetting(settings: Harness.TestCaseParser.CompilerSettings, options: ts.CompilerOptions & HarnessOptions): void {
|
||||
@ -1466,7 +1480,7 @@ namespace Harness {
|
||||
const fullResults = ts.createMap<TypeWriterResult[]>();
|
||||
|
||||
for (const sourceFile of allFiles) {
|
||||
fullResults[sourceFile.unitName] = fullWalker.getTypeAndSymbols(sourceFile.unitName);
|
||||
fullResults.set(sourceFile.unitName, fullWalker.getTypeAndSymbols(sourceFile.unitName));
|
||||
}
|
||||
|
||||
// Produce baselines. The first gives the types for all expressions.
|
||||
@ -1519,7 +1533,7 @@ namespace Harness {
|
||||
|
||||
allFiles.forEach(file => {
|
||||
const codeLines = file.content.split("\n");
|
||||
typeWriterResults[file.unitName].forEach(result => {
|
||||
typeWriterResults.get(file.unitName).forEach(result => {
|
||||
if (isSymbolBaseline && !result.symbol) {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -262,7 +262,7 @@ namespace Harness.LanguageService {
|
||||
this.getModuleResolutionsForFile = (fileName) => {
|
||||
const scriptInfo = this.getScriptInfo(fileName);
|
||||
const preprocessInfo = ts.preProcessFile(scriptInfo.content, /*readImportFiles*/ true);
|
||||
const imports = ts.createMap<string>();
|
||||
const imports: ts.MapLike<string> = {};
|
||||
for (const module of preprocessInfo.importedFiles) {
|
||||
const resolutionInfo = ts.resolveModuleName(module.fileName, fileName, compilerOptions, moduleResolutionHost);
|
||||
if (resolutionInfo.resolvedModule) {
|
||||
@ -275,7 +275,7 @@ namespace Harness.LanguageService {
|
||||
const scriptInfo = this.getScriptInfo(fileName);
|
||||
if (scriptInfo) {
|
||||
const preprocessInfo = ts.preProcessFile(scriptInfo.content, /*readImportFiles*/ false);
|
||||
const resolutions = ts.createMap<ts.ResolvedTypeReferenceDirective>();
|
||||
const resolutions: ts.MapLike<ts.ResolvedTypeReferenceDirective> = {};
|
||||
const settings = this.nativeHost.getCompilationSettings();
|
||||
for (const typeReferenceDirective of preprocessInfo.typeReferenceDirectives) {
|
||||
const resolutionInfo = ts.resolveTypeReferenceDirective(typeReferenceDirective.fileName, fileName, settings, moduleResolutionHost);
|
||||
|
||||
@ -256,17 +256,20 @@ class ProjectRunner extends RunnerBase {
|
||||
// Set the values specified using json
|
||||
const optionNameMap = ts.arrayToMap(ts.optionDeclarations, option => option.name);
|
||||
for (const name in testCase) {
|
||||
if (name !== "mapRoot" && name !== "sourceRoot" && name in optionNameMap) {
|
||||
const option = optionNameMap[name];
|
||||
const optType = option.type;
|
||||
let value = <any>testCase[name];
|
||||
if (typeof optType !== "string") {
|
||||
const key = value.toLowerCase();
|
||||
if (key in optType) {
|
||||
value = optType[key];
|
||||
if (name !== "mapRoot" && name !== "sourceRoot") {
|
||||
const option = optionNameMap.get(name);
|
||||
if (option) {
|
||||
const optType = option.type;
|
||||
let value = <any>testCase[name];
|
||||
if (typeof optType !== "string") {
|
||||
const key = value.toLowerCase();
|
||||
const optTypeValue = optType.get(key);
|
||||
if (optTypeValue) {
|
||||
value = optTypeValue;
|
||||
}
|
||||
}
|
||||
compilerOptions[option.name] = value;
|
||||
}
|
||||
compilerOptions[option.name] = value;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -118,6 +118,7 @@
|
||||
"./unittests/initializeTSConfig.ts",
|
||||
"./unittests/compileOnSave.ts",
|
||||
"./unittests/typingsInstaller.ts",
|
||||
"./unittests/projectErrors.ts"
|
||||
"./unittests/projectErrors.ts",
|
||||
"./unittests/printer.ts"
|
||||
]
|
||||
}
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
/// <reference path="..\harness.ts" />
|
||||
/// <reference path="..\harness.ts" />
|
||||
|
||||
namespace ts {
|
||||
interface File {
|
||||
@ -8,25 +8,28 @@ namespace ts {
|
||||
|
||||
function createDefaultServerHost(fileMap: Map<File>): server.ServerHost {
|
||||
const existingDirectories = createMap<boolean>();
|
||||
for (const name in fileMap) {
|
||||
forEachKey(fileMap, name => {
|
||||
let dir = getDirectoryPath(name);
|
||||
let previous: string;
|
||||
do {
|
||||
existingDirectories[dir] = true;
|
||||
existingDirectories.set(dir, true);
|
||||
previous = dir;
|
||||
dir = getDirectoryPath(dir);
|
||||
} while (dir !== previous);
|
||||
}
|
||||
});
|
||||
return {
|
||||
args: <string[]>[],
|
||||
newLine: "\r\n",
|
||||
useCaseSensitiveFileNames: false,
|
||||
write: noop,
|
||||
readFile: path => path in fileMap ? fileMap[path].content : undefined,
|
||||
readFile: path => {
|
||||
const file = fileMap.get(path);
|
||||
return file && file.content;
|
||||
},
|
||||
writeFile: notImplemented,
|
||||
resolvePath: notImplemented,
|
||||
fileExists: path => path in fileMap,
|
||||
directoryExists: path => existingDirectories[path] || false,
|
||||
fileExists: path => fileMap.has(path),
|
||||
directoryExists: path => existingDirectories.get(path) || false,
|
||||
createDirectory: noop,
|
||||
getExecutingFilePath: () => "",
|
||||
getCurrentDirectory: () => "",
|
||||
@ -83,7 +86,7 @@ namespace ts {
|
||||
content: `foo()`
|
||||
};
|
||||
|
||||
const serverHost = createDefaultServerHost(createMap({ [root.name]: root, [imported.name]: imported }));
|
||||
const serverHost = createDefaultServerHost(createMapFromTemplate({ [root.name]: root, [imported.name]: imported }));
|
||||
const { project, rootScriptInfo } = createProject(root.name, serverHost);
|
||||
|
||||
// ensure that imported file was found
|
||||
@ -167,7 +170,7 @@ namespace ts {
|
||||
content: `export var y = 1`
|
||||
};
|
||||
|
||||
const fileMap = createMap({ [root.name]: root });
|
||||
const fileMap = createMapFromTemplate({ [root.name]: root });
|
||||
const serverHost = createDefaultServerHost(fileMap);
|
||||
const originalFileExists = serverHost.fileExists;
|
||||
|
||||
@ -191,7 +194,7 @@ namespace ts {
|
||||
assert.isTrue(typeof diags[0].messageText === "string" && ((<string>diags[0].messageText).indexOf("Cannot find module") === 0), "should be 'cannot find module' message");
|
||||
|
||||
// assert that import will success once file appear on disk
|
||||
fileMap[imported.name] = imported;
|
||||
fileMap.set(imported.name, imported);
|
||||
fileExistsCalledForBar = false;
|
||||
rootScriptInfo.editContent(0, root.content.length, `import {y} from "bar"`);
|
||||
|
||||
|
||||
@ -87,7 +87,7 @@ namespace ts {
|
||||
start: undefined,
|
||||
length: undefined,
|
||||
}, {
|
||||
messageText: "Argument for '--jsx' option must be: 'preserve', 'react'",
|
||||
messageText: "Argument for '--jsx' option must be: 'preserve', 'react-native', 'react'",
|
||||
category: ts.Diagnostics.Argument_for_0_option_must_be_Colon_1.category,
|
||||
code: ts.Diagnostics.Argument_for_0_option_must_be_Colon_1.code,
|
||||
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
/// <reference path="..\virtualFileSystem.ts" />
|
||||
|
||||
namespace ts {
|
||||
const testContents = {
|
||||
const testContents = createMapFromTemplate({
|
||||
"/dev/tsconfig.json": `{
|
||||
"extends": "./configs/base",
|
||||
"files": [
|
||||
@ -86,10 +86,10 @@ namespace ts {
|
||||
"/dev/tests/utils.ts": "",
|
||||
"/dev/tests/scenarios/first.json": "",
|
||||
"/dev/tests/baselines/first/output.ts": ""
|
||||
};
|
||||
});
|
||||
|
||||
const caseInsensitiveBasePath = "c:/dev/";
|
||||
const caseInsensitiveHost = new Utils.MockParseConfigHost(caseInsensitiveBasePath, /*useCaseSensitiveFileNames*/ false, mapObject(testContents, (key, content) => [`c:${key}`, content]));
|
||||
const caseInsensitiveHost = new Utils.MockParseConfigHost(caseInsensitiveBasePath, /*useCaseSensitiveFileNames*/ false, mapEntries(testContents, (key, content) => [`c:${key}`, content]));
|
||||
|
||||
const caseSensitiveBasePath = "/dev/";
|
||||
const caseSensitiveHost = new Utils.MockParseConfigHost(caseSensitiveBasePath, /*useCaseSensitiveFileNames*/ true, testContents);
|
||||
|
||||
@ -94,7 +94,7 @@ namespace ts {
|
||||
file: undefined,
|
||||
start: 0,
|
||||
length: 0,
|
||||
messageText: "Argument for '--jsx' option must be: 'preserve', 'react'",
|
||||
messageText: "Argument for '--jsx' option must be: 'preserve', 'react-native', 'react'",
|
||||
code: Diagnostics.Argument_for_0_option_must_be_Colon_1.code,
|
||||
category: Diagnostics.Argument_for_0_option_must_be_Colon_1.category
|
||||
}]
|
||||
|
||||
@ -288,5 +288,24 @@ namespace ts {
|
||||
*/`);
|
||||
});
|
||||
});
|
||||
describe("getFirstToken", () => {
|
||||
it("gets jsdoc", () => {
|
||||
const root = ts.createSourceFile("foo.ts", "/** comment */var a = true;", ts.ScriptTarget.ES5, /*setParentNodes*/ true);
|
||||
assert.isDefined(root);
|
||||
assert.equal(root.kind, ts.SyntaxKind.SourceFile);
|
||||
const first = root.getFirstToken();
|
||||
assert.isDefined(first);
|
||||
assert.equal(first.kind, ts.SyntaxKind.VarKeyword);
|
||||
});
|
||||
});
|
||||
describe("getLastToken", () => {
|
||||
it("gets jsdoc", () => {
|
||||
const root = ts.createSourceFile("foo.ts", "var a = true;/** comment */", ts.ScriptTarget.ES5, /*setParentNodes*/ true);
|
||||
assert.isDefined(root);
|
||||
const last = root.getLastToken();
|
||||
assert.isDefined(last);
|
||||
assert.equal(last.kind, ts.SyntaxKind.EndOfFileToken);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@ -346,9 +346,9 @@ namespace ts {
|
||||
fileNames: [
|
||||
"c:/dev/a.ts",
|
||||
"c:/dev/b.ts",
|
||||
"c:/dev/node_modules/a.ts",
|
||||
"c:/dev/bower_components/a.ts",
|
||||
"c:/dev/jspm_packages/a.ts",
|
||||
"c:/dev/node_modules/a.ts"
|
||||
"c:/dev/jspm_packages/a.ts"
|
||||
],
|
||||
wildcardDirectories: {},
|
||||
};
|
||||
@ -373,9 +373,9 @@ namespace ts {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/node_modules/a.ts",
|
||||
"c:/dev/bower_components/a.ts",
|
||||
"c:/dev/jspm_packages/a.ts",
|
||||
"c:/dev/node_modules/a.ts"
|
||||
"c:/dev/jspm_packages/a.ts"
|
||||
],
|
||||
wildcardDirectories: {},
|
||||
};
|
||||
@ -398,9 +398,9 @@ namespace ts {
|
||||
fileNames: [
|
||||
"c:/dev/a.ts",
|
||||
"c:/dev/b.ts",
|
||||
"c:/dev/node_modules/a.ts",
|
||||
"c:/dev/bower_components/a.ts",
|
||||
"c:/dev/jspm_packages/a.ts",
|
||||
"c:/dev/node_modules/a.ts"
|
||||
"c:/dev/jspm_packages/a.ts"
|
||||
],
|
||||
wildcardDirectories: {},
|
||||
};
|
||||
@ -410,6 +410,36 @@ namespace ts {
|
||||
});
|
||||
|
||||
describe("with wildcard include list", () => {
|
||||
it("is sorted in include order, then in alphabetical order", () => {
|
||||
const json = {
|
||||
include: [
|
||||
"z/*.ts",
|
||||
"x/*.ts"
|
||||
]
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/z/a.ts",
|
||||
"c:/dev/z/aba.ts",
|
||||
"c:/dev/z/abz.ts",
|
||||
"c:/dev/z/b.ts",
|
||||
"c:/dev/z/bba.ts",
|
||||
"c:/dev/z/bbz.ts",
|
||||
"c:/dev/x/a.ts",
|
||||
"c:/dev/x/aa.ts",
|
||||
"c:/dev/x/b.ts"
|
||||
],
|
||||
wildcardDirectories: {
|
||||
"c:/dev/z": ts.WatchDirectoryFlags.None,
|
||||
"c:/dev/x": ts.WatchDirectoryFlags.None
|
||||
},
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
|
||||
assertParsed(actual, expected);
|
||||
});
|
||||
|
||||
it("same named declarations are excluded", () => {
|
||||
const json = {
|
||||
include: [
|
||||
@ -506,8 +536,8 @@ namespace ts {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/x/a.ts",
|
||||
"c:/dev/x/y/a.ts",
|
||||
"c:/dev/x/a.ts",
|
||||
"c:/dev/z/a.ts"
|
||||
],
|
||||
wildcardDirectories: {
|
||||
@ -909,6 +939,31 @@ namespace ts {
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath);
|
||||
assertParsed(actual, expected);
|
||||
});
|
||||
it("with jsx=react-native, allowJs=false", () => {
|
||||
const json = {
|
||||
compilerOptions: {
|
||||
jsx: "react-native",
|
||||
allowJs: false
|
||||
}
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {
|
||||
jsx: ts.JsxEmit.ReactNative,
|
||||
allowJs: false
|
||||
},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/a.ts",
|
||||
"c:/dev/b.tsx",
|
||||
"c:/dev/c.tsx",
|
||||
],
|
||||
wildcardDirectories: {
|
||||
"c:/dev": ts.WatchDirectoryFlags.Recursive
|
||||
}
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath);
|
||||
assertParsed(actual, expected);
|
||||
});
|
||||
it("with jsx=none, allowJs=true", () => {
|
||||
const json = {
|
||||
compilerOptions: {
|
||||
@ -961,6 +1016,33 @@ namespace ts {
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath);
|
||||
assertParsed(actual, expected);
|
||||
});
|
||||
it("with jsx=react-native, allowJs=true", () => {
|
||||
const json = {
|
||||
compilerOptions: {
|
||||
jsx: "react-native",
|
||||
allowJs: true
|
||||
}
|
||||
};
|
||||
const expected: ts.ParsedCommandLine = {
|
||||
options: {
|
||||
jsx: ts.JsxEmit.ReactNative,
|
||||
allowJs: true
|
||||
},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/a.ts",
|
||||
"c:/dev/b.tsx",
|
||||
"c:/dev/c.tsx",
|
||||
"c:/dev/d.js",
|
||||
"c:/dev/e.jsx",
|
||||
],
|
||||
wildcardDirectories: {
|
||||
"c:/dev": ts.WatchDirectoryFlags.Recursive
|
||||
}
|
||||
};
|
||||
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath);
|
||||
assertParsed(actual, expected);
|
||||
});
|
||||
it("exclude .min.js files using wildcards", () => {
|
||||
const json = {
|
||||
compilerOptions: {
|
||||
@ -1230,8 +1312,8 @@ namespace ts {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/.z/.b.ts",
|
||||
"c:/dev/x/.y/a.ts"
|
||||
"c:/dev/x/.y/a.ts",
|
||||
"c:/dev/.z/.b.ts"
|
||||
],
|
||||
wildcardDirectories: {}
|
||||
};
|
||||
@ -1271,8 +1353,8 @@ namespace ts {
|
||||
options: {},
|
||||
errors: [],
|
||||
fileNames: [
|
||||
"c:/dev/.z/.b.ts",
|
||||
"c:/dev/x/.y/a.ts"
|
||||
"c:/dev/x/.y/a.ts",
|
||||
"c:/dev/.z/.b.ts"
|
||||
],
|
||||
wildcardDirectories: {
|
||||
"c:/dev/.z": ts.WatchDirectoryFlags.Recursive,
|
||||
@ -1306,4 +1388,4 @@ namespace ts {
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@ -36,7 +36,7 @@ namespace ts {
|
||||
for (const f of files) {
|
||||
let name = getDirectoryPath(f.name);
|
||||
while (true) {
|
||||
directories[name] = name;
|
||||
directories.set(name, name);
|
||||
const baseName = getDirectoryPath(name);
|
||||
if (baseName === name) {
|
||||
break;
|
||||
@ -46,20 +46,19 @@ namespace ts {
|
||||
}
|
||||
return {
|
||||
readFile,
|
||||
directoryExists: path => {
|
||||
return path in directories;
|
||||
},
|
||||
directoryExists: path => directories.has(path),
|
||||
fileExists: path => {
|
||||
assert.isTrue(getDirectoryPath(path) in directories, `'fileExists' '${path}' request in non-existing directory`);
|
||||
return path in map;
|
||||
assert.isTrue(directories.has(getDirectoryPath(path)), `'fileExists' '${path}' request in non-existing directory`);
|
||||
return map.has(path);
|
||||
}
|
||||
};
|
||||
}
|
||||
else {
|
||||
return { readFile, fileExists: path => path in map, };
|
||||
return { readFile, fileExists: path => map.has(path) };
|
||||
}
|
||||
function readFile(path: string): string {
|
||||
return path in map ? map[path].content : undefined;
|
||||
const file = map.get(path);
|
||||
return file && file.content;
|
||||
}
|
||||
}
|
||||
|
||||
@ -300,7 +299,8 @@ namespace ts {
|
||||
const host: CompilerHost = {
|
||||
getSourceFile: (fileName: string, languageVersion: ScriptTarget) => {
|
||||
const path = normalizePath(combinePaths(currentDirectory, fileName));
|
||||
return path in files ? createSourceFile(fileName, files[path], languageVersion) : undefined;
|
||||
const file = files.get(path);
|
||||
return file && createSourceFile(fileName, file, languageVersion);
|
||||
},
|
||||
getDefaultLibFileName: () => "lib.d.ts",
|
||||
writeFile: notImplemented,
|
||||
@ -311,7 +311,7 @@ namespace ts {
|
||||
useCaseSensitiveFileNames: () => false,
|
||||
fileExists: fileName => {
|
||||
const path = normalizePath(combinePaths(currentDirectory, fileName));
|
||||
return path in files;
|
||||
return files.has(path);
|
||||
},
|
||||
readFile: notImplemented
|
||||
};
|
||||
@ -331,7 +331,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
it("should find all modules", () => {
|
||||
const files = createMap({
|
||||
const files = createMapFromTemplate({
|
||||
"/a/b/c/first/shared.ts": `
|
||||
class A {}
|
||||
export = A`,
|
||||
@ -350,7 +350,7 @@ export = C;
|
||||
});
|
||||
|
||||
it("should find modules in node_modules", () => {
|
||||
const files = createMap({
|
||||
const files = createMapFromTemplate({
|
||||
"/parent/node_modules/mod/index.d.ts": "export var x",
|
||||
"/parent/app/myapp.ts": `import {x} from "mod"`
|
||||
});
|
||||
@ -358,7 +358,7 @@ export = C;
|
||||
});
|
||||
|
||||
it("should find file referenced via absolute and relative names", () => {
|
||||
const files = createMap({
|
||||
const files = createMapFromTemplate({
|
||||
"/a/b/c.ts": `/// <reference path="b.ts"/>`,
|
||||
"/a/b/b.ts": "var x"
|
||||
});
|
||||
@ -371,7 +371,11 @@ export = C;
|
||||
function test(files: Map<string>, options: CompilerOptions, currentDirectory: string, useCaseSensitiveFileNames: boolean, rootFiles: string[], diagnosticCodes: number[]): void {
|
||||
const getCanonicalFileName = createGetCanonicalFileName(useCaseSensitiveFileNames);
|
||||
if (!useCaseSensitiveFileNames) {
|
||||
files = reduceProperties(files, (files, file, fileName) => (files[getCanonicalFileName(fileName)] = file, files), createMap<string>());
|
||||
const oldFiles = files;
|
||||
files = createMap<string>();
|
||||
oldFiles.forEach((file, fileName) => {
|
||||
files.set(getCanonicalFileName(fileName), file);
|
||||
});
|
||||
}
|
||||
|
||||
const host: CompilerHost = {
|
||||
@ -380,7 +384,8 @@ export = C;
|
||||
return library;
|
||||
}
|
||||
const path = getCanonicalFileName(normalizePath(combinePaths(currentDirectory, fileName)));
|
||||
return path in files ? createSourceFile(fileName, files[path], languageVersion) : undefined;
|
||||
const file = files.get(path);
|
||||
return file && createSourceFile(fileName, file, languageVersion);
|
||||
},
|
||||
getDefaultLibFileName: () => "lib.d.ts",
|
||||
writeFile: notImplemented,
|
||||
@ -391,7 +396,7 @@ export = C;
|
||||
useCaseSensitiveFileNames: () => useCaseSensitiveFileNames,
|
||||
fileExists: fileName => {
|
||||
const path = getCanonicalFileName(normalizePath(combinePaths(currentDirectory, fileName)));
|
||||
return path in files;
|
||||
return files.has(path);
|
||||
},
|
||||
readFile: notImplemented
|
||||
};
|
||||
@ -404,7 +409,7 @@ export = C;
|
||||
}
|
||||
|
||||
it("should succeed when the same file is referenced using absolute and relative names", () => {
|
||||
const files = createMap({
|
||||
const files = createMapFromTemplate({
|
||||
"/a/b/c.ts": `/// <reference path="d.ts"/>`,
|
||||
"/a/b/d.ts": "var x"
|
||||
});
|
||||
@ -412,7 +417,7 @@ export = C;
|
||||
});
|
||||
|
||||
it("should fail when two files used in program differ only in casing (tripleslash references)", () => {
|
||||
const files = createMap({
|
||||
const files = createMapFromTemplate({
|
||||
"/a/b/c.ts": `/// <reference path="D.ts"/>`,
|
||||
"/a/b/d.ts": "var x"
|
||||
});
|
||||
@ -420,7 +425,7 @@ export = C;
|
||||
});
|
||||
|
||||
it("should fail when two files used in program differ only in casing (imports)", () => {
|
||||
const files = createMap({
|
||||
const files = createMapFromTemplate({
|
||||
"/a/b/c.ts": `import {x} from "D"`,
|
||||
"/a/b/d.ts": "export var x"
|
||||
});
|
||||
@ -428,7 +433,7 @@ export = C;
|
||||
});
|
||||
|
||||
it("should fail when two files used in program differ only in casing (imports, relative module names)", () => {
|
||||
const files = createMap({
|
||||
const files = createMapFromTemplate({
|
||||
"moduleA.ts": `import {x} from "./ModuleB"`,
|
||||
"moduleB.ts": "export var x"
|
||||
});
|
||||
@ -436,7 +441,7 @@ export = C;
|
||||
});
|
||||
|
||||
it("should fail when two files exist on disk that differs only in casing", () => {
|
||||
const files = createMap({
|
||||
const files = createMapFromTemplate({
|
||||
"/a/b/c.ts": `import {x} from "D"`,
|
||||
"/a/b/D.ts": "export var x",
|
||||
"/a/b/d.ts": "export var y"
|
||||
@ -445,7 +450,7 @@ export = C;
|
||||
});
|
||||
|
||||
it("should fail when module name in 'require' calls has inconsistent casing", () => {
|
||||
const files = createMap({
|
||||
const files = createMapFromTemplate({
|
||||
"moduleA.ts": `import a = require("./ModuleC")`,
|
||||
"moduleB.ts": `import a = require("./moduleC")`,
|
||||
"moduleC.ts": "export var x"
|
||||
@ -454,7 +459,7 @@ export = C;
|
||||
});
|
||||
|
||||
it("should fail when module names in 'require' calls has inconsistent casing and current directory has uppercase chars", () => {
|
||||
const files = createMap({
|
||||
const files = createMapFromTemplate({
|
||||
"/a/B/c/moduleA.ts": `import a = require("./ModuleC")`,
|
||||
"/a/B/c/moduleB.ts": `import a = require("./moduleC")`,
|
||||
"/a/B/c/moduleC.ts": "export var x",
|
||||
@ -466,7 +471,7 @@ import b = require("./moduleB");
|
||||
test(files, { module: ts.ModuleKind.CommonJS, forceConsistentCasingInFileNames: true }, "/a/B/c", /*useCaseSensitiveFileNames*/ false, ["moduleD.ts"], [1149]);
|
||||
});
|
||||
it("should not fail when module names in 'require' calls has consistent casing and current directory has uppercase chars", () => {
|
||||
const files = createMap({
|
||||
const files = createMapFromTemplate({
|
||||
"/a/B/c/moduleA.ts": `import a = require("./moduleC")`,
|
||||
"/a/B/c/moduleB.ts": `import a = require("./moduleC")`,
|
||||
"/a/B/c/moduleC.ts": "export var x",
|
||||
@ -1020,8 +1025,8 @@ import b = require("./moduleB");
|
||||
const names = map(files, f => f.name);
|
||||
const sourceFiles = arrayToMap(map(files, f => createSourceFile(f.name, f.content, ScriptTarget.ES2015)), f => f.fileName);
|
||||
const compilerHost: CompilerHost = {
|
||||
fileExists : fileName => fileName in sourceFiles,
|
||||
getSourceFile: fileName => sourceFiles[fileName],
|
||||
fileExists : fileName => sourceFiles.has(fileName),
|
||||
getSourceFile: fileName => sourceFiles.get(fileName),
|
||||
getDefaultLibFileName: () => "lib.d.ts",
|
||||
writeFile: notImplemented,
|
||||
getCurrentDirectory: () => "/",
|
||||
@ -1029,7 +1034,10 @@ import b = require("./moduleB");
|
||||
getCanonicalFileName: f => f.toLowerCase(),
|
||||
getNewLine: () => "\r\n",
|
||||
useCaseSensitiveFileNames: () => false,
|
||||
readFile: fileName => fileName in sourceFiles ? sourceFiles[fileName].text : undefined
|
||||
readFile: fileName => {
|
||||
const file = sourceFiles.get(fileName);
|
||||
return file && file.text;
|
||||
}
|
||||
};
|
||||
const program1 = createProgram(names, {}, compilerHost);
|
||||
const diagnostics1 = program1.getFileProcessingDiagnostics().getDiagnostics();
|
||||
|
||||
97
src/harness/unittests/printer.ts
Normal file
97
src/harness/unittests/printer.ts
Normal file
@ -0,0 +1,97 @@
|
||||
/// <reference path="..\..\compiler\emitter.ts" />
|
||||
/// <reference path="..\harness.ts" />
|
||||
|
||||
namespace ts {
|
||||
describe("PrinterAPI", () => {
|
||||
function makePrintsCorrectly(prefix: string) {
|
||||
return function printsCorrectly(name: string, options: PrinterOptions, printCallback: (printer: Printer) => string) {
|
||||
it(name, () => {
|
||||
Harness.Baseline.runBaseline(`printerApi/${prefix}.${name}.js`, () =>
|
||||
printCallback(createPrinter({ newLine: NewLineKind.CarriageReturnLineFeed, ...options })));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
describe("printFile", () => {
|
||||
const printsCorrectly = makePrintsCorrectly("printsFileCorrectly");
|
||||
const sourceFile = createSourceFile("source.ts", `
|
||||
interface A<T> {
|
||||
// comment1
|
||||
readonly prop?: T;
|
||||
|
||||
// comment2
|
||||
method(): void;
|
||||
|
||||
// comment3
|
||||
new <T>(): A<T>;
|
||||
|
||||
// comment4
|
||||
<T>(): A<T>;
|
||||
}
|
||||
|
||||
// comment5
|
||||
type B = number | string | object;
|
||||
type C = A<number> & { x: string; }; // comment6
|
||||
|
||||
// comment7
|
||||
enum E1 {
|
||||
// comment8
|
||||
first
|
||||
}
|
||||
|
||||
const enum E2 {
|
||||
second
|
||||
}
|
||||
|
||||
// comment9
|
||||
console.log(1 + 2);
|
||||
`, ScriptTarget.ES2015);
|
||||
|
||||
printsCorrectly("default", {}, printer => printer.printFile(sourceFile));
|
||||
printsCorrectly("removeComments", { removeComments: true }, printer => printer.printFile(sourceFile));
|
||||
});
|
||||
|
||||
describe("printBundle", () => {
|
||||
const printsCorrectly = makePrintsCorrectly("printsBundleCorrectly");
|
||||
const bundle = createBundle([
|
||||
createSourceFile("a.ts", `
|
||||
/*! [a.ts] */
|
||||
|
||||
// comment0
|
||||
const a = 1;
|
||||
`, ScriptTarget.ES2015),
|
||||
createSourceFile("b.ts", `
|
||||
/*! [b.ts] */
|
||||
|
||||
// comment1
|
||||
const b = 2;
|
||||
`, ScriptTarget.ES2015)
|
||||
]);
|
||||
printsCorrectly("default", {}, printer => printer.printBundle(bundle));
|
||||
printsCorrectly("removeComments", { removeComments: true }, printer => printer.printBundle(bundle));
|
||||
});
|
||||
|
||||
describe("printNode", () => {
|
||||
const printsCorrectly = makePrintsCorrectly("printsNodeCorrectly");
|
||||
const sourceFile = createSourceFile("source.ts", "", ScriptTarget.ES2015);
|
||||
const syntheticNode = createClassDeclaration(
|
||||
undefined,
|
||||
undefined,
|
||||
/*name*/ createIdentifier("C"),
|
||||
undefined,
|
||||
undefined,
|
||||
createNodeArray([
|
||||
createProperty(
|
||||
undefined,
|
||||
createNodeArray([createToken(SyntaxKind.PublicKeyword)]),
|
||||
createIdentifier("prop"),
|
||||
undefined,
|
||||
undefined,
|
||||
undefined
|
||||
)
|
||||
])
|
||||
);
|
||||
printsCorrectly("class", {}, printer => printer.printNode(EmitHint.Unspecified, syntheticNode, sourceFile));
|
||||
});
|
||||
});
|
||||
}
|
||||
@ -1,4 +1,4 @@
|
||||
/// <reference path="..\harness.ts" />
|
||||
/// <reference path="..\harness.ts" />
|
||||
/// <reference path="..\..\harness\harnessLanguageService.ts" />
|
||||
|
||||
namespace ts {
|
||||
@ -122,7 +122,7 @@ namespace ts {
|
||||
trace: s => trace.push(s),
|
||||
getTrace: () => trace,
|
||||
getSourceFile(fileName): SourceFile {
|
||||
return files[fileName];
|
||||
return files.get(fileName);
|
||||
},
|
||||
getDefaultLibFileName(): string {
|
||||
return "lib.d.ts";
|
||||
@ -143,9 +143,10 @@ namespace ts {
|
||||
getNewLine(): string {
|
||||
return sys ? sys.newLine : newLine;
|
||||
},
|
||||
fileExists: fileName => fileName in files,
|
||||
fileExists: fileName => files.has(fileName),
|
||||
readFile: fileName => {
|
||||
return fileName in files ? files[fileName].text : undefined;
|
||||
const file = files.get(fileName);
|
||||
return file && file.text;
|
||||
},
|
||||
};
|
||||
}
|
||||
@ -188,10 +189,24 @@ namespace ts {
|
||||
}
|
||||
else {
|
||||
assert.isTrue(cache !== undefined, `expected ${caption} to be set`);
|
||||
assert.isTrue(equalOwnProperties(expectedContent, cache, entryChecker), `contents of ${caption} did not match the expected contents.`);
|
||||
assert.isTrue(mapsAreEqual(expectedContent, cache, entryChecker), `contents of ${caption} did not match the expected contents.`);
|
||||
}
|
||||
}
|
||||
|
||||
/** True if the maps have the same keys and values. */
|
||||
function mapsAreEqual<T>(left: Map<T>, right: Map<T>, valuesAreEqual?: (left: T, right: T) => boolean): boolean {
|
||||
if (left === right) return true;
|
||||
if (!left || !right) return false;
|
||||
const someInLeftHasNoMatch = forEachEntry(left, (leftValue, leftKey) => {
|
||||
if (!right.has(leftKey)) return true;
|
||||
const rightValue = right.get(leftKey);
|
||||
return !(valuesAreEqual ? valuesAreEqual(leftValue, rightValue) : leftValue === rightValue);
|
||||
});
|
||||
if (someInLeftHasNoMatch) return false;
|
||||
const someInRightHasNoMatch = forEachKey(right, rightKey => !left.has(rightKey));
|
||||
return !someInRightHasNoMatch;
|
||||
}
|
||||
|
||||
function checkResolvedModulesCache(program: Program, fileName: string, expectedContent: Map<ResolvedModule>): void {
|
||||
checkCache("resolved modules", program, fileName, expectedContent, f => f.resolvedModules, checkResolvedModule);
|
||||
}
|
||||
@ -307,7 +322,7 @@ namespace ts {
|
||||
const options: CompilerOptions = { target };
|
||||
|
||||
const program_1 = newProgram(files, ["a.ts"], options);
|
||||
checkResolvedModulesCache(program_1, "a.ts", createMap({ "b": createResolvedModule("b.ts") }));
|
||||
checkResolvedModulesCache(program_1, "a.ts", createMapFromTemplate({ "b": createResolvedModule("b.ts") }));
|
||||
checkResolvedModulesCache(program_1, "b.ts", undefined);
|
||||
|
||||
const program_2 = updateProgram(program_1, ["a.ts"], options, files => {
|
||||
@ -316,7 +331,7 @@ namespace ts {
|
||||
assert.isTrue(program_1.structureIsReused);
|
||||
|
||||
// content of resolution cache should not change
|
||||
checkResolvedModulesCache(program_1, "a.ts", createMap({ "b": createResolvedModule("b.ts") }));
|
||||
checkResolvedModulesCache(program_1, "a.ts", createMapFromTemplate({ "b": createResolvedModule("b.ts") }));
|
||||
checkResolvedModulesCache(program_1, "b.ts", undefined);
|
||||
|
||||
// imports has changed - program is not reused
|
||||
@ -333,7 +348,7 @@ namespace ts {
|
||||
files[0].text = files[0].text.updateImportsAndExports(newImports);
|
||||
});
|
||||
assert.isTrue(!program_3.structureIsReused);
|
||||
checkResolvedModulesCache(program_4, "a.ts", createMap({ "b": createResolvedModule("b.ts"), "c": undefined }));
|
||||
checkResolvedModulesCache(program_4, "a.ts", createMapFromTemplate({ "b": createResolvedModule("b.ts"), "c": undefined }));
|
||||
});
|
||||
|
||||
it("resolved type directives cache follows type directives", () => {
|
||||
@ -344,7 +359,7 @@ namespace ts {
|
||||
const options: CompilerOptions = { target, typeRoots: ["/types"] };
|
||||
|
||||
const program_1 = newProgram(files, ["/a.ts"], options);
|
||||
checkResolvedTypeDirectivesCache(program_1, "/a.ts", createMap({ "typedefs": { resolvedFileName: "/types/typedefs/index.d.ts", primary: true } }));
|
||||
checkResolvedTypeDirectivesCache(program_1, "/a.ts", createMapFromTemplate({ "typedefs": { resolvedFileName: "/types/typedefs/index.d.ts", primary: true } }));
|
||||
checkResolvedTypeDirectivesCache(program_1, "/types/typedefs/index.d.ts", undefined);
|
||||
|
||||
const program_2 = updateProgram(program_1, ["/a.ts"], options, files => {
|
||||
@ -353,7 +368,7 @@ namespace ts {
|
||||
assert.isTrue(program_1.structureIsReused);
|
||||
|
||||
// content of resolution cache should not change
|
||||
checkResolvedTypeDirectivesCache(program_1, "/a.ts", createMap({ "typedefs": { resolvedFileName: "/types/typedefs/index.d.ts", primary: true } }));
|
||||
checkResolvedTypeDirectivesCache(program_1, "/a.ts", createMapFromTemplate({ "typedefs": { resolvedFileName: "/types/typedefs/index.d.ts", primary: true } }));
|
||||
checkResolvedTypeDirectivesCache(program_1, "/types/typedefs/index.d.ts", undefined);
|
||||
|
||||
// type reference directives has changed - program is not reused
|
||||
@ -371,7 +386,7 @@ namespace ts {
|
||||
files[0].text = files[0].text.updateReferences(newReferences);
|
||||
});
|
||||
assert.isTrue(!program_3.structureIsReused);
|
||||
checkResolvedTypeDirectivesCache(program_1, "/a.ts", createMap({ "typedefs": { resolvedFileName: "/types/typedefs/index.d.ts", primary: true } }));
|
||||
checkResolvedTypeDirectivesCache(program_1, "/a.ts", createMapFromTemplate({ "typedefs": { resolvedFileName: "/types/typedefs/index.d.ts", primary: true } }));
|
||||
});
|
||||
|
||||
it("can reuse ambient module declarations from non-modified files", () => {
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
/// <reference path="..\harness.ts" />
|
||||
/// <reference path="..\harness.ts" />
|
||||
|
||||
const expect: typeof _chai.expect = _chai.expect;
|
||||
|
||||
@ -416,14 +416,15 @@ namespace ts.server {
|
||||
class InProcClient {
|
||||
private server: InProcSession;
|
||||
private seq = 0;
|
||||
private callbacks = createMap<(resp: protocol.Response) => void>();
|
||||
private callbacks: Array<(resp: protocol.Response) => void> = [];
|
||||
private eventHandlers = createMap<(args: any) => void>();
|
||||
|
||||
handle(msg: protocol.Message): void {
|
||||
if (msg.type === "response") {
|
||||
const response = <protocol.Response>msg;
|
||||
if (response.request_seq in this.callbacks) {
|
||||
this.callbacks[response.request_seq](response);
|
||||
const handler = this.callbacks[response.request_seq];
|
||||
if (handler) {
|
||||
handler(response);
|
||||
delete this.callbacks[response.request_seq];
|
||||
}
|
||||
}
|
||||
@ -434,13 +435,14 @@ namespace ts.server {
|
||||
}
|
||||
|
||||
emit(name: string, args: any): void {
|
||||
if (name in this.eventHandlers) {
|
||||
this.eventHandlers[name](args);
|
||||
const handler = this.eventHandlers.get(name);
|
||||
if (handler) {
|
||||
handler(args);
|
||||
}
|
||||
}
|
||||
|
||||
on(name: string, handler: (args: any) => void): void {
|
||||
this.eventHandlers[name] = handler;
|
||||
this.eventHandlers.set(name, handler);
|
||||
}
|
||||
|
||||
connect(session: InProcSession): void {
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
/// <reference path="..\harness.ts" />
|
||||
/// <reference path="..\harness.ts" />
|
||||
/// <reference path="../../server/typingsInstaller/typingsInstaller.ts" />
|
||||
|
||||
namespace ts.projectSystem {
|
||||
@ -244,9 +244,9 @@ namespace ts.projectSystem {
|
||||
}
|
||||
|
||||
export function checkMapKeys(caption: string, map: Map<any>, expectedKeys: string[]) {
|
||||
assert.equal(reduceProperties(map, count => count + 1, 0), expectedKeys.length, `${caption}: incorrect size of map`);
|
||||
assert.equal(map.size, expectedKeys.length, `${caption}: incorrect size of map`);
|
||||
for (const name of expectedKeys) {
|
||||
assert.isTrue(name in map, `${caption} is expected to contain ${name}, actual keys: ${Object.keys(map)}`);
|
||||
assert.isTrue(map.has(name), `${caption} is expected to contain ${name}, actual keys: ${arrayFrom(map.keys())}`);
|
||||
}
|
||||
}
|
||||
|
||||
@ -292,7 +292,7 @@ namespace ts.projectSystem {
|
||||
}
|
||||
|
||||
export class Callbacks {
|
||||
private map: { [n: number]: TimeOutCallback } = {};
|
||||
private map: TimeOutCallback[] = [];
|
||||
private nextId = 1;
|
||||
|
||||
register(cb: (...args: any[]) => void, args: any[]) {
|
||||
@ -310,20 +310,16 @@ namespace ts.projectSystem {
|
||||
count() {
|
||||
let n = 0;
|
||||
for (const _ in this.map) {
|
||||
// TODO: GH#11734
|
||||
_;
|
||||
n++;
|
||||
}
|
||||
return n;
|
||||
}
|
||||
|
||||
invoke() {
|
||||
for (const id in this.map) {
|
||||
if (hasProperty(this.map, id)) {
|
||||
this.map[id]();
|
||||
}
|
||||
for (const key in this.map) {
|
||||
this.map[key]();
|
||||
}
|
||||
this.map = {};
|
||||
this.map = [];
|
||||
}
|
||||
}
|
||||
|
||||
@ -338,8 +334,8 @@ namespace ts.projectSystem {
|
||||
private timeoutCallbacks = new Callbacks();
|
||||
private immediateCallbacks = new Callbacks();
|
||||
|
||||
readonly watchedDirectories = createMap<{ cb: DirectoryWatcherCallback, recursive: boolean }[]>();
|
||||
readonly watchedFiles = createMap<FileWatcherCallback[]>();
|
||||
readonly watchedDirectories = createMultiMap<{ cb: DirectoryWatcherCallback, recursive: boolean }>();
|
||||
readonly watchedFiles = createMultiMap<FileWatcherCallback>();
|
||||
|
||||
private filesOrFolders: FileOrFolder[];
|
||||
|
||||
@ -425,11 +421,11 @@ namespace ts.projectSystem {
|
||||
watchDirectory(directoryName: string, callback: DirectoryWatcherCallback, recursive: boolean): DirectoryWatcher {
|
||||
const path = this.toPath(directoryName);
|
||||
const cbWithRecursive = { cb: callback, recursive };
|
||||
multiMapAdd(this.watchedDirectories, path, cbWithRecursive);
|
||||
this.watchedDirectories.add(path, cbWithRecursive);
|
||||
return {
|
||||
referenceCount: 0,
|
||||
directoryName,
|
||||
close: () => multiMapRemove(this.watchedDirectories, path, cbWithRecursive)
|
||||
close: () => this.watchedDirectories.remove(path, cbWithRecursive)
|
||||
};
|
||||
}
|
||||
|
||||
@ -439,7 +435,7 @@ namespace ts.projectSystem {
|
||||
|
||||
triggerDirectoryWatcherCallback(directoryName: string, fileName: string): void {
|
||||
const path = this.toPath(directoryName);
|
||||
const callbacks = this.watchedDirectories[path];
|
||||
const callbacks = this.watchedDirectories.get(path);
|
||||
if (callbacks) {
|
||||
for (const callback of callbacks) {
|
||||
callback.cb(fileName);
|
||||
@ -449,7 +445,7 @@ namespace ts.projectSystem {
|
||||
|
||||
triggerFileWatcherCallback(fileName: string, removed?: boolean): void {
|
||||
const path = this.toPath(fileName);
|
||||
const callbacks = this.watchedFiles[path];
|
||||
const callbacks = this.watchedFiles.get(path);
|
||||
if (callbacks) {
|
||||
for (const callback of callbacks) {
|
||||
callback(path, removed);
|
||||
@ -459,8 +455,8 @@ namespace ts.projectSystem {
|
||||
|
||||
watchFile(fileName: string, callback: FileWatcherCallback) {
|
||||
const path = this.toPath(fileName);
|
||||
multiMapAdd(this.watchedFiles, path, callback);
|
||||
return { close: () => multiMapRemove(this.watchedFiles, path, callback) };
|
||||
this.watchedFiles.add(path, callback);
|
||||
return { close: () => this.watchedFiles.remove(path, callback) };
|
||||
}
|
||||
|
||||
// TOOD: record and invoke callbacks to simulate timer events
|
||||
|
||||
@ -14,7 +14,7 @@ namespace ts.projectSystem {
|
||||
function createTypesRegistry(...list: string[]): Map<void> {
|
||||
const map = createMap<void>();
|
||||
for (const l of list) {
|
||||
map[l] = undefined;
|
||||
map.set(l, undefined);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
@ -44,6 +44,8 @@ namespace ts.projectSystem {
|
||||
});
|
||||
}
|
||||
|
||||
import typingsName = server.typingsInstaller.typingsName;
|
||||
|
||||
describe("typingsInstaller", () => {
|
||||
it("configured projects (typings installed) 1", () => {
|
||||
const file1 = {
|
||||
@ -519,32 +521,32 @@ namespace ts.projectSystem {
|
||||
const commander = {
|
||||
path: "/a/data/node_modules/@types/commander/index.d.ts",
|
||||
content: "declare const commander: { x: number }",
|
||||
typings: "@types/commander"
|
||||
typings: typingsName("commander")
|
||||
};
|
||||
const jquery = {
|
||||
path: "/a/data/node_modules/@types/jquery/index.d.ts",
|
||||
content: "declare const jquery: { x: number }",
|
||||
typings: "@types/jquery"
|
||||
typings: typingsName("jquery")
|
||||
};
|
||||
const lodash = {
|
||||
path: "/a/data/node_modules/@types/lodash/index.d.ts",
|
||||
content: "declare const lodash: { x: number }",
|
||||
typings: "@types/lodash"
|
||||
typings: typingsName("lodash")
|
||||
};
|
||||
const cordova = {
|
||||
path: "/a/data/node_modules/@types/cordova/index.d.ts",
|
||||
content: "declare const cordova: { x: number }",
|
||||
typings: "@types/cordova"
|
||||
typings: typingsName("cordova")
|
||||
};
|
||||
const grunt = {
|
||||
path: "/a/data/node_modules/@types/grunt/index.d.ts",
|
||||
content: "declare const grunt: { x: number }",
|
||||
typings: "@types/grunt"
|
||||
typings: typingsName("grunt")
|
||||
};
|
||||
const gulp = {
|
||||
path: "/a/data/node_modules/@types/gulp/index.d.ts",
|
||||
content: "declare const gulp: { x: number }",
|
||||
typings: "@types/gulp"
|
||||
typings: typingsName("gulp")
|
||||
};
|
||||
|
||||
const host = createServerHost([lodashJs, commanderJs, file3]);
|
||||
@ -554,7 +556,7 @@ namespace ts.projectSystem {
|
||||
}
|
||||
installWorker(_requestId: number, args: string[], _cwd: string, cb: TI.RequestCompletedAction): void {
|
||||
let typingFiles: (FileOrFolder & { typings: string })[] = [];
|
||||
if (args.indexOf("@types/commander") >= 0) {
|
||||
if (args.indexOf(typingsName("commander")) >= 0) {
|
||||
typingFiles = [commander, jquery, lodash, cordova];
|
||||
}
|
||||
else {
|
||||
@ -944,7 +946,7 @@ namespace ts.projectSystem {
|
||||
content: ""
|
||||
};
|
||||
const host = createServerHost([f, node]);
|
||||
const cache = createMap<string>({ "node": node.path });
|
||||
const cache = createMapFromTemplate<string>({ "node": node.path });
|
||||
const result = JsTyping.discoverTypings(host, [f.path], getDirectoryPath(<Path>f.path), /*safeListPath*/ undefined, cache, { enable: true }, ["fs", "bar"]);
|
||||
assert.deepEqual(result.cachedTypingPaths, [node.path]);
|
||||
assert.deepEqual(result.newTypingNames, ["bar"]);
|
||||
@ -982,7 +984,7 @@ namespace ts.projectSystem {
|
||||
return;
|
||||
}
|
||||
if (response.kind === server.EventEndInstallTypes) {
|
||||
assert.deepEqual(response.packagesToInstall, ["@types/commander"]);
|
||||
assert.deepEqual(response.packagesToInstall, [typingsName("commander")]);
|
||||
seenTelemetryEvent = true;
|
||||
return;
|
||||
}
|
||||
|
||||
@ -195,11 +195,17 @@ namespace Utils {
|
||||
}
|
||||
|
||||
export class MockParseConfigHost extends VirtualFileSystem implements ts.ParseConfigHost {
|
||||
constructor(currentDirectory: string, ignoreCase: boolean, files: ts.MapLike<string> | string[]) {
|
||||
constructor(currentDirectory: string, ignoreCase: boolean, files: ts.Map<string> | string[]) {
|
||||
super(currentDirectory, ignoreCase);
|
||||
const fileNames = (files instanceof Array) ? files : ts.getOwnKeys(files);
|
||||
for (const file of fileNames) {
|
||||
this.addFile(file, new Harness.LanguageService.ScriptInfo(file, (files as ts.MapLike<string>)[file], /*isRootFile*/false));
|
||||
if (files instanceof Array) {
|
||||
for (const file of files) {
|
||||
this.addFile(file, new Harness.LanguageService.ScriptInfo(file, undefined, /*isRootFile*/false));
|
||||
}
|
||||
}
|
||||
else {
|
||||
files.forEach((fileContent, fileName) => {
|
||||
this.addFile(fileName, new Harness.LanguageService.ScriptInfo(fileName, fileContent, /*isRootFile*/false));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
8
src/lib/es2015.collection.d.ts
vendored
8
src/lib/es2015.collection.d.ts
vendored
@ -4,7 +4,7 @@ interface Map<K, V> {
|
||||
forEach(callbackfn: (value: V, key: K, map: Map<K, V>) => void, thisArg?: any): void;
|
||||
get(key: K): V | undefined;
|
||||
has(key: K): boolean;
|
||||
set(key: K, value?: V): this;
|
||||
set(key: K, value: V): this;
|
||||
readonly size: number;
|
||||
}
|
||||
|
||||
@ -22,16 +22,16 @@ interface ReadonlyMap<K, V> {
|
||||
readonly size: number;
|
||||
}
|
||||
|
||||
interface WeakMap<K, V> {
|
||||
interface WeakMap<K extends object, V> {
|
||||
delete(key: K): boolean;
|
||||
get(key: K): V | undefined;
|
||||
has(key: K): boolean;
|
||||
set(key: K, value?: V): this;
|
||||
set(key: K, value: V): this;
|
||||
}
|
||||
|
||||
interface WeakMapConstructor {
|
||||
new (): WeakMap<any, any>;
|
||||
new <K, V>(entries?: [K, V][]): WeakMap<K, V>;
|
||||
new <K extends object, V>(entries?: [K, V][]): WeakMap<K, V>;
|
||||
readonly prototype: WeakMap<any, any>;
|
||||
}
|
||||
declare var WeakMap: WeakMapConstructor;
|
||||
|
||||
2
src/lib/es2015.core.d.ts
vendored
2
src/lib/es2015.core.d.ts
vendored
@ -325,7 +325,7 @@ interface ObjectConstructor {
|
||||
* @param o The object to change its prototype.
|
||||
* @param proto The value of the new prototype or null.
|
||||
*/
|
||||
setPrototypeOf(o: any, proto: any): any;
|
||||
setPrototypeOf(o: any, proto: object | null): any;
|
||||
|
||||
/**
|
||||
* Gets the own property descriptor of the specified object.
|
||||
|
||||
6
src/lib/es2015.iterable.d.ts
vendored
6
src/lib/es2015.iterable.d.ts
vendored
@ -99,10 +99,10 @@ interface MapConstructor {
|
||||
new <K, V>(iterable: Iterable<[K, V]>): Map<K, V>;
|
||||
}
|
||||
|
||||
interface WeakMap<K, V> { }
|
||||
interface WeakMap<K extends object, V> { }
|
||||
|
||||
interface WeakMapConstructor {
|
||||
new <K, V>(iterable: Iterable<[K, V]>): WeakMap<K, V>;
|
||||
new <K extends object, V>(iterable: Iterable<[K, V]>): WeakMap<K, V>;
|
||||
}
|
||||
|
||||
interface Set<T> {
|
||||
@ -442,4 +442,4 @@ interface Float64ArrayConstructor {
|
||||
* @param thisArg Value of 'this' used to invoke the mapfn.
|
||||
*/
|
||||
from(arrayLike: Iterable<number>, mapfn?: (v: number, k: number) => number, thisArg?: any): Float64Array;
|
||||
}
|
||||
}
|
||||
|
||||
4
src/lib/es2015.proxy.d.ts
vendored
4
src/lib/es2015.proxy.d.ts
vendored
@ -1,5 +1,5 @@
|
||||
interface ProxyHandler<T> {
|
||||
getPrototypeOf? (target: T): {} | null;
|
||||
getPrototypeOf? (target: T): object | null;
|
||||
setPrototypeOf? (target: T, v: any): boolean;
|
||||
isExtensible? (target: T): boolean;
|
||||
preventExtensions? (target: T): boolean;
|
||||
@ -12,7 +12,7 @@ interface ProxyHandler<T> {
|
||||
enumerate? (target: T): PropertyKey[];
|
||||
ownKeys? (target: T): PropertyKey[];
|
||||
apply? (target: T, thisArg: any, argArray?: any): any;
|
||||
construct? (target: T, argArray: any, newTarget?: any): {};
|
||||
construct? (target: T, argArray: any, newTarget?: any): object
|
||||
}
|
||||
|
||||
interface ProxyConstructor {
|
||||
|
||||
2
src/lib/es2015.symbol.d.ts
vendored
2
src/lib/es2015.symbol.d.ts
vendored
@ -3,7 +3,7 @@ interface Symbol {
|
||||
toString(): string;
|
||||
|
||||
/** Returns the primitive value of the specified object. */
|
||||
valueOf(): Object;
|
||||
valueOf(): symbol;
|
||||
}
|
||||
|
||||
interface SymbolConstructor {
|
||||
|
||||
4
src/lib/es2015.symbol.wellknown.d.ts
vendored
4
src/lib/es2015.symbol.wellknown.d.ts
vendored
@ -110,7 +110,7 @@ interface Map<K, V> {
|
||||
readonly [Symbol.toStringTag]: "Map";
|
||||
}
|
||||
|
||||
interface WeakMap<K, V>{
|
||||
interface WeakMap<K extends object, V>{
|
||||
readonly [Symbol.toStringTag]: "WeakMap";
|
||||
}
|
||||
|
||||
@ -324,4 +324,4 @@ interface Float32Array {
|
||||
*/
|
||||
interface Float64Array {
|
||||
readonly [Symbol.toStringTag]: "Float64Array";
|
||||
}
|
||||
}
|
||||
|
||||
4
src/lib/es5.d.ts
vendored
4
src/lib/es5.d.ts
vendored
@ -146,14 +146,14 @@ interface ObjectConstructor {
|
||||
* Creates an object that has the specified prototype, and that optionally contains specified properties.
|
||||
* @param o Object to use as a prototype. May be null
|
||||
*/
|
||||
create<T>(o: T): T;
|
||||
create<T extends object>(o: T): T;
|
||||
|
||||
/**
|
||||
* Creates an object that has the specified prototype, and that optionally contains specified properties.
|
||||
* @param o Object to use as a prototype. May be null
|
||||
* @param properties JavaScript object that contains one or more property descriptors.
|
||||
*/
|
||||
create(o: any, properties: PropertyDescriptorMap): any;
|
||||
create(o: object | null, properties: PropertyDescriptorMap): any;
|
||||
|
||||
/**
|
||||
* Adds a property to an object, or modifies attributes of an existing property.
|
||||
|
||||
@ -356,24 +356,24 @@ namespace ts.server {
|
||||
// Use slice to clone the array to avoid manipulating in place
|
||||
const queue = fileInfo.referencedBy.slice(0);
|
||||
const fileNameSet = createMap<ScriptInfo>();
|
||||
fileNameSet[scriptInfo.fileName] = scriptInfo;
|
||||
fileNameSet.set(scriptInfo.fileName, scriptInfo);
|
||||
while (queue.length > 0) {
|
||||
const processingFileInfo = queue.pop();
|
||||
if (processingFileInfo.updateShapeSignature() && processingFileInfo.referencedBy.length > 0) {
|
||||
for (const potentialFileInfo of processingFileInfo.referencedBy) {
|
||||
if (!fileNameSet[potentialFileInfo.scriptInfo.fileName]) {
|
||||
if (!fileNameSet.has(potentialFileInfo.scriptInfo.fileName)) {
|
||||
queue.push(potentialFileInfo);
|
||||
}
|
||||
}
|
||||
}
|
||||
fileNameSet[processingFileInfo.scriptInfo.fileName] = processingFileInfo.scriptInfo;
|
||||
fileNameSet.set(processingFileInfo.scriptInfo.fileName, processingFileInfo.scriptInfo);
|
||||
}
|
||||
const result: string[] = [];
|
||||
for (const fileName in fileNameSet) {
|
||||
if (shouldEmitFile(fileNameSet[fileName])) {
|
||||
fileNameSet.forEach((scriptInfo, fileName) => {
|
||||
if (shouldEmitFile(scriptInfo)) {
|
||||
result.push(fileName);
|
||||
}
|
||||
}
|
||||
});
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
@ -31,10 +31,11 @@ namespace ts.server {
|
||||
}
|
||||
|
||||
private getLineMap(fileName: string): number[] {
|
||||
let lineMap = this.lineMaps[fileName];
|
||||
let lineMap = this.lineMaps.get(fileName);
|
||||
if (!lineMap) {
|
||||
const scriptSnapshot = this.host.getScriptSnapshot(fileName);
|
||||
lineMap = this.lineMaps[fileName] = ts.computeLineStarts(scriptSnapshot.getText(0, scriptSnapshot.getLength()));
|
||||
lineMap = ts.computeLineStarts(scriptSnapshot.getText(0, scriptSnapshot.getLength()));
|
||||
this.lineMaps.set(fileName, lineMap);
|
||||
}
|
||||
return lineMap;
|
||||
}
|
||||
@ -140,7 +141,7 @@ namespace ts.server {
|
||||
|
||||
changeFile(fileName: string, start: number, end: number, newText: string): void {
|
||||
// clear the line map after an edit
|
||||
this.lineMaps[fileName] = undefined;
|
||||
this.lineMaps.set(fileName, undefined);
|
||||
|
||||
const lineOffset = this.positionToOneBasedLineOffset(fileName, start);
|
||||
const endLineOffset = this.positionToOneBasedLineOffset(fileName, end);
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
/// <reference path="..\compiler\commandLineParser.ts" />
|
||||
/// <reference path="..\compiler\commandLineParser.ts" />
|
||||
/// <reference path="..\services\services.ts" />
|
||||
/// <reference path="utilities.ts" />
|
||||
/// <reference path="session.ts" />
|
||||
@ -41,17 +41,17 @@ namespace ts.server {
|
||||
if (typeof option.type === "object") {
|
||||
const optionMap = <Map<number>>option.type;
|
||||
// verify that map contains only numbers
|
||||
for (const id in optionMap) {
|
||||
Debug.assert(typeof optionMap[id] === "number");
|
||||
}
|
||||
map[option.name] = optionMap;
|
||||
optionMap.forEach(value => {
|
||||
Debug.assert(typeof value === "number");
|
||||
});
|
||||
map.set(option.name, optionMap);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
const compilerOptionConverters = prepareConvertersForEnumLikeCompilerOptions(optionDeclarations);
|
||||
const indentStyle = createMap({
|
||||
const indentStyle = createMapFromTemplate({
|
||||
"none": IndentStyle.None,
|
||||
"block": IndentStyle.Block,
|
||||
"smart": IndentStyle.Smart
|
||||
@ -59,20 +59,19 @@ namespace ts.server {
|
||||
|
||||
export function convertFormatOptions(protocolOptions: protocol.FormatCodeSettings): FormatCodeSettings {
|
||||
if (typeof protocolOptions.indentStyle === "string") {
|
||||
protocolOptions.indentStyle = indentStyle[protocolOptions.indentStyle.toLowerCase()];
|
||||
protocolOptions.indentStyle = indentStyle.get(protocolOptions.indentStyle.toLowerCase());
|
||||
Debug.assert(protocolOptions.indentStyle !== undefined);
|
||||
}
|
||||
return <any>protocolOptions;
|
||||
}
|
||||
|
||||
export function convertCompilerOptions(protocolOptions: protocol.ExternalProjectCompilerOptions): CompilerOptions & protocol.CompileOnSaveMixin {
|
||||
for (const id in compilerOptionConverters) {
|
||||
compilerOptionConverters.forEach((mappedValues, id) => {
|
||||
const propertyValue = protocolOptions[id];
|
||||
if (typeof propertyValue === "string") {
|
||||
const mappedValues = compilerOptionConverters[id];
|
||||
protocolOptions[id] = mappedValues[propertyValue.toLowerCase()];
|
||||
protocolOptions[id] = mappedValues.get(propertyValue.toLowerCase());
|
||||
}
|
||||
}
|
||||
});
|
||||
return <any>protocolOptions;
|
||||
}
|
||||
|
||||
@ -193,11 +192,12 @@ namespace ts.server {
|
||||
|
||||
stopWatchingDirectory(directory: string) {
|
||||
// if the ref count for this directory watcher drops to 0, it's time to close it
|
||||
this.directoryWatchersRefCount[directory]--;
|
||||
if (this.directoryWatchersRefCount[directory] === 0) {
|
||||
const refCount = this.directoryWatchersRefCount.get(directory) - 1;
|
||||
this.directoryWatchersRefCount.set(directory, refCount);
|
||||
if (refCount === 0) {
|
||||
this.projectService.logger.info(`Close directory watcher for: ${directory}`);
|
||||
this.directoryWatchersForTsconfig[directory].close();
|
||||
delete this.directoryWatchersForTsconfig[directory];
|
||||
this.directoryWatchersForTsconfig.get(directory).close();
|
||||
this.directoryWatchersForTsconfig.delete(directory);
|
||||
}
|
||||
}
|
||||
|
||||
@ -205,13 +205,13 @@ namespace ts.server {
|
||||
let currentPath = getDirectoryPath(fileName);
|
||||
let parentPath = getDirectoryPath(currentPath);
|
||||
while (currentPath != parentPath) {
|
||||
if (!this.directoryWatchersForTsconfig[currentPath]) {
|
||||
if (!this.directoryWatchersForTsconfig.has(currentPath)) {
|
||||
this.projectService.logger.info(`Add watcher for: ${currentPath}`);
|
||||
this.directoryWatchersForTsconfig[currentPath] = this.projectService.host.watchDirectory(currentPath, callback);
|
||||
this.directoryWatchersRefCount[currentPath] = 1;
|
||||
this.directoryWatchersForTsconfig.set(currentPath, this.projectService.host.watchDirectory(currentPath, callback));
|
||||
this.directoryWatchersRefCount.set(currentPath, 1);
|
||||
}
|
||||
else {
|
||||
this.directoryWatchersRefCount[currentPath] += 1;
|
||||
this.directoryWatchersRefCount.set(currentPath, this.directoryWatchersRefCount.get(currentPath) + 1);
|
||||
}
|
||||
project.directoriesWatchedForTsconfig.push(currentPath);
|
||||
currentPath = parentPath;
|
||||
@ -431,7 +431,7 @@ namespace ts.server {
|
||||
}
|
||||
else {
|
||||
if (info && (!info.isScriptOpen())) {
|
||||
// file has been changed which might affect the set of referenced files in projects that include
|
||||
// file has been changed which might affect the set of referenced files in projects that include
|
||||
// this file and set of inferred projects
|
||||
info.reloadFromFile();
|
||||
this.updateProjectGraphs(info.containingProjects);
|
||||
@ -450,7 +450,7 @@ namespace ts.server {
|
||||
this.filenameToScriptInfo.remove(info.path);
|
||||
this.lastDeletedFile = info;
|
||||
|
||||
// capture list of projects since detachAllProjects will wipe out original list
|
||||
// capture list of projects since detachAllProjects will wipe out original list
|
||||
const containingProjects = info.containingProjects.slice();
|
||||
|
||||
info.detachAllProjects();
|
||||
@ -606,7 +606,7 @@ namespace ts.server {
|
||||
const inferredProject = this.createInferredProjectWithRootFileIfNecessary(info);
|
||||
if (!this.useSingleInferredProject) {
|
||||
// if useOneInferredProject is not set then try to fixup ownership of open files
|
||||
// check 'defaultProject !== inferredProject' is necessary to handle cases
|
||||
// check 'defaultProject !== inferredProject' is necessary to handle cases
|
||||
// when creation inferred project for some file has added other open files into this project (i.e. as referenced files)
|
||||
// we definitely don't want to delete the project that was just created
|
||||
for (const f of this.openFiles) {
|
||||
@ -616,7 +616,7 @@ namespace ts.server {
|
||||
}
|
||||
const defaultProject = f.getDefaultProject();
|
||||
if (isRootFileInInferredProject(info) && defaultProject !== inferredProject && inferredProject.containsScriptInfo(f)) {
|
||||
// open file used to be root in inferred project,
|
||||
// open file used to be root in inferred project,
|
||||
// this inferred project is different from the one we've just created for current file
|
||||
// and new inferred project references this open file.
|
||||
// We should delete old inferred project and attach open file to the new one
|
||||
@ -845,7 +845,7 @@ namespace ts.server {
|
||||
files: parsedCommandLine.fileNames,
|
||||
compilerOptions: parsedCommandLine.options,
|
||||
configHasFilesProperty: config["files"] !== undefined,
|
||||
wildcardDirectories: createMap(parsedCommandLine.wildcardDirectories),
|
||||
wildcardDirectories: createMapFromTemplate(parsedCommandLine.wildcardDirectories),
|
||||
typeAcquisition: parsedCommandLine.typeAcquisition,
|
||||
compileOnSave: parsedCommandLine.compileOnSave
|
||||
};
|
||||
@ -1007,7 +1007,7 @@ namespace ts.server {
|
||||
if (toAdd) {
|
||||
for (const f of toAdd) {
|
||||
if (f.isScriptOpen() && isRootFileInInferredProject(f)) {
|
||||
// if file is already root in some inferred project
|
||||
// if file is already root in some inferred project
|
||||
// - remove the file from that project and delete the project if necessary
|
||||
const inferredProject = f.containingProjects[0];
|
||||
inferredProject.removeFile(f);
|
||||
@ -1160,7 +1160,7 @@ namespace ts.server {
|
||||
this.logger.info(`Host information ${args.hostInfo}`);
|
||||
}
|
||||
if (args.formatOptions) {
|
||||
mergeMaps(this.hostConfiguration.formatCodeOptions, convertFormatOptions(args.formatOptions));
|
||||
mergeMapLikes(this.hostConfiguration.formatCodeOptions, convertFormatOptions(args.formatOptions));
|
||||
this.logger.info("Format host information updated");
|
||||
}
|
||||
if (args.extraFileExtensions) {
|
||||
@ -1300,7 +1300,7 @@ namespace ts.server {
|
||||
for (const file of changedFiles) {
|
||||
const scriptInfo = this.getScriptInfo(file.fileName);
|
||||
Debug.assert(!!scriptInfo);
|
||||
// apply changes in reverse order
|
||||
// apply changes in reverse order
|
||||
for (let i = file.changes.length - 1; i >= 0; i--) {
|
||||
const change = file.changes[i];
|
||||
scriptInfo.editContent(change.span.start, change.span.start + change.span.length, change.newText);
|
||||
@ -1337,7 +1337,7 @@ namespace ts.server {
|
||||
|
||||
closeExternalProject(uncheckedFileName: string, suppressRefresh = false): void {
|
||||
const fileName = toNormalizedPath(uncheckedFileName);
|
||||
const configFiles = this.externalProjectToConfiguredProjectMap[fileName];
|
||||
const configFiles = this.externalProjectToConfiguredProjectMap.get(fileName);
|
||||
if (configFiles) {
|
||||
let shouldRefreshInferredProjects = false;
|
||||
for (const configFile of configFiles) {
|
||||
@ -1345,7 +1345,7 @@ namespace ts.server {
|
||||
shouldRefreshInferredProjects = true;
|
||||
}
|
||||
}
|
||||
delete this.externalProjectToConfiguredProjectMap[fileName];
|
||||
this.externalProjectToConfiguredProjectMap.delete(fileName);
|
||||
if (shouldRefreshInferredProjects && !suppressRefresh) {
|
||||
this.refreshInferredProjects();
|
||||
}
|
||||
@ -1365,20 +1365,20 @@ namespace ts.server {
|
||||
openExternalProjects(projects: protocol.ExternalProject[]): void {
|
||||
// record project list before the update
|
||||
const projectsToClose = arrayToMap(this.externalProjects, p => p.getProjectName(), _ => true);
|
||||
for (const externalProjectName in this.externalProjectToConfiguredProjectMap) {
|
||||
projectsToClose[externalProjectName] = true;
|
||||
}
|
||||
forEachKey(this.externalProjectToConfiguredProjectMap, externalProjectName => {
|
||||
projectsToClose.set(externalProjectName, true);
|
||||
});
|
||||
|
||||
for (const externalProject of projects) {
|
||||
this.openExternalProject(externalProject, /*suppressRefreshOfInferredProjects*/ true);
|
||||
// delete project that is present in input list
|
||||
delete projectsToClose[externalProject.projectFileName];
|
||||
projectsToClose.delete(externalProject.projectFileName);
|
||||
}
|
||||
|
||||
// close projects that were missing in the input list
|
||||
for (const externalProjectName in projectsToClose) {
|
||||
forEachKey(projectsToClose, externalProjectName => {
|
||||
this.closeExternalProject(externalProjectName, /*suppressRefresh*/ true)
|
||||
}
|
||||
});
|
||||
|
||||
this.refreshInferredProjects();
|
||||
}
|
||||
@ -1428,7 +1428,7 @@ namespace ts.server {
|
||||
// close existing project and later we'll open a set of configured projects for these files
|
||||
this.closeExternalProject(proj.projectFileName, /*suppressRefresh*/ true);
|
||||
}
|
||||
else if (this.externalProjectToConfiguredProjectMap[proj.projectFileName]) {
|
||||
else if (this.externalProjectToConfiguredProjectMap.get(proj.projectFileName)) {
|
||||
// this project used to include config files
|
||||
if (!tsConfigFiles) {
|
||||
// config files were removed from the project - close existing external project which in turn will close configured projects
|
||||
@ -1436,7 +1436,7 @@ namespace ts.server {
|
||||
}
|
||||
else {
|
||||
// project previously had some config files - compare them with new set of files and close all configured projects that correspond to unused files
|
||||
const oldConfigFiles = this.externalProjectToConfiguredProjectMap[proj.projectFileName];
|
||||
const oldConfigFiles = this.externalProjectToConfiguredProjectMap.get(proj.projectFileName);
|
||||
let iNew = 0;
|
||||
let iOld = 0;
|
||||
while (iNew < tsConfigFiles.length && iOld < oldConfigFiles.length) {
|
||||
@ -1464,7 +1464,7 @@ namespace ts.server {
|
||||
}
|
||||
if (tsConfigFiles) {
|
||||
// store the list of tsconfig files that belong to the external project
|
||||
this.externalProjectToConfiguredProjectMap[proj.projectFileName] = tsConfigFiles;
|
||||
this.externalProjectToConfiguredProjectMap.set(proj.projectFileName, tsConfigFiles);
|
||||
for (const tsconfigFile of tsConfigFiles) {
|
||||
let project = this.findConfiguredProjectByProjectName(tsconfigFile);
|
||||
if (!project) {
|
||||
@ -1480,7 +1480,7 @@ namespace ts.server {
|
||||
}
|
||||
else {
|
||||
// no config files - remove the item from the collection
|
||||
delete this.externalProjectToConfiguredProjectMap[proj.projectFileName];
|
||||
this.externalProjectToConfiguredProjectMap.delete(proj.projectFileName);
|
||||
this.createAndAddExternalProject(proj.projectFileName, rootFiles, proj.options, proj.typeAcquisition);
|
||||
}
|
||||
if (!suppressRefreshOfInferredProjects) {
|
||||
|
||||
@ -75,15 +75,16 @@ namespace ts.server {
|
||||
|
||||
for (const name of names) {
|
||||
// check if this is a duplicate entry in the list
|
||||
let resolution = newResolutions[name];
|
||||
let resolution = newResolutions.get(name);
|
||||
if (!resolution) {
|
||||
const existingResolution = currentResolutionsInFile && currentResolutionsInFile[name];
|
||||
const existingResolution = currentResolutionsInFile && currentResolutionsInFile.get(name);
|
||||
if (moduleResolutionIsValid(existingResolution)) {
|
||||
// ok, it is safe to use existing name resolution results
|
||||
resolution = existingResolution;
|
||||
}
|
||||
else {
|
||||
newResolutions[name] = resolution = loader(name, containingFile, compilerOptions, this);
|
||||
resolution = loader(name, containingFile, compilerOptions, this);
|
||||
newResolutions.set(name, resolution);
|
||||
}
|
||||
if (logChanges && this.filesWithChangedSetOfUnresolvedImports && !resolutionIsEqualTo(existingResolution, resolution)) {
|
||||
this.filesWithChangedSetOfUnresolvedImports.push(path);
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
/// <reference path="..\services\services.ts" />
|
||||
/// <reference path="..\services\services.ts" />
|
||||
/// <reference path="utilities.ts"/>
|
||||
/// <reference path="scriptInfo.ts"/>
|
||||
/// <reference path="lsHost.ts"/>
|
||||
@ -410,7 +410,7 @@ namespace ts.server {
|
||||
}
|
||||
|
||||
registerFileUpdate(fileName: string) {
|
||||
(this.updatedFileNames || (this.updatedFileNames = createMap<string>()))[fileName] = fileName;
|
||||
(this.updatedFileNames || (this.updatedFileNames = createMap<string>())).set(fileName, fileName);
|
||||
}
|
||||
|
||||
markAsDirty() {
|
||||
@ -428,9 +428,9 @@ namespace ts.server {
|
||||
}
|
||||
let unresolvedImports: string[];
|
||||
if (file.resolvedModules) {
|
||||
for (const name in file.resolvedModules) {
|
||||
file.resolvedModules.forEach((resolvedModule, name) => {
|
||||
// pick unresolved non-relative names
|
||||
if (!file.resolvedModules[name] && !isExternalModuleNameRelative(name)) {
|
||||
if (!resolvedModule && !isExternalModuleNameRelative(name)) {
|
||||
// for non-scoped names extract part up-to the first slash
|
||||
// for scoped names - extract up to the second slash
|
||||
let trimmed = name.trim();
|
||||
@ -444,7 +444,7 @@ namespace ts.server {
|
||||
(unresolvedImports || (unresolvedImports = [])).push(trimmed);
|
||||
result.push(trimmed);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
this.cachedUnresolvedImportsPerFile.set(file.path, unresolvedImports || emptyArray);
|
||||
}
|
||||
@ -466,7 +466,7 @@ namespace ts.server {
|
||||
}
|
||||
|
||||
// 1. no changes in structure, no changes in unresolved imports - do nothing
|
||||
// 2. no changes in structure, unresolved imports were changed - collect unresolved imports for all files
|
||||
// 2. no changes in structure, unresolved imports were changed - collect unresolved imports for all files
|
||||
// (can reuse cached imports for files that were not changed)
|
||||
// 3. new files were added/removed, but compilation settings stays the same - collect unresolved imports for all new/modified files
|
||||
// (can reuse cached imports for files that were not changed)
|
||||
@ -618,17 +618,18 @@ namespace ts.server {
|
||||
|
||||
const added: string[] = [];
|
||||
const removed: string[] = [];
|
||||
const updated: string[] = getOwnKeys(updatedFileNames);
|
||||
for (const id in currentFiles) {
|
||||
if (!hasProperty(lastReportedFileNames, id)) {
|
||||
const updated: string[] = arrayFrom(updatedFileNames.keys());
|
||||
|
||||
forEachKey(currentFiles, id => {
|
||||
if (!lastReportedFileNames.has(id)) {
|
||||
added.push(id);
|
||||
}
|
||||
}
|
||||
for (const id in lastReportedFileNames) {
|
||||
if (!hasProperty(currentFiles, id)) {
|
||||
});
|
||||
forEachKey(lastReportedFileNames, id => {
|
||||
if (!currentFiles.has(id)) {
|
||||
removed.push(id);
|
||||
}
|
||||
}
|
||||
});
|
||||
this.lastReportedFileNames = currentFiles;
|
||||
this.lastReportedVersion = this.projectStructureVersion;
|
||||
return { info, changes: { added, removed, updated }, projectErrors: this.projectErrors };
|
||||
@ -662,7 +663,7 @@ namespace ts.server {
|
||||
if (symbol && symbol.declarations && symbol.declarations[0]) {
|
||||
const declarationSourceFile = symbol.declarations[0].getSourceFile();
|
||||
if (declarationSourceFile) {
|
||||
referencedFiles[declarationSourceFile.path] = true;
|
||||
referencedFiles.set(declarationSourceFile.path, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -674,25 +675,24 @@ namespace ts.server {
|
||||
if (sourceFile.referencedFiles && sourceFile.referencedFiles.length > 0) {
|
||||
for (const referencedFile of sourceFile.referencedFiles) {
|
||||
const referencedPath = toPath(referencedFile.fileName, currentDirectory, getCanonicalFileName);
|
||||
referencedFiles[referencedPath] = true;
|
||||
referencedFiles.set(referencedPath, true);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle type reference directives
|
||||
if (sourceFile.resolvedTypeReferenceDirectiveNames) {
|
||||
for (const typeName in sourceFile.resolvedTypeReferenceDirectiveNames) {
|
||||
const resolvedTypeReferenceDirective = sourceFile.resolvedTypeReferenceDirectiveNames[typeName];
|
||||
sourceFile.resolvedTypeReferenceDirectiveNames.forEach((resolvedTypeReferenceDirective) => {
|
||||
if (!resolvedTypeReferenceDirective) {
|
||||
continue;
|
||||
return;
|
||||
}
|
||||
|
||||
const fileName = resolvedTypeReferenceDirective.resolvedFileName;
|
||||
const typeFilePath = toPath(fileName, currentDirectory, getCanonicalFileName);
|
||||
referencedFiles[typeFilePath] = true;
|
||||
}
|
||||
referencedFiles.set(typeFilePath, true);
|
||||
})
|
||||
}
|
||||
|
||||
const allFileNames = map(Object.keys(referencedFiles), key => <Path>key);
|
||||
const allFileNames = arrayFrom(referencedFiles.keys()) as Path[];
|
||||
return filter(allFileNames, file => this.projectService.host.fileExists(file));
|
||||
}
|
||||
|
||||
@ -868,18 +868,19 @@ namespace ts.server {
|
||||
return;
|
||||
}
|
||||
const configDirectoryPath = getDirectoryPath(this.getConfigFilePath());
|
||||
this.directoriesWatchedForWildcards = reduceProperties(this.wildcardDirectories, (watchers, flag, directory) => {
|
||||
|
||||
this.directoriesWatchedForWildcards = createMap<FileWatcher>();
|
||||
this.wildcardDirectories.forEach((flag, directory) => {
|
||||
if (comparePaths(configDirectoryPath, directory, ".", !this.projectService.host.useCaseSensitiveFileNames) !== Comparison.EqualTo) {
|
||||
const recursive = (flag & WatchDirectoryFlags.Recursive) !== 0;
|
||||
this.projectService.logger.info(`Add ${recursive ? "recursive " : ""}watcher for: ${directory}`);
|
||||
watchers[directory] = this.projectService.host.watchDirectory(
|
||||
this.directoriesWatchedForWildcards.set(directory, this.projectService.host.watchDirectory(
|
||||
directory,
|
||||
path => callback(this, path),
|
||||
recursive
|
||||
);
|
||||
));
|
||||
}
|
||||
return watchers;
|
||||
}, <Map<FileWatcher>>{});
|
||||
});
|
||||
}
|
||||
|
||||
stopWatchingDirectory() {
|
||||
@ -903,9 +904,9 @@ namespace ts.server {
|
||||
this.typeRootsWatchers = undefined;
|
||||
}
|
||||
|
||||
for (const id in this.directoriesWatchedForWildcards) {
|
||||
this.directoriesWatchedForWildcards[id].close();
|
||||
}
|
||||
this.directoriesWatchedForWildcards.forEach(watcher => {
|
||||
watcher.close();
|
||||
});
|
||||
this.directoriesWatchedForWildcards = undefined;
|
||||
|
||||
this.stopWatchingDirectory();
|
||||
|
||||
@ -417,7 +417,7 @@ namespace ts.server.protocol {
|
||||
startOffset: number;
|
||||
|
||||
/**
|
||||
* Position (can be specified instead of line/offset pair)
|
||||
* Position (can be specified instead of line/offset pair)
|
||||
*/
|
||||
/* @internal */
|
||||
startPosition?: number;
|
||||
@ -433,7 +433,7 @@ namespace ts.server.protocol {
|
||||
endOffset: number;
|
||||
|
||||
/**
|
||||
* Position (can be specified instead of line/offset pair)
|
||||
* Position (can be specified instead of line/offset pair)
|
||||
*/
|
||||
/* @internal */
|
||||
endPosition?: number;
|
||||
@ -445,7 +445,7 @@ namespace ts.server.protocol {
|
||||
}
|
||||
|
||||
/**
|
||||
* Response for GetCodeFixes request.
|
||||
* Response for GetCodeFixes request.
|
||||
*/
|
||||
export interface GetCodeFixesResponse extends Response {
|
||||
body?: CodeAction[];
|
||||
@ -2272,10 +2272,11 @@ namespace ts.server.protocol {
|
||||
export namespace JsxEmit {
|
||||
export type None = "None";
|
||||
export type Preserve = "Preserve";
|
||||
export type ReactNative = "ReactNative";
|
||||
export type React = "React";
|
||||
}
|
||||
|
||||
export type JsxEmit = JsxEmit.None | JsxEmit.Preserve | JsxEmit.React;
|
||||
export type JsxEmit = JsxEmit.None | JsxEmit.Preserve | JsxEmit.React | JsxEmit.ReactNative;
|
||||
|
||||
export namespace ModuleKind {
|
||||
export type None = "None";
|
||||
|
||||
@ -278,7 +278,7 @@ namespace ts.server {
|
||||
if (!this.formatCodeSettings) {
|
||||
this.formatCodeSettings = getDefaultFormatCodeSettings(this.host);
|
||||
}
|
||||
mergeMaps(this.formatCodeSettings, formatSettings);
|
||||
mergeMapLikes(this.formatCodeSettings, formatSettings);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -484,18 +484,20 @@ namespace ts.server {
|
||||
|
||||
private getImplementation(args: protocol.FileLocationRequestArgs, simplifiedResult: boolean): protocol.FileSpan[] | ImplementationLocation[] {
|
||||
const { file, project } = this.getFileAndProject(args);
|
||||
const scriptInfo = project.getScriptInfoForNormalizedPath(file);
|
||||
const position = this.getPosition(args, scriptInfo);
|
||||
const position = this.getPosition(args, project.getScriptInfoForNormalizedPath(file));
|
||||
const implementations = project.getLanguageService().getImplementationAtPosition(file, position);
|
||||
if (!implementations) {
|
||||
return [];
|
||||
}
|
||||
if (simplifiedResult) {
|
||||
return implementations.map(impl => ({
|
||||
file: impl.fileName,
|
||||
start: scriptInfo.positionToLineOffset(impl.textSpan.start),
|
||||
end: scriptInfo.positionToLineOffset(ts.textSpanEnd(impl.textSpan))
|
||||
}));
|
||||
return implementations.map(({ fileName, textSpan }) => {
|
||||
const scriptInfo = project.getScriptInfo(fileName);
|
||||
return {
|
||||
file: fileName,
|
||||
start: scriptInfo.positionToLineOffset(textSpan.start),
|
||||
end: scriptInfo.positionToLineOffset(ts.textSpanEnd(textSpan))
|
||||
};
|
||||
});
|
||||
}
|
||||
else {
|
||||
return implementations;
|
||||
@ -1390,7 +1392,7 @@ namespace ts.server {
|
||||
return { response, responseRequired: true };
|
||||
}
|
||||
|
||||
private handlers = createMap<(request: protocol.Request) => { response?: any, responseRequired?: boolean }>({
|
||||
private handlers = createMapFromTemplate<(request: protocol.Request) => { response?: any, responseRequired?: boolean }>({
|
||||
[CommandNames.OpenExternalProject]: (request: protocol.OpenExternalProjectRequest) => {
|
||||
this.projectService.openExternalProject(request.arguments, /*suppressRefreshOfInferredProjects*/ false);
|
||||
// TODO: report errors
|
||||
@ -1634,14 +1636,14 @@ namespace ts.server {
|
||||
});
|
||||
|
||||
public addProtocolHandler(command: string, handler: (request: protocol.Request) => { response?: any, responseRequired: boolean }) {
|
||||
if (command in this.handlers) {
|
||||
if (this.handlers.has(command)) {
|
||||
throw new Error(`Protocol handler already exists for command "${command}"`);
|
||||
}
|
||||
this.handlers[command] = handler;
|
||||
this.handlers.set(command, handler);
|
||||
}
|
||||
|
||||
public executeCommand(request: protocol.Request): { response?: any, responseRequired?: boolean } {
|
||||
const handler = this.handlers[request.command];
|
||||
const handler = this.handlers.get(request.command);
|
||||
if (handler) {
|
||||
return handler(request);
|
||||
}
|
||||
|
||||
@ -35,17 +35,18 @@ namespace ts.server {
|
||||
let unique = 0;
|
||||
|
||||
for (const v of arr1) {
|
||||
if (set[v] !== true) {
|
||||
set[v] = true;
|
||||
if (set.get(v) !== true) {
|
||||
set.set(v, true);
|
||||
unique++;
|
||||
}
|
||||
}
|
||||
for (const v of arr2) {
|
||||
if (!hasProperty(set, v)) {
|
||||
const isSet = set.get(v);
|
||||
if (isSet === undefined) {
|
||||
return false;
|
||||
}
|
||||
if (set[v] === true) {
|
||||
set[v] = false;
|
||||
if (isSet === true) {
|
||||
set.set(v, false);
|
||||
unique--;
|
||||
}
|
||||
}
|
||||
@ -83,7 +84,7 @@ namespace ts.server {
|
||||
return <any>emptyArray;
|
||||
}
|
||||
|
||||
const entry = this.perProjectCache[project.getProjectName()];
|
||||
const entry = this.perProjectCache.get(project.getProjectName());
|
||||
const result: SortedReadonlyArray<string> = entry ? entry.typings : <any>emptyArray;
|
||||
if (forceRefresh ||
|
||||
!entry ||
|
||||
@ -92,13 +93,13 @@ namespace ts.server {
|
||||
unresolvedImportsChanged(unresolvedImports, entry.unresolvedImports)) {
|
||||
// Note: entry is now poisoned since it does not really contain typings for a given combination of compiler options\typings options.
|
||||
// instead it acts as a placeholder to prevent issuing multiple requests
|
||||
this.perProjectCache[project.getProjectName()] = {
|
||||
this.perProjectCache.set(project.getProjectName(), {
|
||||
compilerOptions: project.getCompilerOptions(),
|
||||
typeAcquisition,
|
||||
typings: result,
|
||||
unresolvedImports,
|
||||
poisoned: true
|
||||
};
|
||||
});
|
||||
// something has been changed, issue a request to update typings
|
||||
this.installer.enqueueInstallTypingsRequest(project, typeAcquisition, unresolvedImports);
|
||||
}
|
||||
@ -106,21 +107,21 @@ namespace ts.server {
|
||||
}
|
||||
|
||||
updateTypingsForProject(projectName: string, compilerOptions: CompilerOptions, typeAcquisition: TypeAcquisition, unresolvedImports: SortedReadonlyArray<string>, newTypings: string[]) {
|
||||
this.perProjectCache[projectName] = {
|
||||
this.perProjectCache.set(projectName, {
|
||||
compilerOptions,
|
||||
typeAcquisition,
|
||||
typings: toSortedReadonlyArray(newTypings),
|
||||
unresolvedImports,
|
||||
poisoned: false
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
deleteTypingsForProject(projectName: string) {
|
||||
delete this.perProjectCache[projectName];
|
||||
this.perProjectCache.delete(projectName);
|
||||
}
|
||||
|
||||
onProjectClosed(project: Project) {
|
||||
delete this.perProjectCache[project.getProjectName()];
|
||||
this.perProjectCache.delete(project.getProjectName());
|
||||
this.installer.onProjectClosed(project);
|
||||
}
|
||||
}
|
||||
|
||||
@ -46,7 +46,7 @@ namespace ts.server.typingsInstaller {
|
||||
}
|
||||
try {
|
||||
const content = <TypesRegistryFile>JSON.parse(host.readFile(typesRegistryFilePath));
|
||||
return createMap<void>(content.entries);
|
||||
return createMapFromTemplate<void>(content.entries);
|
||||
}
|
||||
catch (e) {
|
||||
if (log.isEnabled()) {
|
||||
|
||||
@ -112,7 +112,7 @@ namespace ts.server.typingsInstaller {
|
||||
if (this.log.isEnabled()) {
|
||||
this.log.writeLine(`Closing file watchers for project '${projectName}'`);
|
||||
}
|
||||
const watchers = this.projectWatchers[projectName];
|
||||
const watchers = this.projectWatchers.get(projectName);
|
||||
if (!watchers) {
|
||||
if (this.log.isEnabled()) {
|
||||
this.log.writeLine(`No watchers are registered for project '${projectName}'`);
|
||||
@ -123,7 +123,7 @@ namespace ts.server.typingsInstaller {
|
||||
w.close();
|
||||
}
|
||||
|
||||
delete this.projectWatchers[projectName];
|
||||
this.projectWatchers.delete(projectName);
|
||||
|
||||
if (this.log.isEnabled()) {
|
||||
this.log.writeLine(`Closing file watchers for project '${projectName}' - done.`);
|
||||
@ -177,7 +177,7 @@ namespace ts.server.typingsInstaller {
|
||||
if (this.log.isEnabled()) {
|
||||
this.log.writeLine(`Processing cache location '${cacheLocation}'`);
|
||||
}
|
||||
if (this.knownCachesSet[cacheLocation]) {
|
||||
if (this.knownCachesSet.get(cacheLocation)) {
|
||||
if (this.log.isEnabled()) {
|
||||
this.log.writeLine(`Cache location was already processed...`);
|
||||
}
|
||||
@ -201,10 +201,10 @@ namespace ts.server.typingsInstaller {
|
||||
}
|
||||
const typingFile = typingToFileName(cacheLocation, packageName, this.installTypingHost, this.log);
|
||||
if (!typingFile) {
|
||||
this.missingTypingsSet[packageName] = true;
|
||||
this.missingTypingsSet.set(packageName, true);
|
||||
continue;
|
||||
}
|
||||
const existingTypingFile = this.packageNameToTypingLocation[packageName];
|
||||
const existingTypingFile = this.packageNameToTypingLocation.get(packageName);
|
||||
if (existingTypingFile === typingFile) {
|
||||
continue;
|
||||
}
|
||||
@ -216,14 +216,14 @@ namespace ts.server.typingsInstaller {
|
||||
if (this.log.isEnabled()) {
|
||||
this.log.writeLine(`Adding entry into typings cache: '${packageName}' => '${typingFile}'`);
|
||||
}
|
||||
this.packageNameToTypingLocation[packageName] = typingFile;
|
||||
this.packageNameToTypingLocation.set(packageName, typingFile);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (this.log.isEnabled()) {
|
||||
this.log.writeLine(`Finished processing cache location '${cacheLocation}'`);
|
||||
}
|
||||
this.knownCachesSet[cacheLocation] = true;
|
||||
this.knownCachesSet.set(cacheLocation, true);
|
||||
}
|
||||
|
||||
private filterTypings(typingsToInstall: string[]) {
|
||||
@ -232,12 +232,12 @@ namespace ts.server.typingsInstaller {
|
||||
}
|
||||
const result: string[] = [];
|
||||
for (const typing of typingsToInstall) {
|
||||
if (this.missingTypingsSet[typing] || this.packageNameToTypingLocation[typing]) {
|
||||
if (this.missingTypingsSet.get(typing) || this.packageNameToTypingLocation.get(typing)) {
|
||||
continue;
|
||||
}
|
||||
const validationResult = validatePackageName(typing);
|
||||
if (validationResult === PackageNameValidationResult.Ok) {
|
||||
if (typing in this.typesRegistry) {
|
||||
if (this.typesRegistry.has(typing)) {
|
||||
result.push(typing);
|
||||
}
|
||||
else {
|
||||
@ -248,7 +248,7 @@ namespace ts.server.typingsInstaller {
|
||||
}
|
||||
else {
|
||||
// add typing name to missing set so we won't process it again
|
||||
this.missingTypingsSet[typing] = true;
|
||||
this.missingTypingsSet.set(typing, true);
|
||||
if (this.log.isEnabled()) {
|
||||
switch (validationResult) {
|
||||
case PackageNameValidationResult.EmptyName:
|
||||
@ -295,8 +295,7 @@ namespace ts.server.typingsInstaller {
|
||||
this.log.writeLine(`Installing typings ${JSON.stringify(typingsToInstall)}`);
|
||||
}
|
||||
const filteredTypings = this.filterTypings(typingsToInstall);
|
||||
const scopedTypings = filteredTypings.map(x => `@types/${x}`);
|
||||
if (scopedTypings.length === 0) {
|
||||
if (filteredTypings.length === 0) {
|
||||
if (this.log.isEnabled()) {
|
||||
this.log.writeLine(`All typings are known to be missing or invalid - no need to go any further`);
|
||||
}
|
||||
@ -316,6 +315,7 @@ namespace ts.server.typingsInstaller {
|
||||
projectName: req.projectName
|
||||
});
|
||||
|
||||
const scopedTypings = filteredTypings.map(typingsName);
|
||||
this.installTypingsAsync(requestId, scopedTypings, cachePath, ok => {
|
||||
try {
|
||||
if (!ok) {
|
||||
@ -323,7 +323,7 @@ namespace ts.server.typingsInstaller {
|
||||
this.log.writeLine(`install request failed, marking packages as missing to prevent repeated requests: ${JSON.stringify(filteredTypings)}`);
|
||||
}
|
||||
for (const typing of filteredTypings) {
|
||||
this.missingTypingsSet[typing] = true;
|
||||
this.missingTypingsSet.set(typing, true);
|
||||
}
|
||||
return;
|
||||
}
|
||||
@ -336,11 +336,11 @@ namespace ts.server.typingsInstaller {
|
||||
for (const packageName of filteredTypings) {
|
||||
const typingFile = typingToFileName(cachePath, packageName, this.installTypingHost, this.log);
|
||||
if (!typingFile) {
|
||||
this.missingTypingsSet[packageName] = true;
|
||||
this.missingTypingsSet.set(packageName, true);
|
||||
continue;
|
||||
}
|
||||
if (!this.packageNameToTypingLocation[packageName]) {
|
||||
this.packageNameToTypingLocation[packageName] = typingFile;
|
||||
if (!this.packageNameToTypingLocation.has(packageName)) {
|
||||
this.packageNameToTypingLocation.set(packageName, typingFile);
|
||||
}
|
||||
installedTypingFiles.push(typingFile);
|
||||
}
|
||||
@ -395,7 +395,7 @@ namespace ts.server.typingsInstaller {
|
||||
}, /*pollingInterval*/ 2000);
|
||||
watchers.push(w);
|
||||
}
|
||||
this.projectWatchers[projectName] = watchers;
|
||||
this.projectWatchers.set(projectName, watchers);
|
||||
}
|
||||
|
||||
private createSetTypings(request: DiscoverTypings, typings: string[]): SetTypings {
|
||||
@ -429,4 +429,10 @@ namespace ts.server.typingsInstaller {
|
||||
protected abstract installWorker(requestId: number, args: string[], cwd: string, onRequestCompleted: RequestCompletedAction): void;
|
||||
protected abstract sendResponse(response: SetTypings | InvalidateCachedTypings | BeginInstallTypes | EndInstallTypes): void;
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
export function typingsName(packageName: string): string {
|
||||
return `@types/${packageName}@ts${versionMajorMinor}`;
|
||||
}
|
||||
const versionMajorMinor = version.split(".").slice(0, 2).join(".");
|
||||
}
|
||||
@ -95,7 +95,7 @@ namespace ts.server {
|
||||
};
|
||||
}
|
||||
|
||||
export function mergeMaps(target: MapLike<any>, source: MapLike <any>): void {
|
||||
export function mergeMapLikes(target: MapLike<any>, source: MapLike <any>): void {
|
||||
for (const key in source) {
|
||||
if (hasProperty(source, key)) {
|
||||
target[key] = source[key];
|
||||
@ -145,20 +145,20 @@ namespace ts.server {
|
||||
|
||||
export function createNormalizedPathMap<T>(): NormalizedPathMap<T> {
|
||||
/* tslint:disable:no-null-keyword */
|
||||
const map: Map<T> = Object.create(null);
|
||||
const map = createMap<T>();
|
||||
/* tslint:enable:no-null-keyword */
|
||||
return {
|
||||
get(path) {
|
||||
return map[path];
|
||||
return map.get(path);
|
||||
},
|
||||
set(path, value) {
|
||||
map[path] = value;
|
||||
map.set(path, value);
|
||||
},
|
||||
contains(path) {
|
||||
return hasProperty(map, path);
|
||||
return map.has(path);
|
||||
},
|
||||
remove(path) {
|
||||
delete map[path];
|
||||
map.delete(path);
|
||||
}
|
||||
};
|
||||
}
|
||||
@ -198,16 +198,17 @@ namespace ts.server {
|
||||
}
|
||||
|
||||
public schedule(operationId: string, delay: number, cb: () => void) {
|
||||
if (hasProperty(this.pendingTimeouts, operationId)) {
|
||||
const pendingTimeout = this.pendingTimeouts.get(operationId);
|
||||
if (pendingTimeout) {
|
||||
// another operation was already scheduled for this id - cancel it
|
||||
this.host.clearTimeout(this.pendingTimeouts[operationId]);
|
||||
this.host.clearTimeout(pendingTimeout);
|
||||
}
|
||||
// schedule new operation, pass arguments
|
||||
this.pendingTimeouts[operationId] = this.host.setTimeout(ThrottledOperations.run, delay, this, operationId, cb);
|
||||
this.pendingTimeouts.set(operationId, this.host.setTimeout(ThrottledOperations.run, delay, this, operationId, cb));
|
||||
}
|
||||
|
||||
private static run(self: ThrottledOperations, operationId: string, cb: () => void) {
|
||||
delete self.pendingTimeouts[operationId];
|
||||
self.pendingTimeouts.delete(operationId);
|
||||
cb();
|
||||
}
|
||||
}
|
||||
|
||||
@ -557,7 +557,7 @@ namespace ts {
|
||||
// Only bother calling into the typechecker if this is an identifier that
|
||||
// could possibly resolve to a type name. This makes classification run
|
||||
// in a third of the time it would normally take.
|
||||
if (classifiableNames[identifier.text]) {
|
||||
if (classifiableNames.get(identifier.text)) {
|
||||
const symbol = typeChecker.getSymbolAtLocation(node);
|
||||
if (symbol) {
|
||||
const type = classifySymbol(symbol, getMeaningFromLocation(node));
|
||||
|
||||
@ -16,7 +16,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
export namespace codefix {
|
||||
const codeFixes = createMap<CodeFix[]>();
|
||||
const codeFixes: CodeFix[][] = [];
|
||||
|
||||
export function registerCodeFix(action: CodeFix) {
|
||||
forEach(action.errorCodes, error => {
|
||||
|
||||
16
src/services/codefixes/fixForgottenThisPropertyAccess.ts
Normal file
16
src/services/codefixes/fixForgottenThisPropertyAccess.ts
Normal file
@ -0,0 +1,16 @@
|
||||
/* @internal */
|
||||
namespace ts.codefix {
|
||||
registerCodeFix({
|
||||
errorCodes: [Diagnostics.Cannot_find_name_0_Did_you_mean_the_instance_member_this_0.code],
|
||||
getCodeActions: (context: CodeFixContext) => {
|
||||
const sourceFile = context.sourceFile;
|
||||
const token = getTokenAtPosition(sourceFile, context.span.start);
|
||||
const start = token.getStart(sourceFile);
|
||||
|
||||
return [{
|
||||
description: getLocaleSpecificMessage(Diagnostics.Add_this_to_unresolved_variable),
|
||||
changes: [{ fileName: sourceFile.fileName, textChanges: [{ newText: "this.", span: { start, length: 0 } }] }]
|
||||
}];
|
||||
}
|
||||
});
|
||||
}
|
||||
@ -3,7 +3,7 @@
|
||||
/// <reference path="fixClassSuperMustPrecedeThisAccess.ts" />
|
||||
/// <reference path="fixConstructorForDerivedNeedSuperCall.ts" />
|
||||
/// <reference path="fixExtendsInterfaceBecomesImplements.ts" />
|
||||
/// <reference path="fixForgottenThisPropertyAccess.ts" />
|
||||
/// <reference path='unusedIdentifierFixes.ts' />
|
||||
/// <reference path='importFixes.ts' />
|
||||
/// <reference path='helpers.ts' />
|
||||
|
||||
|
||||
@ -9,7 +9,7 @@ namespace ts.codefix {
|
||||
*/
|
||||
export function getMissingMembersInsertion(classDeclaration: ClassLikeDeclaration, possiblyMissingSymbols: Symbol[], checker: TypeChecker, newlineChar: string): string {
|
||||
const classMembers = classDeclaration.symbol.members;
|
||||
const missingMembers = possiblyMissingSymbols.filter(symbol => !(symbol.getName() in classMembers));
|
||||
const missingMembers = possiblyMissingSymbols.filter(symbol => !classMembers.has(symbol.getName()));
|
||||
|
||||
let insertion = "";
|
||||
|
||||
|
||||
@ -14,20 +14,21 @@ namespace ts.codefix {
|
||||
}
|
||||
|
||||
class ImportCodeActionMap {
|
||||
private symbolIdToActionMap = createMap<ImportCodeAction[]>();
|
||||
private symbolIdToActionMap: ImportCodeAction[][] = [];
|
||||
|
||||
addAction(symbolId: number, newAction: ImportCodeAction) {
|
||||
if (!newAction) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!this.symbolIdToActionMap[symbolId]) {
|
||||
const actions = this.symbolIdToActionMap[symbolId];
|
||||
if (!actions) {
|
||||
this.symbolIdToActionMap[symbolId] = [newAction];
|
||||
return;
|
||||
}
|
||||
|
||||
if (newAction.kind === "CodeChange") {
|
||||
this.symbolIdToActionMap[symbolId].push(newAction);
|
||||
actions.push(newAction);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -73,8 +74,8 @@ namespace ts.codefix {
|
||||
|
||||
getAllActions() {
|
||||
let result: ImportCodeAction[] = [];
|
||||
for (const symbolId in this.symbolIdToActionMap) {
|
||||
result = concatenate(result, this.symbolIdToActionMap[symbolId]);
|
||||
for (const key in this.symbolIdToActionMap) {
|
||||
result = concatenate(result, this.symbolIdToActionMap[key])
|
||||
}
|
||||
return result;
|
||||
}
|
||||
@ -127,7 +128,7 @@ namespace ts.codefix {
|
||||
const symbolIdActionMap = new ImportCodeActionMap();
|
||||
|
||||
// this is a module id -> module import declaration map
|
||||
const cachedImportDeclarations = createMap<(ImportDeclaration | ImportEqualsDeclaration)[]>();
|
||||
const cachedImportDeclarations: (ImportDeclaration | ImportEqualsDeclaration)[][] = [];
|
||||
let cachedNewImportInsertPosition: number;
|
||||
|
||||
const currentTokenMeaning = getMeaningFromLocation(token);
|
||||
@ -170,8 +171,9 @@ namespace ts.codefix {
|
||||
function getImportDeclarations(moduleSymbol: Symbol) {
|
||||
const moduleSymbolId = getUniqueSymbolId(moduleSymbol);
|
||||
|
||||
if (cachedImportDeclarations[moduleSymbolId]) {
|
||||
return cachedImportDeclarations[moduleSymbolId];
|
||||
const cached = cachedImportDeclarations[moduleSymbolId];
|
||||
if (cached) {
|
||||
return cached;
|
||||
}
|
||||
|
||||
const existingDeclarations: (ImportDeclaration | ImportEqualsDeclaration)[] = [];
|
||||
@ -414,8 +416,8 @@ namespace ts.codefix {
|
||||
);
|
||||
|
||||
function getModuleSpecifierForNewImport() {
|
||||
const fileName = sourceFile.path;
|
||||
const moduleFileName = moduleSymbol.valueDeclaration.getSourceFile().path;
|
||||
const fileName = sourceFile.fileName;
|
||||
const moduleFileName = moduleSymbol.valueDeclaration.getSourceFile().fileName;
|
||||
const sourceDirectory = getDirectoryPath(fileName);
|
||||
const options = context.program.getCompilerOptions();
|
||||
|
||||
@ -437,8 +439,7 @@ namespace ts.codefix {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const normalizedBaseUrl = toPath(options.baseUrl, getDirectoryPath(options.baseUrl), getCanonicalFileName);
|
||||
let relativeName = tryRemoveParentDirectoryName(moduleFileName, normalizedBaseUrl);
|
||||
let relativeName = getRelativePathIfInDirectory(moduleFileName, options.baseUrl);
|
||||
if (!relativeName) {
|
||||
return undefined;
|
||||
}
|
||||
@ -475,9 +476,8 @@ namespace ts.codefix {
|
||||
|
||||
function tryGetModuleNameFromRootDirs() {
|
||||
if (options.rootDirs) {
|
||||
const normalizedRootDirs = map(options.rootDirs, rootDir => toPath(rootDir, /*basePath*/ undefined, getCanonicalFileName));
|
||||
const normalizedTargetPath = getPathRelativeToRootDirs(moduleFileName, normalizedRootDirs);
|
||||
const normalizedSourcePath = getPathRelativeToRootDirs(sourceDirectory, normalizedRootDirs);
|
||||
const normalizedTargetPath = getPathRelativeToRootDirs(moduleFileName, options.rootDirs);
|
||||
const normalizedSourcePath = getPathRelativeToRootDirs(sourceDirectory, options.rootDirs);
|
||||
if (normalizedTargetPath !== undefined) {
|
||||
const relativePath = normalizedSourcePath !== undefined ? getRelativePath(normalizedTargetPath, normalizedSourcePath) : normalizedTargetPath;
|
||||
return removeFileExtension(relativePath);
|
||||
@ -544,9 +544,9 @@ namespace ts.codefix {
|
||||
}
|
||||
}
|
||||
|
||||
function getPathRelativeToRootDirs(path: Path, rootDirs: Path[]) {
|
||||
function getPathRelativeToRootDirs(path: string, rootDirs: string[]) {
|
||||
for (const rootDir of rootDirs) {
|
||||
const relativeName = tryRemoveParentDirectoryName(path, rootDir);
|
||||
const relativeName = getRelativePathIfInDirectory(path, rootDir);
|
||||
if (relativeName !== undefined) {
|
||||
return relativeName;
|
||||
}
|
||||
@ -562,20 +562,15 @@ namespace ts.codefix {
|
||||
return fileName;
|
||||
}
|
||||
|
||||
function getRelativePathIfInDirectory(path: string, directoryPath: string) {
|
||||
const relativePath = getRelativePathToDirectoryOrUrl(directoryPath, path, directoryPath, getCanonicalFileName, false);
|
||||
return isRootedDiskPath(relativePath) || startsWith(relativePath, "..") ? undefined : relativePath;
|
||||
}
|
||||
|
||||
function getRelativePath(path: string, directoryPath: string) {
|
||||
const relativePath = getRelativePathToDirectoryOrUrl(directoryPath, path, directoryPath, getCanonicalFileName, false);
|
||||
return moduleHasNonRelativeName(relativePath) ? "./" + relativePath : relativePath;
|
||||
}
|
||||
|
||||
function tryRemoveParentDirectoryName(path: Path, parentDirectory: Path) {
|
||||
const index = path.indexOf(parentDirectory);
|
||||
if (index === 0) {
|
||||
return endsWith(parentDirectory, directorySeparator)
|
||||
? path.substring(parentDirectory.length)
|
||||
: path.substring(parentDirectory.length + 1);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,4 +1,4 @@
|
||||
namespace ts {
|
||||
namespace ts {
|
||||
/**
|
||||
* The document registry represents a store of SourceFile objects that can be shared between
|
||||
* multiple LanguageService instances. A LanguageService instance holds on the SourceFile (AST)
|
||||
@ -113,16 +113,16 @@ namespace ts {
|
||||
}
|
||||
|
||||
function getBucketForCompilationSettings(key: DocumentRegistryBucketKey, createIfMissing: boolean): FileMap<DocumentRegistryEntry> {
|
||||
let bucket = buckets[key];
|
||||
let bucket = buckets.get(key);
|
||||
if (!bucket && createIfMissing) {
|
||||
buckets[key] = bucket = createFileMap<DocumentRegistryEntry>();
|
||||
buckets.set(key, bucket = createFileMap<DocumentRegistryEntry>());
|
||||
}
|
||||
return bucket;
|
||||
}
|
||||
|
||||
function reportStats() {
|
||||
const bucketInfoArray = Object.keys(buckets).filter(name => name && name.charAt(0) === "_").map(name => {
|
||||
const entries = buckets[name];
|
||||
const bucketInfoArray = arrayFrom(buckets.keys()).filter(name => name && name.charAt(0) === "_").map(name => {
|
||||
const entries = buckets.get(name);
|
||||
const sourceFiles: { name: string; refCount: number; references: string[]; }[] = [];
|
||||
entries.forEachValue((key, entry) => {
|
||||
sourceFiles.push({
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -4,7 +4,7 @@
|
||||
namespace ts.formatting {
|
||||
export class Rules {
|
||||
public getRuleName(rule: Rule) {
|
||||
const o: ts.Map<any> = <any>this;
|
||||
const o: ts.MapLike<any> = <any>this;
|
||||
for (const name in o) {
|
||||
if (o[name] === rule) {
|
||||
return name;
|
||||
|
||||
@ -14,7 +14,7 @@ namespace ts.GoToDefinition {
|
||||
// Type reference directives
|
||||
const typeReferenceDirective = findReferenceInPosition(sourceFile.typeReferenceDirectives, position);
|
||||
if (typeReferenceDirective) {
|
||||
const referenceFile = program.getResolvedTypeReferenceDirectives()[typeReferenceDirective.fileName];
|
||||
const referenceFile = program.getResolvedTypeReferenceDirectives().get(typeReferenceDirective.fileName);
|
||||
return referenceFile && referenceFile.resolvedFileName &&
|
||||
[getDefinitionInfoForFileReference(typeReferenceDirective.fileName, referenceFile.resolvedFileName)];
|
||||
}
|
||||
@ -187,7 +187,15 @@ namespace ts.GoToDefinition {
|
||||
}
|
||||
|
||||
function isSignatureDeclaration(node: Node): boolean {
|
||||
return node.kind === SyntaxKind.FunctionDeclaration || node.kind === SyntaxKind.MethodDeclaration || node.kind === SyntaxKind.MethodSignature
|
||||
switch (node.kind) {
|
||||
case ts.SyntaxKind.Constructor:
|
||||
case ts.SyntaxKind.FunctionDeclaration:
|
||||
case ts.SyntaxKind.MethodDeclaration:
|
||||
case ts.SyntaxKind.MethodSignature:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/** Creates a DefinitionInfo from a Declaration, using the declaration's name if possible. */
|
||||
@ -254,6 +262,11 @@ namespace ts.GoToDefinition {
|
||||
|
||||
function tryGetSignatureDeclaration(typeChecker: TypeChecker, node: Node): SignatureDeclaration | undefined {
|
||||
const callLike = getAncestorCallLikeExpression(node);
|
||||
return callLike && typeChecker.getResolvedSignature(callLike).declaration;
|
||||
const decl = callLike && typeChecker.getResolvedSignature(callLike).declaration;
|
||||
if (decl && isSignatureDeclaration(decl)) {
|
||||
return decl;
|
||||
}
|
||||
// Don't go to a function type, go to the value having that type.
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
@ -17,7 +17,7 @@ namespace ts.GoToImplementation {
|
||||
else {
|
||||
// Perform "Find all References" and retrieve only those that are implementations
|
||||
const referencedSymbols = FindAllReferences.getReferencedSymbolsForNode(typeChecker, cancellationToken,
|
||||
node, sourceFiles, /*findInStrings*/false, /*findInComments*/false, /*implementations*/true);
|
||||
node, sourceFiles, /*findInStrings*/false, /*findInComments*/false, /*isForRename*/false, /*implementations*/true);
|
||||
const result = flatMap(referencedSymbols, symbol =>
|
||||
map(symbol.references, ({ textSpan, fileName }) => ({ textSpan, fileName })));
|
||||
|
||||
|
||||
@ -17,11 +17,11 @@ namespace ts.JsTyping {
|
||||
|
||||
interface PackageJson {
|
||||
_requiredBy?: string[];
|
||||
dependencies?: Map<string>;
|
||||
devDependencies?: Map<string>;
|
||||
dependencies?: MapLike<string>;
|
||||
devDependencies?: MapLike<string>;
|
||||
name?: string;
|
||||
optionalDependencies?: Map<string>;
|
||||
peerDependencies?: Map<string>;
|
||||
optionalDependencies?: MapLike<string>;
|
||||
peerDependencies?: MapLike<string>;
|
||||
typings?: string;
|
||||
};
|
||||
|
||||
@ -76,7 +76,7 @@ namespace ts.JsTyping {
|
||||
|
||||
if (!safeList) {
|
||||
const result = readConfigFile(safeListPath, (path: string) => host.readFile(path));
|
||||
safeList = result.config ? createMap<string>(result.config) : EmptySafeList;
|
||||
safeList = result.config ? createMapFromTemplate<string>(result.config) : EmptySafeList;
|
||||
}
|
||||
|
||||
const filesToWatch: string[] = [];
|
||||
@ -107,34 +107,34 @@ namespace ts.JsTyping {
|
||||
// add typings for unresolved imports
|
||||
if (unresolvedImports) {
|
||||
for (const moduleId of unresolvedImports) {
|
||||
const typingName = moduleId in nodeCoreModules ? "node" : moduleId;
|
||||
if (!(typingName in inferredTypings)) {
|
||||
inferredTypings[typingName] = undefined;
|
||||
const typingName = nodeCoreModules.has(moduleId) ? "node" : moduleId;
|
||||
if (!inferredTypings.has(typingName)) {
|
||||
inferredTypings.set(typingName, undefined);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Add the cached typing locations for inferred typings that are already installed
|
||||
for (const name in packageNameToTypingLocation) {
|
||||
if (name in inferredTypings && !inferredTypings[name]) {
|
||||
inferredTypings[name] = packageNameToTypingLocation[name];
|
||||
packageNameToTypingLocation.forEach((typingLocation, name) => {
|
||||
if (inferredTypings.has(name) && inferredTypings.get(name) === undefined) {
|
||||
inferredTypings.set(name, typingLocation);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Remove typings that the user has added to the exclude list
|
||||
for (const excludeTypingName of exclude) {
|
||||
delete inferredTypings[excludeTypingName];
|
||||
inferredTypings.delete(excludeTypingName);
|
||||
}
|
||||
|
||||
const newTypingNames: string[] = [];
|
||||
const cachedTypingPaths: string[] = [];
|
||||
for (const typing in inferredTypings) {
|
||||
if (inferredTypings[typing] !== undefined) {
|
||||
cachedTypingPaths.push(inferredTypings[typing]);
|
||||
inferredTypings.forEach((inferred, typing) => {
|
||||
if (inferred !== undefined) {
|
||||
cachedTypingPaths.push(inferred);
|
||||
}
|
||||
else {
|
||||
newTypingNames.push(typing);
|
||||
}
|
||||
}
|
||||
});
|
||||
return { cachedTypingPaths, newTypingNames, filesToWatch };
|
||||
|
||||
/**
|
||||
@ -146,8 +146,8 @@ namespace ts.JsTyping {
|
||||
}
|
||||
|
||||
for (const typing of typingNames) {
|
||||
if (!(typing in inferredTypings)) {
|
||||
inferredTypings[typing] = undefined;
|
||||
if (!inferredTypings.has(typing)) {
|
||||
inferredTypings.set(typing, undefined);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -189,7 +189,7 @@ namespace ts.JsTyping {
|
||||
const cleanedTypingNames = map(inferredTypingNames, f => f.replace(/((?:\.|-)min(?=\.|$))|((?:-|\.)\d+)/g, ""));
|
||||
|
||||
if (safeList !== EmptySafeList) {
|
||||
mergeTypings(filter(cleanedTypingNames, f => f in safeList));
|
||||
mergeTypings(filter(cleanedTypingNames, f => safeList.has(f)));
|
||||
}
|
||||
|
||||
const hasJsxFile = forEach(fileNames, f => ensureScriptKind(f, getScriptKindFromFileName(f)) === ScriptKind.JSX);
|
||||
@ -236,7 +236,7 @@ namespace ts.JsTyping {
|
||||
}
|
||||
if (packageJson.typings) {
|
||||
const absolutePath = getNormalizedAbsolutePath(packageJson.typings, getDirectoryPath(normalizedFileName));
|
||||
inferredTypings[packageJson.name] = absolutePath;
|
||||
inferredTypings.set(packageJson.name, absolutePath);
|
||||
}
|
||||
else {
|
||||
typingNames.push(packageJson.name);
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
/* @internal */
|
||||
/* @internal */
|
||||
namespace ts.NavigateTo {
|
||||
type RawNavigateToItem = { name: string; fileName: string; matchKind: PatternMatchKind; isCaseSensitive: boolean; declaration: Declaration };
|
||||
|
||||
@ -7,23 +7,21 @@ namespace ts.NavigateTo {
|
||||
let rawItems: RawNavigateToItem[] = [];
|
||||
|
||||
// Search the declarations in all files and output matched NavigateToItem into array of NavigateToItem[]
|
||||
forEach(sourceFiles, sourceFile => {
|
||||
for (const sourceFile of sourceFiles) {
|
||||
cancellationToken.throwIfCancellationRequested();
|
||||
|
||||
if (excludeDtsFiles && fileExtensionIs(sourceFile.fileName, ".d.ts")) {
|
||||
return;
|
||||
continue;
|
||||
}
|
||||
|
||||
const nameToDeclarations = sourceFile.getNamedDeclarations();
|
||||
for (const name in nameToDeclarations) {
|
||||
const declarations = nameToDeclarations[name];
|
||||
forEachEntry(sourceFile.getNamedDeclarations(), (declarations, name) => {
|
||||
if (declarations) {
|
||||
// First do a quick check to see if the name of the declaration matches the
|
||||
// last portion of the (possibly) dotted name they're searching for.
|
||||
let matches = patternMatcher.getMatchesForLastSegmentOfPattern(name);
|
||||
|
||||
if (!matches) {
|
||||
continue;
|
||||
return; // continue to next named declarations
|
||||
}
|
||||
|
||||
for (const declaration of declarations) {
|
||||
@ -32,13 +30,13 @@ namespace ts.NavigateTo {
|
||||
if (patternMatcher.patternContainsDots) {
|
||||
const containers = getContainers(declaration);
|
||||
if (!containers) {
|
||||
return undefined;
|
||||
return true; // Break out of named declarations and go to the next source file.
|
||||
}
|
||||
|
||||
matches = patternMatcher.getMatches(containers, name);
|
||||
|
||||
if (!matches) {
|
||||
continue;
|
||||
return; // continue to next named declarations
|
||||
}
|
||||
}
|
||||
|
||||
@ -47,8 +45,8 @@ namespace ts.NavigateTo {
|
||||
rawItems.push({ name, fileName, matchKind, isCaseSensitive: allMatchesAreCaseSensitive(matches), declaration });
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Remove imports when the imported declaration is already in the list and has the same name.
|
||||
rawItems = filter(rawItems, item => {
|
||||
|
||||
@ -248,9 +248,9 @@ namespace ts.NavigationBar {
|
||||
return true;
|
||||
}
|
||||
|
||||
const itemsWithSameName = nameToItems[name];
|
||||
const itemsWithSameName = nameToItems.get(name);
|
||||
if (!itemsWithSameName) {
|
||||
nameToItems[name] = child;
|
||||
nameToItems.set(name, child);
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -268,7 +268,7 @@ namespace ts.NavigationBar {
|
||||
if (tryMerge(itemWithSameName, child)) {
|
||||
return false;
|
||||
}
|
||||
nameToItems[name] = [itemWithSameName, child];
|
||||
nameToItems.set(name, [itemWithSameName, child]);
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -322,7 +322,7 @@ namespace ts.NavigationBar {
|
||||
function compareChildren(child1: NavigationBarNode, child2: NavigationBarNode): number {
|
||||
const name1 = tryGetName(child1.node), name2 = tryGetName(child2.node);
|
||||
if (name1 && name2) {
|
||||
const cmp = localeCompareFix(name1, name2);
|
||||
const cmp = ts.compareStringsCaseInsensitive(name1, name2);
|
||||
return cmp !== 0 ? cmp : navigationBarNodeKind(child1) - navigationBarNodeKind(child2);
|
||||
}
|
||||
else {
|
||||
@ -330,26 +330,6 @@ namespace ts.NavigationBar {
|
||||
}
|
||||
}
|
||||
|
||||
// Intl is missing in Safari, and node 0.10 treats "a" as greater than "B".
|
||||
const localeCompareIsCorrect = ts.collator && ts.collator.compare("a", "B") < 0;
|
||||
const localeCompareFix: (a: string, b: string) => number = localeCompareIsCorrect ? collator.compare : function(a, b) {
|
||||
// This isn't perfect, but it passes all of our tests.
|
||||
for (let i = 0; i < Math.min(a.length, b.length); i++) {
|
||||
const chA = a.charAt(i), chB = b.charAt(i);
|
||||
if (chA === "\"" && chB === "'") {
|
||||
return 1;
|
||||
}
|
||||
if (chA === "'" && chB === "\"") {
|
||||
return -1;
|
||||
}
|
||||
const cmp = ts.compareStrings(chA.toLocaleLowerCase(), chB.toLocaleLowerCase());
|
||||
if (cmp !== 0) {
|
||||
return cmp;
|
||||
}
|
||||
}
|
||||
return a.length - b.length;
|
||||
};
|
||||
|
||||
/**
|
||||
* This differs from getItemName because this is just used for sorting.
|
||||
* We only sort nodes by name that have a more-or-less "direct" name, as opposed to `new()` and the like.
|
||||
|
||||
@ -188,11 +188,11 @@ namespace ts {
|
||||
}
|
||||
|
||||
function getWordSpans(word: string): TextSpan[] {
|
||||
if (!(word in stringToWordSpans)) {
|
||||
stringToWordSpans[word] = breakIntoWordSpans(word);
|
||||
let spans = stringToWordSpans.get(word);
|
||||
if (!spans) {
|
||||
stringToWordSpans.set(word, spans = breakIntoWordSpans(word));
|
||||
}
|
||||
|
||||
return stringToWordSpans[word];
|
||||
return spans;
|
||||
}
|
||||
|
||||
function matchTextChunk(candidate: string, chunk: TextChunk, punctuationStripped: boolean): PatternMatch {
|
||||
|
||||
@ -1,98 +1,92 @@
|
||||
/* @internal */
|
||||
namespace ts.Rename {
|
||||
export function getRenameInfo(typeChecker: TypeChecker, defaultLibFileName: string, getCanonicalFileName: (fileName: string) => string, sourceFile: SourceFile, position: number): RenameInfo {
|
||||
const canonicalDefaultLibName = getCanonicalFileName(ts.normalizePath(defaultLibFileName));
|
||||
|
||||
const getCanonicalDefaultLibName = memoize(() => getCanonicalFileName(ts.normalizePath(defaultLibFileName)));
|
||||
const node = getTouchingWord(sourceFile, position, /*includeJsDocComment*/ true);
|
||||
|
||||
if (node) {
|
||||
if (node.kind === SyntaxKind.Identifier ||
|
||||
node.kind === SyntaxKind.StringLiteral ||
|
||||
isLiteralNameOfPropertyDeclarationOrIndexAccess(node) ||
|
||||
isThis(node)) {
|
||||
const symbol = typeChecker.getSymbolAtLocation(node);
|
||||
|
||||
// Only allow a symbol to be renamed if it actually has at least one declaration.
|
||||
if (symbol) {
|
||||
const declarations = symbol.getDeclarations();
|
||||
if (declarations && declarations.length > 0) {
|
||||
// Disallow rename for elements that are defined in the standard TypeScript library.
|
||||
if (forEach(declarations, isDefinedInLibraryFile)) {
|
||||
return getRenameInfoError(getLocaleSpecificMessage(Diagnostics.You_cannot_rename_elements_that_are_defined_in_the_standard_TypeScript_library));
|
||||
}
|
||||
|
||||
const displayName = stripQuotes(getDeclaredName(typeChecker, symbol, node));
|
||||
const kind = SymbolDisplay.getSymbolKind(typeChecker, symbol, node);
|
||||
if (kind) {
|
||||
return {
|
||||
canRename: true,
|
||||
kind,
|
||||
displayName,
|
||||
localizedErrorMessage: undefined,
|
||||
fullDisplayName: typeChecker.getFullyQualifiedName(symbol),
|
||||
kindModifiers: SymbolDisplay.getSymbolModifiers(symbol),
|
||||
triggerSpan: createTriggerSpanForNode(node, sourceFile)
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (node.kind === SyntaxKind.StringLiteral) {
|
||||
const type = getStringLiteralTypeForNode(<StringLiteral>node, typeChecker);
|
||||
if (type) {
|
||||
if (isDefinedInLibraryFile(node)) {
|
||||
return getRenameInfoError(getLocaleSpecificMessage(Diagnostics.You_cannot_rename_elements_that_are_defined_in_the_standard_TypeScript_library));
|
||||
}
|
||||
else {
|
||||
const displayName = stripQuotes(type.text);
|
||||
return {
|
||||
canRename: true,
|
||||
kind: ScriptElementKind.variableElement,
|
||||
displayName,
|
||||
localizedErrorMessage: undefined,
|
||||
fullDisplayName: displayName,
|
||||
kindModifiers: ScriptElementKindModifier.none,
|
||||
triggerSpan: createTriggerSpanForNode(node, sourceFile)
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return getRenameInfoError(getLocaleSpecificMessage(Diagnostics.You_cannot_rename_this_element));
|
||||
|
||||
function getRenameInfoError(localizedErrorMessage: string): RenameInfo {
|
||||
return {
|
||||
canRename: false,
|
||||
localizedErrorMessage: localizedErrorMessage,
|
||||
displayName: undefined,
|
||||
fullDisplayName: undefined,
|
||||
kind: undefined,
|
||||
kindModifiers: undefined,
|
||||
triggerSpan: undefined
|
||||
};
|
||||
}
|
||||
const renameInfo = node && nodeIsEligibleForRename(node)
|
||||
? getRenameInfoForNode(node, typeChecker, sourceFile, isDefinedInLibraryFile)
|
||||
: undefined;
|
||||
return renameInfo || getRenameInfoError(Diagnostics.You_cannot_rename_this_element);
|
||||
|
||||
function isDefinedInLibraryFile(declaration: Node) {
|
||||
if (defaultLibFileName) {
|
||||
const sourceFile = declaration.getSourceFile();
|
||||
const canonicalName = getCanonicalFileName(ts.normalizePath(sourceFile.fileName));
|
||||
if (canonicalName === canonicalDefaultLibName) {
|
||||
return true;
|
||||
}
|
||||
if (!defaultLibFileName) {
|
||||
return false;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function createTriggerSpanForNode(node: Node, sourceFile: SourceFile) {
|
||||
let start = node.getStart(sourceFile);
|
||||
let width = node.getWidth(sourceFile);
|
||||
if (node.kind === SyntaxKind.StringLiteral) {
|
||||
// Exclude the quotes
|
||||
start += 1;
|
||||
width -= 2;
|
||||
}
|
||||
return createTextSpan(start, width);
|
||||
const sourceFile = declaration.getSourceFile();
|
||||
const canonicalName = getCanonicalFileName(ts.normalizePath(sourceFile.fileName));
|
||||
return canonicalName === getCanonicalDefaultLibName();
|
||||
}
|
||||
}
|
||||
|
||||
function getRenameInfoForNode(node: Node, typeChecker: TypeChecker, sourceFile: SourceFile, isDefinedInLibraryFile: (declaration: Node) => boolean): RenameInfo | undefined {
|
||||
const symbol = typeChecker.getSymbolAtLocation(node);
|
||||
|
||||
// Only allow a symbol to be renamed if it actually has at least one declaration.
|
||||
if (symbol) {
|
||||
const declarations = symbol.getDeclarations();
|
||||
if (declarations && declarations.length > 0) {
|
||||
// Disallow rename for elements that are defined in the standard TypeScript library.
|
||||
if (some(declarations, isDefinedInLibraryFile)) {
|
||||
return getRenameInfoError(Diagnostics.You_cannot_rename_elements_that_are_defined_in_the_standard_TypeScript_library);
|
||||
}
|
||||
|
||||
const displayName = stripQuotes(getDeclaredName(typeChecker, symbol, node));
|
||||
const kind = SymbolDisplay.getSymbolKind(typeChecker, symbol, node);
|
||||
return kind ? getRenameInfoSuccess(displayName, typeChecker.getFullyQualifiedName(symbol), kind, SymbolDisplay.getSymbolModifiers(symbol), node, sourceFile) : undefined;
|
||||
}
|
||||
}
|
||||
else if (node.kind === SyntaxKind.StringLiteral) {
|
||||
const type = getStringLiteralTypeForNode(<StringLiteral>node, typeChecker);
|
||||
if (type) {
|
||||
if (isDefinedInLibraryFile(node)) {
|
||||
return getRenameInfoError(Diagnostics.You_cannot_rename_elements_that_are_defined_in_the_standard_TypeScript_library);
|
||||
}
|
||||
|
||||
const displayName = stripQuotes(type.text);
|
||||
return getRenameInfoSuccess(displayName, displayName, ScriptElementKind.variableElement, ScriptElementKindModifier.none, node, sourceFile);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getRenameInfoSuccess(displayName: string, fullDisplayName: string, kind: string, kindModifiers: string, node: Node, sourceFile: SourceFile): RenameInfo {
|
||||
return {
|
||||
canRename: true,
|
||||
kind,
|
||||
displayName,
|
||||
localizedErrorMessage: undefined,
|
||||
fullDisplayName,
|
||||
kindModifiers,
|
||||
triggerSpan: createTriggerSpanForNode(node, sourceFile)
|
||||
};
|
||||
}
|
||||
|
||||
function getRenameInfoError(diagnostic: DiagnosticMessage): RenameInfo {
|
||||
return {
|
||||
canRename: false,
|
||||
localizedErrorMessage: getLocaleSpecificMessage(diagnostic),
|
||||
displayName: undefined,
|
||||
fullDisplayName: undefined,
|
||||
kind: undefined,
|
||||
kindModifiers: undefined,
|
||||
triggerSpan: undefined
|
||||
};
|
||||
}
|
||||
|
||||
function createTriggerSpanForNode(node: Node, sourceFile: SourceFile) {
|
||||
let start = node.getStart(sourceFile);
|
||||
let width = node.getWidth(sourceFile);
|
||||
if (node.kind === SyntaxKind.StringLiteral) {
|
||||
// Exclude the quotes
|
||||
start += 1;
|
||||
width -= 2;
|
||||
}
|
||||
return createTextSpan(start, width);
|
||||
}
|
||||
|
||||
function nodeIsEligibleForRename(node: Node) {
|
||||
return node.kind === SyntaxKind.Identifier || node.kind === SyntaxKind.StringLiteral ||
|
||||
isLiteralNameOfPropertyDeclarationOrIndexAccess(node) ||
|
||||
isThis(node);
|
||||
}
|
||||
}
|
||||
|
||||
@ -193,9 +193,10 @@ namespace ts {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const child = children[0];
|
||||
|
||||
return child.kind < SyntaxKind.FirstNode ? child : child.getFirstToken(sourceFile);
|
||||
const child = ts.find(children, kid => kid.kind < SyntaxKind.FirstJSDocNode || kid.kind > SyntaxKind.LastJSDocNode);
|
||||
return child.kind < SyntaxKind.FirstNode ?
|
||||
child :
|
||||
child.getFirstToken(sourceFile);
|
||||
}
|
||||
|
||||
public getLastToken(sourceFile?: SourceFile): Node {
|
||||
@ -379,7 +380,7 @@ namespace ts {
|
||||
getNumberIndexType(): Type {
|
||||
return this.checker.getIndexTypeOfType(this, IndexKind.Number);
|
||||
}
|
||||
getBaseTypes(): ObjectType[] {
|
||||
getBaseTypes(): BaseType[] {
|
||||
return this.flags & TypeFlags.Object && this.objectFlags & (ObjectFlags.Class | ObjectFlags.Interface)
|
||||
? this.checker.getBaseTypes(<InterfaceType><Type>this)
|
||||
: undefined;
|
||||
@ -518,7 +519,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
private computeNamedDeclarations(): Map<Declaration[]> {
|
||||
const result = createMap<Declaration[]>();
|
||||
const result = createMultiMap<Declaration>();
|
||||
|
||||
forEachChild(this, visit);
|
||||
|
||||
@ -527,12 +528,16 @@ namespace ts {
|
||||
function addDeclaration(declaration: Declaration) {
|
||||
const name = getDeclarationName(declaration);
|
||||
if (name) {
|
||||
multiMapAdd(result, name, declaration);
|
||||
result.add(name, declaration);
|
||||
}
|
||||
}
|
||||
|
||||
function getDeclarations(name: string) {
|
||||
return result[name] || (result[name] = []);
|
||||
let declarations = result.get(name);
|
||||
if (!declarations) {
|
||||
result.set(name, declarations = []);
|
||||
}
|
||||
return declarations;
|
||||
}
|
||||
|
||||
function getDeclarationName(declaration: Declaration) {
|
||||
@ -1399,25 +1404,25 @@ namespace ts {
|
||||
}
|
||||
|
||||
function findRenameLocations(fileName: string, position: number, findInStrings: boolean, findInComments: boolean): RenameLocation[] {
|
||||
const referencedSymbols = findReferencedSymbols(fileName, position, findInStrings, findInComments);
|
||||
const referencedSymbols = findReferencedSymbols(fileName, position, findInStrings, findInComments, /*isForRename*/true);
|
||||
return FindAllReferences.convertReferences(referencedSymbols);
|
||||
}
|
||||
|
||||
function getReferencesAtPosition(fileName: string, position: number): ReferenceEntry[] {
|
||||
const referencedSymbols = findReferencedSymbols(fileName, position, /*findInStrings*/ false, /*findInComments*/ false);
|
||||
const referencedSymbols = findReferencedSymbols(fileName, position, /*findInStrings*/ false, /*findInComments*/ false, /*isForRename*/false);
|
||||
return FindAllReferences.convertReferences(referencedSymbols);
|
||||
}
|
||||
|
||||
function findReferences(fileName: string, position: number): ReferencedSymbol[] {
|
||||
const referencedSymbols = findReferencedSymbols(fileName, position, /*findInStrings*/ false, /*findInComments*/ false);
|
||||
const referencedSymbols = findReferencedSymbols(fileName, position, /*findInStrings*/ false, /*findInComments*/ false, /*isForRename*/false);
|
||||
|
||||
// Only include referenced symbols that have a valid definition.
|
||||
return filter(referencedSymbols, rs => !!rs.definition);
|
||||
}
|
||||
|
||||
function findReferencedSymbols(fileName: string, position: number, findInStrings: boolean, findInComments: boolean): ReferencedSymbol[] {
|
||||
function findReferencedSymbols(fileName: string, position: number, findInStrings: boolean, findInComments: boolean, isForRename: boolean): ReferencedSymbol[] {
|
||||
synchronizeHostData();
|
||||
return FindAllReferences.findReferencedSymbols(program.getTypeChecker(), cancellationToken, program.getSourceFiles(), getValidSourceFile(fileName), position, findInStrings, findInComments);
|
||||
return FindAllReferences.findReferencedSymbols(program.getTypeChecker(), cancellationToken, program.getSourceFiles(), getValidSourceFile(fileName), position, findInStrings, findInComments, isForRename);
|
||||
}
|
||||
|
||||
/// NavigateTo
|
||||
@ -1956,7 +1961,7 @@ namespace ts {
|
||||
function walk(node: Node) {
|
||||
switch (node.kind) {
|
||||
case SyntaxKind.Identifier:
|
||||
nameTable[(<Identifier>node).text] = nameTable[(<Identifier>node).text] === undefined ? node.pos : -1;
|
||||
setNameTable((<Identifier>node).text, node);
|
||||
break;
|
||||
case SyntaxKind.StringLiteral:
|
||||
case SyntaxKind.NumericLiteral:
|
||||
@ -1968,8 +1973,7 @@ namespace ts {
|
||||
node.parent.kind === SyntaxKind.ExternalModuleReference ||
|
||||
isArgumentOfElementAccessExpression(node) ||
|
||||
isLiteralComputedPropertyDeclarationName(node)) {
|
||||
|
||||
nameTable[(<LiteralExpression>node).text] = nameTable[(<LiteralExpression>node).text] === undefined ? node.pos : -1;
|
||||
setNameTable((<LiteralExpression>node).text, node);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
@ -1981,6 +1985,10 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function setNameTable(text: string, node: ts.Node): void {
|
||||
nameTable.set(text, nameTable.get(text) === undefined ? node.pos : -1);
|
||||
}
|
||||
}
|
||||
|
||||
function isArgumentOfElementAccessExpression(node: Node) {
|
||||
|
||||
@ -385,7 +385,10 @@ namespace ts {
|
||||
if (settingsJson == null || settingsJson == "") {
|
||||
throw Error("LanguageServiceShimHostAdapter.getCompilationSettings: empty compilationSettings");
|
||||
}
|
||||
return <CompilerOptions>JSON.parse(settingsJson);
|
||||
const compilerOptions = <CompilerOptions>JSON.parse(settingsJson);
|
||||
// permit language service to handle all files (filtering should be performed on the host side)
|
||||
compilerOptions.allowNonTsExtensions = true;
|
||||
return compilerOptions;
|
||||
}
|
||||
|
||||
public getScriptFileNames(): string[] {
|
||||
@ -1061,12 +1064,6 @@ namespace ts {
|
||||
const compilerOptions = <CompilerOptions>JSON.parse(compilerOptionsJson);
|
||||
const result = resolveModuleName(moduleName, normalizeSlashes(fileName), compilerOptions, this.host);
|
||||
const resolvedFileName = result.resolvedModule ? result.resolvedModule.resolvedFileName : undefined;
|
||||
if (resolvedFileName && !compilerOptions.allowJs && fileExtensionIs(resolvedFileName, ".js")) {
|
||||
return {
|
||||
resolvedFileName: undefined,
|
||||
failedLookupLocations: []
|
||||
};
|
||||
}
|
||||
return {
|
||||
resolvedFileName,
|
||||
failedLookupLocations: result.failedLookupLocations
|
||||
|
||||
@ -237,7 +237,7 @@ namespace ts.SignatureHelp {
|
||||
const typeChecker = program.getTypeChecker();
|
||||
for (const sourceFile of program.getSourceFiles()) {
|
||||
const nameToDeclarations = sourceFile.getNamedDeclarations();
|
||||
const declarations = nameToDeclarations[name.text];
|
||||
const declarations = nameToDeclarations.get(name.text);
|
||||
|
||||
if (declarations) {
|
||||
for (const declaration of declarations) {
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
namespace ts {
|
||||
namespace ts {
|
||||
export interface TranspileOptions {
|
||||
compilerOptions?: CompilerOptions;
|
||||
fileName?: string;
|
||||
@ -63,7 +63,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
if (transpileOptions.renamedDependencies) {
|
||||
sourceFile.renamedDependencies = createMap(transpileOptions.renamedDependencies);
|
||||
sourceFile.renamedDependencies = createMapFromTemplate(transpileOptions.renamedDependencies);
|
||||
}
|
||||
|
||||
const newLine = getNewLineCharacter(options);
|
||||
@ -126,7 +126,7 @@ namespace ts {
|
||||
function fixupCompilerOptions(options: CompilerOptions, diagnostics: Diagnostic[]): CompilerOptions {
|
||||
// Lazily create this value to fix module loading errors.
|
||||
commandLineOptionsStringToEnum = commandLineOptionsStringToEnum || <CommandLineOptionOfCustomType[]>filter(optionDeclarations, o =>
|
||||
typeof o.type === "object" && !forEachProperty(o.type, v => typeof v !== "number"));
|
||||
typeof o.type === "object" && !forEachEntry(o.type, v => typeof v !== "number"));
|
||||
|
||||
options = clone(options);
|
||||
|
||||
@ -142,7 +142,7 @@ namespace ts {
|
||||
options[opt.name] = parseCustomTypeOption(opt, value, diagnostics);
|
||||
}
|
||||
else {
|
||||
if (!forEachProperty(opt.type, v => v === value)) {
|
||||
if (!forEachEntry(opt.type, v => v === value)) {
|
||||
// Supplied value isn't a valid enum value.
|
||||
diagnostics.push(createCompilerDiagnosticForInvalidCustomType(opt));
|
||||
}
|
||||
|
||||
@ -3,8 +3,7 @@
|
||||
"compilerOptions": {
|
||||
"removeComments": false,
|
||||
"outFile": "../../built/local/typescriptServices.js",
|
||||
"declaration": true,
|
||||
"types": []
|
||||
"declaration": true
|
||||
},
|
||||
"files": [
|
||||
"../compiler/core.ts",
|
||||
@ -84,6 +83,7 @@
|
||||
"codefixes/fixClassDoesntImplementInheritedAbstractMember.ts",
|
||||
"codefixes/fixClassSuperMustPrecedeThisAccess.ts",
|
||||
"codefixes/fixConstructorForDerivedNeedSuperCall.ts",
|
||||
"codefixes/fixForgottenThisPropertyAccess.ts",
|
||||
"codefixes/fixes.ts",
|
||||
"codefixes/helpers.ts",
|
||||
"codefixes/importFixes.ts",
|
||||
|
||||
@ -33,7 +33,7 @@ namespace ts {
|
||||
getConstructSignatures(): Signature[];
|
||||
getStringIndexType(): Type;
|
||||
getNumberIndexType(): Type;
|
||||
getBaseTypes(): ObjectType[];
|
||||
getBaseTypes(): BaseType[];
|
||||
getNonNullableType(): Type;
|
||||
}
|
||||
|
||||
@ -495,7 +495,7 @@ namespace ts {
|
||||
|
||||
export interface SymbolDisplayPart {
|
||||
text: string;
|
||||
kind: string;
|
||||
kind: string; // A ScriptElementKind
|
||||
}
|
||||
|
||||
export interface QuickInfo {
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
// These utilities are common to multiple language service features.
|
||||
// These utilities are common to multiple language service features.
|
||||
/* @internal */
|
||||
namespace ts {
|
||||
export const scanner: Scanner = createScanner(ScriptTarget.Latest, /*skipTrivia*/ true);
|
||||
@ -71,7 +71,10 @@ namespace ts {
|
||||
}
|
||||
|
||||
export function getMeaningFromLocation(node: Node): SemanticMeaning {
|
||||
if (node.parent.kind === SyntaxKind.ExportAssignment) {
|
||||
if (node.kind === SyntaxKind.SourceFile) {
|
||||
return SemanticMeaning.Value;
|
||||
}
|
||||
else if (node.parent.kind === SyntaxKind.ExportAssignment) {
|
||||
return SemanticMeaning.Value | SemanticMeaning.Type | SemanticMeaning.Namespace;
|
||||
}
|
||||
else if (isInRightSideOfImport(node)) {
|
||||
@ -1116,6 +1119,22 @@ namespace ts {
|
||||
export function createTextSpanFromNode(node: Node, sourceFile?: SourceFile): TextSpan {
|
||||
return createTextSpanFromBounds(node.getStart(sourceFile), node.getEnd());
|
||||
}
|
||||
|
||||
export function isTypeKeyword(kind: SyntaxKind): boolean {
|
||||
switch (kind) {
|
||||
case SyntaxKind.AnyKeyword:
|
||||
case SyntaxKind.BooleanKeyword:
|
||||
case SyntaxKind.NeverKeyword:
|
||||
case SyntaxKind.NumberKeyword:
|
||||
case SyntaxKind.ObjectKeyword:
|
||||
case SyntaxKind.StringKeyword:
|
||||
case SyntaxKind.SymbolKeyword:
|
||||
case SyntaxKind.VoidKeyword:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Display-part writer helpers
|
||||
@ -1300,7 +1319,7 @@ namespace ts {
|
||||
return name;
|
||||
}
|
||||
|
||||
export function isImportOrExportSpecifierName(location: Node): boolean {
|
||||
export function isImportOrExportSpecifierName(location: Node): location is Identifier {
|
||||
return location.parent &&
|
||||
(location.parent.kind === SyntaxKind.ImportSpecifier || location.parent.kind === SyntaxKind.ExportSpecifier) &&
|
||||
(<ImportOrExportSpecifier>location.parent).propertyName === location;
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"lib": ["es5", "scripthost"],
|
||||
"noEmitOnError": true,
|
||||
"noImplicitAny": true,
|
||||
"noImplicitThis": true,
|
||||
@ -9,6 +10,7 @@
|
||||
"preserveConstEnums": true,
|
||||
"stripInternal": true,
|
||||
"sourceMap": true,
|
||||
"target": "es5"
|
||||
"target": "es5",
|
||||
"types": []
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user