Merge branch 'master' into fixRunParallel

This commit is contained in:
Yui T 2017-06-16 11:09:34 -07:00
commit 4b21358582
517 changed files with 15859 additions and 4137 deletions

View File

@ -248,4 +248,23 @@ rdosanjh <me@rajdeep.io> # Raj Dosanjh
gdh1995 <gdh1995@qq.com> # Dahan Gong
cedvdb <cedvandenbosch@gmail.com> # @cedvdb
kpreisser <kpreisser@users.noreply.github.com> # K. Preißer
e-cloud <saintscott119@gmail.com> # @e-cloud
e-cloud <saintscott119@gmail.com> # @e-cloud
Andrew Casey <amcasey@users.noreply.github.com> Andrew Casey <andrew.casey@microsoft.com>
Andrew Stegmaier <andrew.stegmaier@gmail.com>
Benny Neugebauer <bn@bennyn.de>
Blaine Bublitz <blaine.bublitz@gmail.com>
Charles Pierce <cpierce.grad@gmail.com>
Daniel Król <daniel@krol.me>
Diogo Franco (Kovensky) <diogomfranco@gmail.com>
Donald Pipowitch <pipo@senaeh.de>
Halasi Tamás <trusted.tomato@gmail.com>
Ika <ikatyang@gmail.com>
Joe Chung <joechung@microsoft.com>
Kate Miháliková <kate@katemihalikova.cz>
Mohsen Azimi <mazimi@lyft.com>
Noel Varanda <ncwvaranda@gmail.com>
Reiner Dolp <reiner-dolp@users.noreply.github.com>
t_ <t-mrt@users.noreply.github.com> # @t_
TravCav <xurrux@gmail.com> # @TravCav
Vladimir Kurchatkin <vladimir.kurchatkin@gmail.com>
William Orr <will@worrbase.com>

View File

@ -16,9 +16,6 @@ matrix:
branches:
only:
- master
- release-2.1
- release-2.2
- release-2.3
install:
- npm uninstall typescript --no-save

View File

@ -15,7 +15,9 @@ TypeScript is authored by:
* Anders Hejlsberg
* Andreas Martin
* Andrej Baran
* Andrew Casey
* Andrew Ochsner
* Andrew Stegmaier
* Andrew Z Allen
* András Parditka
* Andy Hanson
@ -31,7 +33,9 @@ TypeScript is authored by:
* Ben Duffield
* Ben Mosher
* Benjamin Bock
* Benny Neugebauer
* Bill Ticehurst
* Blaine Bublitz
* Blake Embrey
* @bootstraponline
* Bowden Kelly
@ -39,6 +43,7 @@ TypeScript is authored by:
* Bryan Forbes
* Caitlin Potter
* @cedvdb
* Charles Pierce
* Charly POLY
* Chris Bubernak
* Christophe Vidal
@ -52,6 +57,7 @@ TypeScript is authored by:
* Dan Corder
* Dan Quirk
* Daniel Hollocher
* Daniel Król
* Daniel Lehenbauer
* Daniel Rosenwasser
* David Kmenta
@ -60,9 +66,11 @@ TypeScript is authored by:
* David Souther
* Denis Nedelyaev
* Dick van den Brink
* Diogo Franco (Kovensky)
* Dirk Bäumer
* Dirk Holtwick
* Dom Chen
* Donald Pipowitch
* Doug Ilijev
* @e-cloud
* Elisée Maurer
@ -89,12 +97,14 @@ TypeScript is authored by:
* Guilherme Oenning
* Guillaume Salles
* Guy Bedford
* Halasi Tamás
* Harald Niesche
* Hendrik Liebau
* Herrington Darkholme
* Homa Wong
* Iain Monro
* Igor Novozhilov
* Ika
* Ingvar Stepanyan
* Isiah Meadows
* Ivo Gabe de Wolff
@ -111,6 +121,7 @@ TypeScript is authored by:
* Jeffrey Morlan
* Jesse Schalken
* Jiri Tobisek
* Joe Chung
* Joel Day
* Joey Wilson
* Johannes Rieken
@ -131,6 +142,7 @@ TypeScript is authored by:
* K. Preißer
* Kagami Sascha Rosylight
* Kanchalai Tanglertsampan
* Kate Miháliková
* Keith Mashinter
* Ken Howard
* Kenji Imamula
@ -159,6 +171,7 @@ TypeScript is authored by:
* Mike Busyrev
* Mine Starks
* Mohamed Hegazy
* Mohsen Azimi
* Myles Megyesi
* Natalie Coley
* Nathan Shively-Sanders
@ -166,6 +179,7 @@ TypeScript is authored by:
* Nicolas Henry
* Nima Zahedi
* Noah Chen
* Noel Varanda
* Noj Vek
* Oleg Mihailik
* Oleksandr Chekhovskyi
@ -186,6 +200,7 @@ TypeScript is authored by:
* Punya Biswal
* Rado Kirov
* Raj Dosanjh
* Reiner Dolp
* Richard Karmazín
* Richard Knoll
* Richard Sentino
@ -213,6 +228,7 @@ TypeScript is authored by:
* Sudheesh Singanamalla
* Sébastien Arod
* @T18970237136
* @t_
* Tarik Ozket
* Tetsuharu Ohzeki
* Thomas Loubiou
@ -225,13 +241,16 @@ TypeScript is authored by:
* togru
* Tomas Grubliauskas
* Torben Fitschen
* @TravCav
* TruongSinh Tran-Nguyen
* Vadi Taslim
* Vidar Tonaas Fauske
* Viktor Zozulyak
* Vilic Vane
* Vladimir Kurchatkin
* Vladimir Matveev
* Wesley Wigham
* William Orr
* York Yao
* @yortus
* Yuichi Nukiyama

View File

@ -39,25 +39,25 @@ Error.stackTraceLimit = 1000;
const cmdLineOptions = minimist(process.argv.slice(2), {
boolean: ["debug", "inspect", "light", "colors", "lint", "soft"],
string: ["browser", "tests", "host", "reporter", "stackTraceLimit"],
string: ["browser", "tests", "host", "reporter", "stackTraceLimit", "timeout"],
alias: {
b: "browser",
d: "debug",
t: "tests",
test: "tests",
d: "debug", "debug-brk": "debug",
i: "inspect", "inspect-brk": "inspect",
t: "tests", test: "tests",
r: "reporter",
color: "colors",
f: "files",
file: "files",
c: "colors", color: "colors",
f: "files", file: "files",
w: "workers",
},
default: {
soft: false,
colors: process.env.colors || process.env.color || true,
debug: process.env.debug || process.env.d,
inspect: process.env.inspect,
debug: process.env.debug || process.env["debug-brk"] || process.env.d,
inspect: process.env.inspect || process.env["inspect-brk"] || process.env.i,
host: process.env.TYPESCRIPT_HOST || process.env.host || "node",
browser: process.env.browser || process.env.b || "IE",
timeout: process.env.timeout || 40000,
tests: process.env.test || process.env.tests || process.env.t,
light: process.env.light || false,
reporter: process.env.reporter || process.env.r,
@ -594,11 +594,11 @@ function restoreSavedNodeEnv() {
process.env.NODE_ENV = savedNodeEnv;
}
let testTimeout = 40000;
function runConsoleTests(defaultReporter: string, runInParallel: boolean, done: (e?: any) => void) {
const lintFlag = cmdLineOptions["lint"];
cleanTestDirs((err) => {
if (err) { console.error(err); failWithStatus(err, 1); }
let testTimeout = cmdLineOptions["timeout"];
const debug = cmdLineOptions["debug"];
const inspect = cmdLineOptions["inspect"];
const tests = cmdLineOptions["tests"];
@ -637,12 +637,6 @@ function runConsoleTests(defaultReporter: string, runInParallel: boolean, done:
// default timeout is 2sec which really should be enough, but maybe we just need a small amount longer
if (!runInParallel) {
const args = [];
if (inspect) {
args.push("--inspect");
}
if (inspect || debug) {
args.push("--debug-brk");
}
args.push("-R", reporter);
if (tests) {
args.push("-g", `"${tests}"`);
@ -653,7 +647,15 @@ function runConsoleTests(defaultReporter: string, runInParallel: boolean, done:
else {
args.push("--no-colors");
}
args.push("-t", testTimeout);
if (inspect) {
args.unshift("--inspect-brk");
}
else if (debug) {
args.unshift("--debug-brk");
}
else {
args.push("-t", testTimeout);
}
args.push(run);
setNodeEnvToDevelopment();
exec(mocha, args, lintThenFinish, function(e, status) {
@ -745,7 +747,7 @@ declare module "convert-source-map" {
export function fromSource(source: string, largeSource?: boolean): SourceMapConverter;
}
gulp.task("browserify", "Runs browserify on run.js to produce a file suitable for running tests in the browser", [servicesFile], (done) => {
gulp.task("browserify", "Runs browserify on run.js to produce a file suitable for running tests in the browser", [servicesFile, run], (done) => {
const testProject = tsc.createProject("src/harness/tsconfig.json", getCompilerSettings({ outFile: "../../built/local/bundle.js" }, /*useBuiltCompiler*/ true));
return testProject.src()
.pipe(newer("built/local/bundle.js"))
@ -838,6 +840,7 @@ gulp.task("runtests-browser", "Runs the tests using the built run.js file like '
});
gulp.task("generate-code-coverage", "Generates code coverage data via istanbul", ["tests"], (done) => {
const testTimeout = cmdLineOptions["timeout"];
exec("istanbul", ["cover", "node_modules/mocha/bin/_mocha", "--", "-R", "min", "-t", testTimeout.toString(), run], done, done);
});

View File

@ -25,6 +25,8 @@ var LKGDirectory = "lib/";
var copyright = "CopyrightNotice.txt";
var thirdParty = "ThirdPartyNoticeText.txt";
var defaultTestTimeout = 40000;
// add node_modules to path so we don't need global modules, prefer the modules by adding them first
var nodeModulesPathPrefix = path.resolve("./node_modules/.bin/") + path.delimiter;
if (process.env.path !== undefined) {
@ -74,6 +76,10 @@ function measure(marker) {
console.log("travis_time:end:" + marker.id + ":start=" + toNs(marker.stamp) + ",finish=" + toNs(total) + ",duration=" + toNs(diff) + "\r");
}
function removeConstModifierFromEnumDeclarations(text) {
return text.replace(/^(\s*)(export )?const enum (\S+) {(\s*)$/gm, '$1$2enum $3 {$4');
}
var compilerSources = filesFromConfig("./src/compiler/tsconfig.json");
var servicesSources = filesFromConfig("./src/services/tsconfig.json");
var cancellationTokenSources = filesFromConfig(path.join(serverDirectory, "cancellationToken/tsconfig.json"));
@ -551,7 +557,7 @@ compileFile(servicesFile, servicesSources, [builtLocalDirectory, copyright].conc
// Stanalone/web definition file using global 'ts' namespace
jake.cpR(standaloneDefinitionsFile, nodeDefinitionsFile, { silent: true });
var definitionFileContents = fs.readFileSync(nodeDefinitionsFile).toString();
definitionFileContents = definitionFileContents.replace(/^(\s*)(export )?const enum (\S+) {(\s*)$/gm, '$1$2enum $3 {$4');
definitionFileContents = removeConstModifierFromEnumDeclarations(definitionFileContents)
fs.writeFileSync(standaloneDefinitionsFile, definitionFileContents);
// Official node package definition file, pointed to by 'typings' in package.json
@ -611,6 +617,7 @@ compileFile(
fs.readFileSync(tsserverLibraryDefinitionFile).toString() +
"\r\nexport = ts;" +
"\r\nexport as namespace ts;";
tsserverLibraryDefinitionFileContents = removeConstModifierFromEnumDeclarations(tsserverLibraryDefinitionFileContents);
fs.writeFileSync(tsserverLibraryDefinitionFile, tsserverLibraryDefinitionFileContents);
});
@ -800,9 +807,10 @@ function runConsoleTests(defaultReporter, runInParallel) {
cleanTestDirs();
}
var debug = process.env.debug || process.env.d;
var inspect = process.env.inspect;
tests = process.env.test || process.env.tests || process.env.t;
var debug = process.env.debug || process.env["debug-brk"] || process.env.d;
var inspect = process.env.inspect || process.env["inspect-brk"] || process.env.i;
var testTimeout = process.env.timeout || defaultTestTimeout;
var tests = process.env.test || process.env.tests || process.env.t;
var light = process.env.light || false;
var stackTraceLimit = process.env.stackTraceLimit;
var testConfigFile = 'test.config';
@ -820,7 +828,7 @@ function runConsoleTests(defaultReporter, runInParallel) {
} while (fs.existsSync(taskConfigsFolder));
fs.mkdirSync(taskConfigsFolder);
workerCount = process.env.workerCount || os.cpus().length;
workerCount = process.env.workerCount || process.env.p || os.cpus().length;
}
if (tests || light || taskConfigsFolder) {
@ -841,12 +849,6 @@ function runConsoleTests(defaultReporter, runInParallel) {
if (!runInParallel) {
var startTime = mark();
var args = [];
if (inspect) {
args.push("--inspect");
}
if (inspect || debug) {
args.push("--debug-brk");
}
args.push("-R", reporter);
if (tests) {
args.push("-g", `"${tests}"`);
@ -860,7 +862,15 @@ function runConsoleTests(defaultReporter, runInParallel) {
if (bail) {
args.push("--bail");
}
args.push("-t", testTimeout);
if (inspect) {
args.unshift("--inspect-brk");
}
else if (debug) {
args.unshift("--debug-brk");
}
else {
args.push("-t", testTimeout);
}
args.push(run);
var cmd = "mocha " + args.join(" ");
@ -925,7 +935,6 @@ function runConsoleTests(defaultReporter, runInParallel) {
}
}
var testTimeout = 20000;
desc("Runs all the tests in parallel using the built run.js file. Optional arguments are: t[ests]=category1|category2|... d[ebug]=true.");
task("runtests-parallel", ["build-rules", "tests", builtLocalDirectory], function () {
runConsoleTests('min', /*runInParallel*/ true);
@ -938,6 +947,7 @@ task("runtests", ["build-rules", "tests", builtLocalDirectory], function() {
desc("Generates code coverage data via instanbul");
task("generate-code-coverage", ["tests", builtLocalDirectory], function () {
var testTimeout = process.env.timeout || defaultTestTimeout;
var cmd = 'istanbul cover node_modules/mocha/bin/_mocha -- -R min -t ' + testTimeout + ' ' + run;
console.log(cmd);
exec(cmd);
@ -949,7 +959,7 @@ var nodeServerInFile = "tests/webTestServer.ts";
compileFile(nodeServerOutFile, [nodeServerInFile], [builtLocalDirectory, tscFile], [], /*useBuiltCompiler:*/ true, { noOutFile: true, lib: "es6" });
desc("Runs browserify on run.js to produce a file suitable for running tests in the browser");
task("browserify", ["tests", builtLocalDirectory, nodeServerOutFile], function() {
task("browserify", ["tests", run, builtLocalDirectory, nodeServerOutFile], function() {
var cmd = 'browserify built/local/run.js -t ./scripts/browserify-optional -d -o built/local/bundle.js';
exec(cmd);
}, { async: true });

View File

@ -2,7 +2,7 @@
<!-- QUESTIONS: This is not a general support forum! Ask Qs at http://stackoverflow.com/questions/tagged/typescript -->
<!-- SUGGESTIONS: See https://github.com/Microsoft/TypeScript-wiki/blob/master/Writing-Good-Design-Proposals.md -->
**TypeScript Version:** 2.2.1 / nightly (2.2.0-dev.201xxxxx)
**TypeScript Version:** 2.4.0 / nightly (2.5.0-dev.201xxxxx)
**Code**

View File

@ -2,7 +2,7 @@
"name": "typescript",
"author": "Microsoft Corp.",
"homepage": "http://typescriptlang.org/",
"version": "2.4.0",
"version": "2.5.0",
"license": "Apache-2.0",
"description": "TypeScript is a language for application scale JavaScript development",
"keywords": [

View File

@ -113,7 +113,7 @@ class DeclarationsWalker {
}
}
function generateProtocolFile(protocolTs: string, typeScriptServicesDts: string): string {
function writeProtocolFile(outputFile: string, protocolTs: string, typeScriptServicesDts: string) {
const options = { target: ts.ScriptTarget.ES5, declaration: true, noResolve: true, types: <string[]>[], stripInternal: true };
/**
@ -163,14 +163,17 @@ function generateProtocolFile(protocolTs: string, typeScriptServicesDts: string)
protocolDts += "\nimport protocol = ts.server.protocol;";
protocolDts += "\nexport = protocol;";
protocolDts += "\nexport as namespace protocol;";
// do sanity check and try to compile generated text as standalone program
const sanityCheckProgram = getProgramWithProtocolText(protocolDts, /*includeTypeScriptServices*/ false);
const diagnostics = [...sanityCheckProgram.getSyntacticDiagnostics(), ...sanityCheckProgram.getSemanticDiagnostics(), ...sanityCheckProgram.getGlobalDiagnostics()];
ts.sys.writeFile(outputFile, protocolDts);
if (diagnostics.length) {
const flattenedDiagnostics = diagnostics.map(d => `${ts.flattenDiagnosticMessageText(d.messageText, "\n")} at ${d.file.fileName} line ${d.start}`).join("\n");
throw new Error(`Unexpected errors during sanity check: ${flattenedDiagnostics}`);
}
return protocolDts;
}
if (process.argv.length < 5) {
@ -181,5 +184,4 @@ if (process.argv.length < 5) {
const protocolTs = process.argv[2];
const typeScriptServicesDts = process.argv[3];
const outputFile = process.argv[4];
const generatedProtocolDts = generateProtocolFile(protocolTs, typeScriptServicesDts);
ts.sys.writeFile(outputFile, generatedProtocolDts);
writeProtocolFile(outputFile, protocolTs, typeScriptServicesDts);

View File

@ -18,7 +18,7 @@ export class Rule extends Lint.Rules.AbstractRule {
function walk(ctx: Lint.WalkContext<void>, checkCatch: boolean, checkElse: boolean): void {
const { sourceFile } = ctx;
function recur(node: ts.Node): void {
ts.forEachChild(sourceFile, function recur(node) {
switch (node.kind) {
case ts.SyntaxKind.IfStatement:
checkIf(node as ts.IfStatement);
@ -28,7 +28,7 @@ function walk(ctx: Lint.WalkContext<void>, checkCatch: boolean, checkElse: boole
break;
}
ts.forEachChild(node, recur);
}
});
function checkIf(node: ts.IfStatement): void {
const { thenStatement, elseStatement } = node;

View File

@ -595,7 +595,19 @@ namespace ts {
// Binding of JsDocComment should be done before the current block scope container changes.
// because the scope of JsDocComment should not be affected by whether the current node is a
// container or not.
forEach(node.jsDoc, bind);
if (node.jsDoc) {
if (isInJavaScriptFile(node)) {
for (const j of node.jsDoc) {
bind(j);
}
}
else {
for (const j of node.jsDoc) {
setParentPointers(node, j);
}
}
}
if (checkUnreachable(node)) {
bindEachChild(node);
return;
@ -612,7 +624,7 @@ namespace ts {
break;
case SyntaxKind.ForInStatement:
case SyntaxKind.ForOfStatement:
bindForInOrForOfStatement(<ForInStatement | ForOfStatement>node);
bindForInOrForOfStatement(<ForInOrOfStatement>node);
break;
case SyntaxKind.IfStatement:
bindIfStatement(<IfStatement>node);
@ -950,7 +962,7 @@ namespace ts {
currentFlow = finishFlowLabel(postLoopLabel);
}
function bindForInOrForOfStatement(node: ForInStatement | ForOfStatement): void {
function bindForInOrForOfStatement(node: ForInOrOfStatement): void {
const preLoopLabel = createLoopLabel();
const postLoopLabel = createBranchLabel();
addAntecedent(preLoopLabel, currentFlow);
@ -1328,7 +1340,7 @@ namespace ts {
function bindVariableDeclarationFlow(node: VariableDeclaration) {
bindEachChild(node);
if (node.initializer || node.parent.parent.kind === SyntaxKind.ForInStatement || node.parent.parent.kind === SyntaxKind.ForOfStatement) {
if (node.initializer || isForInOrOfStatement(node.parent.parent)) {
bindInitializedVariableFlow(node);
}
}
@ -1521,7 +1533,7 @@ namespace ts {
// All the children of these container types are never visible through another
// symbol (i.e. through another symbol's 'exports' or 'members'). Instead,
// they're only accessed 'lexically' (i.e. from code that exists underneath
// their container in the tree. To accomplish this, we simply add their declared
// their container in the tree). To accomplish this, we simply add their declared
// symbol to the 'locals' of the container. These symbols can then be found as
// the type checker walks up the containers, checking them for matching names.
return declareSymbol(container.locals, /*parent*/ undefined, node, symbolFlags, symbolExcludes);
@ -1903,7 +1915,7 @@ namespace ts {
// Here the current node is "foo", which is a container, but the scope of "MyType" should
// not be inside "foo". Therefore we always bind @typedef before bind the parent node,
// and skip binding this tag later when binding all the other jsdoc tags.
bindJSDocTypedefTagIfAny(node);
if (isInJavaScriptFile(node)) bindJSDocTypedefTagIfAny(node);
// First we bind declaration nodes to a symbol if possible. We'll both create a symbol
// and then potentially add the symbol to an appropriate symbol table. Possible
@ -1991,7 +2003,7 @@ namespace ts {
// for typedef type names with namespaces, bind the new jsdoc type symbol here
// because it requires all containing namespaces to be in effect, namely the
// current "blockScopeContainer" needs to be set to its immediate namespace parent.
if (isInJavaScriptFile(node) && (<Identifier>node).isInJSDocNamespace) {
if ((<Identifier>node).isInJSDocNamespace) {
let parentNode = node.parent;
while (parentNode && parentNode.kind !== SyntaxKind.JSDocTypedefTag) {
parentNode = parentNode.parent;
@ -2136,7 +2148,6 @@ namespace ts {
return bindEnumDeclaration(<EnumDeclaration>node);
case SyntaxKind.ModuleDeclaration:
return bindModuleDeclaration(<ModuleDeclaration>node);
// Jsx-attributes
case SyntaxKind.JsxAttributes:
return bindJsxAttributes(<JsxAttributes>node);
@ -2168,13 +2179,6 @@ namespace ts {
case SyntaxKind.ModuleBlock:
return updateStrictModeStatementList((<Block | ModuleBlock>node).statements);
default:
if (isInJavaScriptFile(node)) return bindJSDocWorker(node);
}
}
function bindJSDocWorker(node: Node) {
switch (node.kind) {
case SyntaxKind.JSDocRecordMember:
return bindPropertyWorker(node as JSDocRecordMember);
case SyntaxKind.JSDocPropertyTag:
@ -3600,4 +3604,13 @@ namespace ts {
return TransformFlags.NodeExcludes;
}
}
/**
* "Binds" JSDoc nodes in TypeScript code.
* Since we will never create symbols for JSDoc, we just set parent pointers instead.
*/
function setParentPointers(parent: Node, child: Node): void {
child.parent = parent;
forEachChild(child, (childsChild) => setParentPointers(child, childsChild));
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -3,7 +3,7 @@
namespace ts {
/** The version of the TypeScript compiler release */
export const version = "2.4.0";
export const version = "2.5.0";
}
/* @internal */
@ -51,8 +51,10 @@ namespace ts {
// Copies keys/values from template. Note that for..in will not throw if
// template is undefined, and instead will just exit the loop.
for (const key in template) if (hasOwnProperty.call(template, key)) {
map.set(key, template[key]);
for (const key in template) {
if (hasOwnProperty.call(template, key)) {
map.set(key, template[key]);
}
}
return map;
@ -473,7 +475,7 @@ namespace ts {
* @param array The array to map.
* @param mapfn The callback used to map the result into one or more values.
*/
export function flatMap<T, U>(array: T[], mapfn: (x: T, i: number) => U | U[]): U[] {
export function flatMap<T, U>(array: T[] | undefined, mapfn: (x: T, i: number) => U | U[] | undefined): U[] | undefined {
let result: U[];
if (array) {
result = [];
@ -521,8 +523,8 @@ namespace ts {
return result || array;
}
export function mapDefined<T>(array: ReadonlyArray<T>, mapFn: (x: T, i: number) => T | undefined): ReadonlyArray<T> {
const result: T[] = [];
export function mapDefined<T, U>(array: ReadonlyArray<T>, mapFn: (x: T, i: number) => U | undefined): U[] {
const result: U[] = [];
for (let i = 0; i < array.length; i++) {
const item = array[i];
const mapped = mapFn(item, i);
@ -977,9 +979,12 @@ namespace ts {
*/
export function getOwnKeys<T>(map: MapLike<T>): string[] {
const keys: string[] = [];
for (const key in map) if (hasOwnProperty.call(map, key)) {
keys.push(key);
for (const key in map) {
if (hasOwnProperty.call(map, key)) {
keys.push(key);
}
}
return keys;
}
@ -1042,8 +1047,10 @@ namespace ts {
export function assign<T1 extends MapLike<{}>>(t: T1, ...args: any[]): any;
export function assign<T1 extends MapLike<{}>>(t: T1, ...args: any[]) {
for (const arg of args) {
for (const p in arg) if (hasProperty(arg, p)) {
t[p] = arg[p];
for (const p in arg) {
if (hasProperty(arg, p)) {
t[p] = arg[p];
}
}
}
return t;
@ -1058,13 +1065,19 @@ namespace ts {
export function equalOwnProperties<T>(left: MapLike<T>, right: MapLike<T>, equalityComparer?: (left: T, right: T) => boolean) {
if (left === right) return true;
if (!left || !right) return false;
for (const key in left) if (hasOwnProperty.call(left, key)) {
if (!hasOwnProperty.call(right, key) === undefined) return false;
if (equalityComparer ? !equalityComparer(left[key], right[key]) : left[key] !== right[key]) return false;
for (const key in left) {
if (hasOwnProperty.call(left, key)) {
if (!hasOwnProperty.call(right, key) === undefined) return false;
if (equalityComparer ? !equalityComparer(left[key], right[key]) : left[key] !== right[key]) return false;
}
}
for (const key in right) if (hasOwnProperty.call(right, key)) {
if (!hasOwnProperty.call(left, key)) return false;
for (const key in right) {
if (hasOwnProperty.call(right, key)) {
if (!hasOwnProperty.call(left, key)) return false;
}
}
return true;
}
@ -1106,12 +1119,18 @@ namespace ts {
export function extend<T1, T2>(first: T1, second: T2): T1 & T2 {
const result: T1 & T2 = <any>{};
for (const id in second) if (hasOwnProperty.call(second, id)) {
(result as any)[id] = (second as any)[id];
for (const id in second) {
if (hasOwnProperty.call(second, id)) {
(result as any)[id] = (second as any)[id];
}
}
for (const id in first) if (hasOwnProperty.call(first, id)) {
(result as any)[id] = (first as any)[id];
for (const id in first) {
if (hasOwnProperty.call(first, id)) {
(result as any)[id] = (first as any)[id];
}
}
return result;
}
@ -2112,14 +2131,16 @@ namespace ts {
export function getScriptKindFromFileName(fileName: string): ScriptKind {
const ext = fileName.substr(fileName.lastIndexOf("."));
switch (ext.toLowerCase()) {
case ".js":
case Extension.Js:
return ScriptKind.JS;
case ".jsx":
case Extension.Jsx:
return ScriptKind.JSX;
case ".ts":
case Extension.Ts:
return ScriptKind.TS;
case ".tsx":
case Extension.Tsx:
return ScriptKind.TSX;
case ".json":
return ScriptKind.JSON;
default:
return ScriptKind.Unknown;
}
@ -2128,10 +2149,10 @@ namespace ts {
/**
* List of supported extensions in order of file resolution precedence.
*/
export const supportedTypeScriptExtensions = [".ts", ".tsx", ".d.ts"];
export const supportedTypeScriptExtensions = [Extension.Ts, Extension.Tsx, Extension.Dts];
/** Must have ".d.ts" first because if ".ts" goes first, that will be detected as the extension instead of ".d.ts". */
export const supportedTypescriptExtensionsForExtractExtension = [".d.ts", ".ts", ".tsx"];
export const supportedJavascriptExtensions = [".js", ".jsx"];
export const supportedTypescriptExtensionsForExtractExtension = [Extension.Dts, Extension.Ts, Extension.Tsx];
export const supportedJavascriptExtensions = [Extension.Js, Extension.Jsx];
const allSupportedExtensions = supportedTypeScriptExtensions.concat(supportedJavascriptExtensions);
export function getSupportedExtensions(options?: CompilerOptions, extraFileExtensions?: JsFileExtensionInfo[]): string[] {
@ -2139,7 +2160,7 @@ namespace ts {
if (!extraFileExtensions || extraFileExtensions.length === 0 || !needAllExtensions) {
return needAllExtensions ? allSupportedExtensions : supportedTypeScriptExtensions;
}
const extensions = allSupportedExtensions.slice(0);
const extensions: string[] = allSupportedExtensions.slice(0);
for (const extInfo of extraFileExtensions) {
if (extensions.indexOf(extInfo.extension) === -1) {
extensions.push(extInfo.extension);
@ -2218,7 +2239,7 @@ namespace ts {
}
}
const extensionsToRemove = [".d.ts", ".ts", ".js", ".tsx", ".jsx"];
const extensionsToRemove = [Extension.Dts, Extension.Ts, Extension.Js, Extension.Tsx, Extension.Jsx];
export function removeFileExtension(path: string): string {
for (const ext of extensionsToRemove) {
const extensionless = tryRemoveExtension(path, ext);
@ -2249,6 +2270,7 @@ namespace ts {
getSymbolConstructor(): new (flags: SymbolFlags, name: string) => Symbol;
getTypeConstructor(): new (checker: TypeChecker, flags: TypeFlags) => Type;
getSignatureConstructor(): new (checker: TypeChecker) => Signature;
getSourceMapSourceConstructor(): new (fileName: string, text: string, skipTrivia?: (pos: number) => number) => SourceMapSource;
}
function Symbol(this: Symbol, flags: SymbolFlags, name: string) {
@ -2279,6 +2301,12 @@ namespace ts {
this.original = undefined;
}
function SourceMapSource(this: SourceMapSource, fileName: string, text: string, skipTrivia?: (pos: number) => number) {
this.fileName = fileName;
this.text = text;
this.skipTrivia = skipTrivia || (pos => pos);
}
export let objectAllocator: ObjectAllocator = {
getNodeConstructor: () => <any>Node,
getTokenConstructor: () => <any>Node,
@ -2286,7 +2314,8 @@ namespace ts {
getSourceFileConstructor: () => <any>Node,
getSymbolConstructor: () => <any>Symbol,
getTypeConstructor: () => <any>Type,
getSignatureConstructor: () => <any>Signature
getSignatureConstructor: () => <any>Signature,
getSourceMapSourceConstructor: () => <any>SourceMapSource,
};
export const enum AssertionLevel {
@ -2464,7 +2493,7 @@ namespace ts {
/** True if an extension is one of the supported TypeScript extensions. */
export function extensionIsTypeScript(ext: Extension): boolean {
return ext <= Extension.LastTypeScriptExtension;
return ext === Extension.Ts || ext === Extension.Tsx || ext === Extension.Dts;
}
/**
@ -2479,21 +2508,7 @@ namespace ts {
Debug.fail(`File ${path} has unknown extension.`);
}
export function tryGetExtensionFromPath(path: string): Extension | undefined {
if (fileExtensionIs(path, ".d.ts")) {
return Extension.Dts;
}
if (fileExtensionIs(path, ".ts")) {
return Extension.Ts;
}
if (fileExtensionIs(path, ".tsx")) {
return Extension.Tsx;
}
if (fileExtensionIs(path, ".js")) {
return Extension.Js;
}
if (fileExtensionIs(path, ".jsx")) {
return Extension.Jsx;
}
return find(supportedTypescriptExtensionsForExtractExtension, e => fileExtensionIs(path, e)) || find(supportedJavascriptExtensions, e => fileExtensionIs(path, e));
}
export function isCheckJsEnabledForFile(sourceFile: SourceFile, compilerOptions: CompilerOptions) {

View File

@ -335,9 +335,12 @@ namespace ts {
write(": ");
// use the checker's type, not the declared type,
// for non-optional initialized parameters that aren't a parameter property
// for optional parameter properties
// and also for non-optional initialized parameters that aren't a parameter property
// these types may need to add `undefined`.
const shouldUseResolverType = declaration.kind === SyntaxKind.Parameter &&
resolver.isRequiredInitializedParameter(declaration as ParameterDeclaration);
(resolver.isRequiredInitializedParameter(declaration as ParameterDeclaration) ||
resolver.isOptionalUninitializedParameterProperty(declaration as ParameterDeclaration));
if (type && !shouldUseResolverType) {
// Write the type
emitType(type);
@ -596,7 +599,7 @@ namespace ts {
currentIdentifiers = node.identifiers;
isCurrentFileExternalModule = isExternalModule(node);
enclosingDeclaration = node;
emitDetachedComments(currentText, currentLineMap, writer, writeCommentRange, node, newLine, /*removeComents*/ true);
emitDetachedComments(currentText, currentLineMap, writer, writeCommentRange, node, newLine, /*removeComments*/ true);
emitLines(node.statements);
}

View File

@ -1,4 +1,4 @@
{
{
"Unterminated string literal.": {
"category": "Error",
"code": 1002
@ -899,6 +899,14 @@
"category": "Error",
"code": 1326
},
"String literal with double quotes expected.": {
"category": "Error",
"code": 1327
},
"Property value can only be string literal, numeric literal, 'true', 'false', 'null', object literal or array literal.": {
"category": "Error",
"code": 1328
},
"Duplicate identifier '{0}'.": {
"category": "Error",
@ -3278,6 +3286,10 @@
"category": "Message",
"code": 6184
},
"Disable strict checking of generic signatures in function types.": {
"category": "Message",
"code": 6185
},
"Variable '{0}' implicitly has an '{1}' type.": {
"category": "Error",
"code": 7005
@ -3585,11 +3597,11 @@
"category": "Message",
"code": 90015
},
"Add declaration for missing property '{0}'.": {
"Declare property '{0}'.": {
"category": "Message",
"code": 90016
},
"Add index signature for missing property '{0}'.": {
"Add index signature for property '{0}'.": {
"category": "Message",
"code": 90017
},
@ -3613,7 +3625,19 @@
"category": "Message",
"code": 90022
},
"Declare method '{0}'.": {
"category": "Message",
"code": 90023
},
"Declare static method '{0}'.": {
"category": "Message",
"code": 90024
},
"Prefix '{0}' with an underscore.": {
"category": "Message",
"code": 90025
},
"Convert function to an ES2015 class": {
"category": "Message",
"code": 95001

View File

@ -8,6 +8,70 @@ namespace ts {
const delimiters = createDelimiterMap();
const brackets = createBracketsMap();
/*@internal*/
/**
* Iterates over the source files that are expected to have an emit output.
*
* @param host An EmitHost.
* @param action The action to execute.
* @param sourceFilesOrTargetSourceFile
* If an array, the full list of source files to emit.
* Else, calls `getSourceFilesToEmit` with the (optional) target source file to determine the list of source files to emit.
*/
export function forEachEmittedFile(
host: EmitHost, action: (emitFileNames: EmitFileNames, sourceFileOrBundle: SourceFile | Bundle, emitOnlyDtsFiles: boolean) => void,
sourceFilesOrTargetSourceFile?: SourceFile[] | SourceFile,
emitOnlyDtsFiles?: boolean) {
const sourceFiles = isArray(sourceFilesOrTargetSourceFile) ? sourceFilesOrTargetSourceFile : getSourceFilesToEmit(host, sourceFilesOrTargetSourceFile);
const options = host.getCompilerOptions();
if (options.outFile || options.out) {
if (sourceFiles.length) {
const jsFilePath = options.outFile || options.out;
const sourceMapFilePath = getSourceMapFilePath(jsFilePath, options);
const declarationFilePath = options.declaration ? removeFileExtension(jsFilePath) + Extension.Dts : "";
action({ jsFilePath, sourceMapFilePath, declarationFilePath }, createBundle(sourceFiles), emitOnlyDtsFiles);
}
}
else {
for (const sourceFile of sourceFiles) {
const jsFilePath = getOwnEmitOutputFilePath(sourceFile, host, getOutputExtension(sourceFile, options));
const sourceMapFilePath = getSourceMapFilePath(jsFilePath, options);
const declarationFilePath = !isSourceFileJavaScript(sourceFile) && (emitOnlyDtsFiles || options.declaration) ? getDeclarationEmitOutputFilePath(sourceFile, host) : undefined;
action({ jsFilePath, sourceMapFilePath, declarationFilePath }, sourceFile, emitOnlyDtsFiles);
}
}
}
function getSourceMapFilePath(jsFilePath: string, options: CompilerOptions) {
return options.sourceMap ? jsFilePath + ".map" : undefined;
}
// JavaScript files are always LanguageVariant.JSX, as JSX syntax is allowed in .js files also.
// So for JavaScript files, '.jsx' is only emitted if the input was '.jsx', and JsxEmit.Preserve.
// For TypeScript, the only time to emit with a '.jsx' extension, is on JSX input, and JsxEmit.Preserve
function getOutputExtension(sourceFile: SourceFile, options: CompilerOptions): Extension {
if (options.jsx === JsxEmit.Preserve) {
if (isSourceFileJavaScript(sourceFile)) {
if (fileExtensionIs(sourceFile.fileName, Extension.Jsx)) {
return Extension.Jsx;
}
}
else if (sourceFile.languageVariant === LanguageVariant.JSX) {
// TypeScript source file preserving JSX syntax
return Extension.Jsx;
}
}
return Extension.Js;
}
function getOriginalSourceFileOrBundle(sourceFileOrBundle: SourceFile | Bundle) {
if (sourceFileOrBundle.kind === SyntaxKind.Bundle) {
return updateBundle(sourceFileOrBundle, sameMap(sourceFileOrBundle.sourceFiles, getOriginalSourceFile));
}
return getOriginalSourceFile(sourceFileOrBundle);
}
/*@internal*/
// targetSourceFile is when users only want one file in entire project to be emitted. This is used in compileOnSave feature
export function emitFiles(resolver: EmitResolver, host: EmitHost, targetSourceFile: SourceFile, emitOnlyDtsFiles?: boolean, transformers?: TransformerFactory<SourceFile>[]): EmitResult {
@ -959,7 +1023,7 @@ namespace ts {
function emitConstructorType(node: ConstructorTypeNode) {
write("new ");
emitTypeParameters(node, node.typeParameters);
emitParametersForArrow(node, node.parameters);
emitParameters(node, node.parameters);
write(" => ");
emit(node.type);
}
@ -2283,11 +2347,25 @@ namespace ts {
emitList(parentNode, parameters, ListFormat.Parameters);
}
function emitParametersForArrow(parentNode: Node, parameters: NodeArray<ParameterDeclaration>) {
if (parameters &&
parameters.length === 1 &&
parameters[0].type === undefined &&
parameters[0].pos === parentNode.pos) {
function canEmitSimpleArrowHead(parentNode: FunctionTypeNode | ArrowFunction, parameters: NodeArray<ParameterDeclaration>) {
const parameter = singleOrUndefined(parameters);
return parameter
&& parameter.pos === parentNode.pos // may not have parsed tokens between parent and parameter
&& !(isArrowFunction(parentNode) && parentNode.type) // arrow function may not have return type annotation
&& !some(parentNode.decorators) // parent may not have decorators
&& !some(parentNode.modifiers) // parent may not have modifiers
&& !some(parentNode.typeParameters) // parent may not have type parameters
&& !some(parameter.decorators) // parameter may not have decorators
&& !some(parameter.modifiers) // parameter may not have modifiers
&& !parameter.dotDotDotToken // parameter may not be rest
&& !parameter.questionToken // parameter may not be optional
&& !parameter.type // parameter may not have a type annotation
&& !parameter.initializer // parameter may not have an initializer
&& isIdentifier(parameter.name); // parameter name must be identifier
}
function emitParametersForArrow(parentNode: FunctionTypeNode | ArrowFunction, parameters: NodeArray<ParameterDeclaration>) {
if (canEmitSimpleArrowHead(parentNode, parameters)) {
emit(parameters[0]);
}
else {

View File

@ -228,7 +228,7 @@ namespace ts {
// Signature elements
export function createTypeParameterDeclaration(name: string | Identifier, constraint: TypeNode | undefined, defaultType: TypeNode | undefined) {
export function createTypeParameterDeclaration(name: string | Identifier, constraint?: TypeNode, defaultType?: TypeNode) {
const node = createSynthesizedNode(SyntaxKind.TypeParameter) as TypeParameterDeclaration;
node.name = asName(name);
node.constraint = constraint;
@ -2287,14 +2287,6 @@ namespace ts {
return range;
}
/**
* Gets flags that control emit behavior of a node.
*/
export function getEmitFlags(node: Node): EmitFlags | undefined {
const emitNode = node.emitNode;
return emitNode && emitNode.flags;
}
/**
* Sets flags that control emit behavior of a node.
*/
@ -2314,15 +2306,24 @@ namespace ts {
/**
* Sets a custom text range to use when emitting source maps.
*/
export function setSourceMapRange<T extends Node>(node: T, range: TextRange | undefined) {
export function setSourceMapRange<T extends Node>(node: T, range: SourceMapRange | undefined) {
getOrCreateEmitNode(node).sourceMapRange = range;
return node;
}
let SourceMapSource: new (fileName: string, text: string, skipTrivia?: (pos: number) => number) => SourceMapSource;
/**
* Create an external source map source file reference
*/
export function createSourceMapSource(fileName: string, text: string, skipTrivia?: (pos: number) => number): SourceMapSource {
return new (SourceMapSource || (SourceMapSource = objectAllocator.getSourceMapSourceConstructor()))(fileName, text, skipTrivia);
}
/**
* Gets the TextRange to use for source maps for a token of a node.
*/
export function getTokenSourceMapRange(node: Node, token: SyntaxKind): TextRange | undefined {
export function getTokenSourceMapRange(node: Node, token: SyntaxKind): SourceMapRange | undefined {
const emitNode = node.emitNode;
const tokenSourceMapRanges = emitNode && emitNode.tokenSourceMapRanges;
return tokenSourceMapRanges && tokenSourceMapRanges[token];
@ -2331,7 +2332,7 @@ namespace ts {
/**
* Sets the TextRange to use for source maps for a token of a node.
*/
export function setTokenSourceMapRange<T extends Node>(node: T, token: SyntaxKind, range: TextRange | undefined) {
export function setTokenSourceMapRange<T extends Node>(node: T, token: SyntaxKind, range: SourceMapRange | undefined) {
const emitNode = getOrCreateEmitNode(node);
const tokenSourceMapRanges = emitNode.tokenSourceMapRanges || (emitNode.tokenSourceMapRanges = []);
tokenSourceMapRanges[token] = range;
@ -2503,6 +2504,7 @@ namespace ts {
helpers
} = sourceEmitNode;
if (!destEmitNode) destEmitNode = {};
// We are using `.slice()` here in case `destEmitNode.leadingComments` is pushed to later.
if (leadingComments) destEmitNode.leadingComments = addRange(leadingComments.slice(), destEmitNode.leadingComments);
if (trailingComments) destEmitNode.trailingComments = addRange(trailingComments.slice(), destEmitNode.trailingComments);
if (flags) destEmitNode.flags = flags;
@ -3799,16 +3801,6 @@ namespace ts {
return node;
}
export function skipPartiallyEmittedExpressions(node: Expression): Expression;
export function skipPartiallyEmittedExpressions(node: Node): Node;
export function skipPartiallyEmittedExpressions(node: Node) {
while (node.kind === SyntaxKind.PartiallyEmittedExpression) {
node = (<PartiallyEmittedExpression>node).expression;
}
return node;
}
function updateOuterExpression(outerExpression: OuterExpression, expression: Expression) {
switch (outerExpression.kind) {
case SyntaxKind.ParenthesizedExpression: return updateParen(outerExpression, expression);
@ -3840,23 +3832,34 @@ namespace ts {
return emitNode && emitNode.externalHelpersModuleName;
}
export function getOrCreateExternalHelpersModuleNameIfNeeded(node: SourceFile, compilerOptions: CompilerOptions) {
if (compilerOptions.importHelpers && (isExternalModule(node) || compilerOptions.isolatedModules)) {
export function getOrCreateExternalHelpersModuleNameIfNeeded(node: SourceFile, compilerOptions: CompilerOptions, hasExportStarsToExportValues?: boolean) {
if (compilerOptions.importHelpers && isEffectiveExternalModule(node, compilerOptions)) {
const externalHelpersModuleName = getExternalHelpersModuleName(node);
if (externalHelpersModuleName) {
return externalHelpersModuleName;
}
const helpers = getEmitHelpers(node);
if (helpers) {
for (const helper of helpers) {
if (!helper.scoped) {
const parseNode = getOriginalNode(node, isSourceFile);
const emitNode = getOrCreateEmitNode(parseNode);
return emitNode.externalHelpersModuleName || (emitNode.externalHelpersModuleName = createUniqueName(externalHelpersModuleNameText));
const moduleKind = getEmitModuleKind(compilerOptions);
let create = hasExportStarsToExportValues
&& moduleKind !== ModuleKind.System
&& moduleKind !== ModuleKind.ES2015;
if (!create) {
const helpers = getEmitHelpers(node);
if (helpers) {
for (const helper of helpers) {
if (!helper.scoped) {
create = true;
break;
}
}
}
}
if (create) {
const parseNode = getOriginalNode(node, isSourceFile);
const emitNode = getOrCreateEmitNode(parseNode);
return emitNode.externalHelpersModuleName || (emitNode.externalHelpersModuleName = createUniqueName(externalHelpersModuleNameText));
}
}
}
@ -4218,177 +4221,4 @@ namespace ts {
Debug.assertNode(node, isExpression);
return <Expression>node;
}
export interface ExternalModuleInfo {
externalImports: (ImportDeclaration | ImportEqualsDeclaration | ExportDeclaration)[]; // imports of other external modules
externalHelpersImportDeclaration: ImportDeclaration | undefined; // import of external helpers
exportSpecifiers: Map<ExportSpecifier[]>; // export specifiers by name
exportedBindings: Identifier[][]; // exported names of local declarations
exportedNames: Identifier[]; // all exported names local to module
exportEquals: ExportAssignment | undefined; // an export= declaration if one was present
hasExportStarsToExportValues: boolean; // whether this module contains export*
}
export function collectExternalModuleInfo(sourceFile: SourceFile, resolver: EmitResolver, compilerOptions: CompilerOptions): ExternalModuleInfo {
const externalImports: (ImportDeclaration | ImportEqualsDeclaration | ExportDeclaration)[] = [];
const exportSpecifiers = createMultiMap<ExportSpecifier>();
const exportedBindings: Identifier[][] = [];
const uniqueExports = createMap<boolean>();
let exportedNames: Identifier[];
let hasExportDefault = false;
let exportEquals: ExportAssignment = undefined;
let hasExportStarsToExportValues = false;
const externalHelpersModuleName = getOrCreateExternalHelpersModuleNameIfNeeded(sourceFile, compilerOptions);
const externalHelpersImportDeclaration = externalHelpersModuleName && createImportDeclaration(
/*decorators*/ undefined,
/*modifiers*/ undefined,
createImportClause(/*name*/ undefined, createNamespaceImport(externalHelpersModuleName)),
createLiteral(externalHelpersModuleNameText));
if (externalHelpersImportDeclaration) {
externalImports.push(externalHelpersImportDeclaration);
}
for (const node of sourceFile.statements) {
switch (node.kind) {
case SyntaxKind.ImportDeclaration:
// import "mod"
// import x from "mod"
// import * as x from "mod"
// import { x, y } from "mod"
externalImports.push(<ImportDeclaration>node);
break;
case SyntaxKind.ImportEqualsDeclaration:
if ((<ImportEqualsDeclaration>node).moduleReference.kind === SyntaxKind.ExternalModuleReference) {
// import x = require("mod")
externalImports.push(<ImportEqualsDeclaration>node);
}
break;
case SyntaxKind.ExportDeclaration:
if ((<ExportDeclaration>node).moduleSpecifier) {
if (!(<ExportDeclaration>node).exportClause) {
// export * from "mod"
externalImports.push(<ExportDeclaration>node);
hasExportStarsToExportValues = true;
}
else {
// export { x, y } from "mod"
externalImports.push(<ExportDeclaration>node);
}
}
else {
// export { x, y }
for (const specifier of (<ExportDeclaration>node).exportClause.elements) {
if (!uniqueExports.get(specifier.name.text)) {
const name = specifier.propertyName || specifier.name;
exportSpecifiers.add(name.text, specifier);
const decl = resolver.getReferencedImportDeclaration(name)
|| resolver.getReferencedValueDeclaration(name);
if (decl) {
multiMapSparseArrayAdd(exportedBindings, getOriginalNodeId(decl), specifier.name);
}
uniqueExports.set(specifier.name.text, true);
exportedNames = append(exportedNames, specifier.name);
}
}
}
break;
case SyntaxKind.ExportAssignment:
if ((<ExportAssignment>node).isExportEquals && !exportEquals) {
// export = x
exportEquals = <ExportAssignment>node;
}
break;
case SyntaxKind.VariableStatement:
if (hasModifier(node, ModifierFlags.Export)) {
for (const decl of (<VariableStatement>node).declarationList.declarations) {
exportedNames = collectExportedVariableInfo(decl, uniqueExports, exportedNames);
}
}
break;
case SyntaxKind.FunctionDeclaration:
if (hasModifier(node, ModifierFlags.Export)) {
if (hasModifier(node, ModifierFlags.Default)) {
// export default function() { }
if (!hasExportDefault) {
multiMapSparseArrayAdd(exportedBindings, getOriginalNodeId(node), getDeclarationName(<FunctionDeclaration>node));
hasExportDefault = true;
}
}
else {
// export function x() { }
const name = (<FunctionDeclaration>node).name;
if (!uniqueExports.get(name.text)) {
multiMapSparseArrayAdd(exportedBindings, getOriginalNodeId(node), name);
uniqueExports.set(name.text, true);
exportedNames = append(exportedNames, name);
}
}
}
break;
case SyntaxKind.ClassDeclaration:
if (hasModifier(node, ModifierFlags.Export)) {
if (hasModifier(node, ModifierFlags.Default)) {
// export default class { }
if (!hasExportDefault) {
multiMapSparseArrayAdd(exportedBindings, getOriginalNodeId(node), getDeclarationName(<ClassDeclaration>node));
hasExportDefault = true;
}
}
else {
// export class x { }
const name = (<ClassDeclaration>node).name;
if (!uniqueExports.get(name.text)) {
multiMapSparseArrayAdd(exportedBindings, getOriginalNodeId(node), name);
uniqueExports.set(name.text, true);
exportedNames = append(exportedNames, name);
}
}
}
break;
}
}
return { externalImports, exportSpecifiers, exportEquals, hasExportStarsToExportValues, exportedBindings, exportedNames, externalHelpersImportDeclaration };
}
function collectExportedVariableInfo(decl: VariableDeclaration | BindingElement, uniqueExports: Map<boolean>, exportedNames: Identifier[]) {
if (isBindingPattern(decl.name)) {
for (const element of decl.name.elements) {
if (!isOmittedExpression(element)) {
exportedNames = collectExportedVariableInfo(element, uniqueExports, exportedNames);
}
}
}
else if (!isGeneratedIdentifier(decl.name)) {
if (!uniqueExports.get(decl.name.text)) {
uniqueExports.set(decl.name.text, true);
exportedNames = append(exportedNames, decl.name);
}
}
return exportedNames;
}
/** Use a sparse array as a multi-map. */
function multiMapSparseArrayAdd<V>(map: V[][], key: number, value: V): V[] {
let values = map[key];
if (values) {
values.push(value);
}
else {
map[key] = values = [value];
}
return values;
}
}

View File

@ -815,15 +815,15 @@ namespace ts {
switch (extensions) {
case Extensions.DtsOnly:
return tryExtension(".d.ts", Extension.Dts);
return tryExtension(Extension.Dts);
case Extensions.TypeScript:
return tryExtension(".ts", Extension.Ts) || tryExtension(".tsx", Extension.Tsx) || tryExtension(".d.ts", Extension.Dts);
return tryExtension(Extension.Ts) || tryExtension(Extension.Tsx) || tryExtension(Extension.Dts);
case Extensions.JavaScript:
return tryExtension(".js", Extension.Js) || tryExtension(".jsx", Extension.Jsx);
return tryExtension(Extension.Js) || tryExtension(Extension.Jsx);
}
function tryExtension(ext: string, extension: Extension): Resolved | undefined {
const path = tryFile(candidate + ext, failedLookupLocations, onlyRecordFailures, state);
function tryExtension(extension: Extension): Resolved | undefined {
const path = tryFile(candidate + extension, failedLookupLocations, onlyRecordFailures, state);
return path && { path, extension };
}
}

View File

@ -1,6 +1,5 @@
/// <reference path="utilities.ts"/>
/// <reference path="scanner.ts"/>
/// <reference path="factory.ts"/>
namespace ts {
let NodeConstructor: new (kind: SyntaxKind, pos: number, end: number) => Node;
@ -15,7 +14,7 @@ namespace ts {
else if (kind === SyntaxKind.Identifier) {
return new (IdentifierConstructor || (IdentifierConstructor = objectAllocator.getIdentifierConstructor()))(kind, pos, end);
}
else if (kind < SyntaxKind.FirstNode) {
else if (!isNodeKind(kind)) {
return new (TokenConstructor || (TokenConstructor = objectAllocator.getTokenConstructor()))(kind, pos, end);
}
else {
@ -466,6 +465,15 @@ namespace ts {
return Parser.parseIsolatedEntityName(text, languageVersion);
}
/**
* Parse json text into SyntaxTree and return node and parse errors if any
* @param fileName
* @param sourceText
*/
export function parseJsonText(fileName: string, sourceText: string): JsonSourceFile {
return Parser.parseJsonText(fileName, sourceText);
}
// See also `isExternalOrCommonJsModule` in utilities.ts
export function isExternalModule(file: SourceFile): boolean {
return file.externalModuleIndicator !== undefined;
@ -481,7 +489,11 @@ namespace ts {
// becoming detached from any SourceFile). It is recommended that this SourceFile not
// be used once 'update' is called on it.
export function updateSourceFile(sourceFile: SourceFile, newText: string, textChangeRange: TextChangeRange, aggressiveChecks?: boolean): SourceFile {
return IncrementalParser.updateSourceFile(sourceFile, newText, textChangeRange, aggressiveChecks);
const newSourceFile = IncrementalParser.updateSourceFile(sourceFile, newText, textChangeRange, aggressiveChecks);
// Because new source file node is created, it may not have the flag PossiblyContainDynamicImport. This is the case if there is no new edit to add dynamic import.
// We will manually port the flag to the new source file.
newSourceFile.flags |= (sourceFile.flags & NodeFlags.PossiblyContainsDynamicImport);
return newSourceFile;
}
/* @internal */
@ -628,9 +640,34 @@ namespace ts {
return isInvalid ? entityName : undefined;
}
export function parseJsonText(fileName: string, sourceText: string): JsonSourceFile {
initializeState(sourceText, ScriptTarget.ES2015, /*syntaxCursor*/ undefined, ScriptKind.JSON);
// Set source file so that errors will be reported with this file name
sourceFile = createSourceFile(fileName, ScriptTarget.ES2015, ScriptKind.JSON);
const result = <JsonSourceFile>sourceFile;
// Prime the scanner.
nextToken();
if (token() === SyntaxKind.EndOfFileToken) {
sourceFile.endOfFileToken = <EndOfFileToken>parseTokenNode();
}
else if (token() === SyntaxKind.OpenBraceToken ||
lookAhead(() => token() === SyntaxKind.StringLiteral)) {
result.jsonObject = parseObjectLiteralExpression();
sourceFile.endOfFileToken = parseExpectedToken(SyntaxKind.EndOfFileToken, /*reportAtCurrentPosition*/ false, Diagnostics.Unexpected_token);
}
else {
parseExpected(SyntaxKind.OpenBraceToken);
}
sourceFile.parseDiagnostics = parseDiagnostics;
clearState();
return result;
}
function getLanguageVariant(scriptKind: ScriptKind) {
// .tsx and .jsx files are treated as jsx language variant.
return scriptKind === ScriptKind.TSX || scriptKind === ScriptKind.JSX || scriptKind === ScriptKind.JS ? LanguageVariant.JSX : LanguageVariant.Standard;
return scriptKind === ScriptKind.TSX || scriptKind === ScriptKind.JSX || scriptKind === ScriptKind.JS || scriptKind === ScriptKind.JSON ? LanguageVariant.JSX : LanguageVariant.Standard;
}
function initializeState(_sourceText: string, languageVersion: ScriptTarget, _syntaxCursor: IncrementalParser.SyntaxCursor, scriptKind: ScriptKind) {
@ -648,7 +685,7 @@ namespace ts {
identifierCount = 0;
nodeCount = 0;
contextFlags = scriptKind === ScriptKind.JS || scriptKind === ScriptKind.JSX ? NodeFlags.JavaScriptFile : NodeFlags.None;
contextFlags = scriptKind === ScriptKind.JS || scriptKind === ScriptKind.JSX || scriptKind === ScriptKind.JSON ? NodeFlags.JavaScriptFile : NodeFlags.None;
parseErrorBeforeNextFinishedNode = false;
// Initialize and prime the scanner before parsing the source elements.
@ -681,7 +718,7 @@ namespace ts {
sourceFile.statements = parseList(ParsingContext.SourceElements, parseStatement);
Debug.assert(token() === SyntaxKind.EndOfFileToken);
sourceFile.endOfFileToken = <EndOfFileToken>parseTokenNode();
sourceFile.endOfFileToken = addJSDocComment(parseTokenNode() as EndOfFileToken);
setExternalModuleIndicator(sourceFile);
@ -760,7 +797,7 @@ namespace ts {
sourceFile.languageVersion = languageVersion;
sourceFile.fileName = normalizePath(fileName);
sourceFile.languageVariant = getLanguageVariant(scriptKind);
sourceFile.isDeclarationFile = fileExtensionIs(sourceFile.fileName, ".d.ts");
sourceFile.isDeclarationFile = fileExtensionIs(sourceFile.fileName, Extension.Dts);
sourceFile.scriptKind = scriptKind;
return sourceFile;
@ -1099,7 +1136,7 @@ namespace ts {
pos = scanner.getStartPos();
}
return kind >= SyntaxKind.FirstNode ? new NodeConstructor(kind, pos, pos) :
return isNodeKind(kind) ? new NodeConstructor(kind, pos, pos) :
kind === SyntaxKind.Identifier ? new IdentifierConstructor(kind, pos, pos) :
new TokenConstructor(kind, pos, pos);
}
@ -2782,8 +2819,9 @@ namespace ts {
case SyntaxKind.SlashToken:
case SyntaxKind.SlashEqualsToken:
case SyntaxKind.Identifier:
case SyntaxKind.ImportKeyword:
return true;
case SyntaxKind.ImportKeyword:
return lookAhead(nextTokenIsOpenParenOrLessThan);
default:
return isIdentifier();
}
@ -2967,7 +3005,7 @@ namespace ts {
// for now we just check if the next token is an identifier. More heuristics
// can be added here later as necessary. We just need to make sure that we
// don't accidentally consume something legal.
return lookAhead(nextTokenIsIdentifierOrKeywordOrNumberOnSameLine);
return lookAhead(nextTokenIsIdentifierOrKeywordOrLiteralOnSameLine);
}
return false;
@ -3485,7 +3523,7 @@ namespace ts {
}
// here we are using similar heuristics as 'isYieldExpression'
return lookAhead(nextTokenIsIdentifierOnSameLine);
return lookAhead(nextTokenIsIdentifierOrKeywordOrLiteralOnSameLine);
}
return false;
@ -3695,12 +3733,12 @@ namespace ts {
// 3)we have a MemberExpression which either completes the LeftHandSideExpression,
// or starts the beginning of the first four CallExpression productions.
let expression: MemberExpression;
if (token() === SyntaxKind.ImportKeyword && lookAhead(nextTokenIsOpenParenOrLessThan)) {
if (token() === SyntaxKind.ImportKeyword) {
// We don't want to eagerly consume all import keyword as import call expression so we look a head to find "("
// For example:
// var foo3 = require("subfolder
// import * as foo1 from "module-from-node -> we want this import to be a statement rather than import call expression
sourceFile.flags |= NodeFlags.PossiblyContainDynamicImport;
sourceFile.flags |= NodeFlags.PossiblyContainsDynamicImport;
expression = parseTokenNode<PrimaryExpression>();
}
else {
@ -4694,9 +4732,9 @@ namespace ts {
return token() === SyntaxKind.FunctionKeyword && !scanner.hasPrecedingLineBreak();
}
function nextTokenIsIdentifierOrKeywordOrNumberOnSameLine() {
function nextTokenIsIdentifierOrKeywordOrLiteralOnSameLine() {
nextToken();
return (tokenIsIdentifierOrKeyword(token()) || token() === SyntaxKind.NumericLiteral) && !scanner.hasPrecedingLineBreak();
return (tokenIsIdentifierOrKeyword(token()) || token() === SyntaxKind.NumericLiteral || token() === SyntaxKind.StringLiteral) && !scanner.hasPrecedingLineBreak();
}
function isDeclaration(): boolean {
@ -4807,9 +4845,11 @@ namespace ts {
// however, we say they are here so that we may gracefully parse them and error later.
case SyntaxKind.CatchKeyword:
case SyntaxKind.FinallyKeyword:
case SyntaxKind.ImportKeyword:
return true;
case SyntaxKind.ImportKeyword:
return isStartOfDeclaration() || lookAhead(nextTokenIsOpenParenOrLessThan);
case SyntaxKind.ConstKeyword:
case SyntaxKind.ExportKeyword:
return isStartOfDeclaration();
@ -6530,6 +6570,10 @@ namespace ts {
case "augments":
tag = parseAugmentsTag(atToken, tagName);
break;
case "class":
case "constructor":
tag = parseClassTag(atToken, tagName);
break;
case "arg":
case "argument":
case "param":
@ -6656,14 +6700,12 @@ namespace ts {
});
}
function parseBracketNameInPropertyAndParamTag() {
let name: Identifier;
let isBracketed: boolean;
function parseBracketNameInPropertyAndParamTag(): { name: Identifier, isBracketed: boolean } {
// Looking for something like '[foo]' or 'foo'
if (parseOptionalToken(SyntaxKind.OpenBracketToken)) {
name = parseJSDocIdentifierName();
const isBracketed = parseOptional(SyntaxKind.OpenBracketToken);
const name = parseJSDocIdentifierName(/*createIfMissing*/ true);
if (isBracketed) {
skipWhitespace();
isBracketed = true;
// May have an optional default, e.g. '[foo = 42]'
if (parseOptionalToken(SyntaxKind.EqualsToken)) {
@ -6672,9 +6714,7 @@ namespace ts {
parseExpected(SyntaxKind.CloseBracketToken);
}
else if (tokenIsIdentifierOrKeyword(token())) {
name = parseJSDocIdentifierName();
}
return { name, isBracketed };
}
@ -6685,20 +6725,12 @@ namespace ts {
const { name, isBracketed } = parseBracketNameInPropertyAndParamTag();
skipWhitespace();
if (!name) {
parseErrorAtPosition(scanner.getStartPos(), 0, Diagnostics.Identifier_expected);
return undefined;
}
let preName: Identifier, postName: Identifier;
if (typeExpression) {
postName = name;
}
else {
preName = name;
}
if (!typeExpression) {
typeExpression = tryParseTypeExpression();
}
@ -6749,6 +6781,13 @@ namespace ts {
return finishNode(result);
}
function parseClassTag(atToken: AtToken, tagName: Identifier): JSDocClassTag {
const tag = <JSDocClassTag>createNode(SyntaxKind.JSDocClassTag, atToken.pos);
tag.atToken = atToken;
tag.tagName = tagName;
return finishNode(tag);
}
function parseTypedefTag(atToken: AtToken, tagName: Identifier): JSDocTypedefTag {
const typeExpression = tryParseTypeExpression();
skipWhitespace();
@ -6842,7 +6881,7 @@ namespace ts {
jsDocNamespaceNode.flags |= flags;
jsDocNamespaceNode.name = typeNameOrNamespaceName;
jsDocNamespaceNode.body = parseJSDocTypeNameWithNamespace(NodeFlags.NestedNamespace);
return jsDocNamespaceNode;
return finishNode(jsDocNamespaceNode);
}
if (typeNameOrNamespaceName && flags & NodeFlags.NestedNamespace) {
@ -6933,14 +6972,19 @@ namespace ts {
return currentToken = scanner.scanJSDocToken();
}
function parseJSDocIdentifierName(): Identifier {
return createJSDocIdentifier(tokenIsIdentifierOrKeyword(token()));
function parseJSDocIdentifierName(createIfMissing = false): Identifier {
return createJSDocIdentifier(tokenIsIdentifierOrKeyword(token()), createIfMissing);
}
function createJSDocIdentifier(isIdentifier: boolean): Identifier {
function createJSDocIdentifier(isIdentifier: boolean, createIfMissing: boolean): Identifier {
if (!isIdentifier) {
parseErrorAtCurrentToken(Diagnostics.Identifier_expected);
return undefined;
if (createIfMissing) {
return <Identifier>createMissingNode(SyntaxKind.Identifier, /*reportAtCurrentPosition*/ true, Diagnostics.Identifier_expected);
}
else {
parseErrorAtCurrentToken(Diagnostics.Identifier_expected);
return undefined;
}
}
const pos = scanner.getTokenPos();

View File

@ -442,6 +442,7 @@ namespace ts {
// Map storing if there is emit blocking diagnostics for given input
const hasEmitBlockingDiagnostics = createFileMap<boolean>(getCanonicalFileName);
let _compilerOptionsObjectLiteralSyntax: ObjectLiteralExpression;
let moduleResolutionCache: ModuleResolutionCache;
let resolveModuleNamesWorker: (moduleNames: string[], containingFile: string) => ResolvedModuleFull[];
@ -984,16 +985,12 @@ namespace ts {
if (sourceFile) {
return getDiagnostics(sourceFile, cancellationToken);
}
const allDiagnostics: Diagnostic[] = [];
forEach(program.getSourceFiles(), sourceFile => {
return sortAndDeduplicateDiagnostics(flatMap(program.getSourceFiles(), sourceFile => {
if (cancellationToken) {
cancellationToken.throwIfCancellationRequested();
}
addRange(allDiagnostics, getDiagnostics(sourceFile, cancellationToken));
});
return sortAndDeduplicateDiagnostics(allDiagnostics);
return getDiagnostics(sourceFile, cancellationToken);
}));
}
function getSyntacticDiagnostics(sourceFile: SourceFile, cancellationToken: CancellationToken): Diagnostic[] {
@ -1330,16 +1327,17 @@ namespace ts {
}
function getOptionsDiagnostics(): Diagnostic[] {
const allDiagnostics: Diagnostic[] = [];
addRange(allDiagnostics, fileProcessingDiagnostics.getGlobalDiagnostics());
addRange(allDiagnostics, programDiagnostics.getGlobalDiagnostics());
return sortAndDeduplicateDiagnostics(allDiagnostics);
return sortAndDeduplicateDiagnostics(concatenate(
fileProcessingDiagnostics.getGlobalDiagnostics(),
concatenate(
programDiagnostics.getGlobalDiagnostics(),
options.configFile ? programDiagnostics.getDiagnostics(options.configFile.fileName) : []
)
));
}
function getGlobalDiagnostics(): Diagnostic[] {
const allDiagnostics: Diagnostic[] = [];
addRange(allDiagnostics, getDiagnosticsProducingTypeChecker().getGlobalDiagnostics());
return sortAndDeduplicateDiagnostics(allDiagnostics);
return sortAndDeduplicateDiagnostics(getDiagnosticsProducingTypeChecker().getGlobalDiagnostics().slice());
}
function processRootFile(fileName: string, isDefaultLib: boolean) {
@ -1386,7 +1384,7 @@ namespace ts {
for (const node of file.statements) {
collectModuleReferences(node, /*inAmbientModule*/ false);
if ((file.flags & NodeFlags.PossiblyContainDynamicImport) || isJavaScriptFile) {
if ((file.flags & NodeFlags.PossiblyContainsDynamicImport) || isJavaScriptFile) {
collectDynamicImportOrRequireCalls(node);
}
}
@ -1491,7 +1489,8 @@ namespace ts {
}
}
return sourceFile;
} else {
}
else {
const sourceFileNoExtension = options.allowNonTsExtensions && getSourceFile(fileName);
if (sourceFileNoExtension) return sourceFileNoExtension;
@ -1501,7 +1500,7 @@ namespace ts {
}
const sourceFileWithAddedExtension = forEach(supportedExtensions, extension => getSourceFile(fileName + extension));
if (fail && !sourceFileWithAddedExtension) fail(Diagnostics.File_0_not_found, fileName + ".ts");
if (fail && !sourceFileWithAddedExtension) fail(Diagnostics.File_0_not_found, fileName + Extension.Ts);
return sourceFileWithAddedExtension;
}
}
@ -1779,33 +1778,33 @@ namespace ts {
function verifyCompilerOptions() {
if (options.isolatedModules) {
if (options.declaration) {
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Option_0_cannot_be_specified_with_option_1, "declaration", "isolatedModules"));
createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_with_option_1, "declaration", "isolatedModules");
}
if (options.noEmitOnError) {
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Option_0_cannot_be_specified_with_option_1, "noEmitOnError", "isolatedModules"));
createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_with_option_1, "noEmitOnError", "isolatedModules");
}
if (options.out) {
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Option_0_cannot_be_specified_with_option_1, "out", "isolatedModules"));
createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_with_option_1, "out", "isolatedModules");
}
if (options.outFile) {
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Option_0_cannot_be_specified_with_option_1, "outFile", "isolatedModules"));
createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_with_option_1, "outFile", "isolatedModules");
}
}
if (options.inlineSourceMap) {
if (options.sourceMap) {
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Option_0_cannot_be_specified_with_option_1, "sourceMap", "inlineSourceMap"));
createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_with_option_1, "sourceMap", "inlineSourceMap");
}
if (options.mapRoot) {
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Option_0_cannot_be_specified_with_option_1, "mapRoot", "inlineSourceMap"));
createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_with_option_1, "mapRoot", "inlineSourceMap");
}
}
if (options.paths && options.baseUrl === undefined) {
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Option_paths_cannot_be_used_without_specifying_baseUrl_option));
createDiagnosticForOptionName(Diagnostics.Option_paths_cannot_be_used_without_specifying_baseUrl_option, "paths");
}
if (options.paths) {
@ -1814,63 +1813,65 @@ namespace ts {
continue;
}
if (!hasZeroOrOneAsteriskCharacter(key)) {
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Pattern_0_can_have_at_most_one_Asterisk_character, key));
createDiagnosticForOptionPaths(/*onKey*/ true, key, Diagnostics.Pattern_0_can_have_at_most_one_Asterisk_character, key);
}
if (isArray(options.paths[key])) {
if (options.paths[key].length === 0) {
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Substitutions_for_pattern_0_shouldn_t_be_an_empty_array, key));
const len = options.paths[key].length;
if (len === 0) {
createDiagnosticForOptionPaths(/*onKey*/ false, key, Diagnostics.Substitutions_for_pattern_0_shouldn_t_be_an_empty_array, key);
}
for (const subst of options.paths[key]) {
for (let i = 0; i < len; i++) {
const subst = options.paths[key][i];
const typeOfSubst = typeof subst;
if (typeOfSubst === "string") {
if (!hasZeroOrOneAsteriskCharacter(subst)) {
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Substitution_0_in_pattern_1_in_can_have_at_most_one_Asterisk_character, subst, key));
createDiagnosticForOptionPathKeyValue(key, i, Diagnostics.Substitution_0_in_pattern_1_in_can_have_at_most_one_Asterisk_character, subst, key);
}
}
else {
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Substitution_0_for_pattern_1_has_incorrect_type_expected_string_got_2, subst, key, typeOfSubst));
createDiagnosticForOptionPathKeyValue(key, i, Diagnostics.Substitution_0_for_pattern_1_has_incorrect_type_expected_string_got_2, subst, key, typeOfSubst);
}
}
}
else {
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Substitutions_for_pattern_0_should_be_an_array, key));
createDiagnosticForOptionPaths(/*onKey*/ false, key, Diagnostics.Substitutions_for_pattern_0_should_be_an_array, key);
}
}
}
if (!options.sourceMap && !options.inlineSourceMap) {
if (options.inlineSources) {
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Option_0_can_only_be_used_when_either_option_inlineSourceMap_or_option_sourceMap_is_provided, "inlineSources"));
createDiagnosticForOptionName(Diagnostics.Option_0_can_only_be_used_when_either_option_inlineSourceMap_or_option_sourceMap_is_provided, "inlineSources");
}
if (options.sourceRoot) {
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Option_0_can_only_be_used_when_either_option_inlineSourceMap_or_option_sourceMap_is_provided, "sourceRoot"));
createDiagnosticForOptionName(Diagnostics.Option_0_can_only_be_used_when_either_option_inlineSourceMap_or_option_sourceMap_is_provided, "sourceRoot");
}
}
if (options.out && options.outFile) {
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Option_0_cannot_be_specified_with_option_1, "out", "outFile"));
createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_with_option_1, "out", "outFile");
}
if (options.mapRoot && !options.sourceMap) {
// Error to specify --mapRoot without --sourcemap
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Option_0_cannot_be_specified_without_specifying_option_1, "mapRoot", "sourceMap"));
createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_without_specifying_option_1, "mapRoot", "sourceMap");
}
if (options.declarationDir) {
if (!options.declaration) {
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Option_0_cannot_be_specified_without_specifying_option_1, "declarationDir", "declaration"));
createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_without_specifying_option_1, "declarationDir", "declaration");
}
if (options.out || options.outFile) {
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Option_0_cannot_be_specified_with_option_1, "declarationDir", options.out ? "out" : "outFile"));
createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_with_option_1, "declarationDir", options.out ? "out" : "outFile");
}
}
if (options.lib && options.noLib) {
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Option_0_cannot_be_specified_with_option_1, "lib", "noLib"));
createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_with_option_1, "lib", "noLib");
}
if (options.noImplicitUseStrict && (options.alwaysStrict === undefined ? options.strict : options.alwaysStrict)) {
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Option_0_cannot_be_specified_with_option_1, "noImplicitUseStrict", "alwaysStrict"));
createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_with_option_1, "noImplicitUseStrict", "alwaysStrict");
}
const languageVersion = options.target || ScriptTarget.ES3;
@ -1879,7 +1880,7 @@ namespace ts {
const firstNonAmbientExternalModuleSourceFile = forEach(files, f => isExternalModule(f) && !f.isDeclarationFile ? f : undefined);
if (options.isolatedModules) {
if (options.module === ModuleKind.None && languageVersion < ScriptTarget.ES2015) {
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Option_isolatedModules_can_only_be_used_when_either_option_module_is_provided_or_option_target_is_ES2015_or_higher));
createDiagnosticForOptionName(Diagnostics.Option_isolatedModules_can_only_be_used_when_either_option_module_is_provided_or_option_target_is_ES2015_or_higher, "isolatedModules", "target");
}
const firstNonExternalModuleSourceFile = forEach(files, f => !isExternalModule(f) && !f.isDeclarationFile ? f : undefined);
@ -1897,7 +1898,7 @@ namespace ts {
// Cannot specify module gen that isn't amd or system with --out
if (outFile) {
if (options.module && !(options.module === ModuleKind.AMD || options.module === ModuleKind.System)) {
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Only_amd_and_system_modules_are_supported_alongside_0, options.out ? "out" : "outFile"));
createDiagnosticForOptionName(Diagnostics.Only_amd_and_system_modules_are_supported_alongside_0, options.out ? "out" : "outFile", "module");
}
else if (options.module === undefined && firstNonAmbientExternalModuleSourceFile) {
const span = getErrorSpanForNode(firstNonAmbientExternalModuleSourceFile, firstNonAmbientExternalModuleSourceFile.externalModuleIndicator);
@ -1916,12 +1917,12 @@ namespace ts {
// If we failed to find a good common directory, but outDir is specified and at least one of our files is on a windows drive/URL/other resource, add a failure
if (options.outDir && dir === "" && forEach(files, file => getRootLength(file.fileName) > 1)) {
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Cannot_find_the_common_subdirectory_path_for_the_input_files));
createDiagnosticForOptionName(Diagnostics.Cannot_find_the_common_subdirectory_path_for_the_input_files, "outDir");
}
}
if (!options.noEmit && options.allowJs && options.declaration) {
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Option_0_cannot_be_specified_with_option_1, "allowJs", "declaration"));
createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_with_option_1, "allowJs", "declaration");
}
if (options.checkJs && !options.allowJs) {
@ -1930,19 +1931,19 @@ namespace ts {
if (options.emitDecoratorMetadata &&
!options.experimentalDecorators) {
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Option_0_cannot_be_specified_without_specifying_option_1, "emitDecoratorMetadata", "experimentalDecorators"));
createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_without_specifying_option_1, "emitDecoratorMetadata", "experimentalDecorators");
}
if (options.jsxFactory) {
if (options.reactNamespace) {
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Option_0_cannot_be_specified_with_option_1, "reactNamespace", "jsxFactory"));
createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_with_option_1, "reactNamespace", "jsxFactory");
}
if (!parseIsolatedEntityName(options.jsxFactory, languageVersion)) {
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Invalid_value_for_jsxFactory_0_is_not_a_valid_identifier_or_qualified_name, options.jsxFactory));
createOptionValueDiagnostic("jsxFactory", Diagnostics.Invalid_value_for_jsxFactory_0_is_not_a_valid_identifier_or_qualified_name, options.jsxFactory);
}
}
else if (options.reactNamespace && !isIdentifierText(options.reactNamespace, languageVersion)) {
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Invalid_value_for_reactNamespace_0_is_not_a_valid_identifier, options.reactNamespace));
createOptionValueDiagnostic("reactNamespace", Diagnostics.Invalid_value_for_reactNamespace_0_is_not_a_valid_identifier, options.reactNamespace);
}
// If the emit is enabled make sure that every output file is unique and not overwriting any of the input files
@ -1982,6 +1983,91 @@ namespace ts {
}
}
function createDiagnosticForOptionPathKeyValue(key: string, valueIndex: number, message: DiagnosticMessage, arg0: string | number, arg1: string | number, arg2?: string | number) {
let needCompilerDiagnostic = true;
const pathsSyntax = getOptionPathsSyntax();
for (const pathProp of pathsSyntax) {
if (isObjectLiteralExpression(pathProp.initializer)) {
for (const keyProps of getPropertyAssignment(pathProp.initializer, key)) {
if (isArrayLiteralExpression(keyProps.initializer) &&
keyProps.initializer.elements.length > valueIndex) {
programDiagnostics.add(createDiagnosticForNodeInSourceFile(options.configFile, keyProps.initializer.elements[valueIndex], message, arg0, arg1, arg2));
needCompilerDiagnostic = false;
}
}
}
}
if (needCompilerDiagnostic) {
programDiagnostics.add(createCompilerDiagnostic(message, arg0, arg1, arg2));
}
}
function createDiagnosticForOptionPaths(onKey: boolean, key: string, message: DiagnosticMessage, arg0: string | number) {
let needCompilerDiagnostic = true;
const pathsSyntax = getOptionPathsSyntax();
for (const pathProp of pathsSyntax) {
if (isObjectLiteralExpression(pathProp.initializer) &&
createOptionDiagnosticInObjectLiteralSyntax(
pathProp.initializer, onKey, key, /*key2*/ undefined,
message, arg0)) {
needCompilerDiagnostic = false;
}
}
if (needCompilerDiagnostic) {
programDiagnostics.add(createCompilerDiagnostic(message, arg0));
}
}
function getOptionPathsSyntax() {
const compilerOptionsObjectLiteralSyntax = getCompilerOptionsObjectLiteralSyntax();
if (compilerOptionsObjectLiteralSyntax) {
return getPropertyAssignment(compilerOptionsObjectLiteralSyntax, "paths");
}
return emptyArray;
}
function createDiagnosticForOptionName(message: DiagnosticMessage, option1: string, option2?: string) {
createDiagnosticForOption(/*onKey*/ true, option1, option2, message, option1, option2);
}
function createOptionValueDiagnostic(option1: string, message: DiagnosticMessage, arg0: string) {
createDiagnosticForOption(/*onKey*/ false, option1, /*option2*/ undefined, message, arg0);
}
function createDiagnosticForOption(onKey: boolean, option1: string, option2: string, message: DiagnosticMessage, arg0: string | number, arg1?: string | number) {
const compilerOptionsObjectLiteralSyntax = getCompilerOptionsObjectLiteralSyntax();
const needCompilerDiagnostic = !compilerOptionsObjectLiteralSyntax ||
!createOptionDiagnosticInObjectLiteralSyntax(compilerOptionsObjectLiteralSyntax, onKey, option1, option2, message, arg0, arg1);
if (needCompilerDiagnostic) {
programDiagnostics.add(createCompilerDiagnostic(message, arg0, arg1));
}
}
function getCompilerOptionsObjectLiteralSyntax() {
if (_compilerOptionsObjectLiteralSyntax === undefined) {
_compilerOptionsObjectLiteralSyntax = null; // tslint:disable-line:no-null-keyword
if (options.configFile && options.configFile.jsonObject) {
for (const prop of getPropertyAssignment(options.configFile.jsonObject, "compilerOptions")) {
if (isObjectLiteralExpression(prop.initializer)) {
_compilerOptionsObjectLiteralSyntax = prop.initializer;
break;
}
}
}
}
return _compilerOptionsObjectLiteralSyntax;
}
function createOptionDiagnosticInObjectLiteralSyntax(objectLiteral: ObjectLiteralExpression, onKey: boolean, key1: string, key2: string, message: DiagnosticMessage, arg0: string | number, arg1?: string | number): boolean {
const props = getPropertyAssignment(objectLiteral, key1, key2);
for (const prop of props) {
programDiagnostics.add(createDiagnosticForNodeInSourceFile(options.configFile, onKey ? prop.name : prop.initializer, message, arg0, arg1));
}
return !!props.length;
}
function blockEmittingOfFile(emitFileName: string, diag: Diagnostic) {
hasEmitBlockingDiagnostics.set(toPath(emitFileName, currentDirectory, getCanonicalFileName), true);
programDiagnostics.add(diag);

View File

@ -363,7 +363,7 @@ namespace ts {
};
}
export function getLineAndCharacterOfPosition(sourceFile: SourceFile, position: number): LineAndCharacter {
export function getLineAndCharacterOfPosition(sourceFile: SourceFileLike, position: number): LineAndCharacter {
return computeLineAndCharacterOfPosition(getLineStarts(sourceFile), position);
}

View File

@ -22,7 +22,7 @@ namespace ts {
*
* @param sourceFile The source file.
*/
setSourceFile(sourceFile: SourceFile): void;
setSourceFile(sourceFile: SourceMapSource): void;
/**
* Emits a mapping.
@ -81,7 +81,7 @@ namespace ts {
export function createSourceMapWriter(host: EmitHost, writer: EmitTextWriter): SourceMapWriter {
const compilerOptions = host.getCompilerOptions();
const extendedDiagnostics = compilerOptions.extendedDiagnostics;
let currentSourceFile: SourceFile;
let currentSource: SourceMapSource;
let currentSourceText: string;
let sourceMapDir: string; // The directory in which sourcemap will be
@ -109,6 +109,13 @@ namespace ts {
getSourceMappingURL,
};
/**
* Skips trivia such as comments and white-space that can optionally overriden by the source map source
*/
function skipSourceTrivia(pos: number): number {
return currentSource.skipTrivia ? currentSource.skipTrivia(pos) : skipTrivia(currentSourceText, pos);
}
/**
* Initialize the SourceMapWriter for a new output file.
*
@ -125,7 +132,7 @@ namespace ts {
reset();
}
currentSourceFile = undefined;
currentSource = undefined;
currentSourceText = undefined;
// Current source map file and its index in the sources list
@ -192,7 +199,7 @@ namespace ts {
return;
}
currentSourceFile = undefined;
currentSource = undefined;
sourceMapDir = undefined;
sourceMapSourceIndex = undefined;
lastRecordedSourceMapSpan = undefined;
@ -263,7 +270,7 @@ namespace ts {
performance.mark("beforeSourcemap");
}
const sourceLinePos = getLineAndCharacterOfPosition(currentSourceFile, pos);
const sourceLinePos = getLineAndCharacterOfPosition(currentSource, pos);
// Convert the location to be one-based.
sourceLinePos.line++;
@ -320,14 +327,22 @@ namespace ts {
if (node) {
const emitNode = node.emitNode;
const emitFlags = emitNode && emitNode.flags;
const { pos, end } = emitNode && emitNode.sourceMapRange || node;
const range = emitNode && emitNode.sourceMapRange;
const { pos, end } = range || node;
let source = range && range.source;
const oldSource = currentSource;
if (source === oldSource) source = undefined;
if (source) setSourceFile(source);
if (node.kind !== SyntaxKind.NotEmittedStatement
&& (emitFlags & EmitFlags.NoLeadingSourceMap) === 0
&& pos >= 0) {
emitPos(skipTrivia(currentSourceText, pos));
emitPos(skipSourceTrivia(pos));
}
if (source) setSourceFile(oldSource);
if (emitFlags & EmitFlags.NoNestedSourceMaps) {
disabled = true;
emitCallback(hint, node);
@ -337,11 +352,15 @@ namespace ts {
emitCallback(hint, node);
}
if (source) setSourceFile(source);
if (node.kind !== SyntaxKind.NotEmittedStatement
&& (emitFlags & EmitFlags.NoTrailingSourceMap) === 0
&& end >= 0) {
emitPos(end);
}
if (source) setSourceFile(oldSource);
}
}
@ -362,7 +381,7 @@ namespace ts {
const emitFlags = emitNode && emitNode.flags;
const range = emitNode && emitNode.tokenSourceMapRanges && emitNode.tokenSourceMapRanges[token];
tokenPos = skipTrivia(currentSourceText, range ? range.pos : tokenPos);
tokenPos = skipSourceTrivia(range ? range.pos : tokenPos);
if ((emitFlags & EmitFlags.NoTokenLeadingSourceMaps) === 0 && tokenPos >= 0) {
emitPos(tokenPos);
}
@ -382,13 +401,13 @@ namespace ts {
*
* @param sourceFile The source file.
*/
function setSourceFile(sourceFile: SourceFile) {
function setSourceFile(sourceFile: SourceMapSource) {
if (disabled) {
return;
}
currentSourceFile = sourceFile;
currentSourceText = currentSourceFile.text;
currentSource = sourceFile;
currentSourceText = currentSource.text;
// Add the file to tsFilePaths
// If sourceroot option: Use the relative path corresponding to the common directory path
@ -396,7 +415,7 @@ namespace ts {
const sourcesDirectoryPath = compilerOptions.sourceRoot ? host.getCommonSourceDirectory() : sourceMapDir;
const source = getRelativePathToDirectoryOrUrl(sourcesDirectoryPath,
currentSourceFile.fileName,
currentSource.fileName,
host.getCurrentDirectory(),
host.getCanonicalFileName,
/*isAbsolutePathAnUrl*/ true);
@ -407,10 +426,10 @@ namespace ts {
sourceMapData.sourceMapSources.push(source);
// The one that can be used from program to get the actual source file
sourceMapData.inputSourceFileNames.push(currentSourceFile.fileName);
sourceMapData.inputSourceFileNames.push(currentSource.fileName);
if (compilerOptions.inlineSources) {
sourceMapData.sourceMapSourcesContent.push(currentSourceFile.text);
sourceMapData.sourceMapSourcesContent.push(currentSource.text);
}
}
}

View File

@ -35,6 +35,10 @@ namespace ts {
getDirectories(path: string): string[];
readDirectory(path: string, extensions?: string[], exclude?: string[], include?: string[]): string[];
getModifiedTime?(path: string): Date;
/**
* This should be cryptographically secure.
* A good implementation is node.js' `crypto.createHash`. (https://nodejs.org/api/crypto.html#crypto_crypto_createhash_algorithm)
*/
createHash?(data: string): string;
getMemoryUsage?(): number;
exit(exitCode?: number): void;

View File

@ -1,4 +1,5 @@
/// <reference path="visitor.ts" />
/// <reference path="transformers/utilities.ts" />
/// <reference path="transformers/ts.ts" />
/// <reference path="transformers/jsx.ts" />
/// <reference path="transformers/esnext.ts" />
@ -239,7 +240,7 @@ namespace ts {
function hoistVariableDeclaration(name: Identifier): void {
Debug.assert(state > TransformationState.Uninitialized, "Cannot modify the lexical environment during initialization.");
Debug.assert(state < TransformationState.Completed, "Cannot modify the lexical environment after transformation has completed.");
const decl = createVariableDeclaration(name);
const decl = setEmitFlags(createVariableDeclaration(name), EmitFlags.NoNestedSourceMaps);
if (!lexicalEnvironmentVariableDeclarations) {
lexicalEnvironmentVariableDeclarations = [decl];
}

View File

@ -2095,9 +2095,9 @@ namespace ts {
enableSubstitutionsForBlockScopedBindings();
}
const declarations = flatten(map(node.declarations, node.flags & NodeFlags.Let
const declarations = flatMap(node.declarations, node.flags & NodeFlags.Let
? visitVariableDeclarationInLetDeclarationList
: visitVariableDeclaration));
: visitVariableDeclaration);
const declarationList = createVariableDeclarationList(declarations);
setOriginalNode(declarationList, node);
@ -3401,28 +3401,19 @@ namespace ts {
classBodyStart++;
}
// We reuse the comment and source-map positions from the original variable statement
// and class alias, while converting the function declaration for the class constructor
// into an expression.
// The next statement is the function declaration.
statements.push(funcStatements[classBodyStart]);
classBodyStart++;
// Add the class alias following the declaration.
statements.push(
updateVariableStatement(
varStatement,
/*modifiers*/ undefined,
updateVariableDeclarationList(varStatement.declarationList, [
updateVariableDeclaration(variable,
variable.name,
/*type*/ undefined,
updateBinary(aliasAssignment,
aliasAssignment.left,
convertFunctionDeclarationToExpression(
cast(funcStatements[classBodyStart], isFunctionDeclaration)
)
)
)
])
createStatement(
createAssignment(
aliasAssignment.left,
cast(variable.name, isIdentifier)
)
)
);
classBodyStart++;
}
// Find the trailing 'return' statement (4)

View File

@ -351,8 +351,10 @@ namespace ts {
);
}
function awaitAsYield(expression: Expression) {
return createYield(/*asteriskToken*/ undefined, enclosingFunctionFlags & FunctionFlags.Generator ? createAwaitHelper(context, expression) : expression);
function createDownlevelAwait(expression: Expression) {
return enclosingFunctionFlags & FunctionFlags.Generator
? createYield(/*asteriskToken*/ undefined, createAwaitHelper(context, expression))
: createAwait(expression);
}
function transformForAwaitOfStatement(node: ForOfStatement, outermostLabeledStatement: LabeledStatement) {
@ -385,11 +387,11 @@ namespace ts {
EmitFlags.NoHoisting
),
/*condition*/ createComma(
createAssignment(result, awaitAsYield(callNext)),
createAssignment(result, createDownlevelAwait(callNext)),
createLogicalNot(getDone)
),
/*incrementor*/ undefined,
/*statement*/ convertForOfStatementHead(node, awaitAsYield(getValue))
/*statement*/ convertForOfStatementHead(node, createDownlevelAwait(getValue))
),
/*location*/ node
),
@ -434,7 +436,7 @@ namespace ts {
createPropertyAccess(iterator, "return")
)
),
createStatement(awaitAsYield(callReturn))
createStatement(createDownlevelAwait(callReturn))
),
EmitFlags.SingleLine
)

View File

@ -640,10 +640,13 @@ namespace ts {
return undefined;
}
return createStatement(
inlineExpressions(
map(variables, transformInitializedVariable)
)
return setSourceMapRange(
createStatement(
inlineExpressions(
map(variables, transformInitializedVariable)
)
),
node
);
}
}
@ -1281,9 +1284,12 @@ namespace ts {
}
function transformInitializedVariable(node: VariableDeclaration) {
return createAssignment(
<Identifier>getSynthesizedClone(node.name),
visitNode(node.initializer, visitor, isExpression)
return setSourceMapRange(
createAssignment(
setSourceMapRange(<Identifier>getSynthesizedClone(node.name), node.name),
visitNode(node.initializer, visitor, isExpression)
),
node
);
}

View File

@ -103,7 +103,7 @@ namespace ts {
addRange(statements, endLexicalEnvironment());
const updated = updateSourceFileNode(node, setTextRange(createNodeArray(statements), node.statements));
if (currentModuleInfo.hasExportStarsToExportValues) {
if (currentModuleInfo.hasExportStarsToExportValues && !compilerOptions.importHelpers) {
// If we have any `export * from ...` declarations
// we need to inform the emitter to add the __export helper.
addEmitHelper(updated, exportStarHelper);
@ -408,7 +408,7 @@ namespace ts {
addRange(statements, endLexicalEnvironment());
const body = createBlock(statements, /*multiLine*/ true);
if (currentModuleInfo.hasExportStarsToExportValues) {
if (currentModuleInfo.hasExportStarsToExportValues && !compilerOptions.importHelpers) {
// If we have any `export * from ...` declarations
// we need to inform the emitter to add the __export helper.
addEmitHelper(body, exportStarHelper);
@ -514,14 +514,14 @@ namespace ts {
function visitImportCallExpression(node: ImportCall): Expression {
switch (compilerOptions.module) {
case ModuleKind.CommonJS:
return transformImportCallExpressionCommonJS(node);
case ModuleKind.AMD:
return transformImportCallExpressionAMD(node);
case ModuleKind.UMD:
return transformImportCallExpressionUMD(node);
case ModuleKind.CommonJS:
default:
return transformImportCallExpressionCommonJS(node);
}
Debug.fail("All supported module kind in this transformation step should have been handled");
}
function transformImportCallExpressionUMD(node: ImportCall): Expression {
@ -833,15 +833,7 @@ namespace ts {
// export * from "mod";
return setTextRange(
createStatement(
createCall(
createIdentifier("__export"),
/*typeArguments*/ undefined,
[
moduleKind !== ModuleKind.AMD
? createRequireCall(node)
: generatedName
]
)
createExportStarHelper(context, moduleKind !== ModuleKind.AMD ? createRequireCall(node) : generatedName)
),
node
);
@ -1598,9 +1590,17 @@ namespace ts {
text: `
function __export(m) {
for (var p in m) if (!exports.hasOwnProperty(p)) exports[p] = m[p];
}`
}
`
};
function createExportStarHelper(context: TransformationContext, module: Expression) {
const compilerOptions = context.getCompilerOptions();
return compilerOptions.importHelpers
? createCall(getHelperName("__exportStar"), /*typeArguments*/ undefined, [module, createIdentifier("exports")])
: createCall(createIdentifier("__export"), /*typeArguments*/ undefined, [module]);
}
// emit helper for dynamic import
const dynamicImportUMDHelper: EmitHelper = {
name: "typescript:dynamicimport-sync-require",

View File

@ -0,0 +1,180 @@
/* @internal */
namespace ts {
export function getOriginalNodeId(node: Node) {
node = getOriginalNode(node);
return node ? getNodeId(node) : 0;
}
export interface ExternalModuleInfo {
externalImports: (ImportDeclaration | ImportEqualsDeclaration | ExportDeclaration)[]; // imports of other external modules
externalHelpersImportDeclaration: ImportDeclaration | undefined; // import of external helpers
exportSpecifiers: Map<ExportSpecifier[]>; // export specifiers by name
exportedBindings: Identifier[][]; // exported names of local declarations
exportedNames: Identifier[]; // all exported names local to module
exportEquals: ExportAssignment | undefined; // an export= declaration if one was present
hasExportStarsToExportValues: boolean; // whether this module contains export*
}
export function collectExternalModuleInfo(sourceFile: SourceFile, resolver: EmitResolver, compilerOptions: CompilerOptions): ExternalModuleInfo {
const externalImports: (ImportDeclaration | ImportEqualsDeclaration | ExportDeclaration)[] = [];
const exportSpecifiers = createMultiMap<ExportSpecifier>();
const exportedBindings: Identifier[][] = [];
const uniqueExports = createMap<boolean>();
let exportedNames: Identifier[];
let hasExportDefault = false;
let exportEquals: ExportAssignment = undefined;
let hasExportStarsToExportValues = false;
for (const node of sourceFile.statements) {
switch (node.kind) {
case SyntaxKind.ImportDeclaration:
// import "mod"
// import x from "mod"
// import * as x from "mod"
// import { x, y } from "mod"
externalImports.push(<ImportDeclaration>node);
break;
case SyntaxKind.ImportEqualsDeclaration:
if ((<ImportEqualsDeclaration>node).moduleReference.kind === SyntaxKind.ExternalModuleReference) {
// import x = require("mod")
externalImports.push(<ImportEqualsDeclaration>node);
}
break;
case SyntaxKind.ExportDeclaration:
if ((<ExportDeclaration>node).moduleSpecifier) {
if (!(<ExportDeclaration>node).exportClause) {
// export * from "mod"
externalImports.push(<ExportDeclaration>node);
hasExportStarsToExportValues = true;
}
else {
// export { x, y } from "mod"
externalImports.push(<ExportDeclaration>node);
}
}
else {
// export { x, y }
for (const specifier of (<ExportDeclaration>node).exportClause.elements) {
if (!uniqueExports.get(specifier.name.text)) {
const name = specifier.propertyName || specifier.name;
exportSpecifiers.add(name.text, specifier);
const decl = resolver.getReferencedImportDeclaration(name)
|| resolver.getReferencedValueDeclaration(name);
if (decl) {
multiMapSparseArrayAdd(exportedBindings, getOriginalNodeId(decl), specifier.name);
}
uniqueExports.set(specifier.name.text, true);
exportedNames = append(exportedNames, specifier.name);
}
}
}
break;
case SyntaxKind.ExportAssignment:
if ((<ExportAssignment>node).isExportEquals && !exportEquals) {
// export = x
exportEquals = <ExportAssignment>node;
}
break;
case SyntaxKind.VariableStatement:
if (hasModifier(node, ModifierFlags.Export)) {
for (const decl of (<VariableStatement>node).declarationList.declarations) {
exportedNames = collectExportedVariableInfo(decl, uniqueExports, exportedNames);
}
}
break;
case SyntaxKind.FunctionDeclaration:
if (hasModifier(node, ModifierFlags.Export)) {
if (hasModifier(node, ModifierFlags.Default)) {
// export default function() { }
if (!hasExportDefault) {
multiMapSparseArrayAdd(exportedBindings, getOriginalNodeId(node), getDeclarationName(<FunctionDeclaration>node));
hasExportDefault = true;
}
}
else {
// export function x() { }
const name = (<FunctionDeclaration>node).name;
if (!uniqueExports.get(name.text)) {
multiMapSparseArrayAdd(exportedBindings, getOriginalNodeId(node), name);
uniqueExports.set(name.text, true);
exportedNames = append(exportedNames, name);
}
}
}
break;
case SyntaxKind.ClassDeclaration:
if (hasModifier(node, ModifierFlags.Export)) {
if (hasModifier(node, ModifierFlags.Default)) {
// export default class { }
if (!hasExportDefault) {
multiMapSparseArrayAdd(exportedBindings, getOriginalNodeId(node), getDeclarationName(<ClassDeclaration>node));
hasExportDefault = true;
}
}
else {
// export class x { }
const name = (<ClassDeclaration>node).name;
if (!uniqueExports.get(name.text)) {
multiMapSparseArrayAdd(exportedBindings, getOriginalNodeId(node), name);
uniqueExports.set(name.text, true);
exportedNames = append(exportedNames, name);
}
}
}
break;
}
}
const externalHelpersModuleName = getOrCreateExternalHelpersModuleNameIfNeeded(sourceFile, compilerOptions, hasExportStarsToExportValues);
const externalHelpersImportDeclaration = externalHelpersModuleName && createImportDeclaration(
/*decorators*/ undefined,
/*modifiers*/ undefined,
createImportClause(/*name*/ undefined, createNamespaceImport(externalHelpersModuleName)),
createLiteral(externalHelpersModuleNameText));
if (externalHelpersImportDeclaration) {
externalImports.unshift(externalHelpersImportDeclaration);
}
return { externalImports, exportSpecifiers, exportEquals, hasExportStarsToExportValues, exportedBindings, exportedNames, externalHelpersImportDeclaration };
}
function collectExportedVariableInfo(decl: VariableDeclaration | BindingElement, uniqueExports: Map<boolean>, exportedNames: Identifier[]) {
if (isBindingPattern(decl.name)) {
for (const element of decl.name.elements) {
if (!isOmittedExpression(element)) {
exportedNames = collectExportedVariableInfo(element, uniqueExports, exportedNames);
}
}
}
else if (!isGeneratedIdentifier(decl.name)) {
if (!uniqueExports.get(decl.name.text)) {
uniqueExports.set(decl.name.text, true);
exportedNames = append(exportedNames, decl.name);
}
}
return exportedNames;
}
/** Use a sparse array as a multi-map. */
function multiMapSparseArrayAdd<V>(map: V[][], key: number, value: V): V[] {
let values = map[key];
if (values) {
values.push(value);
}
else {
map[key] = values = [value];
}
return values;
}
}

View File

@ -223,20 +223,13 @@ namespace ts {
return;
}
const result = parseConfigFileTextToJson(configFileName, cachedConfigFileText);
const configObject = result.config;
if (!configObject) {
reportDiagnostics([result.error], /* compilerHost */ undefined);
sys.exit(ExitStatus.DiagnosticsPresent_OutputsSkipped);
return;
}
const result = parseJsonText(configFileName, cachedConfigFileText);
reportDiagnostics(result.parseDiagnostics, /* compilerHost */ undefined);
const cwd = sys.getCurrentDirectory();
const configParseResult = parseJsonConfigFileContent(configObject, sys, getNormalizedAbsolutePath(getDirectoryPath(configFileName), cwd), commandLine.options, getNormalizedAbsolutePath(configFileName, cwd));
if (configParseResult.errors.length > 0) {
reportDiagnostics(configParseResult.errors, /* compilerHost */ undefined);
sys.exit(ExitStatus.DiagnosticsPresent_OutputsSkipped);
return;
}
const configParseResult = parseJsonSourceFileConfigFileContent(result, sys, getNormalizedAbsolutePath(getDirectoryPath(configFileName), cwd), commandLine.options, getNormalizedAbsolutePath(configFileName, cwd));
reportDiagnostics(configParseResult.errors, /* compilerHost */ undefined);
if (isWatchSet(configParseResult.options)) {
if (!sys.watchFile) {
reportDiagnostic(createCompilerDiagnostic(Diagnostics.The_current_host_does_not_support_the_0_option, "--watch"), /* host */ undefined);
@ -662,6 +655,10 @@ namespace ts {
}
}
if (ts.Debug.isDebugging) {
ts.Debug.enableDebugInfo();
}
if (ts.sys.tryEnableSourceMapsForHost && /^development$/i.test(ts.sys.getEnvironmentVariable("NODE_ENV"))) {
ts.sys.tryEnableSourceMapsForHost();
}

View File

@ -17,6 +17,7 @@
"checker.ts",
"factory.ts",
"visitor.ts",
"transformers/utilities.ts",
"transformers/ts.ts",
"transformers/jsx.ts",
"transformers/esnext.ts",

View File

@ -374,6 +374,7 @@ namespace ts {
JSDocComment,
JSDocTag,
JSDocAugmentsTag,
JSDocClassTag,
JSDocParameterTag,
JSDocReturnTag,
JSDocTypeTag,
@ -424,7 +425,7 @@ namespace ts {
FirstNode = QualifiedName,
FirstJSDocNode = JSDocTypeExpression,
LastJSDocNode = JSDocLiteralType,
FirstJSDocTagNode = JSDocComment,
FirstJSDocTagNode = JSDocTag,
LastJSDocTagNode = JSDocLiteralType
}
@ -450,13 +451,16 @@ namespace ts {
ThisNodeOrAnySubNodesHasError = 1 << 17, // If this node or any of its children had an error
HasAggregatedChildData = 1 << 18, // If we've computed data from children and cached it in this node
// This flag will be set to true when the parse encounter dynamic import so that post-parsing process of module resolution
// will not walk the tree if the flag is not set. However, this flag is just a approximation because once it is set, the flag never get reset.
// (hence it is named "possiblyContainDynamicImport").
// During editing, if dynamic import is remove, incremental parsing will *NOT* update this flag. This will then causes walking of the tree during module resolution.
// However, the removal operation should not occur often and in the case of the removal, it is likely that users will add back the import anyway.
// The advantage of this approach is its simplicity. For the case of batch compilation, we garuntee that users won't have to pay the price of walking the tree if dynamic import isn't used.
PossiblyContainDynamicImport = 1 << 19,
// This flag will be set when the parser encounters a dynamic import expression so that module resolution
// will not have to walk the tree if the flag is not set. However, this flag is just a approximation because
// once it is set, the flag never gets cleared (hence why it's named "PossiblyContainsDynamicImport").
// During editing, if dynamic import is removed, incremental parsing will *NOT* update this flag. This means that the tree will always be traversed
// during module resolution. However, the removal operation should not occur often and in the case of the
// removal, it is likely that users will add the import anyway.
// The advantage of this approach is its simplicity. For the case of batch compilation,
// we guarantee that users won't have to pay the price of walking the tree if a dynamic import isn't used.
/* @internal */
PossiblyContainsDynamicImport = 1 << 19,
BlockScoped = Let | Const,
@ -1704,6 +1708,8 @@ namespace ts {
incrementor?: Expression;
}
export type ForInOrOfStatement = ForInStatement | ForOfStatement;
export interface ForInStatement extends IterationStatement {
kind: SyntaxKind.ForInStatement;
initializer: ForInitializer;
@ -1793,7 +1799,7 @@ namespace ts {
block: Block;
}
export type DeclarationWithTypeParameters = SignatureDeclaration | ClassLikeDeclaration | InterfaceDeclaration | TypeAliasDeclaration;
export type DeclarationWithTypeParameters = SignatureDeclaration | ClassLikeDeclaration | InterfaceDeclaration | TypeAliasDeclaration | JSDocTemplateTag;
export interface ClassLikeDeclaration extends NamedDeclaration {
name?: Identifier;
@ -2132,6 +2138,10 @@ namespace ts {
typeExpression: JSDocTypeExpression;
}
export interface JSDocClassTag extends JSDocTag {
kind: SyntaxKind.JSDocClassTag;
}
export interface JSDocTemplateTag extends JSDocTag {
kind: SyntaxKind.JSDocTemplateTag;
typeParameters: NodeArray<TypeParameterDeclaration>;
@ -2365,6 +2375,11 @@ namespace ts {
sourceFiles: SourceFile[];
}
export interface JsonSourceFile extends SourceFile {
jsonObject?: ObjectLiteralExpression;
extendedSourceFiles?: string[];
}
export interface ScriptReferenceHost {
getCompilerOptions(): CompilerOptions;
getSourceFile(fileName: string): SourceFile;
@ -2550,7 +2565,6 @@ namespace ts {
getNonNullableType(type: Type): Type;
/** Note that the resulting nodes cannot be checked. */
typeToTypeNode(type: Type, enclosingDeclaration?: Node, flags?: NodeBuilderFlags): TypeNode;
/** Note that the resulting nodes cannot be checked. */
signatureToSignatureDeclaration(signature: Signature, kind: SyntaxKind, enclosingDeclaration?: Node, flags?: NodeBuilderFlags): SignatureDeclaration;
@ -2620,6 +2634,9 @@ namespace ts {
* Does not include properties of primitive types.
*/
/* @internal */ getAllPossiblePropertiesOfType(type: Type): Symbol[];
/* @internal */ getJsxNamespace(): string;
/* @internal */ resolveNameAtLocation(location: Node, name: string, meaning: SymbolFlags): Symbol | undefined;
}
export enum NodeBuilderFlags {
@ -2802,6 +2819,7 @@ namespace ts {
collectLinkedAliases(node: Identifier): Node[];
isImplementationOfOverload(node: FunctionLikeDeclaration): boolean | undefined;
isRequiredInitializedParameter(node: ParameterDeclaration): boolean;
isOptionalUninitializedParameterProperty(node: ParameterDeclaration): boolean;
writeTypeOfDeclaration(declaration: AccessorDeclaration | VariableLikeDeclaration, enclosingDeclaration: Node, flags: TypeFormatFlags, writer: SymbolWriter): void;
writeReturnTypeOfSignatureDeclaration(signatureDeclaration: SignatureDeclaration, enclosingDeclaration: Node, flags: TypeFormatFlags, writer: SymbolWriter): void;
writeTypeOfExpression(expr: Expression, enclosingDeclaration: Node, flags: TypeFormatFlags, writer: SymbolWriter): void;
@ -3394,8 +3412,6 @@ namespace ts {
signature: Signature; // Generic signature for which inferences are made
inferences: InferenceInfo[]; // Inferences made for each type parameter
flags: InferenceFlags; // Inference flags
failedTypeParameterIndex?: number; // Index of type parameter for which inference failed
// It is optional because in contextual signature instantiation, nothing fails
}
/* @internal */
@ -3477,6 +3493,7 @@ namespace ts {
charset?: string;
checkJs?: boolean;
/* @internal */ configFilePath?: string;
/* @internal */ readonly configFile?: JsonSourceFile;
declaration?: boolean;
declarationDir?: string;
/* @internal */ diagnostics?: boolean;
@ -3512,6 +3529,7 @@ namespace ts {
noImplicitAny?: boolean; // Always combine with strict property
noImplicitReturns?: boolean;
noImplicitThis?: boolean; // Always combine with strict property
noStrictGenericChecks?: boolean;
noUnusedLocals?: boolean;
noUnusedParameters?: boolean;
noImplicitUseStrict?: boolean;
@ -3548,7 +3566,7 @@ namespace ts {
/*@internal*/ version?: boolean;
/*@internal*/ watch?: boolean;
[option: string]: CompilerOptionsValue | undefined;
[option: string]: CompilerOptionsValue | JsonSourceFile | undefined;
}
export interface TypeAcquisition {
@ -3608,7 +3626,8 @@ namespace ts {
JSX = 2,
TS = 3,
TSX = 4,
External = 5
External = 5,
JSON = 6
}
export const enum ScriptTarget {
@ -3680,6 +3699,8 @@ namespace ts {
/* @internal */
export interface TsConfigOnlyOption extends CommandLineOptionBase {
type: "object";
elementOptions?: Map<CommandLineOption>;
extraKeyDiagnosticMessage?: DiagnosticMessage;
}
/* @internal */
@ -3871,13 +3892,12 @@ namespace ts {
extension: Extension;
}
export enum Extension {
Ts,
Tsx,
Dts,
Js,
Jsx,
LastTypeScriptExtension = Dts
export const enum Extension {
Ts = ".ts",
Tsx = ".tsx",
Dts = ".d.ts",
Js = ".js",
Jsx = ".jsx"
}
export interface ResolvedModuleWithFailedLookupLocations {
@ -4006,18 +4026,29 @@ namespace ts {
ES2015FunctionSyntaxMask = ContainsCapturedLexicalThis | ContainsDefaultValueAssignments,
}
export interface SourceMapRange extends TextRange {
source?: SourceMapSource;
}
export interface SourceMapSource {
fileName: string;
text: string;
/* @internal */ lineMap: number[];
skipTrivia?: (pos: number) => number;
}
/* @internal */
export interface EmitNode {
annotatedNodes?: Node[]; // Tracks Parse-tree nodes with EmitNodes for eventual cleanup.
flags?: EmitFlags; // Flags that customize emit
leadingComments?: SynthesizedComment[]; // Synthesized leading comments
annotatedNodes?: Node[]; // Tracks Parse-tree nodes with EmitNodes for eventual cleanup.
flags?: EmitFlags; // Flags that customize emit
leadingComments?: SynthesizedComment[]; // Synthesized leading comments
trailingComments?: SynthesizedComment[]; // Synthesized trailing comments
commentRange?: TextRange; // The text range to use when emitting leading or trailing comments
sourceMapRange?: TextRange; // The text range to use when emitting leading or trailing source mappings
tokenSourceMapRanges?: TextRange[]; // The text range to use when emitting source mappings for tokens
constantValue?: string | number; // The constant value of an expression
externalHelpersModuleName?: Identifier; // The local name for an imported helpers module
helpers?: EmitHelper[]; // Emit helpers for the node
commentRange?: TextRange; // The text range to use when emitting leading or trailing comments
sourceMapRange?: SourceMapRange; // The text range to use when emitting leading or trailing source mappings
tokenSourceMapRanges?: SourceMapRange[]; // The text range to use when emitting source mappings for tokens
constantValue?: string | number; // The constant value of an expression
externalHelpersModuleName?: Identifier; // The local name for an imported helpers module
helpers?: EmitHelper[]; // Emit helpers for the node
}
export const enum EmitFlags {
@ -4079,6 +4110,7 @@ namespace ts {
AsyncGenerator = 1 << 12, // __asyncGenerator (used by ES2017 async generator transformation)
AsyncDelegator = 1 << 13, // __asyncDelegator (used by ES2017 async generator yield* transformation)
AsyncValues = 1 << 14, // __asyncValues (used by ES2017 for..await..of transformation)
ExportStar = 1 << 15, // __exportStar (used by CommonJS/AMD/UMD module transformation)
// Helpers included by ES2015 for..of
ForOfIncludes = Values,
@ -4096,7 +4128,7 @@ namespace ts {
SpreadIncludes = Read | Spread,
FirstEmitHelper = Extends,
LastEmitHelper = AsyncValues
LastEmitHelper = ExportStar
}
export const enum EmitHint {

View File

@ -309,6 +309,14 @@ namespace ts {
return getSourceTextOfNodeFromSourceFile(getSourceFileOfNode(node), node, includeTrivia);
}
/**
* Gets flags that control emit behavior of a node.
*/
export function getEmitFlags(node: Node): EmitFlags | undefined {
const emitNode = node.emitNode;
return emitNode && emitNode.flags;
}
export function getLiteralText(node: LiteralLikeNode, sourceFile: SourceFile) {
// If we don't need to downlevel and we can reach the original source text using
// the node's parent reference, then simply get the text as it was originally written.
@ -882,6 +890,15 @@ namespace ts {
return predicate && predicate.kind === TypePredicateKind.This;
}
export function getPropertyAssignment(objectLiteral: ObjectLiteralExpression, key: string, key2?: string) {
return <PropertyAssignment[]>filter(objectLiteral.properties, property => {
if (property.kind === SyntaxKind.PropertyAssignment) {
const propName = getTextOfPropertyName(property.name);
return key === propName || (key2 && key2 === propName);
}
});
}
export function getContainingFunction(node: Node): FunctionLikeDeclaration {
while (true) {
node = node.parent;
@ -1242,12 +1259,6 @@ namespace ts {
return false;
}
export function isInstantiatedModule(node: ModuleDeclaration, preserveConstEnums: boolean) {
const moduleState = getModuleInstanceState(node);
return moduleState === ModuleInstanceState.Instantiated ||
(preserveConstEnums && moduleState === ModuleInstanceState.ConstEnumOnly);
}
export function isExternalModuleImportEqualsDeclaration(node: Node) {
return node.kind === SyntaxKind.ImportEqualsDeclaration && (<ImportEqualsDeclaration>node).moduleReference.kind === SyntaxKind.ExternalModuleReference;
}
@ -1438,31 +1449,17 @@ namespace ts {
}
function getJSDocTags(node: Node, kind: SyntaxKind): JSDocTag[] {
const docs = getJSDocs(node);
if (docs) {
const result: JSDocTag[] = [];
for (const doc of docs) {
if (doc.kind === SyntaxKind.JSDocParameterTag) {
if (doc.kind === kind) {
result.push(doc as JSDocTag);
}
}
else {
const tags = (doc as JSDoc).tags;
if (tags) {
result.push(...filter(tags, tag => tag.kind === kind));
}
}
}
return result;
}
return flatMap(getJSDocs(node), doc =>
doc.kind === SyntaxKind.JSDocComment
? filter((doc as JSDoc).tags, tag => tag.kind === kind)
: doc.kind === kind && doc);
}
function getFirstJSDocTag(node: Node, kind: SyntaxKind): JSDocTag {
return node && firstOrUndefined(getJSDocTags(node, kind));
}
export function getJSDocs(node: Node): (JSDoc | JSDocTag)[] {
export function getJSDocs(node: Node): (JSDoc | JSDocTag)[] {
if (isJSDocTypedefTag(node)) {
return [node.parent];
}
@ -1560,6 +1557,12 @@ namespace ts {
p.name.kind === SyntaxKind.Identifier && p.name.text === name);
}
export function getTypeParameterFromJsDoc(node: TypeParameterDeclaration & { parent: JSDocTemplateTag }): TypeParameterDeclaration | undefined {
const name = node.name.text;
const { typeParameters } = (node.parent.parent.parent as ts.SignatureDeclaration | ts.InterfaceDeclaration | ts.ClassDeclaration);
return find(typeParameters, p => p.name.text === name);
}
export function getJSDocType(node: Node): JSDocType {
let tag: JSDocTypeTag | JSDocParameterTag = getFirstJSDocTag(node, SyntaxKind.JSDocTypeTag) as JSDocTypeTag;
if (!tag && node.kind === SyntaxKind.Parameter) {
@ -1576,10 +1579,19 @@ namespace ts {
return getFirstJSDocTag(node, SyntaxKind.JSDocAugmentsTag) as JSDocAugmentsTag;
}
export function getJSDocClassTag(node: Node): JSDocClassTag {
return getFirstJSDocTag(node, SyntaxKind.JSDocClassTag) as JSDocClassTag;
}
export function getJSDocReturnTag(node: Node): JSDocReturnTag {
return getFirstJSDocTag(node, SyntaxKind.JSDocReturnTag) as JSDocReturnTag;
}
export function getJSDocReturnType(node: Node): JSDocType {
const returnTag = getJSDocReturnTag(node);
return returnTag && returnTag.typeExpression && returnTag.typeExpression.type;
}
export function getJSDocTemplateTag(node: Node): JSDocTemplateTag {
return getFirstJSDocTag(node, SyntaxKind.JSDocTemplateTag) as JSDocTemplateTag;
}
@ -1626,7 +1638,7 @@ namespace ts {
return unaryOperator === SyntaxKind.PlusPlusToken || unaryOperator === SyntaxKind.MinusMinusToken ? AssignmentKind.Compound : AssignmentKind.None;
case SyntaxKind.ForInStatement:
case SyntaxKind.ForOfStatement:
return (<ForInStatement | ForOfStatement>parent).initializer === node ? AssignmentKind.Definite : AssignmentKind.None;
return (<ForInOrOfStatement>parent).initializer === node ? AssignmentKind.Definite : AssignmentKind.None;
case SyntaxKind.ParenthesizedExpression:
case SyntaxKind.ArrayLiteralExpression:
case SyntaxKind.SpreadElement:
@ -2012,14 +2024,7 @@ namespace ts {
|| positionIsSynthesized(node.end);
}
export function getOriginalSourceFileOrBundle(sourceFileOrBundle: SourceFile | Bundle) {
if (sourceFileOrBundle.kind === SyntaxKind.Bundle) {
return updateBundle(sourceFileOrBundle, sameMap(sourceFileOrBundle.sourceFiles, getOriginalSourceFile));
}
return getOriginalSourceFile(sourceFileOrBundle);
}
function getOriginalSourceFile(sourceFile: SourceFile) {
export function getOriginalSourceFile(sourceFile: SourceFile) {
return getParseTreeNode(sourceFile, isSourceFile) || sourceFile;
}
@ -2027,11 +2032,6 @@ namespace ts {
return sameMap(sourceFiles, getOriginalSourceFile);
}
export function getOriginalNodeId(node: Node) {
node = getOriginalNode(node);
return node ? getNodeId(node) : 0;
}
export const enum Associativity {
Left,
Right
@ -2500,7 +2500,7 @@ namespace ts {
const path = outputDir
? getSourceFilePathInNewDir(sourceFile, host, outputDir)
: sourceFile.fileName;
return removeFileExtension(path) + ".d.ts";
return removeFileExtension(path) + Extension.Dts;
}
export interface EmitFileNames {
@ -2539,62 +2539,6 @@ namespace ts {
return !(options.noEmitForJsFiles && isSourceFileJavaScript(sourceFile)) && !sourceFile.isDeclarationFile && !isSourceFileFromExternalLibrary(sourceFile);
}
/**
* Iterates over the source files that are expected to have an emit output.
*
* @param host An EmitHost.
* @param action The action to execute.
* @param sourceFilesOrTargetSourceFile
* If an array, the full list of source files to emit.
* Else, calls `getSourceFilesToEmit` with the (optional) target source file to determine the list of source files to emit.
*/
export function forEachEmittedFile(
host: EmitHost, action: (emitFileNames: EmitFileNames, sourceFileOrBundle: SourceFile | Bundle, emitOnlyDtsFiles: boolean) => void,
sourceFilesOrTargetSourceFile?: SourceFile[] | SourceFile,
emitOnlyDtsFiles?: boolean) {
const sourceFiles = isArray(sourceFilesOrTargetSourceFile) ? sourceFilesOrTargetSourceFile : getSourceFilesToEmit(host, sourceFilesOrTargetSourceFile);
const options = host.getCompilerOptions();
if (options.outFile || options.out) {
if (sourceFiles.length) {
const jsFilePath = options.outFile || options.out;
const sourceMapFilePath = getSourceMapFilePath(jsFilePath, options);
const declarationFilePath = options.declaration ? removeFileExtension(jsFilePath) + ".d.ts" : "";
action({ jsFilePath, sourceMapFilePath, declarationFilePath }, createBundle(sourceFiles), emitOnlyDtsFiles);
}
}
else {
for (const sourceFile of sourceFiles) {
const jsFilePath = getOwnEmitOutputFilePath(sourceFile, host, getOutputExtension(sourceFile, options));
const sourceMapFilePath = getSourceMapFilePath(jsFilePath, options);
const declarationFilePath = !isSourceFileJavaScript(sourceFile) && (emitOnlyDtsFiles || options.declaration) ? getDeclarationEmitOutputFilePath(sourceFile, host) : undefined;
action({ jsFilePath, sourceMapFilePath, declarationFilePath }, sourceFile, emitOnlyDtsFiles);
}
}
}
function getSourceMapFilePath(jsFilePath: string, options: CompilerOptions) {
return options.sourceMap ? jsFilePath + ".map" : undefined;
}
// JavaScript files are always LanguageVariant.JSX, as JSX syntax is allowed in .js files also.
// So for JavaScript files, '.jsx' is only emitted if the input was '.jsx', and JsxEmit.Preserve.
// For TypeScript, the only time to emit with a '.jsx' extension, is on JSX input, and JsxEmit.Preserve
function getOutputExtension(sourceFile: SourceFile, options: CompilerOptions): string {
if (options.jsx === JsxEmit.Preserve) {
if (isSourceFileJavaScript(sourceFile)) {
if (fileExtensionIs(sourceFile.fileName, ".jsx")) {
return ".jsx";
}
}
else if (sourceFile.languageVariant === LanguageVariant.JSX) {
// TypeScript source file preserving JSX syntax
return ".jsx";
}
}
return ".js";
}
export function getSourceFilePathInNewDir(sourceFile: SourceFile, host: EmitHost, newDirPath: string) {
let sourceFilePath = getNormalizedAbsolutePath(sourceFile.fileName, host.getCurrentDirectory());
const commonSourceDirectory = host.getCommonSourceDirectory();
@ -2625,14 +2569,19 @@ namespace ts {
});
}
/** Get the type annotaion for the value parameter. */
export function getSetAccessorTypeAnnotationNode(accessor: SetAccessorDeclaration): TypeNode {
function getSetAccessorValueParameter(accessor: SetAccessorDeclaration): ParameterDeclaration | undefined {
if (accessor && accessor.parameters.length > 0) {
const hasThis = accessor.parameters.length === 2 && parameterIsThisKeyword(accessor.parameters[0]);
return accessor.parameters[hasThis ? 1 : 0].type;
return accessor.parameters[hasThis ? 1 : 0];
}
}
/** Get the type annotation for the value parameter. */
export function getSetAccessorTypeAnnotationNode(accessor: SetAccessorDeclaration): TypeNode {
const parameter = getSetAccessorValueParameter(accessor);
return parameter && parameter.type;
}
export function getThisParameter(signature: SignatureDeclaration): ParameterDeclaration | undefined {
if (signature.parameters.length) {
const thisParameter = signature.parameters[0];
@ -2711,6 +2660,55 @@ namespace ts {
};
}
/**
* Gets the effective type annotation of a variable, parameter, or property. If the node was
* parsed in a JavaScript file, gets the type annotation from JSDoc.
*/
export function getEffectiveTypeAnnotationNode(node: VariableLikeDeclaration): TypeNode {
if (node.type) {
return node.type;
}
if (node.flags & NodeFlags.JavaScriptFile) {
return getJSDocType(node);
}
}
/**
* Gets the effective return type annotation of a signature. If the node was parsed in a
* JavaScript file, gets the return type annotation from JSDoc.
*/
export function getEffectiveReturnTypeNode(node: SignatureDeclaration): TypeNode {
if (node.type) {
return node.type;
}
if (node.flags & NodeFlags.JavaScriptFile) {
return getJSDocReturnType(node);
}
}
/**
* Gets the effective type parameters. If the node was parsed in a
* JavaScript file, gets the type parameters from the `@template` tag from JSDoc.
*/
export function getEffectiveTypeParameterDeclarations(node: DeclarationWithTypeParameters): TypeParameterDeclaration[] {
if (node.typeParameters) {
return node.typeParameters;
}
if (node.flags & NodeFlags.JavaScriptFile) {
const templateTag = getJSDocTemplateTag(node);
return templateTag && templateTag.typeParameters;
}
}
/**
* Gets the effective type annotation of the value parameter of a set accessor. If the node
* was parsed in a JavaScript file, gets the type annotation from JSDoc.
*/
export function getEffectiveSetAccessorTypeAnnotationNode(node: SetAccessorDeclaration): TypeNode {
const parameter = getSetAccessorValueParameter(node);
return parameter && getEffectiveTypeAnnotationNode(parameter);
}
export function emitNewLineBeforeLeadingComments(lineMap: number[], writer: EmitTextWriter, node: TextRange, leadingComments: CommentRange[]) {
emitNewLineBeforeLeadingCommentsOfPosition(lineMap, writer, node.pos, leadingComments);
}
@ -4057,7 +4055,7 @@ namespace ts {
// Signature elements
export function isTypeParameter(node: Node): node is TypeParameterDeclaration {
export function isTypeParameterDeclaration(node: Node): node is TypeParameterDeclaration {
return node.kind === SyntaxKind.TypeParameter;
}
@ -4229,6 +4227,16 @@ namespace ts {
return node.kind === SyntaxKind.ParenthesizedExpression;
}
export function skipPartiallyEmittedExpressions(node: Expression): Expression;
export function skipPartiallyEmittedExpressions(node: Node): Node;
export function skipPartiallyEmittedExpressions(node: Node) {
while (node.kind === SyntaxKind.PartiallyEmittedExpression) {
node = (<PartiallyEmittedExpression>node).expression;
}
return node;
}
export function isFunctionExpression(node: Node): node is FunctionExpression {
return node.kind === SyntaxKind.FunctionExpression;
}
@ -4680,6 +4688,16 @@ namespace ts {
// All node tests in the following list should *not* reference parent pointers so that
// they may be used with transformations.
namespace ts {
/* @internal */
export function isNode(node: Node) {
return isNodeKind(node.kind);
}
/* @internal */
export function isNodeKind(kind: SyntaxKind) {
return kind >= SyntaxKind.FirstNode;
}
/**
* True if node is of some token syntax kind.
* For example, this is true for an IfKeyword but not for an IfStatement.
@ -5101,6 +5119,11 @@ namespace ts {
return false;
}
/* @internal */
export function isForInOrOfStatement(node: Node): node is ForInOrOfStatement {
return node.kind === SyntaxKind.ForInStatement || node.kind === SyntaxKind.ForOfStatement;
}
// Element
/* @internal */
@ -5229,6 +5252,10 @@ namespace ts {
/* @internal */
export function isDeclaration(node: Node): node is NamedDeclaration {
if (node.kind === SyntaxKind.TypeParameter) {
return node.parent.kind !== SyntaxKind.JSDocTemplateTag || isInJavaScriptFile(node);
}
return isDeclarationKind(node.kind);
}
@ -5318,6 +5345,11 @@ namespace ts {
return node.kind >= SyntaxKind.FirstJSDocNode && node.kind <= SyntaxKind.LastJSDocNode;
}
/** True if node is of a kind that may contain comment text. */
export function isJSDocCommentContainingNode(node: Node): boolean {
return node.kind === SyntaxKind.JSDocComment || isJSDocTag(node);
}
// TODO: determine what this does before making it public.
/* @internal */
export function isJSDocTag(node: Node): boolean {

View File

@ -275,7 +275,7 @@ namespace ts {
case SyntaxKind.MethodSignature:
return updateMethodSignature(<MethodSignature>node,
nodesVisitor((<MethodSignature>node).typeParameters, visitor, isTypeParameter),
nodesVisitor((<MethodSignature>node).typeParameters, visitor, isTypeParameterDeclaration),
nodesVisitor((<MethodSignature>node).parameters, visitor, isParameterDeclaration),
visitNode((<MethodSignature>node).type, visitor, isTypeNode),
visitNode((<MethodSignature>node).name, visitor, isPropertyName),
@ -288,7 +288,7 @@ namespace ts {
visitNode((<MethodDeclaration>node).asteriskToken, tokenVisitor, isToken),
visitNode((<MethodDeclaration>node).name, visitor, isPropertyName),
visitNode((<MethodDeclaration>node).questionToken, tokenVisitor, isToken),
nodesVisitor((<MethodDeclaration>node).typeParameters, visitor, isTypeParameter),
nodesVisitor((<MethodDeclaration>node).typeParameters, visitor, isTypeParameterDeclaration),
visitParameterList((<MethodDeclaration>node).parameters, visitor, context, nodesVisitor),
visitNode((<MethodDeclaration>node).type, visitor, isTypeNode),
visitFunctionBody((<MethodDeclaration>node).body, visitor, context));
@ -319,13 +319,13 @@ namespace ts {
case SyntaxKind.CallSignature:
return updateCallSignature(<CallSignatureDeclaration>node,
nodesVisitor((<CallSignatureDeclaration>node).typeParameters, visitor, isTypeParameter),
nodesVisitor((<CallSignatureDeclaration>node).typeParameters, visitor, isTypeParameterDeclaration),
nodesVisitor((<CallSignatureDeclaration>node).parameters, visitor, isParameterDeclaration),
visitNode((<CallSignatureDeclaration>node).type, visitor, isTypeNode));
case SyntaxKind.ConstructSignature:
return updateConstructSignature(<ConstructSignatureDeclaration>node,
nodesVisitor((<ConstructSignatureDeclaration>node).typeParameters, visitor, isTypeParameter),
nodesVisitor((<ConstructSignatureDeclaration>node).typeParameters, visitor, isTypeParameterDeclaration),
nodesVisitor((<ConstructSignatureDeclaration>node).parameters, visitor, isParameterDeclaration),
visitNode((<ConstructSignatureDeclaration>node).type, visitor, isTypeNode));
@ -350,13 +350,13 @@ namespace ts {
case SyntaxKind.FunctionType:
return updateFunctionTypeNode(<FunctionTypeNode>node,
nodesVisitor((<FunctionTypeNode>node).typeParameters, visitor, isTypeParameter),
nodesVisitor((<FunctionTypeNode>node).typeParameters, visitor, isTypeParameterDeclaration),
nodesVisitor((<FunctionTypeNode>node).parameters, visitor, isParameterDeclaration),
visitNode((<FunctionTypeNode>node).type, visitor, isTypeNode));
case SyntaxKind.ConstructorType:
return updateConstructorTypeNode(<ConstructorTypeNode>node,
nodesVisitor((<ConstructorTypeNode>node).typeParameters, visitor, isTypeParameter),
nodesVisitor((<ConstructorTypeNode>node).typeParameters, visitor, isTypeParameterDeclaration),
nodesVisitor((<ConstructorTypeNode>node).parameters, visitor, isParameterDeclaration),
visitNode((<ConstructorTypeNode>node).type, visitor, isTypeNode));
@ -400,7 +400,7 @@ namespace ts {
case SyntaxKind.MappedType:
return updateMappedTypeNode((<MappedTypeNode>node),
visitNode((<MappedTypeNode>node).readonlyToken, tokenVisitor, isToken),
visitNode((<MappedTypeNode>node).typeParameter, visitor, isTypeParameter),
visitNode((<MappedTypeNode>node).typeParameter, visitor, isTypeParameterDeclaration),
visitNode((<MappedTypeNode>node).questionToken, tokenVisitor, isToken),
visitNode((<MappedTypeNode>node).type, visitor, isTypeNode));
@ -476,7 +476,7 @@ namespace ts {
nodesVisitor((<FunctionExpression>node).modifiers, visitor, isModifier),
visitNode((<FunctionExpression>node).asteriskToken, tokenVisitor, isToken),
visitNode((<FunctionExpression>node).name, visitor, isIdentifier),
nodesVisitor((<FunctionExpression>node).typeParameters, visitor, isTypeParameter),
nodesVisitor((<FunctionExpression>node).typeParameters, visitor, isTypeParameterDeclaration),
visitParameterList((<FunctionExpression>node).parameters, visitor, context, nodesVisitor),
visitNode((<FunctionExpression>node).type, visitor, isTypeNode),
visitFunctionBody((<FunctionExpression>node).body, visitor, context));
@ -484,7 +484,7 @@ namespace ts {
case SyntaxKind.ArrowFunction:
return updateArrowFunction(<ArrowFunction>node,
nodesVisitor((<ArrowFunction>node).modifiers, visitor, isModifier),
nodesVisitor((<ArrowFunction>node).typeParameters, visitor, isTypeParameter),
nodesVisitor((<ArrowFunction>node).typeParameters, visitor, isTypeParameterDeclaration),
visitParameterList((<ArrowFunction>node).parameters, visitor, context, nodesVisitor),
visitNode((<ArrowFunction>node).type, visitor, isTypeNode),
visitFunctionBody((<ArrowFunction>node).body, visitor, context));
@ -543,7 +543,7 @@ namespace ts {
return updateClassExpression(<ClassExpression>node,
nodesVisitor((<ClassExpression>node).modifiers, visitor, isModifier),
visitNode((<ClassExpression>node).name, visitor, isIdentifier),
nodesVisitor((<ClassExpression>node).typeParameters, visitor, isTypeParameter),
nodesVisitor((<ClassExpression>node).typeParameters, visitor, isTypeParameterDeclaration),
nodesVisitor((<ClassExpression>node).heritageClauses, visitor, isHeritageClause),
nodesVisitor((<ClassExpression>node).members, visitor, isClassElement));
@ -676,7 +676,7 @@ namespace ts {
nodesVisitor((<FunctionDeclaration>node).modifiers, visitor, isModifier),
visitNode((<FunctionDeclaration>node).asteriskToken, tokenVisitor, isToken),
visitNode((<FunctionDeclaration>node).name, visitor, isIdentifier),
nodesVisitor((<FunctionDeclaration>node).typeParameters, visitor, isTypeParameter),
nodesVisitor((<FunctionDeclaration>node).typeParameters, visitor, isTypeParameterDeclaration),
visitParameterList((<FunctionDeclaration>node).parameters, visitor, context, nodesVisitor),
visitNode((<FunctionDeclaration>node).type, visitor, isTypeNode),
visitFunctionBody((<FunctionExpression>node).body, visitor, context));
@ -686,7 +686,7 @@ namespace ts {
nodesVisitor((<ClassDeclaration>node).decorators, visitor, isDecorator),
nodesVisitor((<ClassDeclaration>node).modifiers, visitor, isModifier),
visitNode((<ClassDeclaration>node).name, visitor, isIdentifier),
nodesVisitor((<ClassDeclaration>node).typeParameters, visitor, isTypeParameter),
nodesVisitor((<ClassDeclaration>node).typeParameters, visitor, isTypeParameterDeclaration),
nodesVisitor((<ClassDeclaration>node).heritageClauses, visitor, isHeritageClause),
nodesVisitor((<ClassDeclaration>node).members, visitor, isClassElement));
@ -695,7 +695,7 @@ namespace ts {
nodesVisitor((<InterfaceDeclaration>node).decorators, visitor, isDecorator),
nodesVisitor((<InterfaceDeclaration>node).modifiers, visitor, isModifier),
visitNode((<InterfaceDeclaration>node).name, visitor, isIdentifier),
nodesVisitor((<InterfaceDeclaration>node).typeParameters, visitor, isTypeParameter),
nodesVisitor((<InterfaceDeclaration>node).typeParameters, visitor, isTypeParameterDeclaration),
nodesVisitor((<InterfaceDeclaration>node).heritageClauses, visitor, isHeritageClause),
nodesVisitor((<InterfaceDeclaration>node).members, visitor, isTypeElement));
@ -704,7 +704,7 @@ namespace ts {
nodesVisitor((<TypeAliasDeclaration>node).decorators, visitor, isDecorator),
nodesVisitor((<TypeAliasDeclaration>node).modifiers, visitor, isModifier),
visitNode((<TypeAliasDeclaration>node).name, visitor, isIdentifier),
nodesVisitor((<TypeAliasDeclaration>node).typeParameters, visitor, isTypeParameter),
nodesVisitor((<TypeAliasDeclaration>node).typeParameters, visitor, isTypeParameterDeclaration),
visitNode((<TypeAliasDeclaration>node).type, visitor, isTypeNode));
case SyntaxKind.EnumDeclaration:
@ -1198,9 +1198,9 @@ namespace ts {
case SyntaxKind.ForInStatement:
case SyntaxKind.ForOfStatement:
result = reduceNode((<ForInStatement | ForOfStatement>node).initializer, cbNode, result);
result = reduceNode((<ForInStatement | ForOfStatement>node).expression, cbNode, result);
result = reduceNode((<ForInStatement | ForOfStatement>node).statement, cbNode, result);
result = reduceNode((<ForInOrOfStatement>node).initializer, cbNode, result);
result = reduceNode((<ForInOrOfStatement>node).expression, cbNode, result);
result = reduceNode((<ForInOrOfStatement>node).statement, cbNode, result);
break;
case SyntaxKind.ReturnStatement:
@ -1517,35 +1517,7 @@ namespace ts {
}
export namespace Debug {
if (isDebugging) {
// Add additional properties in debug mode to assist with debugging.
Object.defineProperties(objectAllocator.getSymbolConstructor().prototype, {
"__debugFlags": { get(this: Symbol) { return formatSymbolFlags(this.flags); } }
});
Object.defineProperties(objectAllocator.getTypeConstructor().prototype, {
"__debugFlags": { get(this: Type) { return formatTypeFlags(this.flags); } },
"__debugObjectFlags": { get(this: Type) { return this.flags & TypeFlags.Object ? formatObjectFlags((<ObjectType>this).objectFlags) : ""; } },
"__debugTypeToString": { value(this: Type) { return this.checker.typeToString(this); } },
});
for (const ctor of [objectAllocator.getNodeConstructor(), objectAllocator.getIdentifierConstructor(), objectAllocator.getTokenConstructor(), objectAllocator.getSourceFileConstructor()]) {
if (!ctor.prototype.hasOwnProperty("__debugKind")) {
Object.defineProperties(ctor.prototype, {
"__debugKind": { get(this: Node) { return formatSyntaxKind(this.kind); } },
"__debugModifierFlags": { get(this: Node) { return formatModifierFlags(getModifierFlagsNoCache(this)); } },
"__debugTransformFlags": { get(this: Node) { return formatTransformFlags(this.transformFlags); } },
"__debugEmitFlags": { get(this: Node) { return formatEmitFlags(getEmitFlags(this)); } },
"__debugGetText": { value(this: Node, includeTrivia?: boolean) {
if (nodeIsSynthesized(this)) return "";
const parseNode = getParseTreeNode(this);
const sourceFile = parseNode && getSourceFileOfNode(parseNode);
return sourceFile ? getSourceTextOfNodeFromSourceFile(sourceFile, parseNode, includeTrivia) : "";
} }
});
}
}
}
let isDebugInfoEnabled = false;
export const failBadSyntaxKind = shouldAssert(AssertionLevel.Normal)
? (node: Node, message?: string): void => fail(
@ -1592,5 +1564,51 @@ namespace ts {
() => `Node ${formatSyntaxKind(node.kind)} was unexpected'.`,
assertMissingNode)
: noop;
/**
* Injects debug information into frequently used types.
*/
export function enableDebugInfo() {
if (isDebugInfoEnabled) return;
// Add additional properties in debug mode to assist with debugging.
Object.defineProperties(objectAllocator.getSymbolConstructor().prototype, {
"__debugFlags": { get(this: Symbol) { return formatSymbolFlags(this.flags); } }
});
Object.defineProperties(objectAllocator.getTypeConstructor().prototype, {
"__debugFlags": { get(this: Type) { return formatTypeFlags(this.flags); } },
"__debugObjectFlags": { get(this: Type) { return this.flags & TypeFlags.Object ? formatObjectFlags((<ObjectType>this).objectFlags) : ""; } },
"__debugTypeToString": { value(this: Type) { return this.checker.typeToString(this); } },
});
const nodeConstructors = [
objectAllocator.getNodeConstructor(),
objectAllocator.getIdentifierConstructor(),
objectAllocator.getTokenConstructor(),
objectAllocator.getSourceFileConstructor()
];
for (const ctor of nodeConstructors) {
if (!ctor.prototype.hasOwnProperty("__debugKind")) {
Object.defineProperties(ctor.prototype, {
"__debugKind": { get(this: Node) { return formatSyntaxKind(this.kind); } },
"__debugModifierFlags": { get(this: Node) { return formatModifierFlags(getModifierFlagsNoCache(this)); } },
"__debugTransformFlags": { get(this: Node) { return formatTransformFlags(this.transformFlags); } },
"__debugEmitFlags": { get(this: Node) { return formatEmitFlags(getEmitFlags(this)); } },
"__debugGetText": {
value(this: Node, includeTrivia?: boolean) {
if (nodeIsSynthesized(this)) return "";
const parseNode = getParseTreeNode(this);
const sourceFile = parseNode && getSourceFileOfNode(parseNode);
return sourceFile ? getSourceTextOfNodeFromSourceFile(sourceFile, parseNode, includeTrivia) : "";
}
}
});
}
}
isDebugInfoEnabled = true;
}
}
}

View File

@ -64,6 +64,7 @@ class CompilerBaselineRunner extends RunnerBase {
let result: Harness.Compiler.CompilerResult;
let options: ts.CompilerOptions;
let tsConfigFiles: Harness.Compiler.TestFile[];
// equivalent to the files that will be passed on the command line
let toBeCompiled: Harness.Compiler.TestFile[];
// equivalent to other files on the file system not directly passed to the compiler (ie things that are referenced by other files)
@ -77,10 +78,12 @@ class CompilerBaselineRunner extends RunnerBase {
const units = testCaseContent.testUnitData;
harnessSettings = testCaseContent.settings;
let tsConfigOptions: ts.CompilerOptions;
tsConfigFiles = [];
if (testCaseContent.tsConfig) {
assert.equal(testCaseContent.tsConfig.fileNames.length, 0, `list of files in tsconfig is not currently supported`);
tsConfigOptions = ts.clone(testCaseContent.tsConfig.options);
tsConfigOptions = ts.cloneCompilerOptions(testCaseContent.tsConfig.options);
tsConfigFiles.push(this.createHarnessTestFile(testCaseContent.tsConfigFileUnitData, rootDir, ts.combinePaths(rootDir, tsConfigOptions.configFilePath)));
}
else {
const baseUrl = harnessSettings["baseUrl"];
@ -90,7 +93,7 @@ class CompilerBaselineRunner extends RunnerBase {
}
lastUnit = units[units.length - 1];
hasNonDtsFiles = ts.forEach(units, unit => !ts.fileExtensionIs(unit.name, ".d.ts"));
hasNonDtsFiles = ts.forEach(units, unit => !ts.fileExtensionIs(unit.name, ts.Extension.Dts));
// We need to assemble the list of input files for the compiler and other related files on the 'filesystem' (ie in a multi-file test)
// If the last file in a test uses require or a triple slash reference we'll assume all other files will be brought in via references,
// otherwise, assume all files are just meant to be in the same compilation session without explicit references to one another.
@ -98,21 +101,22 @@ class CompilerBaselineRunner extends RunnerBase {
otherFiles = [];
if (testCaseContent.settings["noImplicitReferences"] || /require\(/.test(lastUnit.content) || /reference\spath/.test(lastUnit.content)) {
toBeCompiled.push({ unitName: this.makeUnitName(lastUnit.name, rootDir), content: lastUnit.content, fileOptions: lastUnit.fileOptions });
toBeCompiled.push(this.createHarnessTestFile(lastUnit, rootDir));
units.forEach(unit => {
if (unit.name !== lastUnit.name) {
otherFiles.push({ unitName: this.makeUnitName(unit.name, rootDir), content: unit.content, fileOptions: unit.fileOptions });
otherFiles.push(this.createHarnessTestFile(unit, rootDir));
}
});
}
else {
toBeCompiled = units.map(unit => {
return { unitName: this.makeUnitName(unit.name, rootDir), content: unit.content, fileOptions: unit.fileOptions };
return this.createHarnessTestFile(unit, rootDir);
});
}
if (tsConfigOptions && tsConfigOptions.configFilePath !== undefined) {
tsConfigOptions.configFilePath = ts.combinePaths(rootDir, tsConfigOptions.configFilePath);
tsConfigOptions.configFile.fileName = tsConfigOptions.configFilePath;
}
const output = Harness.Compiler.compileFiles(
@ -132,11 +136,12 @@ class CompilerBaselineRunner extends RunnerBase {
options = undefined;
toBeCompiled = undefined;
otherFiles = undefined;
tsConfigFiles = undefined;
});
// check errors
it("Correct errors for " + fileName, () => {
Harness.Compiler.doErrorBaseline(justName, toBeCompiled.concat(otherFiles), result.errors);
Harness.Compiler.doErrorBaseline(justName, tsConfigFiles.concat(toBeCompiled, otherFiles), result.errors);
});
it (`Correct module resolution tracing for ${fileName}`, () => {
@ -165,7 +170,7 @@ class CompilerBaselineRunner extends RunnerBase {
it("Correct JS output for " + fileName, () => {
if (hasNonDtsFiles && this.emit) {
Harness.Compiler.doJsEmitBaseline(justName, fileName, options, result, toBeCompiled, otherFiles, harnessSettings);
Harness.Compiler.doJsEmitBaseline(justName, fileName, options, result, tsConfigFiles, toBeCompiled, otherFiles, harnessSettings);
}
});
@ -183,6 +188,10 @@ class CompilerBaselineRunner extends RunnerBase {
});
}
private createHarnessTestFile(lastUnit: Harness.TestCaseParser.TestUnitData, rootDir: string, unitName?: string): Harness.Compiler.TestFile {
return { unitName: unitName || this.makeUnitName(lastUnit.name, rootDir), content: lastUnit.content, fileOptions: lastUnit.fileOptions };
}
public initializeTests() {
describe(this.testSuiteName + " tests", () => {
describe("Setup compiler for compiler baselines", () => {

View File

@ -479,24 +479,11 @@ namespace FourSlash {
}
private getDiagnostics(fileName: string): ts.Diagnostic[] {
const syntacticErrors = this.languageService.getSyntacticDiagnostics(fileName);
const semanticErrors = this.languageService.getSemanticDiagnostics(fileName);
const diagnostics: ts.Diagnostic[] = [];
diagnostics.push.apply(diagnostics, syntacticErrors);
diagnostics.push.apply(diagnostics, semanticErrors);
return diagnostics;
return this.languageService.getSyntacticDiagnostics(fileName).concat(this.languageService.getSemanticDiagnostics(fileName));
}
private getAllDiagnostics(): ts.Diagnostic[] {
const diagnostics: ts.Diagnostic[] = [];
for (const fileName of this.languageServiceAdapterHost.getFilenames()) {
diagnostics.push.apply(this.getDiagnostics(fileName));
}
return diagnostics;
return ts.flatMap(this.languageServiceAdapterHost.getFilenames(), fileName => this.getDiagnostics(fileName));
}
public verifyErrorExistsAfterMarker(markerName: string, negative: boolean, after: boolean) {
@ -549,11 +536,16 @@ namespace FourSlash {
Harness.IO.log("Unexpected error(s) found. Error list is:");
}
for (const { start, length, messageText } of errors) {
Harness.IO.log(" minChar: " + start +
", limChar: " + (start + length) +
for (const { start, length, messageText, file } of errors) {
Harness.IO.log(" from: " + showPosition(file, start) +
", to: " + showPosition(file, start + length) +
", message: " + ts.flattenDiagnosticMessageText(messageText, Harness.IO.newLine()) + "\n");
}
function showPosition(file: ts.SourceFile, pos: number) {
const { line, character } = ts.getLineAndCharacterOfPosition(file, pos);
return `${line}:${character}`;
}
}
public verifyNoErrors() {
@ -702,7 +694,7 @@ namespace FourSlash {
public verifyCompletionListItemsCountIsGreaterThan(count: number, negative: boolean) {
const completions = this.getCompletionListAtCaret();
const itemsCount = completions.entries.length;
const itemsCount = completions ? completions.entries.length : 0;
if (negative) {
if (itemsCount > count) {
@ -822,8 +814,8 @@ namespace FourSlash {
function filterByTextOrDocumentation(entry: ts.CompletionEntry) {
const details = that.getCompletionEntryDetails(entry.name);
const documentation = ts.displayPartsToString(details.documentation);
const text = ts.displayPartsToString(details.displayParts);
const documentation = details && ts.displayPartsToString(details.documentation);
const text = details && ts.displayPartsToString(details.displayParts);
// If any of the expected values are undefined, assume that users don't
// care about them.
@ -860,6 +852,9 @@ namespace FourSlash {
if (expectedKind) {
error += "Expected kind: " + expectedKind + " to equal: " + filterCompletions[0].kind + ".";
}
else {
error += "kind: " + filterCompletions[0].kind + ".";
}
if (replacementSpan) {
const spanText = filterCompletions[0].replacementSpan ? stringify(filterCompletions[0].replacementSpan) : undefined;
error += "Expected replacement span: " + stringify(replacementSpan) + " to equal: " + spanText + ".";
@ -1049,21 +1044,27 @@ namespace FourSlash {
fail(`Expected ${expected}, got ${actual}`);
}
for (const key in actual) if (ts.hasProperty(actual as any, key)) {
const ak = actual[key], ek = expected[key];
if (typeof ak === "object" && typeof ek === "object") {
recur(ak, ek, path ? path + "." + key : key);
}
else if (ak !== ek) {
fail(`Expected '${key}' to be '${ek}', got '${ak}'`);
for (const key in actual) {
if (ts.hasProperty(actual as any, key)) {
const ak = actual[key], ek = expected[key];
if (typeof ak === "object" && typeof ek === "object") {
recur(ak, ek, path ? path + "." + key : key);
}
else if (ak !== ek) {
fail(`Expected '${key}' to be '${ek}', got '${ak}'`);
}
}
}
for (const key in expected) if (ts.hasProperty(expected as any, key)) {
if (!ts.hasProperty(actual as any, key)) {
fail(`${msgPrefix}Missing property '${key}'`);
for (const key in expected) {
if (ts.hasProperty(expected as any, key)) {
if (!ts.hasProperty(actual as any, key)) {
fail(`${msgPrefix}Missing property '${key}'`);
}
}
}
};
if (fullActual === undefined || fullExpected === undefined) {
if (fullActual === fullExpected) {
return;
@ -1145,15 +1146,17 @@ namespace FourSlash {
}
public verifyQuickInfos(namesAndTexts: { [name: string]: string | [string, string] }) {
for (const name in namesAndTexts) if (ts.hasProperty(namesAndTexts, name)) {
const text = namesAndTexts[name];
if (ts.isArray(text)) {
assert(text.length === 2);
const [expectedText, expectedDocumentation] = text;
this.verifyQuickInfoAt(name, expectedText, expectedDocumentation);
}
else {
this.verifyQuickInfoAt(name, text);
for (const name in namesAndTexts) {
if (ts.hasProperty(namesAndTexts, name)) {
const text = namesAndTexts[name];
if (ts.isArray(text)) {
assert(text.length === 2);
const [expectedText, expectedDocumentation] = text;
this.verifyQuickInfoAt(name, expectedText, expectedDocumentation);
}
else {
this.verifyQuickInfoAt(name, text);
}
}
}
}
@ -1162,7 +1165,6 @@ namespace FourSlash {
if (expectedDocumentation === "") {
throw new Error("Use 'undefined' instead");
}
const actualQuickInfo = this.languageService.getQuickInfoAtPosition(this.activeFile.fileName, this.currentCaretPosition);
const actualQuickInfoText = actualQuickInfo ? ts.displayPartsToString(actualQuickInfo.displayParts) : "";
const actualQuickInfoDocumentation = actualQuickInfo ? ts.displayPartsToString(actualQuickInfo.documentation) : "";
@ -1465,7 +1467,7 @@ namespace FourSlash {
let baselineFile = this.testData.globalOptions[metadataOptionNames.baselineFile];
if (!baselineFile) {
baselineFile = this.activeFile.fileName.replace(this.basePath + "/breakpointValidation", "bpSpan");
baselineFile = baselineFile.replace(".ts", ".baseline");
baselineFile = baselineFile.replace(ts.Extension.Ts, ".baseline");
}
Harness.Baseline.runBaseline(
@ -1535,7 +1537,7 @@ namespace FourSlash {
public baselineQuickInfo() {
let baselineFile = this.testData.globalOptions[metadataOptionNames.baselineFile];
if (!baselineFile) {
baselineFile = ts.getBaseFileName(this.activeFile.fileName).replace(".ts", ".baseline");
baselineFile = ts.getBaseFileName(this.activeFile.fileName).replace(ts.Extension.Ts, ".baseline");
}
Harness.Baseline.runBaseline(
@ -1608,16 +1610,19 @@ namespace FourSlash {
}
private printMembersOrCompletions(info: ts.CompletionInfo) {
if (info === undefined) { return "No completion info."; }
const { entries } = info;
function pad(s: string, length: number) {
return s + new Array(length - s.length + 1).join(" ");
}
function max<T>(arr: T[], selector: (x: T) => number): number {
return arr.reduce((prev, x) => Math.max(prev, selector(x)), 0);
}
const longestNameLength = max(info.entries, m => m.name.length);
const longestKindLength = max(info.entries, m => m.kind.length);
info.entries.sort((m, n) => m.sortText > n.sortText ? 1 : m.sortText < n.sortText ? -1 : m.name > n.name ? 1 : m.name < n.name ? -1 : 0);
const membersString = info.entries.map(m => `${pad(m.name, longestNameLength)} ${pad(m.kind, longestKindLength)} ${m.kindModifiers}`).join("\n");
const longestNameLength = max(entries, m => m.name.length);
const longestKindLength = max(entries, m => m.kind.length);
entries.sort((m, n) => m.sortText > n.sortText ? 1 : m.sortText < n.sortText ? -1 : m.name > n.name ? 1 : m.name < n.name ? -1 : 0);
const membersString = entries.map(m => `${pad(m.name, longestNameLength)} ${pad(m.kind, longestKindLength)} ${m.kindModifiers}`).join("\n");
Harness.IO.log(membersString);
}
@ -2169,7 +2174,7 @@ namespace FourSlash {
Harness.IO.log(this.spanInfoToString(this.getNameOrDottedNameSpan(pos), "**"));
}
private verifyClassifications(expected: { classificationType: string; text: string; textSpan?: TextSpan }[], actual: ts.ClassifiedSpan[]) {
private verifyClassifications(expected: { classificationType: string; text: string; textSpan?: TextSpan }[], actual: ts.ClassifiedSpan[], sourceFileText: string) {
if (actual.length !== expected.length) {
this.raiseError("verifyClassifications failed - expected total classifications to be " + expected.length +
", but was " + actual.length +
@ -2209,9 +2214,11 @@ namespace FourSlash {
});
function jsonMismatchString() {
const showActual = actual.map(({ classificationType, textSpan }) =>
({ classificationType, text: sourceFileText.slice(textSpan.start, textSpan.start + textSpan.length) }));
return Harness.IO.newLine() +
"expected: '" + Harness.IO.newLine() + stringify(expected) + "'" + Harness.IO.newLine() +
"actual: '" + Harness.IO.newLine() + stringify(actual) + "'";
"actual: '" + Harness.IO.newLine() + stringify(showActual) + "'";
}
}
@ -2234,14 +2241,14 @@ namespace FourSlash {
const actual = this.languageService.getSemanticClassifications(this.activeFile.fileName,
ts.createTextSpan(0, this.activeFile.content.length));
this.verifyClassifications(expected, actual);
this.verifyClassifications(expected, actual, this.activeFile.content);
}
public verifySyntacticClassifications(expected: { classificationType: string; text: string }[]) {
const actual = this.languageService.getSyntacticClassifications(this.activeFile.fileName,
ts.createTextSpan(0, this.activeFile.content.length));
this.verifyClassifications(expected, actual);
this.verifyClassifications(expected, actual, this.activeFile.content);
}
public verifyOutliningSpans(spans: TextSpan[]) {
@ -2276,23 +2283,22 @@ namespace FourSlash {
}
/**
* Compares expected text to the text that would be in the sole range
* (ie: [|...|]) in the file after applying the codefix sole codefix
* in the source file.
*
* Because codefixes are only applied on the working file, it is unsafe
* to apply this more than once (consider a refactoring across files).
* Finds and applies a code action corresponding to the supplied parameters.
* If index is undefined, applies the unique code action available.
* @param errorCode The error code that generated the code action.
* @param index The nth (0-index-based) codeaction available generated by errorCode.
*/
public verifyRangeAfterCodeFix(expectedText: string, includeWhiteSpace?: boolean, errorCode?: number, index?: number) {
public getAndApplyCodeActions(errorCode?: number, index?: number) {
const fileName = this.activeFile.fileName;
this.applyCodeActions(this.getCodeFixActions(fileName, errorCode), index);
}
public verifyRangeIs(expectedText: string, includeWhiteSpace?: boolean) {
const ranges = this.getRanges();
if (ranges.length !== 1) {
this.raiseError("Exactly one range should be specified in the testfile.");
}
const fileName = this.activeFile.fileName;
this.applyCodeAction(fileName, this.getCodeFixActions(fileName, errorCode), index);
const actualText = this.rangeText(ranges[0]);
const result = includeWhiteSpace
@ -2304,6 +2310,16 @@ namespace FourSlash {
}
}
/**
* Compares expected text to the text that would be in the sole range
* (ie: [|...|]) in the file after applying the codefix sole codefix
* in the source file.
*/
public verifyRangeAfterCodeFix(expectedText: string, includeWhiteSpace?: boolean, errorCode?: number, index?: number) {
this.getAndApplyCodeActions(errorCode, index);
this.verifyRangeIs(expectedText, includeWhiteSpace);
}
/**
* Applies fixes for the errors in fileName and compares the results to
* expectedContents after all fixes have been applied.
@ -2316,7 +2332,7 @@ namespace FourSlash {
public verifyFileAfterCodeFix(expectedContents: string, fileName?: string) {
fileName = fileName ? fileName : this.activeFile.fileName;
this.applyCodeAction(fileName, this.getCodeFixActions(fileName));
this.applyCodeActions(this.getCodeFixActions(fileName));
const actualContents: string = this.getFileContent(fileName);
if (this.removeWhitespace(actualContents) !== this.removeWhitespace(expectedContents)) {
@ -2354,11 +2370,10 @@ namespace FourSlash {
return actions;
}
private applyCodeAction(fileName: string, actions: ts.CodeAction[], index?: number): void {
private applyCodeActions(actions: ts.CodeAction[], index?: number): void {
if (index === undefined) {
if (!(actions && actions.length === 1)) {
const actionText = (actions && actions.length) ? JSON.stringify(actions) : "none";
this.raiseError(`Should find exactly one codefix, but found ${actionText}`);
this.raiseError(`Should find exactly one codefix, but ${actions ? actions.length : "none"} found.`);
}
index = 0;
}
@ -2368,12 +2383,11 @@ namespace FourSlash {
}
}
const fileChanges = ts.find(actions[index].changes, change => change.fileName === fileName);
if (!fileChanges) {
this.raiseError("The CodeFix found doesn't provide any changes in this file.");
}
const changes = actions[index].changes;
this.applyEdits(fileChanges.fileName, fileChanges.textChanges, /*isFormattingEdit*/ false);
for (const change of changes) {
this.applyEdits(change.fileName, change.textChanges, /*isFormattingEdit*/ false);
}
}
public verifyImportFixAtPosition(expectedTextArray: string[], errorCode?: number) {
@ -2385,7 +2399,10 @@ namespace FourSlash {
const codeFixes = this.getCodeFixActions(this.activeFile.fileName, errorCode);
if (!codeFixes || codeFixes.length === 0) {
this.raiseError("No codefixes returned.");
if (expectedTextArray.length !== 0) {
this.raiseError("No codefixes returned.");
}
return;
}
const actualTextArray: string[] = [];
@ -2662,6 +2679,13 @@ namespace FourSlash {
this.rangesByText().forEach(ranges => this.verifyRangesAreDocumentHighlights(ranges));
}
public verifyDocumentHighlightsOf(startRange: Range, ranges: Range[]) {
ts.Debug.assert(ts.contains(ranges, startRange));
const fileNames = unique(ranges, range => range.fileName);
this.goToRangeStart(startRange);
this.verifyDocumentHighlights(ranges, fileNames);
}
public verifyRangesAreDocumentHighlights(ranges?: Range[]) {
ranges = ranges || this.getRanges();
const fileNames = unique(ranges, range => range.fileName);
@ -2744,6 +2768,7 @@ namespace FourSlash {
markerName: string,
expectedContent: string,
refactorNameToApply: string,
actionName: string,
formattingOptions?: ts.FormatCodeSettings) {
formattingOptions = formattingOptions || this.formatCodeSettings;
@ -2756,9 +2781,11 @@ namespace FourSlash {
this.raiseError(`The expected refactor: ${refactorNameToApply} is not available at the marker location.`);
}
const codeActions = this.languageService.getRefactorCodeActions(this.activeFile.fileName, formattingOptions, markerPos, refactorNameToApply);
const editInfo = this.languageService.getEditsForRefactor(this.activeFile.fileName, formattingOptions, markerPos, refactorNameToApply, actionName);
this.applyCodeAction(this.activeFile.fileName, codeActions);
for (const edit of editInfo.edits) {
this.applyEdits(edit.fileName, edit.textChanges);
}
const actualContent = this.getFileContent(this.activeFile.fileName);
if (this.normalizeNewlines(actualContent) !== this.normalizeNewlines(expectedContent)) {
@ -3494,6 +3521,12 @@ namespace FourSlashInterface {
"constructor",
"async"
];
public allowedConstructorParameterKeywords = [
"public",
"private",
"protected",
"readonly",
];
constructor(protected state: FourSlash.TestState, private negative = false) {
if (!negative) {
@ -3536,6 +3569,12 @@ namespace FourSlashInterface {
}
}
public completionListContainsConstructorParameterKeywords() {
for (const keyword of this.allowedConstructorParameterKeywords) {
this.completionListContains(keyword, keyword, /*documentation*/ undefined, "keyword");
}
}
public completionListIsGlobal(expected: boolean) {
this.state.verifyCompletionListIsGlobal(expected);
}
@ -3801,8 +3840,16 @@ namespace FourSlashInterface {
this.state.verifyRangeAfterCodeFix(expectedText, includeWhiteSpace, errorCode, index);
}
public fileAfterApplyingRefactorAtMarker(markerName: string, expectedContent: string, refactorNameToApply: string, formattingOptions?: ts.FormatCodeSettings): void {
this.state.verifyFileAfterApplyingRefactorAtMarker(markerName, expectedContent, refactorNameToApply, formattingOptions);
public fileAfterApplyingRefactorAtMarker(markerName: string, expectedContent: string, refactorNameToApply: string, actionName: string, formattingOptions?: ts.FormatCodeSettings): void {
this.state.verifyFileAfterApplyingRefactorAtMarker(markerName, expectedContent, refactorNameToApply, actionName, formattingOptions);
}
public rangeIs(expectedText: string, includeWhiteSpace?: boolean): void {
this.state.verifyRangeIs(expectedText, includeWhiteSpace);
}
public getAndApplyCodeFix(errorCode?: number, index?: number): void {
this.state.getAndApplyCodeActions(errorCode, index);
}
public importFixAtPosition(expectedTextArray: string[], errorCode?: number): void {
@ -3865,6 +3912,10 @@ namespace FourSlashInterface {
this.state.verifyRangesWithSameTextAreDocumentHighlights();
}
public documentHighlightsOf(startRange: FourSlash.Range, ranges: FourSlash.Range[]) {
this.state.verifyDocumentHighlightsOf(startRange, ranges);
}
public completionEntryDetailIs(entryName: string, text: string, documentation?: string, kind?: string, tags?: ts.JSDocTagInfo[]) {
this.state.verifyCompletionEntryDetails(entryName, text, documentation, kind, tags);
}

View File

@ -259,8 +259,9 @@ namespace Utils {
return true;
}
else if ((f & v) > 0) {
if (result.length)
if (result.length) {
result += " | ";
}
result += flags[v];
return false;
}
@ -1052,7 +1053,7 @@ namespace Harness {
];
let optionsIndex: ts.Map<ts.CommandLineOption>;
function getCommandLineOption(name: string): ts.CommandLineOption {
function getCommandLineOption(name: string): ts.CommandLineOption | undefined {
if (!optionsIndex) {
optionsIndex = ts.createMap<ts.CommandLineOption>();
const optionDeclarations = harnessOptionDeclarations.concat(ts.optionDeclarations);
@ -1124,7 +1125,7 @@ namespace Harness {
compilerOptions: ts.CompilerOptions,
// Current directory is needed for rwcRunner to be able to use currentDirectory defined in json file
currentDirectory: string): CompilationOutput {
const options: ts.CompilerOptions & HarnessOptions = compilerOptions ? ts.clone(compilerOptions) : { noResolve: false };
const options: ts.CompilerOptions & HarnessOptions = compilerOptions ? ts.cloneCompilerOptions(compilerOptions) : { noResolve: false };
options.target = options.target || ts.ScriptTarget.ES3;
options.newLine = options.newLine || ts.NewLineKind.CarriageReturnLineFeed;
options.noErrorTruncation = true;
@ -1248,7 +1249,7 @@ namespace Harness {
sourceFileName = outFile;
}
const dTsFileName = ts.removeFileExtension(sourceFileName) + ".d.ts";
const dTsFileName = ts.removeFileExtension(sourceFileName) + ts.Extension.Dts;
return ts.forEach(result.declFilesCode, declFile => declFile.fileName === dTsFileName ? declFile : undefined);
}
@ -1464,7 +1465,7 @@ namespace Harness {
// When calling this function from rwc-runner, the baselinePath will have no extension.
// As rwc test- file is stored in json which ".json" will get stripped off.
// When calling this function from compiler-runner, the baselinePath will then has either ".ts" or ".tsx" extension
const outputFileName = ts.endsWith(baselinePath, ".ts") || ts.endsWith(baselinePath, ".tsx") ?
const outputFileName = ts.endsWith(baselinePath, ts.Extension.Ts) || ts.endsWith(baselinePath, ts.Extension.Tsx) ?
baselinePath.replace(/\.tsx?/, fullExtension) : baselinePath.concat(fullExtension);
Harness.Baseline.runBaseline(outputFileName, () => fullBaseLine, opts);
}
@ -1556,13 +1557,13 @@ namespace Harness {
}
}
export function doJsEmitBaseline(baselinePath: string, header: string, options: ts.CompilerOptions, result: CompilerResult, toBeCompiled: Harness.Compiler.TestFile[], otherFiles: Harness.Compiler.TestFile[], harnessSettings: Harness.TestCaseParser.CompilerSettings) {
export function doJsEmitBaseline(baselinePath: string, header: string, options: ts.CompilerOptions, result: CompilerResult, tsConfigFiles: Harness.Compiler.TestFile[], toBeCompiled: Harness.Compiler.TestFile[], otherFiles: Harness.Compiler.TestFile[], harnessSettings: Harness.TestCaseParser.CompilerSettings) {
if (!options.noEmit && result.files.length === 0 && result.errors.length === 0) {
throw new Error("Expected at least one js file to be emitted or at least one error to be created.");
}
// check js output
Harness.Baseline.runBaseline(baselinePath.replace(/\.tsx?/, ".js"), () => {
Harness.Baseline.runBaseline(baselinePath.replace(/\.tsx?/, ts.Extension.Js), () => {
let tsCode = "";
const tsSources = otherFiles.concat(toBeCompiled);
if (tsSources.length > 1) {
@ -1592,7 +1593,7 @@ namespace Harness {
if (declFileCompilationResult && declFileCompilationResult.declResult.errors.length) {
jsCode += "\r\n\r\n//// [DtsFileErrors]\r\n";
jsCode += "\r\n\r\n";
jsCode += Harness.Compiler.getErrorBaseline(declFileCompilationResult.declInputFiles.concat(declFileCompilationResult.declOtherFiles), declFileCompilationResult.declResult.errors);
jsCode += Harness.Compiler.getErrorBaseline(tsConfigFiles.concat(declFileCompilationResult.declInputFiles, declFileCompilationResult.declOtherFiles), declFileCompilationResult.declResult.errors);
}
if (jsCode.length > 0) {
@ -1650,22 +1651,22 @@ namespace Harness {
}
export function isTS(fileName: string) {
return ts.endsWith(fileName, ".ts");
return ts.endsWith(fileName, ts.Extension.Ts);
}
export function isTSX(fileName: string) {
return ts.endsWith(fileName, ".tsx");
return ts.endsWith(fileName, ts.Extension.Tsx);
}
export function isDTS(fileName: string) {
return ts.endsWith(fileName, ".d.ts");
return ts.endsWith(fileName, ts.Extension.Dts);
}
export function isJS(fileName: string) {
return ts.endsWith(fileName, ".js");
return ts.endsWith(fileName, ts.Extension.Js);
}
export function isJSX(fileName: string) {
return ts.endsWith(fileName, ".jsx");
return ts.endsWith(fileName, ts.Extension.Jsx);
}
export function isJSMap(fileName: string) {
@ -1743,7 +1744,12 @@ namespace Harness {
}
/** Given a test file containing // @FileName directives, return an array of named units of code to be added to an existing compiler instance */
export function makeUnitsFromTest(code: string, fileName: string, rootDir?: string): { settings: CompilerSettings; testUnitData: TestUnitData[]; tsConfig: ts.ParsedCommandLine } {
export function makeUnitsFromTest(code: string, fileName: string, rootDir?: string): {
settings: CompilerSettings;
testUnitData: TestUnitData[];
tsConfig: ts.ParsedCommandLine;
tsConfigFileUnitData: TestUnitData;
} {
const settings = extractCompilerSettings(code);
// List of all the subfiles we've parsed out
@ -1829,17 +1835,19 @@ namespace Harness {
// check if project has tsconfig.json in the list of files
let tsConfig: ts.ParsedCommandLine;
let tsConfigFileUnitData: TestUnitData;
for (let i = 0; i < testUnitData.length; i++) {
const data = testUnitData[i];
if (ts.getBaseFileName(data.name).toLowerCase() === "tsconfig.json") {
const configJson = ts.parseConfigFileTextToJson(data.name, data.content);
assert.isTrue(configJson.config !== undefined);
const configJson = ts.parseJsonText(data.name, data.content);
assert.isTrue(configJson.endOfFileToken !== undefined);
let baseDir = ts.normalizePath(ts.getDirectoryPath(data.name));
if (rootDir) {
baseDir = ts.getNormalizedAbsolutePath(baseDir, rootDir);
}
tsConfig = ts.parseJsonConfigFileContent(configJson.config, parseConfigHost, baseDir);
tsConfig = ts.parseJsonSourceFileConfigFileContent(configJson, parseConfigHost, baseDir);
tsConfig.options.configFilePath = data.name;
tsConfigFileUnitData = data;
// delete entry from the list
ts.orderedRemoveItemAt(testUnitData, i);
@ -1847,7 +1855,7 @@ namespace Harness {
break;
}
}
return { settings, testUnitData, tsConfig };
return { settings, testUnitData, tsConfig, tsConfigFileUnitData };
}
}
@ -1972,7 +1980,7 @@ namespace Harness {
export function isDefaultLibraryFile(filePath: string): boolean {
// We need to make sure that the filePath is prefixed with "lib." not just containing "lib." and end with ".d.ts"
const fileName = ts.getBaseFileName(ts.normalizeSlashes(filePath));
return ts.startsWith(fileName, "lib.") && ts.endsWith(fileName, ".d.ts");
return ts.startsWith(fileName, "lib.") && ts.endsWith(fileName, ts.Extension.Dts);
}
export function isBuiltFile(filePath: string): boolean {

View File

@ -492,7 +492,7 @@ namespace Harness.LanguageService {
getCodeFixDiagnostics(): ts.Diagnostic[] {
throw new Error("Not supported on the shim.");
}
getRefactorCodeActions(): ts.CodeAction[] {
getEditsForRefactor(): ts.RefactorEditInfo {
throw new Error("Not supported on the shim.");
}
getApplicableRefactors(): ts.ApplicableRefactorInfo[] {
@ -731,7 +731,7 @@ namespace Harness.LanguageService {
}
createHash(s: string) {
return s;
return mockHash(s);
}
require(_initialDir: string, _moduleName: string): ts.server.RequireResult {
@ -856,4 +856,8 @@ namespace Harness.LanguageService {
getClassifier(): ts.Classifier { throw new Error("getClassifier is not available using the server interface."); }
getPreProcessedFileInfo(): ts.PreProcessedFileInfo { throw new Error("getPreProcessedFileInfo is not available using the server interface."); }
}
export function mockHash(s: string): string {
return `hash-${s}`;
}
}

View File

@ -232,14 +232,11 @@ namespace Playback {
// different entry).
// TODO (yuisu): We can certainly remove these once we recapture the RWC using new API
const normalizedPath = ts.normalizePath(path).toLowerCase();
const result: string[] = [];
for (const directory of replayLog.directoriesRead) {
return ts.flatMap(replayLog.directoriesRead, directory => {
if (ts.normalizeSlashes(directory.path).toLowerCase() === normalizedPath) {
result.push(...directory.result);
return directory.result;
}
}
return result;
});
});
wrapper.writeFile = recordReplay(wrapper.writeFile, underlying)(

View File

@ -24,6 +24,7 @@ interface BatchCompileProjectTestCaseEmittedFile extends Harness.Compiler.Genera
}
interface CompileProjectFilesResult {
configFileSourceFiles: ts.SourceFile[];
moduleKind: ts.ModuleKind;
program?: ts.Program;
compilerOptions?: ts.CompilerOptions;
@ -124,7 +125,8 @@ class ProjectRunner extends RunnerBase {
return Harness.IO.resolvePath(testCase.projectRoot);
}
function compileProjectFiles(moduleKind: ts.ModuleKind, getInputFiles: () => string[],
function compileProjectFiles(moduleKind: ts.ModuleKind, configFileSourceFiles: ts.SourceFile[],
getInputFiles: () => string[],
getSourceFileTextImpl: (fileName: string) => string,
writeFile: (fileName: string, data: string, writeByteOrderMark: boolean) => void,
compilerOptions: ts.CompilerOptions): CompileProjectFilesResult {
@ -148,6 +150,7 @@ class ProjectRunner extends RunnerBase {
}
return {
configFileSourceFiles,
moduleKind,
program,
errors,
@ -196,6 +199,7 @@ class ProjectRunner extends RunnerBase {
const outputFiles: BatchCompileProjectTestCaseEmittedFile[] = [];
let inputFiles = testCase.inputFiles;
let compilerOptions = createCompilerOptions();
const configFileSourceFiles: ts.SourceFile[] = [];
let configFileName: string;
if (compilerOptions.project) {
@ -207,41 +211,31 @@ class ProjectRunner extends RunnerBase {
configFileName = ts.findConfigFile("", fileExists);
}
let errors: ts.Diagnostic[];
if (configFileName) {
const result = ts.readConfigFile(configFileName, getSourceFileText);
if (result.error) {
return {
moduleKind,
errors: [result.error]
};
}
const configObject = result.config;
const result = ts.readJsonConfigFile(configFileName, getSourceFileText);
configFileSourceFiles.push(result);
const configParseHost: ts.ParseConfigHost = {
useCaseSensitiveFileNames: Harness.IO.useCaseSensitiveFileNames(),
fileExists,
readDirectory,
readFile
};
const configParseResult = ts.parseJsonConfigFileContent(configObject, configParseHost, ts.getDirectoryPath(configFileName), compilerOptions);
if (configParseResult.errors.length > 0) {
return {
moduleKind,
errors: configParseResult.errors
};
}
const configParseResult = ts.parseJsonSourceFileConfigFileContent(result, configParseHost, ts.getDirectoryPath(configFileName), compilerOptions);
inputFiles = configParseResult.fileNames;
compilerOptions = configParseResult.options;
errors = result.parseDiagnostics.concat(configParseResult.errors);
}
const projectCompilerResult = compileProjectFiles(moduleKind, () => inputFiles, getSourceFileText, writeFile, compilerOptions);
const projectCompilerResult = compileProjectFiles(moduleKind, configFileSourceFiles, () => inputFiles, getSourceFileText, writeFile, compilerOptions);
return {
configFileSourceFiles,
moduleKind,
program: projectCompilerResult.program,
compilerOptions,
sourceMapData: projectCompilerResult.sourceMapData,
outputFiles,
errors: projectCompilerResult.errors,
errors: errors ? errors.concat(projectCompilerResult.errors) : projectCompilerResult.errors,
};
function createCompilerOptions() {
@ -325,8 +319,8 @@ class ProjectRunner extends RunnerBase {
// we need to instead create files that can live in the project reference folder
// but make sure extension of these files matches with the fileName the compiler asked to write
diskRelativeName = "diskFile" + nonSubfolderDiskFiles +
(Harness.Compiler.isDTS(fileName) ? ".d.ts" :
Harness.Compiler.isJS(fileName) ? ".js" : ".js.map");
(Harness.Compiler.isDTS(fileName) ? ts.Extension.Dts :
Harness.Compiler.isJS(fileName) ? ts.Extension.Js : ".js.map");
nonSubfolderDiskFiles++;
}
@ -386,14 +380,14 @@ class ProjectRunner extends RunnerBase {
emitOutputFilePathWithoutExtension = ts.removeFileExtension(sourceFile.fileName);
}
const outputDtsFileName = emitOutputFilePathWithoutExtension + ".d.ts";
const outputDtsFileName = emitOutputFilePathWithoutExtension + ts.Extension.Dts;
const file = findOutputDtsFile(outputDtsFileName);
if (file) {
allInputFiles.unshift(file);
}
}
else {
const outputDtsFileName = ts.removeFileExtension(compilerOptions.outFile || compilerOptions.out) + ".d.ts";
const outputDtsFileName = ts.removeFileExtension(compilerOptions.outFile || compilerOptions.out) + ts.Extension.Dts;
const outputDtsFile = findOutputDtsFile(outputDtsFileName);
if (!ts.contains(allInputFiles, outputDtsFile)) {
allInputFiles.unshift(outputDtsFile);
@ -402,7 +396,7 @@ class ProjectRunner extends RunnerBase {
});
// Dont allow config files since we are compiling existing source options
return compileProjectFiles(compilerResult.moduleKind, getInputFiles, getSourceFileText, writeFile, compilerResult.compilerOptions);
return compileProjectFiles(compilerResult.moduleKind, compilerResult.configFileSourceFiles, getInputFiles, getSourceFileText, writeFile, compilerResult.compilerOptions);
function findOutputDtsFile(fileName: string) {
return ts.forEach(compilerResult.outputFiles, outputFile => outputFile.emittedFileName === fileName ? outputFile : undefined);
@ -428,16 +422,16 @@ class ProjectRunner extends RunnerBase {
}
function getErrorsBaseline(compilerResult: CompileProjectFilesResult) {
const inputFiles = compilerResult.program ? ts.map(ts.filter(compilerResult.program.getSourceFiles(),
sourceFile => !Harness.isDefaultLibraryFile(sourceFile.fileName)),
sourceFile => {
return {
unitName: ts.isRootedDiskPath(sourceFile.fileName) ?
RunnerBase.removeFullPaths(sourceFile.fileName) :
sourceFile.fileName,
content: sourceFile.text
};
}) : [];
const inputFiles = ts.map(compilerResult.configFileSourceFiles.concat(
compilerResult.program ?
ts.filter(compilerResult.program.getSourceFiles(), sourceFile => !Harness.isDefaultLibraryFile(sourceFile.fileName)) :
[]),
sourceFile => <Harness.Compiler.TestFile>{
unitName: ts.isRootedDiskPath(sourceFile.fileName) ?
RunnerBase.removeFullPaths(sourceFile.fileName) :
sourceFile.fileName,
content: sourceFile.text
});
return Harness.Compiler.getErrorBaseline(inputFiles, compilerResult.errors);
}

View File

@ -222,6 +222,10 @@ if (taskConfigsFolder) {
}
}
else {
if (ts.Debug.isDebugging) {
ts.Debug.enableDebugInfo();
}
runTests(runners);
}
if (!runUnitTests) {

View File

@ -30,6 +30,7 @@ namespace RWC {
describe("Testing a RWC project: " + jsonPath, () => {
let inputFiles: Harness.Compiler.TestFile[] = [];
let otherFiles: Harness.Compiler.TestFile[] = [];
let tsconfigFiles: Harness.Compiler.TestFile[] = [];
let compilerResult: Harness.Compiler.CompilerResult;
let compilerOptions: ts.CompilerOptions;
const baselineOpts: Harness.Baseline.BaselineOptions = {
@ -44,6 +45,7 @@ namespace RWC {
// Therefore we have to clean out large objects after the test is done.
inputFiles = [];
otherFiles = [];
tsconfigFiles = [];
compilerResult = undefined;
compilerOptions = undefined;
currentDirectory = undefined;
@ -74,16 +76,18 @@ namespace RWC {
const tsconfigFile = ts.forEach(ioLog.filesRead, f => isTsConfigFile(f) ? f : undefined);
if (tsconfigFile) {
const tsconfigFileContents = getHarnessCompilerInputUnit(tsconfigFile.path);
const parsedTsconfigFileContents = ts.parseConfigFileTextToJson(tsconfigFile.path, tsconfigFileContents.content);
tsconfigFiles.push({ unitName: tsconfigFile.path, content: tsconfigFileContents.content });
const parsedTsconfigFileContents = ts.parseJsonText(tsconfigFile.path, tsconfigFileContents.content);
const configParseHost: ts.ParseConfigHost = {
useCaseSensitiveFileNames: Harness.IO.useCaseSensitiveFileNames(),
fileExists: Harness.IO.fileExists,
readDirectory: Harness.IO.readDirectory,
readFile: Harness.IO.readFile
};
const configParseResult = ts.parseJsonConfigFileContent(parsedTsconfigFileContents.config, configParseHost, ts.getDirectoryPath(tsconfigFile.path));
const configParseResult = ts.parseJsonSourceFileConfigFileContent(parsedTsconfigFileContents, configParseHost, ts.getDirectoryPath(tsconfigFile.path));
fileNames = configParseResult.fileNames;
opts.options = ts.extend(opts.options, configParseResult.options);
ts.setConfigFileInOptions(opts.options, configParseResult.options.configFile);
}
// Load the files
@ -198,8 +202,8 @@ namespace RWC {
return null;
}
// Do not include the library in the baselines to avoid noise
const baselineFiles = inputFiles.concat(otherFiles).filter(f => !Harness.isDefaultLibraryFile(f.unitName));
const errors = compilerResult.errors.filter(e => e.file && !Harness.isDefaultLibraryFile(e.file.fileName));
const baselineFiles = tsconfigFiles.concat(inputFiles, otherFiles).filter(f => !Harness.isDefaultLibraryFile(f.unitName));
const errors = compilerResult.errors.filter(e => !e.file || !Harness.isDefaultLibraryFile(e.file.fileName));
return Harness.Compiler.getErrorBaseline(baselineFiles, errors);
}, baselineOpts);
});
@ -218,7 +222,7 @@ namespace RWC {
return Harness.Compiler.minimalDiagnosticsToString(declFileCompilationResult.declResult.errors) +
Harness.IO.newLine() + Harness.IO.newLine() +
Harness.Compiler.getErrorBaseline(declFileCompilationResult.declInputFiles.concat(declFileCompilationResult.declOtherFiles), declFileCompilationResult.declResult.errors);
Harness.Compiler.getErrorBaseline(tsconfigFiles.concat(declFileCompilationResult.declInputFiles, declFileCompilationResult.declOtherFiles), declFileCompilationResult.declResult.errors);
}, baselineOpts);
}
});
@ -259,4 +263,4 @@ class RWCRunner extends RunnerBase {
private runTest(jsonFileName: string) {
RWC.runRWCTest(jsonFileName);
}
}
}

View File

@ -24,6 +24,7 @@
"../compiler/checker.ts",
"../compiler/factory.ts",
"../compiler/visitor.ts",
"../compiler/transformers/utilities.ts",
"../compiler/transformers/ts.ts",
"../compiler/transformers/jsx.ts",
"../compiler/transformers/esnext.ts",
@ -80,7 +81,7 @@
"../services/codefixes/fixConstructorForDerivedNeedSuperCall.ts",
"../services/codefixes/helpers.ts",
"../services/codefixes/importFixes.ts",
"../services/codefixes/unusedIdentifierFixes.ts",
"../services/codefixes/fixUnusedIdentifier.ts",
"../services/codefixes/disableJsDiagnostics.ts",
"harness.ts",

View File

@ -47,7 +47,7 @@ namespace ts {
clearTimeout,
setImmediate: typeof setImmediate !== "undefined" ? setImmediate : action => setTimeout(action, 0),
clearImmediate: typeof clearImmediate !== "undefined" ? clearImmediate : clearTimeout,
createHash: s => s
createHash: Harness.LanguageService.mockHash,
};
}
@ -158,7 +158,7 @@ namespace ts {
// setting compiler options discards module resolution cache
fileExistsCalled = false;
const compilerOptions = ts.clone(project.getCompilerOptions());
const compilerOptions = ts.cloneCompilerOptions(project.getCompilerOptions());
compilerOptions.target = ts.ScriptTarget.ES5;
project.setCompilerOptions(compilerOptions);

View File

@ -513,7 +513,7 @@ namespace ts.projectSystem {
const lines = ["var x = 1;", "var y = 2;"];
const path = "/a/app";
const f = {
path: path + ".ts",
path: path + ts.Extension.Ts,
content: lines.join(newLine)
};
const host = createServerHost([f], { newLine });
@ -530,7 +530,7 @@ namespace ts.projectSystem {
command: "compileOnSaveEmitFile",
arguments: { file: f.path }
});
const emitOutput = host.readFile(path + ".js");
const emitOutput = host.readFile(path + ts.Extension.Js);
assert.equal(emitOutput, f.content + newLine, "content of emit output should be identical with the input + newline");
}
});

View File

@ -111,23 +111,47 @@ namespace ts {
["under a case insensitive host", caseInsensitiveBasePath, caseInsensitiveHost],
["under a case sensitive host", caseSensitiveBasePath, caseSensitiveHost]
], ([testName, basePath, host]) => {
function getParseCommandLine(entry: string) {
const {config, error} = ts.readConfigFile(entry, name => host.readFile(name));
assert(config && !error, flattenDiagnosticMessageText(error && error.messageText, "\n"));
return ts.parseJsonConfigFileContent(config, host, basePath, {}, entry);
}
function getParseCommandLineJsonSourceFile(entry: string) {
const jsonSourceFile = ts.readJsonConfigFile(entry, name => host.readFile(name));
assert(jsonSourceFile.endOfFileToken && !jsonSourceFile.parseDiagnostics.length, flattenDiagnosticMessageText(jsonSourceFile.parseDiagnostics[0] && jsonSourceFile.parseDiagnostics[0].messageText, "\n"));
return {
jsonSourceFile,
parsed: ts.parseJsonSourceFileConfigFileContent(jsonSourceFile, host, basePath, {}, entry)
};
}
function testSuccess(name: string, entry: string, expected: CompilerOptions, expectedFiles: string[]) {
expected.configFilePath = entry;
it(name, () => {
const {config, error} = ts.readConfigFile(entry, name => host.readFile(name));
assert(config && !error, flattenDiagnosticMessageText(error && error.messageText, "\n"));
const parsed = ts.parseJsonConfigFileContent(config, host, basePath, {}, entry);
const parsed = getParseCommandLine(entry);
assert(!parsed.errors.length, flattenDiagnosticMessageText(parsed.errors[0] && parsed.errors[0].messageText, "\n"));
expected.configFilePath = entry;
assert.deepEqual(parsed.options, expected);
assert.deepEqual(parsed.fileNames, expectedFiles);
});
it(name + " with jsonSourceFile", () => {
const { parsed, jsonSourceFile } = getParseCommandLineJsonSourceFile(entry);
assert(!parsed.errors.length, flattenDiagnosticMessageText(parsed.errors[0] && parsed.errors[0].messageText, "\n"));
assert.deepEqual(parsed.options, expected);
assert.equal(parsed.options.configFile, jsonSourceFile);
assert.deepEqual(parsed.fileNames, expectedFiles);
});
}
function testFailure(name: string, entry: string, expectedDiagnostics: {code: number, category: DiagnosticCategory, messageText: string}[]) {
function testFailure(name: string, entry: string, expectedDiagnostics: { code: number, category: DiagnosticCategory, messageText: string }[]) {
it(name, () => {
const {config, error} = ts.readConfigFile(entry, name => host.readFile(name));
assert(config && !error, flattenDiagnosticMessageText(error && error.messageText, "\n"));
const parsed = ts.parseJsonConfigFileContent(config, host, basePath, {}, entry);
const parsed = getParseCommandLine(entry);
verifyDiagnostics(parsed.errors, expectedDiagnostics);
});
it(name + " with jsonSourceFile", () => {
const { parsed } = getParseCommandLineJsonSourceFile(entry);
verifyDiagnostics(parsed.errors, expectedDiagnostics);
});
}
@ -185,4 +209,4 @@ namespace ts {
});
});
});
}
}

View File

@ -4,6 +4,11 @@
namespace ts {
describe("convertCompilerOptionsFromJson", () => {
function assertCompilerOptions(json: any, configFileName: string, expectedResult: { compilerOptions: CompilerOptions, errors: Diagnostic[] }) {
assertCompilerOptionsWithJson(json, configFileName, expectedResult);
assertCompilerOptionsWithJsonNode(json, configFileName, expectedResult);
}
function assertCompilerOptionsWithJson(json: any, configFileName: string, expectedResult: { compilerOptions: CompilerOptions, errors: Diagnostic[] }) {
const { options: actualCompilerOptions, errors: actualErrors} = convertCompilerOptionsFromJson(json["compilerOptions"], "/apath/", configFileName);
const parsedCompilerOptions = JSON.stringify(actualCompilerOptions);
@ -21,6 +26,34 @@ namespace ts {
}
}
function assertCompilerOptionsWithJsonNode(json: any, configFileName: string, expectedResult: { compilerOptions: CompilerOptions, errors: Diagnostic[] }) {
const fileText = JSON.stringify(json);
const result = parseJsonText(configFileName, fileText);
assert(!result.parseDiagnostics.length);
assert(!!result.endOfFileToken);
const host: ParseConfigHost = new Utils.MockParseConfigHost("/apath/", true, []);
const { options: actualCompilerOptions, errors: actualParseErrors } = parseJsonSourceFileConfigFileContent(result, host, "/apath/", /*existingOptions*/ undefined, configFileName);
expectedResult.compilerOptions["configFilePath"] = configFileName;
const parsedCompilerOptions = JSON.stringify(actualCompilerOptions);
const expectedCompilerOptions = JSON.stringify(expectedResult.compilerOptions);
assert.equal(parsedCompilerOptions, expectedCompilerOptions);
assert.equal(actualCompilerOptions.configFile, result);
const actualErrors = filter(actualParseErrors, error => error.code !== Diagnostics.No_inputs_were_found_in_config_file_0_Specified_include_paths_were_1_and_exclude_paths_were_2.code);
const expectedErrors = expectedResult.errors;
assert.isTrue(expectedResult.errors.length === actualErrors.length, `Expected error: ${JSON.stringify(expectedResult.errors)}. Actual error: ${JSON.stringify(actualErrors)}.`);
for (let i = 0; i < actualErrors.length; i++) {
const actualError = actualErrors[i];
const expectedError = expectedErrors[i];
assert.equal(actualError.code, expectedError.code, `Expected error-code: ${JSON.stringify(expectedError.code)}. Actual error-code: ${JSON.stringify(actualError.code)}.`);
assert.equal(actualError.category, expectedError.category, `Expected error-category: ${JSON.stringify(expectedError.category)}. Actual error-category: ${JSON.stringify(actualError.category)}.`);
assert(actualError.file);
assert(actualError.start);
assert(actualError.length);
}
}
// tsconfig.json tests
it("Convert correctly format tsconfig.json to compiler-options ", () => {
assertCompilerOptions(

View File

@ -2,14 +2,20 @@
/// <reference path="..\..\compiler\commandLineParser.ts" />
namespace ts {
type ExpectedResult = { typeAcquisition: TypeAcquisition, errors: Diagnostic[] };
describe("convertTypeAcquisitionFromJson", () => {
function assertTypeAcquisition(json: any, configFileName: string, expectedResult: { typeAcquisition: TypeAcquisition, errors: Diagnostic[] }) {
const jsonOptions = json["typeAcquisition"] || json["typingOptions"];
const { options: actualTypeAcquisition, errors: actualErrors } = convertTypeAcquisitionFromJson(jsonOptions, "/apath/", configFileName);
function assertTypeAcquisition(json: any, configFileName: string, expectedResult: ExpectedResult) {
assertTypeAcquisitionWithJson(json, configFileName, expectedResult);
assertTypeAcquisitionWithJsonNode(json, configFileName, expectedResult);
}
function verifyAcquisition(actualTypeAcquisition: TypeAcquisition, expectedResult: ExpectedResult) {
const parsedTypeAcquisition = JSON.stringify(actualTypeAcquisition);
const expectedTypeAcquisition = JSON.stringify(expectedResult.typeAcquisition);
assert.equal(parsedTypeAcquisition, expectedTypeAcquisition);
}
function verifyErrors(actualErrors: Diagnostic[], expectedResult: ExpectedResult, hasLocation?: boolean) {
const expectedErrors = expectedResult.errors;
assert.isTrue(expectedResult.errors.length === actualErrors.length, `Expected error: ${JSON.stringify(expectedResult.errors)}. Actual error: ${JSON.stringify(actualErrors)}.`);
for (let i = 0; i < actualErrors.length; i++) {
@ -17,9 +23,34 @@ namespace ts {
const expectedError = expectedErrors[i];
assert.equal(actualError.code, expectedError.code, `Expected error-code: ${JSON.stringify(expectedError.code)}. Actual error-code: ${JSON.stringify(actualError.code)}.`);
assert.equal(actualError.category, expectedError.category, `Expected error-category: ${JSON.stringify(expectedError.category)}. Actual error-category: ${JSON.stringify(actualError.category)}.`);
if (hasLocation) {
assert(actualError.file);
assert(actualError.start);
assert(actualError.length);
}
}
}
function assertTypeAcquisitionWithJson(json: any, configFileName: string, expectedResult: ExpectedResult) {
const jsonOptions = json["typeAcquisition"] || json["typingOptions"];
const { options: actualTypeAcquisition, errors: actualErrors } = convertTypeAcquisitionFromJson(jsonOptions, "/apath/", configFileName);
verifyAcquisition(actualTypeAcquisition, expectedResult);
verifyErrors(actualErrors, expectedResult);
}
function assertTypeAcquisitionWithJsonNode(json: any, configFileName: string, expectedResult: ExpectedResult) {
const fileText = JSON.stringify(json);
const result = parseJsonText(configFileName, fileText);
assert(!result.parseDiagnostics.length);
assert(!!result.endOfFileToken);
const host: ParseConfigHost = new Utils.MockParseConfigHost("/apath/", true, []);
const { typeAcquisition: actualTypeAcquisition, errors: actualParseErrors } = parseJsonSourceFileConfigFileContent(result, host, "/apath/", /*existingOptions*/ undefined, configFileName);
verifyAcquisition(actualTypeAcquisition, expectedResult);
const actualErrors = filter(actualParseErrors, error => error.code !== Diagnostics.No_inputs_were_found_in_config_file_0_Specified_include_paths_were_1_and_exclude_paths_were_2.code);
verifyErrors(actualErrors, expectedResult, /*hasLocation*/ true);
}
// tsconfig.json
it("Convert deprecated typingOptions.enableAutoDiscovery format tsconfig.json to typeAcquisition ", () => {
assertTypeAcquisition(
@ -177,7 +208,7 @@ namespace ts {
},
errors: [
{
category: Diagnostics.Unknown_compiler_option_0.category,
category: Diagnostics.Unknown_type_acquisition_option_0.category,
code: Diagnostics.Unknown_type_acquisition_option_0.code,
file: undefined,
start: 0,

View File

@ -95,6 +95,44 @@ namespace ts {
assert.deepEqual(actual.errors, expected.errors);
}
function validateMatches(expected: ts.ParsedCommandLine, json: any, host: ParseConfigHost, basePath: string, existingOptions?: CompilerOptions, configFileName?: string, resolutionStack?: Path[]) {
{
const jsonText = JSON.stringify(json);
const result = parseJsonText(caseInsensitiveTsconfigPath, jsonText);
const actual = ts.parseJsonSourceFileConfigFileContent(result, host, basePath, existingOptions, configFileName, resolutionStack);
for (const error of expected.errors) {
if (error.file) {
error.file = result;
}
}
assertParsed(actual, expected);
}
{
const actual = ts.parseJsonConfigFileContent(json, host, basePath, existingOptions, configFileName, resolutionStack);
expected.errors = map(expected.errors, error => {
return <Diagnostic>{
category: error.category,
code: error.code,
file: undefined,
length: undefined,
messageText: error.messageText,
start: undefined,
};
});
assertParsed(actual, expected);
}
}
function createDiagnosticForConfigFile(json: any, start: number, length: number, diagnosticMessage: DiagnosticMessage, arg0: string) {
const text = JSON.stringify(json);
const file = <SourceFile>{
fileName: caseInsensitiveTsconfigPath,
kind: SyntaxKind.SourceFile,
text
};
return ts.createFileDiagnostic(file, start, length, diagnosticMessage, arg0);
}
describe("matchFiles", () => {
it("with defaults", () => {
const json = {};
@ -109,8 +147,7 @@ namespace ts {
"c:/dev": ts.WatchDirectoryFlags.Recursive
},
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveCommonFoldersHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveCommonFoldersHost, caseInsensitiveBasePath);
});
describe("with literal file list", () => {
@ -130,8 +167,7 @@ namespace ts {
],
wildcardDirectories: {},
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath);
});
it("missing files are still present", () => {
const json = {
@ -149,8 +185,7 @@ namespace ts {
],
wildcardDirectories: {},
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath);
});
it("are not removed due to excludes", () => {
const json = {
@ -171,8 +206,7 @@ namespace ts {
],
wildcardDirectories: {},
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath);
});
});
@ -193,8 +227,7 @@ namespace ts {
],
wildcardDirectories: {},
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath);
});
it("with non .ts file extensions are excluded", () => {
const json = {
@ -212,8 +245,7 @@ namespace ts {
fileNames: [],
wildcardDirectories: {},
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath, /*existingOptions*/ undefined, caseInsensitiveTsconfigPath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath, /*existingOptions*/ undefined, caseInsensitiveTsconfigPath);
});
it("with missing files are excluded", () => {
const json = {
@ -231,8 +263,7 @@ namespace ts {
fileNames: [],
wildcardDirectories: {},
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath, /*existingOptions*/ undefined, caseInsensitiveTsconfigPath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath, /*existingOptions*/ undefined, caseInsensitiveTsconfigPath);
});
it("with literal excludes", () => {
const json = {
@ -252,8 +283,7 @@ namespace ts {
],
wildcardDirectories: {},
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath);
});
it("with wildcard excludes", () => {
const json = {
@ -280,8 +310,7 @@ namespace ts {
],
wildcardDirectories: {},
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath);
});
it("with recursive excludes", () => {
const json = {
@ -307,8 +336,7 @@ namespace ts {
],
wildcardDirectories: {},
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath);
});
it("with case sensitive exclude", () => {
const json = {
@ -327,8 +355,7 @@ namespace ts {
],
wildcardDirectories: {},
};
const actual = ts.parseJsonConfigFileContent(json, caseSensitiveHost, caseSensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseSensitiveHost, caseSensitiveBasePath);
});
it("with common package folders and no exclusions", () => {
const json = {
@ -352,8 +379,7 @@ namespace ts {
],
wildcardDirectories: {},
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveCommonFoldersHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveCommonFoldersHost, caseInsensitiveBasePath);
});
it("with common package folders and exclusions", () => {
const json = {
@ -379,8 +405,7 @@ namespace ts {
],
wildcardDirectories: {},
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveCommonFoldersHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveCommonFoldersHost, caseInsensitiveBasePath);
});
it("with common package folders and empty exclude", () => {
const json = {
@ -404,8 +429,7 @@ namespace ts {
],
wildcardDirectories: {},
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveCommonFoldersHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveCommonFoldersHost, caseInsensitiveBasePath);
});
});
@ -436,8 +460,7 @@ namespace ts {
"c:/dev/x": ts.WatchDirectoryFlags.None
},
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath);
});
it("same named declarations are excluded", () => {
@ -458,8 +481,7 @@ namespace ts {
"c:/dev": ts.WatchDirectoryFlags.None
},
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath);
});
it("`*` matches only ts files", () => {
const json = {
@ -479,8 +501,7 @@ namespace ts {
"c:/dev": ts.WatchDirectoryFlags.None
},
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath);
});
it("`?` matches only a single character", () => {
const json = {
@ -499,8 +520,7 @@ namespace ts {
"c:/dev/x": ts.WatchDirectoryFlags.None
},
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath);
});
it("with recursive directory", () => {
const json = {
@ -521,8 +541,7 @@ namespace ts {
"c:/dev": ts.WatchDirectoryFlags.Recursive
},
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath);
});
it("with multiple recursive directories", () => {
const json = {
@ -545,8 +564,7 @@ namespace ts {
"c:/dev/z": ts.WatchDirectoryFlags.Recursive
},
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath);
});
it("case sensitive", () => {
const json = {
@ -564,8 +582,7 @@ namespace ts {
"/dev": ts.WatchDirectoryFlags.Recursive
},
};
const actual = ts.parseJsonConfigFileContent(json, caseSensitiveHost, caseSensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseSensitiveHost, caseSensitiveBasePath);
});
it("with missing files are excluded", () => {
const json = {
@ -584,8 +601,7 @@ namespace ts {
"c:/dev": ts.WatchDirectoryFlags.Recursive
},
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath, /*existingOptions*/ undefined, caseInsensitiveTsconfigPath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath, /*existingOptions*/ undefined, caseInsensitiveTsconfigPath);
});
it("always include literal files", () => {
const json = {
@ -609,8 +625,7 @@ namespace ts {
"c:/dev": ts.WatchDirectoryFlags.Recursive
},
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath);
});
it("exclude folders", () => {
const json = {
@ -634,8 +649,7 @@ namespace ts {
"c:/dev": ts.WatchDirectoryFlags.Recursive
}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath);
});
it("with common package folders and no exclusions", () => {
const json = {
@ -656,8 +670,7 @@ namespace ts {
"c:/dev": ts.WatchDirectoryFlags.Recursive
},
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveCommonFoldersHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveCommonFoldersHost, caseInsensitiveBasePath);
});
it("with common package folders and exclusions", () => {
const json = {
@ -680,8 +693,7 @@ namespace ts {
"c:/dev": ts.WatchDirectoryFlags.Recursive
},
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveCommonFoldersHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveCommonFoldersHost, caseInsensitiveBasePath);
});
it("with common package folders and empty exclude", () => {
const json = {
@ -703,8 +715,7 @@ namespace ts {
"c:/dev": ts.WatchDirectoryFlags.Recursive
},
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveCommonFoldersHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveCommonFoldersHost, caseInsensitiveBasePath);
});
it("exclude .js files when allowJs=false", () => {
const json = {
@ -728,8 +739,7 @@ namespace ts {
"c:/dev/js": ts.WatchDirectoryFlags.None
}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath, /*existingOptions*/ undefined, caseInsensitiveTsconfigPath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath, /*existingOptions*/ undefined, caseInsensitiveTsconfigPath);
});
it("include .js files when allowJs=true", () => {
const json = {
@ -753,8 +763,7 @@ namespace ts {
"c:/dev/js": ts.WatchDirectoryFlags.None
}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath);
});
it("include explicitly listed .min.js files when allowJs=true", () => {
const json = {
@ -778,8 +787,7 @@ namespace ts {
"c:/dev/js": ts.WatchDirectoryFlags.None
}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath);
});
it("include paths outside of the project", () => {
const json = {
@ -802,8 +810,7 @@ namespace ts {
"c:/ext": ts.WatchDirectoryFlags.None
}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath);
});
it("include paths outside of the project using relative paths", () => {
const json = {
@ -825,8 +832,7 @@ namespace ts {
"c:/ext": ts.WatchDirectoryFlags.None
}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath);
});
it("exclude paths outside of the project using relative paths", () => {
const json = {
@ -846,8 +852,7 @@ namespace ts {
fileNames: [],
wildcardDirectories: {}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath, /*existingOptions*/ undefined, caseInsensitiveTsconfigPath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath, /*existingOptions*/ undefined, caseInsensitiveTsconfigPath);
});
it("include files with .. in their name", () => {
const json = {
@ -866,8 +871,7 @@ namespace ts {
],
wildcardDirectories: {}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath);
});
it("exclude files with .. in their name", () => {
const json = {
@ -888,8 +892,7 @@ namespace ts {
"c:/ext": ts.WatchDirectoryFlags.Recursive
}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath);
});
it("with jsx=none, allowJs=false", () => {
const json = {
@ -911,8 +914,7 @@ namespace ts {
"c:/dev": ts.WatchDirectoryFlags.Recursive
}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath);
});
it("with jsx=preserve, allowJs=false", () => {
const json = {
@ -936,8 +938,7 @@ namespace ts {
"c:/dev": ts.WatchDirectoryFlags.Recursive
}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath);
});
it("with jsx=react-native, allowJs=false", () => {
const json = {
@ -961,8 +962,7 @@ namespace ts {
"c:/dev": ts.WatchDirectoryFlags.Recursive
}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath);
});
it("with jsx=none, allowJs=true", () => {
const json = {
@ -986,8 +986,7 @@ namespace ts {
"c:/dev": ts.WatchDirectoryFlags.Recursive
}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath);
});
it("with jsx=preserve, allowJs=true", () => {
const json = {
@ -1013,8 +1012,7 @@ namespace ts {
"c:/dev": ts.WatchDirectoryFlags.Recursive
}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath);
});
it("with jsx=react-native, allowJs=true", () => {
const json = {
@ -1040,8 +1038,7 @@ namespace ts {
"c:/dev": ts.WatchDirectoryFlags.Recursive
}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveMixedExtensionHost, caseInsensitiveBasePath);
});
it("exclude .min.js files using wildcards", () => {
const json = {
@ -1067,8 +1064,7 @@ namespace ts {
"c:/dev/js": ts.WatchDirectoryFlags.None
}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath);
});
describe("with trailing recursive directory", () => {
it("in includes", () => {
@ -1080,15 +1076,14 @@ namespace ts {
const expected: ts.ParsedCommandLine = {
options: {},
errors: [
ts.createCompilerDiagnostic(ts.Diagnostics.File_specification_cannot_end_in_a_recursive_directory_wildcard_Asterisk_Asterisk_Colon_0, "**"),
createDiagnosticForConfigFile(json, 12, 4, ts.Diagnostics.File_specification_cannot_end_in_a_recursive_directory_wildcard_Asterisk_Asterisk_Colon_0, "**"),
ts.createCompilerDiagnostic(ts.Diagnostics.No_inputs_were_found_in_config_file_0_Specified_include_paths_were_1_and_exclude_paths_were_2,
caseInsensitiveTsconfigPath, JSON.stringify(json.include), "[]")
],
fileNames: [],
wildcardDirectories: {}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath, /*existingOptions*/ undefined, caseInsensitiveTsconfigPath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath, /*existingOptions*/ undefined, caseInsensitiveTsconfigPath);
});
it("in excludes", () => {
const json = {
@ -1108,8 +1103,7 @@ namespace ts {
fileNames: [],
wildcardDirectories: {}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath, /*existingOptions*/ undefined, caseInsensitiveTsconfigPath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath, /*existingOptions*/ undefined, caseInsensitiveTsconfigPath);
});
});
describe("with multiple recursive directory patterns", () => {
@ -1122,15 +1116,14 @@ namespace ts {
const expected: ts.ParsedCommandLine = {
options: {},
errors: [
ts.createCompilerDiagnostic(ts.Diagnostics.File_specification_cannot_contain_multiple_recursive_directory_wildcards_Asterisk_Asterisk_Colon_0, "**/x/**/*"),
createDiagnosticForConfigFile(json, 12, 11, ts.Diagnostics.File_specification_cannot_contain_multiple_recursive_directory_wildcards_Asterisk_Asterisk_Colon_0, "**/x/**/*"),
ts.createCompilerDiagnostic(ts.Diagnostics.No_inputs_were_found_in_config_file_0_Specified_include_paths_were_1_and_exclude_paths_were_2,
caseInsensitiveTsconfigPath, JSON.stringify(json.include), "[]")
],
fileNames: [],
wildcardDirectories: {}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath, /*existingOptions*/ undefined, caseInsensitiveTsconfigPath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath, /*existingOptions*/ undefined, caseInsensitiveTsconfigPath);
});
it("in excludes", () => {
const json = {
@ -1144,7 +1137,7 @@ namespace ts {
const expected: ts.ParsedCommandLine = {
options: {},
errors: [
ts.createCompilerDiagnostic(ts.Diagnostics.File_specification_cannot_contain_multiple_recursive_directory_wildcards_Asterisk_Asterisk_Colon_0, "**/x/**")
createDiagnosticForConfigFile(json, 34, 9, ts.Diagnostics.File_specification_cannot_contain_multiple_recursive_directory_wildcards_Asterisk_Asterisk_Colon_0, "**/x/**")
],
fileNames: [
"c:/dev/a.ts",
@ -1156,8 +1149,7 @@ namespace ts {
"c:/dev": ts.WatchDirectoryFlags.Recursive
}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath);
});
});
@ -1171,15 +1163,14 @@ namespace ts {
const expected: ts.ParsedCommandLine = {
options: {},
errors: [
ts.createCompilerDiagnostic(ts.Diagnostics.File_specification_cannot_contain_a_parent_directory_that_appears_after_a_recursive_directory_wildcard_Asterisk_Asterisk_Colon_0, "**/../*"),
createDiagnosticForConfigFile(json, 12, 9, ts.Diagnostics.File_specification_cannot_contain_a_parent_directory_that_appears_after_a_recursive_directory_wildcard_Asterisk_Asterisk_Colon_0, "**/../*"),
ts.createCompilerDiagnostic(ts.Diagnostics.No_inputs_were_found_in_config_file_0_Specified_include_paths_were_1_and_exclude_paths_were_2,
caseInsensitiveTsconfigPath, JSON.stringify(json.include), "[]")
],
fileNames: [],
wildcardDirectories: {}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath, /*existingOptions*/ undefined, caseInsensitiveTsconfigPath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath, /*existingOptions*/ undefined, caseInsensitiveTsconfigPath);
});
it("in includes after a subdirectory", () => {
@ -1191,15 +1182,14 @@ namespace ts {
const expected: ts.ParsedCommandLine = {
options: {},
errors: [
ts.createCompilerDiagnostic(ts.Diagnostics.File_specification_cannot_contain_a_parent_directory_that_appears_after_a_recursive_directory_wildcard_Asterisk_Asterisk_Colon_0, "**/y/../*"),
createDiagnosticForConfigFile(json, 12, 11, ts.Diagnostics.File_specification_cannot_contain_a_parent_directory_that_appears_after_a_recursive_directory_wildcard_Asterisk_Asterisk_Colon_0, "**/y/../*"),
ts.createCompilerDiagnostic(ts.Diagnostics.No_inputs_were_found_in_config_file_0_Specified_include_paths_were_1_and_exclude_paths_were_2,
caseInsensitiveTsconfigPath, JSON.stringify(json.include), "[]")
],
fileNames: [],
wildcardDirectories: {}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath, /*existingOptions*/ undefined, caseInsensitiveTsconfigPath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath, /*existingOptions*/ undefined, caseInsensitiveTsconfigPath);
});
it("in excludes immediately after", () => {
@ -1214,7 +1204,7 @@ namespace ts {
const expected: ts.ParsedCommandLine = {
options: {},
errors: [
ts.createCompilerDiagnostic(ts.Diagnostics.File_specification_cannot_contain_a_parent_directory_that_appears_after_a_recursive_directory_wildcard_Asterisk_Asterisk_Colon_0, "**/..")
createDiagnosticForConfigFile(json, 34, 7, ts.Diagnostics.File_specification_cannot_contain_a_parent_directory_that_appears_after_a_recursive_directory_wildcard_Asterisk_Asterisk_Colon_0, "**/..")
],
fileNames: [
"c:/dev/a.ts",
@ -1226,8 +1216,7 @@ namespace ts {
"c:/dev": ts.WatchDirectoryFlags.Recursive
}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath);
});
it("in excludes after a subdirectory", () => {
@ -1242,7 +1231,7 @@ namespace ts {
const expected: ts.ParsedCommandLine = {
options: {},
errors: [
ts.createCompilerDiagnostic(ts.Diagnostics.File_specification_cannot_contain_a_parent_directory_that_appears_after_a_recursive_directory_wildcard_Asterisk_Asterisk_Colon_0, "**/y/..")
createDiagnosticForConfigFile(json, 34, 9, ts.Diagnostics.File_specification_cannot_contain_a_parent_directory_that_appears_after_a_recursive_directory_wildcard_Asterisk_Asterisk_Colon_0, "**/y/..")
],
fileNames: [
"c:/dev/a.ts",
@ -1254,8 +1243,7 @@ namespace ts {
"c:/dev": ts.WatchDirectoryFlags.Recursive
}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath);
});
});
@ -1272,8 +1260,7 @@ namespace ts {
"c:/dev/z": ts.WatchDirectoryFlags.Recursive
}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveHost, caseInsensitiveBasePath);
});
});
});
@ -1297,8 +1284,7 @@ namespace ts {
"c:/dev/w": ts.WatchDirectoryFlags.Recursive
}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveDottedFoldersHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveDottedFoldersHost, caseInsensitiveBasePath);
});
describe("that are explicitly included", () => {
it("without wildcards", () => {
@ -1317,8 +1303,7 @@ namespace ts {
],
wildcardDirectories: {}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveDottedFoldersHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveDottedFoldersHost, caseInsensitiveBasePath);
});
it("with recursive wildcards that match directories", () => {
const json = {
@ -1339,8 +1324,7 @@ namespace ts {
"c:/dev": ts.WatchDirectoryFlags.Recursive
}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveDottedFoldersHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveDottedFoldersHost, caseInsensitiveBasePath);
});
it("with recursive wildcards that match nothing", () => {
const json = {
@ -1361,8 +1345,7 @@ namespace ts {
"c:/dev/x": ts.WatchDirectoryFlags.Recursive
}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveDottedFoldersHost, caseInsensitiveBasePath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveDottedFoldersHost, caseInsensitiveBasePath);
});
it("with wildcard excludes that implicitly exclude dotted files", () => {
const json = {
@ -1382,8 +1365,7 @@ namespace ts {
fileNames: [],
wildcardDirectories: {}
};
const actual = ts.parseJsonConfigFileContent(json, caseInsensitiveDottedFoldersHost, caseInsensitiveBasePath, /*existingOptions*/ undefined, caseInsensitiveTsconfigPath);
assertParsed(actual, expected);
validateMatches(expected, json, caseInsensitiveDottedFoldersHost, caseInsensitiveBasePath, /*existingOptions*/ undefined, caseInsensitiveTsconfigPath);
});
});
});

View File

@ -5,7 +5,7 @@ namespace ts {
if (!expected === !actual) {
if (expected) {
assert.isTrue(expected.resolvedFileName === actual.resolvedFileName, `'resolvedFileName': expected '${expected.resolvedFileName}' to be equal to '${actual.resolvedFileName}'`);
assert.isTrue(expected.extension === actual.extension, `'ext': expected '${Extension[expected.extension]}' to be equal to '${Extension[actual.extension]}'`);
assert.isTrue(expected.extension === actual.extension, `'ext': expected '${expected.extension}' to be equal to '${actual.extension}'`);
assert.isTrue(expected.isExternalLibraryImport === actual.isExternalLibraryImport, `'isExternalLibraryImport': expected '${expected.isExternalLibraryImport}' to be equal to '${actual.isExternalLibraryImport}'`);
}
return true;

View File

@ -81,63 +81,136 @@ namespace ts {
describe("printNode", () => {
const printsCorrectly = makePrintsCorrectly("printsNodeCorrectly");
let sourceFile: SourceFile;
before(() => sourceFile = createSourceFile("source.ts", "", ScriptTarget.ES2015));
// tslint:disable boolean-trivia
const syntheticNode = createClassDeclaration(
undefined,
undefined,
/*name*/ createIdentifier("C"),
undefined,
undefined,
createNodeArray([
createProperty(
undefined,
printsCorrectly("class", {}, printer => printer.printNode(
EmitHint.Unspecified,
createClassDeclaration(
/*decorators*/ undefined,
/*modifiers*/ undefined,
/*name*/ createIdentifier("C"),
/*typeParameters*/ undefined,
/*heritageClauses*/ undefined,
[createProperty(
/*decorators*/ undefined,
createNodeArray([createToken(SyntaxKind.PublicKeyword)]),
createIdentifier("prop"),
undefined,
undefined,
undefined
)
])
);
/*questionToken*/ undefined,
/*type*/ undefined,
/*initializer*/ undefined
)]
),
createSourceFile("source.ts", "", ScriptTarget.ES2015)
));
printsCorrectly("namespaceExportDeclaration", {}, printer => printer.printNode(
EmitHint.Unspecified,
createNamespaceExportDeclaration("B"),
createSourceFile("source.ts", "", ScriptTarget.ES2015)
));
// https://github.com/Microsoft/TypeScript/issues/15971
const classWithOptionalMethodAndProperty = createClassDeclaration(
undefined,
/* modifiers */ createNodeArray([createToken(SyntaxKind.DeclareKeyword)]),
/* name */ createIdentifier("X"),
undefined,
undefined,
createNodeArray([
createMethod(
undefined,
undefined,
undefined,
/* name */ createIdentifier("method"),
/* questionToken */ createToken(SyntaxKind.QuestionToken),
undefined,
undefined,
/* type */ createKeywordTypeNode(SyntaxKind.VoidKeyword),
undefined
printsCorrectly("classWithOptionalMethodAndProperty", {}, printer => printer.printNode(
EmitHint.Unspecified,
createClassDeclaration(
/*decorators*/ undefined,
/*modifiers*/ [createToken(SyntaxKind.DeclareKeyword)],
/*name*/ createIdentifier("X"),
/*typeParameters*/ undefined,
/*heritageClauses*/ undefined,
[
createMethod(
/*decorators*/ undefined,
/*modifiers*/ undefined,
/*asteriskToken*/ undefined,
/*name*/ createIdentifier("method"),
/*questionToken*/ createToken(SyntaxKind.QuestionToken),
/*typeParameters*/ undefined,
[],
/*type*/ createKeywordTypeNode(SyntaxKind.VoidKeyword),
/*body*/ undefined
),
createProperty(
/*decorators*/ undefined,
/*modifiers*/ undefined,
/*name*/ createIdentifier("property"),
/*questionToken*/ createToken(SyntaxKind.QuestionToken),
/*type*/ createKeywordTypeNode(SyntaxKind.StringKeyword),
/*initializer*/ undefined
),
]
),
createSourceFile("source.ts", "", ScriptTarget.ES2015)
));
// https://github.com/Microsoft/TypeScript/issues/15651
printsCorrectly("functionTypes", {}, printer => printer.printNode(
EmitHint.Unspecified,
createTupleTypeNode([
createFunctionTypeNode(
/*typeArguments*/ undefined,
[createParameter(
/*decorators*/ undefined,
/*modifiers*/ undefined,
/*dotDotDotToken*/ undefined,
createIdentifier("args")
)],
createKeywordTypeNode(SyntaxKind.AnyKeyword)
),
createProperty(
undefined,
undefined,
/* name */ createIdentifier("property"),
/* questionToken */ createToken(SyntaxKind.QuestionToken),
/* type */ createKeywordTypeNode(SyntaxKind.StringKeyword),
undefined
createFunctionTypeNode(
[createTypeParameterDeclaration("T")],
[createParameter(
/*decorators*/ undefined,
/*modifiers*/ undefined,
/*dotDotDotToken*/ undefined,
createIdentifier("args")
)],
createKeywordTypeNode(SyntaxKind.AnyKeyword)
),
])
);
// tslint:enable boolean-trivia
printsCorrectly("class", {}, printer => printer.printNode(EmitHint.Unspecified, syntheticNode, sourceFile));
printsCorrectly("namespaceExportDeclaration", {}, printer => printer.printNode(EmitHint.Unspecified, createNamespaceExportDeclaration("B"), sourceFile));
printsCorrectly("classWithOptionalMethodAndProperty", {}, printer => printer.printNode(EmitHint.Unspecified, classWithOptionalMethodAndProperty, sourceFile));
createFunctionTypeNode(
/*typeArguments*/ undefined,
[createParameter(
/*decorators*/ undefined,
/*modifiers*/ undefined,
createToken(SyntaxKind.DotDotDotToken),
createIdentifier("args")
)],
createKeywordTypeNode(SyntaxKind.AnyKeyword)
),
createFunctionTypeNode(
/*typeArguments*/ undefined,
[createParameter(
/*decorators*/ undefined,
/*modifiers*/ undefined,
/*dotDotDotToken*/ undefined,
createIdentifier("args"),
createToken(SyntaxKind.QuestionToken)
)],
createKeywordTypeNode(SyntaxKind.AnyKeyword)
),
createFunctionTypeNode(
/*typeArguments*/ undefined,
[createParameter(
/*decorators*/ undefined,
/*modifiers*/ undefined,
/*dotDotDotToken*/ undefined,
createIdentifier("args"),
/*questionToken*/ undefined,
createKeywordTypeNode(SyntaxKind.AnyKeyword)
)],
createKeywordTypeNode(SyntaxKind.AnyKeyword)
),
createFunctionTypeNode(
/*typeArguments*/ undefined,
[createParameter(
/*decorators*/ undefined,
/*modifiers*/ undefined,
/*dotDotDotToken*/ undefined,
createObjectBindingPattern([])
)],
createKeywordTypeNode(SyntaxKind.AnyKeyword)
),
]),
createSourceFile("source.ts", "", ScriptTarget.ES2015)
));
});
});
}

View File

@ -6,7 +6,10 @@ namespace ts.projectSystem {
describe("Project errors", () => {
function checkProjectErrors(projectFiles: server.ProjectFilesWithTSDiagnostics, expectedErrors: string[]) {
assert.isTrue(projectFiles !== undefined, "missing project files");
const errors = projectFiles.projectErrors;
checkProjectErrorsWorker(projectFiles.projectErrors, expectedErrors);
}
function checkProjectErrorsWorker(errors: Diagnostic[], expectedErrors: string[]) {
assert.equal(errors ? errors.length : 0, expectedErrors.length, `expected ${expectedErrors.length} error in the list`);
if (expectedErrors.length) {
for (let i = 0; i < errors.length; i++) {
@ -122,12 +125,13 @@ namespace ts.projectSystem {
projectService.checkNumberOfProjects({ configuredProjects: 1 });
const configuredProject = forEach(projectService.synchronizeProjectList([]), f => f.info.projectName === corruptedConfig.path && f);
assert.isTrue(configuredProject !== undefined, "should find configured project");
checkProjectErrors(configuredProject, [
"')' expected.",
"Declaration or statement expected.",
"Declaration or statement expected.",
"Failed to parse file '/a/b/tsconfig.json'"
checkProjectErrors(configuredProject, []);
const projectErrors = projectService.configuredProjects[0].getAllProjectErrors();
checkProjectErrorsWorker(projectErrors, [
"'{' expected."
]);
assert.isNotNull(projectErrors[0].file);
assert.equal(projectErrors[0].file.fileName, corruptedConfig.path);
}
// fix config and trigger watcher
host.reloadFS([file1, file2, correctConfig]);
@ -137,6 +141,8 @@ namespace ts.projectSystem {
const configuredProject = forEach(projectService.synchronizeProjectList([]), f => f.info.projectName === corruptedConfig.path && f);
assert.isTrue(configuredProject !== undefined, "should find configured project");
checkProjectErrors(configuredProject, []);
const projectErrors = projectService.configuredProjects[0].getAllProjectErrors();
checkProjectErrorsWorker(projectErrors, []);
}
});
@ -166,6 +172,8 @@ namespace ts.projectSystem {
const configuredProject = forEach(projectService.synchronizeProjectList([]), f => f.info.projectName === corruptedConfig.path && f);
assert.isTrue(configuredProject !== undefined, "should find configured project");
checkProjectErrors(configuredProject, []);
const projectErrors = projectService.configuredProjects[0].getAllProjectErrors();
checkProjectErrorsWorker(projectErrors, []);
}
// break config and trigger watcher
host.reloadFS([file1, file2, corruptedConfig]);
@ -174,13 +182,14 @@ namespace ts.projectSystem {
projectService.checkNumberOfProjects({ configuredProjects: 1 });
const configuredProject = forEach(projectService.synchronizeProjectList([]), f => f.info.projectName === corruptedConfig.path && f);
assert.isTrue(configuredProject !== undefined, "should find configured project");
checkProjectErrors(configuredProject, [
"')' expected.",
"Declaration or statement expected.",
"Declaration or statement expected.",
"Failed to parse file '/a/b/tsconfig.json'"
checkProjectErrors(configuredProject, []);
const projectErrors = projectService.configuredProjects[0].getAllProjectErrors();
checkProjectErrorsWorker(projectErrors, [
"'{' expected."
]);
assert.isNotNull(projectErrors[0].file);
assert.equal(projectErrors[0].file.fileName, corruptedConfig.path);
}
});
});
}
}

View File

@ -25,7 +25,7 @@ namespace ts.server {
clearTimeout: noop,
setImmediate: () => 0,
clearImmediate: noop,
createHash: s => s
createHash: Harness.LanguageService.mockHash,
};
const mockLogger: Logger = {
@ -240,8 +240,8 @@ namespace ts.server {
CommandNames.GetCodeFixesFull,
CommandNames.GetSupportedCodeFixes,
CommandNames.GetApplicableRefactors,
CommandNames.GetRefactorCodeActions,
CommandNames.GetRefactorCodeActionsFull,
CommandNames.GetEditsForRefactor,
CommandNames.GetEditsForRefactorFull,
];
it("should not throw when commands are executed with invalid arguments", () => {

View File

@ -9,6 +9,7 @@ namespace ts.projectSystem {
et.service.openClientFile(file.path);
assert.equal(et.getEvents().length, 0);
});
it("only sends an event once", () => {
const file = makeFile("/a.ts");
const tsconfig = makeFile("/tsconfig.json", {});
@ -44,14 +45,15 @@ namespace ts.projectSystem {
it("works with external project", () => {
const file1 = makeFile("/a.ts");
const et = new EventTracker([file1]);
const compilerOptions: ts.CompilerOptions = { strict: true };
const compilerOptions: ts.server.protocol.CompilerOptions = { strict: true };
const projectFileName = "foo.csproj";
const projectFileName = "/hunter2/foo.csproj";
open();
// TODO: Apparently compilerOptions is mutated, so have to repeat it here!
et.assertProjectInfoTelemetryEvent({
projectId: Harness.LanguageService.mockHash("/hunter2/foo.csproj"),
compilerOptions: { strict: true },
compileOnSave: true,
// These properties can't be present for an external project, so they are undefined instead of false.
@ -136,8 +138,6 @@ namespace ts.projectSystem {
declaration: true,
lib: ["es6", "dom"],
checkJs: "" as any as boolean,
};
(compilerOptions as any).unknownCompilerOption = "hunter2"; // These are always ignored.
const tsconfig = makeFile("/tsconfig.json", { compilerOptions, files: ["/a.ts"] });
@ -195,6 +195,7 @@ namespace ts.projectSystem {
const et = new EventTracker([jsconfig, file]);
et.service.openClientFile(file.path);
et.assertProjectInfoTelemetryEvent({
projectId: Harness.LanguageService.mockHash("/jsconfig.json"),
fileStats: fileStats({ js: 1 }),
compilerOptions: autoJsCompilerOptions,
typeAcquisition: {
@ -214,6 +215,7 @@ namespace ts.projectSystem {
et.service.openClientFile(file.path);
et.getEvent<server.ProjectLanguageServiceStateEvent>(server.ProjectLanguageServiceStateEvent, /*mayBeMore*/ true);
et.assertProjectInfoTelemetryEvent({
projectId: Harness.LanguageService.mockHash("/jsconfig.json"),
fileStats: fileStats({ js: 1 }),
compilerOptions: autoJsCompilerOptions,
configFileName: "jsconfig.json",
@ -248,39 +250,37 @@ namespace ts.projectSystem {
}
assertProjectInfoTelemetryEvent(partial: Partial<server.ProjectInfoTelemetryEventData>): void {
assert.deepEqual(this.getEvent<server.ProjectInfoTelemetryEvent>(ts.server.ProjectInfoTelemetryEvent), makePayload(partial));
assert.deepEqual(this.getEvent<server.ProjectInfoTelemetryEvent>(ts.server.ProjectInfoTelemetryEvent), {
projectId: Harness.LanguageService.mockHash("/tsconfig.json"),
fileStats: fileStats({ ts: 1 }),
compilerOptions: {},
extends: false,
files: false,
include: false,
exclude: false,
compileOnSave: false,
typeAcquisition: {
enable: false,
exclude: false,
include: false,
},
configFileName: "tsconfig.json",
projectType: "configured",
languageServiceEnabled: true,
version: ts.version,
...partial,
});
}
getEvent<T extends server.ProjectServiceEvent>(eventName: T["eventName"], mayBeMore = false): T["data"] {
if (mayBeMore) assert(this.events.length !== 0); else assert.equal(this.events.length, 1);
if (mayBeMore) { assert(this.events.length !== 0); }
else { assert.equal(this.events.length, 1); }
const event = this.events.shift();
assert.equal(event.eventName, eventName);
return event.data;
}
}
function makePayload(partial: Partial<server.ProjectInfoTelemetryEventData>): server.ProjectInfoTelemetryEventData {
return {
fileStats: fileStats({ ts: 1 }),
compilerOptions: {},
extends: false,
files: false,
include: false,
exclude: false,
compileOnSave: false,
typeAcquisition: {
enable: false,
exclude: false,
include: false,
},
configFileName: "tsconfig.json",
projectType: "configured",
languageServiceEnabled: true,
version: ts.version,
...partial
};
}
function makeFile(path: string, content: {} = ""): projectSystem.FileOrFolder {
return { path, content: typeof content === "string" ? "" : JSON.stringify(content) };
}

View File

@ -36,7 +36,7 @@ namespace ts {
transpileOptions.reportDiagnostics = true;
justName = "transpile/" + name.replace(/[^a-z0-9\-. ]/ig, "") + (transpileOptions.compilerOptions.jsx ? ".tsx" : ".ts");
justName = "transpile/" + name.replace(/[^a-z0-9\-. ]/ig, "") + (transpileOptions.compilerOptions.jsx ? Extension.Tsx : Extension.Ts);
toBeCompiled = [{
unitName: transpileOptions.fileName,
content: input
@ -88,7 +88,7 @@ namespace ts {
}
it("Correct output for " + justName, () => {
Harness.Baseline.runBaseline(justName.replace(/\.tsx?$/, ".js"), () => {
Harness.Baseline.runBaseline(justName.replace(/\.tsx?$/, ts.Extension.Js), () => {
if (transpileResult.outputText) {
return transpileResult.outputText;
}

View File

@ -3,37 +3,69 @@
namespace ts {
describe("parseConfigFileTextToJson", () => {
function assertParseResult(jsonText: string, expectedConfigObject: { config?: any; error?: Diagnostic }) {
function assertParseResult(jsonText: string, expectedConfigObject: { config?: any; error?: Diagnostic[] }) {
const parsed = ts.parseConfigFileTextToJson("/apath/tsconfig.json", jsonText);
assert.equal(JSON.stringify(parsed), JSON.stringify(expectedConfigObject));
}
function assertParseError(jsonText: string) {
const parsed = ts.parseConfigFileTextToJson("/apath/tsconfig.json", jsonText);
assert.isTrue(undefined === parsed.config);
assert.deepEqual(parsed.config, {});
assert.isTrue(undefined !== parsed.error);
}
function assertParseErrorWithExcludesKeyword(jsonText: string) {
const parsed = ts.parseConfigFileTextToJson("/apath/tsconfig.json", jsonText);
const parsedCommand = ts.parseJsonConfigFileContent(parsed.config, ts.sys, "tests/cases/unittests");
assert.isTrue(parsedCommand.errors && parsedCommand.errors.length === 1 &&
parsedCommand.errors[0].code === ts.Diagnostics.Unknown_option_excludes_Did_you_mean_exclude.code);
{
const parsed = ts.parseConfigFileTextToJson("/apath/tsconfig.json", jsonText);
const parsedCommand = ts.parseJsonConfigFileContent(parsed.config, ts.sys, "tests/cases/unittests");
assert.isTrue(parsedCommand.errors && parsedCommand.errors.length === 1 &&
parsedCommand.errors[0].code === ts.Diagnostics.Unknown_option_excludes_Did_you_mean_exclude.code);
}
{
const parsed = ts.parseJsonText("/apath/tsconfig.json", jsonText);
const parsedCommand = ts.parseJsonSourceFileConfigFileContent(parsed, ts.sys, "tests/cases/unittests");
assert.isTrue(parsedCommand.errors && parsedCommand.errors.length === 1 &&
parsedCommand.errors[0].code === ts.Diagnostics.Unknown_option_excludes_Did_you_mean_exclude.code);
}
}
function getParsedCommandJson(jsonText: string, configFileName: string, basePath: string, allFileList: string[]) {
const parsed = ts.parseConfigFileTextToJson(configFileName, jsonText);
const host: ParseConfigHost = new Utils.MockParseConfigHost(basePath, true, allFileList);
return ts.parseJsonConfigFileContent(parsed.config, host, basePath, /*existingOptions*/ undefined, configFileName);
}
function getParsedCommandJsonNode(jsonText: string, configFileName: string, basePath: string, allFileList: string[]) {
const parsed = ts.parseJsonText(configFileName, jsonText);
const host: ParseConfigHost = new Utils.MockParseConfigHost(basePath, true, allFileList);
return ts.parseJsonSourceFileConfigFileContent(parsed, host, basePath, /*existingOptions*/ undefined, configFileName);
}
function assertParseFileList(jsonText: string, configFileName: string, basePath: string, allFileList: string[], expectedFileList: string[]) {
const json = JSON.parse(jsonText);
const host: ParseConfigHost = new Utils.MockParseConfigHost(basePath, true, allFileList);
const parsed = ts.parseJsonConfigFileContent(json, host, basePath, /*existingOptions*/ undefined, configFileName);
assert.isTrue(arrayIsEqualTo(parsed.fileNames.sort(), expectedFileList.sort()));
{
const parsed = getParsedCommandJson(jsonText, configFileName, basePath, allFileList);
assert.isTrue(arrayIsEqualTo(parsed.fileNames.sort(), expectedFileList.sort()));
}
{
const parsed = getParsedCommandJsonNode(jsonText, configFileName, basePath, allFileList);
assert.isTrue(arrayIsEqualTo(parsed.fileNames.sort(), expectedFileList.sort()));
}
}
function assertParseFileDiagnostics(jsonText: string, configFileName: string, basePath: string, allFileList: string[], expectedDiagnosticCode: number) {
const json = JSON.parse(jsonText);
const host: ParseConfigHost = new Utils.MockParseConfigHost(basePath, true, allFileList);
const parsed = ts.parseJsonConfigFileContent(json, host, basePath, /*existingOptions*/ undefined, configFileName);
assert.isTrue(parsed.errors.length >= 0);
assert.isTrue(parsed.errors.filter(e => e.code === expectedDiagnosticCode).length > 0, `Expected error code ${expectedDiagnosticCode} to be in ${JSON.stringify(parsed.errors)}`);
function assertParseFileDiagnostics(jsonText: string, configFileName: string, basePath: string, allFileList: string[], expectedDiagnosticCode: number, noLocation?: boolean) {
{
const parsed = getParsedCommandJson(jsonText, configFileName, basePath, allFileList);
assert.isTrue(parsed.errors.length >= 0);
assert.isTrue(parsed.errors.filter(e => e.code === expectedDiagnosticCode).length > 0, `Expected error code ${expectedDiagnosticCode} to be in ${JSON.stringify(parsed.errors)}`);
}
{
const parsed = getParsedCommandJsonNode(jsonText, configFileName, basePath, allFileList);
assert.isTrue(parsed.errors.length >= 0);
assert.isTrue(parsed.errors.filter(e => e.code === expectedDiagnosticCode).length > 0, `Expected error code ${expectedDiagnosticCode} to be in ${JSON.stringify(parsed.errors)}`);
if (!noLocation) {
assert.isTrue(parsed.errors.filter(e => e.code === expectedDiagnosticCode && e.file && e.start && e.length).length > 0, `Expected error code ${expectedDiagnosticCode} to be in ${JSON.stringify(parsed.errors)} with location information`);
}
}
}
it("returns empty config for file with only whitespaces", () => {
@ -199,7 +231,9 @@ namespace ts {
}
"files": ["file1.ts"]
}`;
const { configJsonObject, diagnostics } = sanitizeConfigFile("config.json", content);
const result = parseJsonText("config.json", content);
const diagnostics = result.parseDiagnostics;
const configJsonObject = convertToObject(result, diagnostics);
const expectedResult = {
compilerOptions: {
allowJs: true,
@ -229,7 +263,8 @@ namespace ts {
"/apath/tsconfig.json",
"tests/cases/unittests",
["/apath/a.js"],
Diagnostics.No_inputs_were_found_in_config_file_0_Specified_include_paths_were_1_and_exclude_paths_were_2.code);
Diagnostics.No_inputs_were_found_in_config_file_0_Specified_include_paths_were_1_and_exclude_paths_were_2.code,
/*noLocation*/ true);
});
it("generates errors for empty directory", () => {
@ -242,7 +277,8 @@ namespace ts {
"/apath/tsconfig.json",
"tests/cases/unittests",
[],
Diagnostics.No_inputs_were_found_in_config_file_0_Specified_include_paths_were_1_and_exclude_paths_were_2.code);
Diagnostics.No_inputs_were_found_in_config_file_0_Specified_include_paths_were_1_and_exclude_paths_were_2.code,
/*noLocation*/ true);
});
it("generates errors for empty include", () => {
@ -253,7 +289,8 @@ namespace ts {
"/apath/tsconfig.json",
"tests/cases/unittests",
["/apath/a.ts"],
Diagnostics.No_inputs_were_found_in_config_file_0_Specified_include_paths_were_1_and_exclude_paths_were_2.code);
Diagnostics.No_inputs_were_found_in_config_file_0_Specified_include_paths_were_1_and_exclude_paths_were_2.code,
/*noLocation*/ true);
});
it("generates errors for includes with outDir", () => {
@ -267,7 +304,8 @@ namespace ts {
"/apath/tsconfig.json",
"tests/cases/unittests",
["/apath/a.ts"],
Diagnostics.No_inputs_were_found_in_config_file_0_Specified_include_paths_were_1_and_exclude_paths_were_2.code);
Diagnostics.No_inputs_were_found_in_config_file_0_Specified_include_paths_were_1_and_exclude_paths_were_2.code,
/*noLocation*/ true);
});
});
}

View File

@ -13,7 +13,8 @@ namespace ts.projectSystem {
express: "express",
jquery: "jquery",
lodash: "lodash",
moment: "moment"
moment: "moment",
chroma: "chroma-js"
})
};
@ -61,7 +62,6 @@ namespace ts.projectSystem {
super(installTypingHost, globalTypingsCacheLocation, safeList.path, throttleLimit, log);
}
safeFileList = safeList.path;
protected postExecActions: PostExecAction[] = [];
executePendingCommands() {
@ -472,7 +472,7 @@ namespace ts.projectSystem {
}
createHash(s: string): string {
return s;
return Harness.LanguageService.mockHash(s);
}
triggerDirectoryWatcherCallback(directoryName: string, fileName: string): void {
@ -735,7 +735,7 @@ namespace ts.projectSystem {
checkNumberOfConfiguredProjects(projectService, 1);
const project = projectService.configuredProjects[0];
checkProjectActualFiles(project, [file1.path, libFile.path, file2.path]);
checkProjectActualFiles(project, [file1.path, libFile.path, file2.path, configFile.path]);
checkProjectRootFiles(project, [file1.path, file2.path]);
// watching all files except one that was open
checkWatchedFiles(host, [configFile.path, file2.path, libFile.path]);
@ -992,7 +992,7 @@ namespace ts.projectSystem {
checkNumberOfConfiguredProjects(projectService, 1);
const project = projectService.configuredProjects[0];
checkProjectActualFiles(project, [file1.path, nodeModuleFile.path]);
checkProjectActualFiles(project, [file1.path, nodeModuleFile.path, configFile.path]);
checkNumberOfInferredProjects(projectService, 1);
configFile.content = `{
@ -1003,7 +1003,7 @@ namespace ts.projectSystem {
}`;
host.reloadFS(files);
host.triggerFileWatcherCallback(configFile.path);
checkProjectActualFiles(project, [file1.path, classicModuleFile.path]);
checkProjectActualFiles(project, [file1.path, classicModuleFile.path, configFile.path]);
checkNumberOfInferredProjects(projectService, 1);
});
@ -1566,7 +1566,7 @@ namespace ts.projectSystem {
host.reloadFS([file1, file2, file3, configFile]);
host.triggerDirectoryWatcherCallback(getDirectoryPath(configFile.path), configFile.path);
checkNumberOfProjects(projectService, { configuredProjects: 1 });
checkProjectActualFiles(projectService.configuredProjects[0], [file1.path, file2.path, file3.path]);
checkProjectActualFiles(projectService.configuredProjects[0], [file1.path, file2.path, file3.path, configFile.path]);
});
it("correctly migrate files between projects", () => {
@ -1624,7 +1624,7 @@ namespace ts.projectSystem {
projectService.openClientFile(file1.path);
checkNumberOfProjects(projectService, { configuredProjects: 1 });
checkProjectActualFiles(projectService.configuredProjects[0], [file1.path]);
checkProjectActualFiles(projectService.configuredProjects[0], [file1.path, configFile.path]);
host.reloadFS([file1, file2, configFile]);
@ -1655,7 +1655,7 @@ namespace ts.projectSystem {
projectService.openClientFile(file1.path);
checkNumberOfProjects(projectService, { configuredProjects: 1 });
checkProjectActualFiles(projectService.configuredProjects[0], [file1.path]);
checkProjectActualFiles(projectService.configuredProjects[0], [file1.path, configFile.path]);
const modifiedConfigFile = {
path: configFile.path,
@ -1688,7 +1688,7 @@ namespace ts.projectSystem {
projectService.openClientFile(file1.path);
checkNumberOfProjects(projectService, { configuredProjects: 1 });
checkProjectActualFiles(projectService.configuredProjects[0], [file1.path, file2.path]);
checkProjectActualFiles(projectService.configuredProjects[0], [file1.path, file2.path, configFile.path]);
const modifiedConfigFile = {
path: configFile.path,
@ -1769,11 +1769,11 @@ namespace ts.projectSystem {
projectService.openClientFile(file1.path);
checkNumberOfProjects(projectService, { configuredProjects: 1 });
checkProjectActualFiles(projectService.configuredProjects[0], [file1.path, file2.path]);
checkProjectActualFiles(projectService.configuredProjects[0], [file1.path, file2.path, config.path]);
projectService.openClientFile(file2.path);
checkNumberOfProjects(projectService, { configuredProjects: 1 });
checkProjectActualFiles(projectService.configuredProjects[0], [file1.path, file2.path]);
checkProjectActualFiles(projectService.configuredProjects[0], [file1.path, file2.path, config.path]);
host.reloadFS([file1, file2]);
host.triggerFileWatcherCallback(config.path, /*removed*/ true);
@ -1808,13 +1808,13 @@ namespace ts.projectSystem {
});
projectService.openClientFile(f1.path);
projectService.checkNumberOfProjects({ configuredProjects: 1 });
checkProjectActualFiles(projectService.configuredProjects[0], [f1.path]);
checkProjectActualFiles(projectService.configuredProjects[0], [f1.path, config.path]);
projectService.closeClientFile(f1.path);
projectService.openClientFile(f2.path);
projectService.checkNumberOfProjects({ configuredProjects: 1, inferredProjects: 1 });
checkProjectActualFiles(projectService.configuredProjects[0], [f1.path]);
checkProjectActualFiles(projectService.configuredProjects[0], [f1.path, config.path]);
checkProjectActualFiles(projectService.inferredProjects[0], [f2.path]);
});
@ -1838,7 +1838,7 @@ namespace ts.projectSystem {
// HTML file will not be included in any projects yet
checkNumberOfProjects(projectService, { configuredProjects: 1 });
checkProjectActualFiles(projectService.configuredProjects[0], [file1.path]);
checkProjectActualFiles(projectService.configuredProjects[0], [file1.path, config.path]);
// Specify .html extension as mixed content
const extraFileExtensions = [{ extension: ".html", scriptKind: ScriptKind.JS, isMixedContent: true }];
@ -1847,7 +1847,7 @@ namespace ts.projectSystem {
// HTML file still not included in the project as it is closed
checkNumberOfProjects(projectService, { configuredProjects: 1 });
checkProjectActualFiles(projectService.configuredProjects[0], [file1.path]);
checkProjectActualFiles(projectService.configuredProjects[0], [file1.path, config.path]);
// Open HTML file
projectService.applyChangesInOpenFiles(
@ -1857,7 +1857,7 @@ namespace ts.projectSystem {
// Now HTML file is included in the project
checkNumberOfProjects(projectService, { configuredProjects: 1 });
checkProjectActualFiles(projectService.configuredProjects[0], [file1.path, file2.path]);
checkProjectActualFiles(projectService.configuredProjects[0], [file1.path, file2.path, config.path]);
// Check identifiers defined in HTML content are available in .ts file
const project = projectService.configuredProjects[0];
@ -1872,7 +1872,7 @@ namespace ts.projectSystem {
// HTML file is still included in project
checkNumberOfProjects(projectService, { configuredProjects: 1 });
checkProjectActualFiles(projectService.configuredProjects[0], [file1.path, file2.path]);
checkProjectActualFiles(projectService.configuredProjects[0], [file1.path, file2.path, config.path]);
// Check identifiers defined in HTML content are not available in .ts file
completions = project.getLanguageService().getCompletionsAtPosition(file1.path, 5);
@ -2204,7 +2204,8 @@ namespace ts.projectSystem {
projectService.closeClientFile(f1.path);
projectService.checkNumberOfProjects({});
for (const f of [f2, f3]) {
for (const f of [f1, f2, f3]) {
// There shouldnt be any script info as we closed the file that resulted in creation of it
const scriptInfo = projectService.getScriptInfoForNormalizedPath(server.toNormalizedPath(f.path));
assert.equal(scriptInfo.containingProjects.length, 0, `expect 0 containing projects for '${f.path}'`);
}
@ -2487,7 +2488,7 @@ namespace ts.projectSystem {
options: {}
});
projectService.checkNumberOfProjects({ configuredProjects: 1 });
checkProjectActualFiles(projectService.configuredProjects[0], [f1.path]);
checkProjectActualFiles(projectService.configuredProjects[0], [f1.path, tsconfig.path]);
// rename tsconfig.json back to lib.ts
host.reloadFS([f1, f2]);
@ -2545,8 +2546,8 @@ namespace ts.projectSystem {
options: {}
});
projectService.checkNumberOfProjects({ configuredProjects: 2 });
checkProjectActualFiles(projectService.configuredProjects[0], [cLib.path]);
checkProjectActualFiles(projectService.configuredProjects[1], [dLib.path]);
checkProjectActualFiles(projectService.configuredProjects[0], [cLib.path, cTsconfig.path]);
checkProjectActualFiles(projectService.configuredProjects[1], [dLib.path, dTsconfig.path]);
// remove one config file
projectService.openExternalProject({
@ -2556,7 +2557,7 @@ namespace ts.projectSystem {
});
projectService.checkNumberOfProjects({ configuredProjects: 1 });
checkProjectActualFiles(projectService.configuredProjects[0], [dLib.path]);
checkProjectActualFiles(projectService.configuredProjects[0], [dLib.path, dTsconfig.path]);
// remove second config file
projectService.openExternalProject({
@ -2576,8 +2577,8 @@ namespace ts.projectSystem {
options: {}
});
projectService.checkNumberOfProjects({ configuredProjects: 2 });
checkProjectActualFiles(projectService.configuredProjects[0], [cLib.path]);
checkProjectActualFiles(projectService.configuredProjects[1], [dLib.path]);
checkProjectActualFiles(projectService.configuredProjects[0], [cLib.path, cTsconfig.path]);
checkProjectActualFiles(projectService.configuredProjects[1], [dLib.path, dTsconfig.path]);
// close all projects - no projects should be opened
projectService.closeExternalProject(projectName);
@ -2633,13 +2634,13 @@ namespace ts.projectSystem {
projectService.openClientFile(app.path);
projectService.checkNumberOfProjects({ configuredProjects: 1 });
checkProjectActualFiles(projectService.configuredProjects[0], [libES5.path, app.path]);
checkProjectActualFiles(projectService.configuredProjects[0], [libES5.path, app.path, config1.path]);
host.reloadFS([libES5, libES2015Promise, app, config2]);
host.triggerFileWatcherCallback(config1.path);
projectService.checkNumberOfProjects({ configuredProjects: 1 });
checkProjectActualFiles(projectService.configuredProjects[0], [libES5.path, libES2015Promise.path, app.path]);
checkProjectActualFiles(projectService.configuredProjects[0], [libES5.path, libES2015Promise.path, app.path, config2.path]);
});
it("should handle non-existing directories in config file", () => {
@ -2694,7 +2695,7 @@ namespace ts.projectSystem {
projectService.openClientFile(f1.path);
projectService.checkNumberOfProjects({ configuredProjects: 1 });
checkProjectActualFiles(projectService.configuredProjects[0], [f1.path, barTypings.path]);
checkProjectActualFiles(projectService.configuredProjects[0], [f1.path, barTypings.path, config.path]);
});
});
@ -2765,7 +2766,7 @@ namespace ts.projectSystem {
projectService.openClientFile(f1.path);
projectService.checkNumberOfProjects({ configuredProjects: 1 });
checkProjectActualFiles(projectService.configuredProjects[0], [f1.path, t1.path]);
checkProjectActualFiles(projectService.configuredProjects[0], [f1.path, t1.path, tsconfig.path]);
// delete t1
host.reloadFS([f1, tsconfig]);
@ -2774,7 +2775,7 @@ namespace ts.projectSystem {
host.runQueuedTimeoutCallbacks();
projectService.checkNumberOfProjects({ configuredProjects: 1 });
checkProjectActualFiles(projectService.configuredProjects[0], [f1.path]);
checkProjectActualFiles(projectService.configuredProjects[0], [f1.path, tsconfig.path]);
// create t2
host.reloadFS([f1, tsconfig, t2]);
@ -2783,7 +2784,7 @@ namespace ts.projectSystem {
host.runQueuedTimeoutCallbacks();
projectService.checkNumberOfProjects({ configuredProjects: 1 });
checkProjectActualFiles(projectService.configuredProjects[0], [f1.path, t2.path]);
checkProjectActualFiles(projectService.configuredProjects[0], [f1.path, t2.path, tsconfig.path]);
});
});
@ -2968,7 +2969,7 @@ namespace ts.projectSystem {
const projectService = createProjectService(host);
projectService.openClientFile(f1.path);
projectService.checkNumberOfProjects({ configuredProjects: 1 });
checkProjectActualFiles(projectService.configuredProjects[0], [f1.path, node.path]);
checkProjectActualFiles(projectService.configuredProjects[0], [f1.path, node.path, config.path]);
});
});
@ -3979,4 +3980,69 @@ namespace ts.projectSystem {
assert.isUndefined(project.getCompilerOptions().maxNodeModuleJsDepth);
});
});
describe("Options Diagnostic locations reported correctly with changes in configFile contents", () => {
it("when options change", () => {
const file = {
path: "/a/b/app.ts",
content: "let x = 10"
};
const configFileContentBeforeComment = `{`;
const configFileContentComment = `
// comment`;
const configFileContentAfterComment = `
"compilerOptions": {
"allowJs": true,
"declaration": true
}
}`;
const configFileContentWithComment = configFileContentBeforeComment + configFileContentComment + configFileContentAfterComment;
const configFileContentWithoutCommentLine = configFileContentBeforeComment + configFileContentAfterComment;
const configFile = {
path: "/a/b/tsconfig.json",
content: configFileContentWithComment
};
const host = createServerHost([file, libFile, configFile]);
const session = createSession(host);
openFilesForSession([file], session);
const projectService = session.getProjectService();
checkNumberOfProjects(projectService, { configuredProjects: 1 });
const projectName = projectService.configuredProjects[0].getProjectName();
const diags = session.executeCommand(<server.protocol.SemanticDiagnosticsSyncRequest>{
type: "request",
command: server.CommandNames.SemanticDiagnosticsSync,
seq: 2,
arguments: { file: configFile.path, projectFileName: projectName, includeLinePosition: true }
}).response;
assert.isTrue(diags.length === 2);
configFile.content = configFileContentWithoutCommentLine;
host.reloadFS([file, configFile]);
host.triggerFileWatcherCallback(configFile.path);
const diagsAfterEdit = session.executeCommand(<server.protocol.SemanticDiagnosticsSyncRequest>{
type: "request",
command: server.CommandNames.SemanticDiagnosticsSync,
seq: 2,
arguments: { file: configFile.path, projectFileName: projectName, includeLinePosition: true }
}).response;
assert.isTrue(diagsAfterEdit.length === 2);
verifyDiagnostic(diags[0], diagsAfterEdit[0]);
verifyDiagnostic(diags[1], diagsAfterEdit[1]);
function verifyDiagnostic(beforeEditDiag: server.protocol.DiagnosticWithLinePosition, afterEditDiag: server.protocol.DiagnosticWithLinePosition) {
assert.equal(beforeEditDiag.message, afterEditDiag.message);
assert.equal(beforeEditDiag.code, afterEditDiag.code);
assert.equal(beforeEditDiag.category, afterEditDiag.category);
assert.equal(beforeEditDiag.startLocation.line, afterEditDiag.startLocation.line + 1);
assert.equal(beforeEditDiag.startLocation.offset, afterEditDiag.startLocation.offset);
assert.equal(beforeEditDiag.endLocation.line, afterEditDiag.endLocation.line + 1);
assert.equal(beforeEditDiag.endLocation.offset, afterEditDiag.endLocation.offset);
}
});
});
}

View File

@ -80,7 +80,7 @@ namespace ts.projectSystem {
const service = createProjectService(host, { typingsInstaller: installer });
service.openClientFile(f1.path);
service.checkNumberOfProjects({ configuredProjects: 1 });
checkProjectActualFiles(service.configuredProjects[0], [f1.path, f2.path]);
checkProjectActualFiles(service.configuredProjects[0], [f1.path, f2.path, config.path]);
installer.installAll(0);
});
});
@ -133,12 +133,12 @@ namespace ts.projectSystem {
checkNumberOfProjects(projectService, { configuredProjects: 1 });
const p = projectService.configuredProjects[0];
checkProjectActualFiles(p, [file1.path]);
checkProjectActualFiles(p, [file1.path, tsconfig.path]);
installer.installAll(/*expectedCount*/ 1);
checkNumberOfProjects(projectService, { configuredProjects: 1 });
checkProjectActualFiles(p, [file1.path, jquery.path]);
checkProjectActualFiles(p, [file1.path, jquery.path, tsconfig.path]);
});
it("inferred project (typings installed)", () => {
@ -684,12 +684,12 @@ namespace ts.projectSystem {
checkNumberOfProjects(projectService, { configuredProjects: 1 });
const p = projectService.configuredProjects[0];
checkProjectActualFiles(p, [app.path]);
checkProjectActualFiles(p, [app.path, jsconfig.path]);
installer.installAll(/*expectedCount*/ 1);
checkNumberOfProjects(projectService, { configuredProjects: 1 });
checkProjectActualFiles(p, [app.path, jqueryDTS.path]);
checkProjectActualFiles(p, [app.path, jqueryDTS.path, jsconfig.path]);
});
it("configured projects discover from bower_components", () => {
@ -730,13 +730,13 @@ namespace ts.projectSystem {
checkNumberOfProjects(projectService, { configuredProjects: 1 });
const p = projectService.configuredProjects[0];
checkProjectActualFiles(p, [app.path]);
checkProjectActualFiles(p, [app.path, jsconfig.path]);
checkWatchedFiles(host, [jsconfig.path, "/bower_components", "/node_modules"]);
installer.installAll(/*expectedCount*/ 1);
checkNumberOfProjects(projectService, { configuredProjects: 1 });
checkProjectActualFiles(p, [app.path, jqueryDTS.path]);
checkProjectActualFiles(p, [app.path, jqueryDTS.path, jsconfig.path]);
});
it("configured projects discover from bower.json", () => {
@ -777,12 +777,12 @@ namespace ts.projectSystem {
checkNumberOfProjects(projectService, { configuredProjects: 1 });
const p = projectService.configuredProjects[0];
checkProjectActualFiles(p, [app.path]);
checkProjectActualFiles(p, [app.path, jsconfig.path]);
installer.installAll(/*expectedCount*/ 1);
checkNumberOfProjects(projectService, { configuredProjects: 1 });
checkProjectActualFiles(p, [app.path, jqueryDTS.path]);
checkProjectActualFiles(p, [app.path, jqueryDTS.path, jsconfig.path]);
});
it("Malformed package.json should be watched", () => {
@ -1009,6 +1009,26 @@ namespace ts.projectSystem {
});
describe("discover typings", () => {
it("should use mappings from safe list", () => {
const app = {
path: "/a/b/app.js",
content: ""
};
const jquery = {
path: "/a/b/jquery.js",
content: ""
};
const chroma = {
path: "/a/b/chroma.min.js",
content: ""
};
const cache = createMap<string>();
const host = createServerHost([app, jquery, chroma]);
const result = JsTyping.discoverTypings(host, [app.path, jquery.path, chroma.path], getDirectoryPath(<Path>app.path), /*safeListPath*/ undefined, cache, { enable: true }, []);
assert.deepEqual(result.newTypingNames, ["jquery", "chroma-js"]);
});
it("should return node for core modules", () => {
const f = {
path: "/a/b/app.js",
@ -1016,6 +1036,7 @@ namespace ts.projectSystem {
};
const host = createServerHost([f]);
const cache = createMap<string>();
for (const name of JsTyping.nodeCoreModuleList) {
const result = JsTyping.discoverTypings(host, [f.path], getDirectoryPath(<Path>f.path), /*safeListPath*/ undefined, cache, { enable: true }, [name, "somename"]);
assert.deepEqual(result.newTypingNames.sort(), ["node", "somename"]);
@ -1040,7 +1061,7 @@ namespace ts.projectSystem {
});
describe("telemetry events", () => {
it ("should be received", () => {
it("should be received", () => {
const f1 = {
path: "/a/app.js",
content: ""
@ -1089,7 +1110,7 @@ namespace ts.projectSystem {
});
describe("progress notifications", () => {
it ("should be sent for success", () => {
it("should be sent for success", () => {
const f1 = {
path: "/a/app.js",
content: ""
@ -1140,7 +1161,7 @@ namespace ts.projectSystem {
checkProjectActualFiles(projectService.inferredProjects[0], [f1.path, commander.path]);
});
it ("should be sent for error", () => {
it("should be sent for error", () => {
const f1 = {
path: "/a/app.js",
content: ""
@ -1185,4 +1206,4 @@ namespace ts.projectSystem {
checkProjectActualFiles(projectService.inferredProjects[0], [f1.path]);
});
});
}
}

View File

@ -3483,7 +3483,7 @@ interface DragEvent extends MouseEvent {
declare var DragEvent: {
prototype: DragEvent;
new(): DragEvent;
new(type: "drag" | "dragend" | "dragenter" | "dragexit" | "dragleave" | "dragover" | "dragstart" | "drop", dragEventInit?: { dataTransfer?: DataTransfer }): DragEvent;
};
interface DynamicsCompressorNode extends AudioNode {
@ -8224,6 +8224,7 @@ interface Navigator extends Object, NavigatorID, NavigatorOnLine, NavigatorConte
readonly serviceWorker: ServiceWorkerContainer;
readonly webdriver: boolean;
readonly hardwareConcurrency: number;
readonly languages: string[];
getGamepads(): Gamepad[];
javaEnabled(): boolean;
msLaunchUri(uri: string, successCallback?: MSLaunchUriCallback, noHandlerCallback?: MSLaunchUriCallback): void;

14
src/lib/es5.d.ts vendored
View File

@ -1635,7 +1635,7 @@ interface Int8Array {
* @param thisArg An object to which the this keyword can refer in the callbackfn function.
* If thisArg is omitted, undefined is used as the this value.
*/
map(callbackfn: (this: void, value: number, index: number, array: Int8Array) => number, thisArg: any): Int8Array;
map(callbackfn: (this: void, value: number, index: number, array: Int8Array) => number, thisArg?: any): Int8Array;
/**
* Calls the specified callback function for all the elements in an array. The return value of
@ -1902,7 +1902,7 @@ interface Uint8Array {
* @param thisArg An object to which the this keyword can refer in the callbackfn function.
* If thisArg is omitted, undefined is used as the this value.
*/
map(callbackfn: (this: void, value: number, index: number, array: Uint8Array) => number, thisArg: any): Uint8Array;
map(callbackfn: (this: void, value: number, index: number, array: Uint8Array) => number, thisArg?: any): Uint8Array;
/**
* Calls the specified callback function for all the elements in an array. The return value of
@ -2169,7 +2169,7 @@ interface Uint8ClampedArray {
* @param thisArg An object to which the this keyword can refer in the callbackfn function.
* If thisArg is omitted, undefined is used as the this value.
*/
map(callbackfn: (this: void, value: number, index: number, array: Uint8ClampedArray) => number, thisArg: any): Uint8ClampedArray;
map(callbackfn: (this: void, value: number, index: number, array: Uint8ClampedArray) => number, thisArg?: any): Uint8ClampedArray;
/**
* Calls the specified callback function for all the elements in an array. The return value of
@ -2434,7 +2434,7 @@ interface Int16Array {
* @param thisArg An object to which the this keyword can refer in the callbackfn function.
* If thisArg is omitted, undefined is used as the this value.
*/
map(callbackfn: (this: void, value: number, index: number, array: Int16Array) => number, thisArg: any): Int16Array;
map(callbackfn: (this: void, value: number, index: number, array: Int16Array) => number, thisArg?: any): Int16Array;
/**
* Calls the specified callback function for all the elements in an array. The return value of
@ -2702,7 +2702,7 @@ interface Uint16Array {
* @param thisArg An object to which the this keyword can refer in the callbackfn function.
* If thisArg is omitted, undefined is used as the this value.
*/
map(callbackfn: (this: void, value: number, index: number, array: Uint16Array) => number, thisArg: any): Uint16Array;
map(callbackfn: (this: void, value: number, index: number, array: Uint16Array) => number, thisArg?: any): Uint16Array;
/**
* Calls the specified callback function for all the elements in an array. The return value of
@ -3235,7 +3235,7 @@ interface Uint32Array {
* @param thisArg An object to which the this keyword can refer in the callbackfn function.
* If thisArg is omitted, undefined is used as the this value.
*/
map(callbackfn: (this: void, value: number, index: number, array: Uint32Array) => number, thisArg: any): Uint32Array;
map(callbackfn: (this: void, value: number, index: number, array: Uint32Array) => number, thisArg?: any): Uint32Array;
/**
* Calls the specified callback function for all the elements in an array. The return value of
@ -3502,7 +3502,7 @@ interface Float32Array {
* @param thisArg An object to which the this keyword can refer in the callbackfn function.
* If thisArg is omitted, undefined is used as the this value.
*/
map(callbackfn: (this: void, value: number, index: number, array: Float32Array) => number, thisArg: any): Float32Array;
map(callbackfn: (this: void, value: number, index: number, array: Float32Array) => number, thisArg?: any): Float32Array;
/**
* Calls the specified callback function for all the elements in an array. The return value of

View File

@ -719,20 +719,49 @@ namespace ts.server {
return response.body;
}
getRefactorCodeActions(
getEditsForRefactor(
fileName: string,
_formatOptions: FormatCodeSettings,
positionOrRange: number | TextRange,
refactorName: string) {
refactorName: string,
actionName: string): RefactorEditInfo {
const args = this.createFileLocationOrRangeRequestArgs(positionOrRange, fileName) as protocol.GetRefactorCodeActionsRequestArgs;
args.refactorName = refactorName;
const args = this.createFileLocationOrRangeRequestArgs(positionOrRange, fileName) as protocol.GetEditsForRefactorRequestArgs;
args.refactor = refactorName;
args.action = actionName;
const request = this.processRequest<protocol.GetRefactorCodeActionsRequest>(CommandNames.GetRefactorCodeActions, args);
const response = this.processResponse<protocol.GetRefactorCodeActionsResponse>(request);
const codeActions = response.body.actions;
const request = this.processRequest<protocol.GetEditsForRefactorRequest>(CommandNames.GetEditsForRefactor, args);
const response = this.processResponse<protocol.GetEditsForRefactorResponse>(request);
return map(codeActions, codeAction => this.convertCodeActions(codeAction, fileName));
if (!response.body) {
return {
edits: []
};
}
const edits: FileTextChanges[] = this.convertCodeEditsToTextChanges(response.body.edits);
const renameFilename: string | undefined = response.body.renameFilename;
let renameLocation: number | undefined = undefined;
if (renameFilename !== undefined) {
renameLocation = this.lineOffsetToPosition(renameFilename, response.body.renameLocation);
}
return {
edits,
renameFilename,
renameLocation
};
}
private convertCodeEditsToTextChanges(edits: ts.server.protocol.FileCodeEdits[]): FileTextChanges[] {
return edits.map(edit => {
const fileName = edit.fileName;
return {
fileName,
textChanges: edit.textChanges.map(t => this.convertTextChangeToCodeEdit(t, fileName))
};
});
}
convertCodeActions(entry: protocol.CodeAction, fileName: string): CodeAction {

View File

@ -37,6 +37,8 @@ namespace ts.server {
}
export interface ProjectInfoTelemetryEventData {
/** Cryptographically secure hash of project file location. */
readonly projectId: string;
/** Count of file extensions seen in the project. */
readonly fileStats: FileStats;
/**
@ -563,10 +565,17 @@ namespace ts.server {
}
else {
if (info && (!info.isScriptOpen())) {
// file has been changed which might affect the set of referenced files in projects that include
// this file and set of inferred projects
info.reloadFromFile();
this.updateProjectGraphs(info.containingProjects);
if (info.containingProjects.length === 0) {
// Orphan script info, remove it as we can always reload it on next open
info.stopWatcher();
this.filenameToScriptInfo.remove(info.path);
}
else {
// file has been changed which might affect the set of referenced files in projects that include
// this file and set of inferred projects
info.reloadFromFile();
this.updateProjectGraphs(info.containingProjects);
}
}
}
}
@ -827,10 +836,29 @@ namespace ts.server {
this.assignScriptInfoToInferredProjectIfNecessary(f, /*addToListOfOpenFiles*/ false);
}
}
// Cleanup script infos that arent part of any project is postponed to
// next file open so that if file from same project is opened we wont end up creating same script infos
}
if (info.containingProjects.length === 0) {
// if there are not projects that include this script info - delete it
this.filenameToScriptInfo.remove(info.path);
// If the current info is being just closed - add the watcher file to track changes
// But if file was deleted, handle that part
if (this.host.fileExists(info.fileName)) {
this.watchClosedScriptInfo(info);
}
else {
this.handleDeletedFile(info);
}
}
private deleteOrphanScriptInfoNotInAnyProject() {
for (const path of this.filenameToScriptInfo.getKeys()) {
const info = this.filenameToScriptInfo.get(path);
if (!info.isScriptOpen() && info.containingProjects.length === 0) {
// if there are not projects that include this script info - delete it
info.stopWatcher();
this.filenameToScriptInfo.remove(info.path);
}
}
}
@ -948,20 +976,14 @@ namespace ts.server {
configFilename = normalizePath(configFilename);
const configFileContent = this.host.readFile(configFilename);
let errors: Diagnostic[];
const result = parseConfigFileTextToJson(configFilename, configFileContent);
let config = result.config;
if (result.error) {
// try to reparse config file
const { configJsonObject: sanitizedConfig, diagnostics } = sanitizeConfigFile(configFilename, configFileContent);
config = sanitizedConfig;
errors = diagnostics.length ? diagnostics : [result.error];
const result = parseJsonText(configFilename, configFileContent);
if (!result.endOfFileToken) {
result.endOfFileToken = <EndOfFileToken>{ kind: SyntaxKind.EndOfFileToken };
}
const parsedCommandLine = parseJsonConfigFileContent(
config,
const errors = result.parseDiagnostics;
const parsedCommandLine = parseJsonSourceFileConfigFileContent(
result,
this.host,
getDirectoryPath(configFilename),
/*existingOptions*/ {},
@ -970,23 +992,23 @@ namespace ts.server {
this.hostConfiguration.extraFileExtensions);
if (parsedCommandLine.errors.length) {
errors = concatenate(errors, parsedCommandLine.errors);
errors.push(...parsedCommandLine.errors);
}
Debug.assert(!!parsedCommandLine.fileNames);
if (parsedCommandLine.fileNames.length === 0) {
(errors || (errors = [])).push(createCompilerDiagnostic(Diagnostics.The_config_file_0_found_doesn_t_contain_any_source_files, configFilename));
errors.push(createCompilerDiagnostic(Diagnostics.The_config_file_0_found_doesn_t_contain_any_source_files, configFilename));
return { success: false, configFileErrors: errors };
}
const projectOptions: ProjectOptions = {
files: parsedCommandLine.fileNames,
compilerOptions: parsedCommandLine.options,
configHasExtendsProperty: config.extends !== undefined,
configHasFilesProperty: config.files !== undefined,
configHasIncludeProperty: config.include !== undefined,
configHasExcludeProperty: config.exclude !== undefined,
configHasExtendsProperty: parsedCommandLine.raw["extends"] !== undefined,
configHasFilesProperty: parsedCommandLine.raw["files"] !== undefined,
configHasIncludeProperty: parsedCommandLine.raw["include"] !== undefined,
configHasExcludeProperty: parsedCommandLine.raw["exclude"] !== undefined,
wildcardDirectories: createMapFromTemplate(parsedCommandLine.wildcardDirectories),
typeAcquisition: parsedCommandLine.typeAcquisition,
compileOnSave: parsedCommandLine.compileOnSave
@ -1049,6 +1071,7 @@ namespace ts.server {
if (!this.eventHandler) return;
const data: ProjectInfoTelemetryEventData = {
projectId: this.host.createHash(projectKey),
fileStats: countEachFileTypes(project.getScriptInfos()),
compilerOptions: convertCompilerOptionsForTelemetry(project.getCompilerOptions()),
typeAcquisition: convertTypeAcquisition(project.getTypeAcquisition()),
@ -1154,7 +1177,7 @@ namespace ts.server {
return {
success: conversionResult.success,
project,
errors: project.getProjectErrors()
errors: project.getGlobalProjectErrors()
};
}
@ -1300,6 +1323,14 @@ namespace ts.server {
return this.getScriptInfoForNormalizedPath(toNormalizedPath(uncheckedFileName));
}
watchClosedScriptInfo(info: ScriptInfo) {
// do not watch files with mixed content - server doesn't know how to interpret it
if (!info.hasMixedContent) {
const { fileName } = info;
info.setWatcher(this.host.watchFile(fileName, _ => this.onSourceFileChanged(fileName)));
}
}
getOrCreateScriptInfoForNormalizedPath(fileName: NormalizedPath, openedByClient: boolean, fileContent?: string, scriptKind?: ScriptKind, hasMixedContent?: boolean) {
let info = this.getScriptInfoForNormalizedPath(fileName);
if (!info) {
@ -1315,15 +1346,13 @@ namespace ts.server {
}
}
else {
// do not watch files with mixed content - server doesn't know how to interpret it
if (!hasMixedContent) {
info.setWatcher(this.host.watchFile(fileName, _ => this.onSourceFileChanged(fileName)));
}
this.watchClosedScriptInfo(info);
}
}
}
if (info) {
if (openedByClient && !info.isScriptOpen()) {
info.stopWatcher();
info.open(fileContent);
if (hasMixedContent) {
info.registerFileUpdate();
@ -1418,6 +1447,7 @@ namespace ts.server {
for (const p of this.inferredProjects) {
p.updateGraph();
}
this.printProjects();
}
@ -1451,6 +1481,11 @@ namespace ts.server {
// at this point if file is the part of some configured/external project then this project should be created
const info = this.getOrCreateScriptInfoForNormalizedPath(fileName, /*openedByClient*/ true, fileContent, scriptKind, hasMixedContent);
this.assignScriptInfoToInferredProjectIfNecessary(info, /*addToListOfOpenFiles*/ true);
// Delete the orphan files here because there might be orphan script infos (which are not part of project)
// when some file/s were closed which resulted in project removal.
// It was then postponed to cleanup these script infos so that they can be reused if
// the file from that old project is reopened because of opening file from here.
this.deleteOrphanScriptInfoNotInAnyProject();
this.printProjects();
return { configFileName, configFileErrors };
}

View File

@ -11,11 +11,11 @@ namespace ts.server {
private filesWithChangedSetOfUnresolvedImports: Path[];
private readonly resolveModuleName: typeof resolveModuleName;
private resolveModuleName: typeof resolveModuleName;
readonly trace: (s: string) => void;
readonly realpath?: (path: string) => string;
constructor(private readonly host: ServerHost, private readonly project: Project, private readonly cancellationToken: HostCancellationToken) {
constructor(private readonly host: ServerHost, private project: Project, private readonly cancellationToken: HostCancellationToken) {
this.cancellationToken = new ThrottledCancellationToken(cancellationToken, project.projectService.throttleWaitMilliseconds);
this.getCanonicalFileName = ts.createGetCanonicalFileName(this.host.useCaseSensitiveFileNames);
@ -47,6 +47,11 @@ namespace ts.server {
}
}
dispose() {
this.project = undefined;
this.resolveModuleName = undefined;
}
public startRecordingFilesWithChangedResolutions() {
this.filesWithChangedSetOfUnresolvedImports = [];
}
@ -238,4 +243,4 @@ namespace ts.server {
this.compilationSettings = opt;
}
}
}
}

View File

@ -25,7 +25,7 @@ namespace ts.server {
result.jsx += 1;
break;
case ScriptKind.TS:
fileExtensionIs(info.fileName, ".d.ts")
fileExtensionIs(info.fileName, Extension.Dts)
? result.dts += 1
: result.ts += 1;
break;
@ -106,6 +106,7 @@ namespace ts.server {
private rootFiles: ScriptInfo[] = [];
private rootFilesMap: FileMap<ScriptInfo> = createFileMap<ScriptInfo>();
private program: ts.Program;
private externalFiles: SortedReadonlyArray<string>;
private cachedUnresolvedImportsPerFile = new UnresolvedImportsMap();
private lastCachedUnresolvedImportsList: SortedReadonlyArray<string>;
@ -115,7 +116,7 @@ namespace ts.server {
public languageServiceEnabled = true;
protected readonly lsHost: LSHost;
protected lsHost: LSHost;
builder: Builder;
/**
@ -215,7 +216,14 @@ namespace ts.server {
}
}
getProjectErrors() {
/**
* Get the errors that dont have any file name associated
*/
getGlobalProjectErrors() {
return filter(this.projectErrors, diagnostic => !diagnostic.file);
}
getAllProjectErrors() {
return this.projectErrors;
}
@ -261,8 +269,8 @@ namespace ts.server {
abstract getProjectRootPath(): string | undefined;
abstract getTypeAcquisition(): TypeAcquisition;
getExternalFiles(): string[] {
return [];
getExternalFiles(): SortedReadonlyArray<string> {
return emptyArray as SortedReadonlyArray<string>;
}
getSourceFile(path: Path) {
@ -296,9 +304,15 @@ namespace ts.server {
this.rootFiles = undefined;
this.rootFilesMap = undefined;
this.program = undefined;
this.builder = undefined;
this.cachedUnresolvedImportsPerFile = undefined;
this.projectErrors = undefined;
this.lsHost.dispose();
this.lsHost = undefined;
// signal language service to release source files acquired from document registry
this.languageService.dispose();
this.languageService = undefined;
}
getCompilerOptions() {
@ -356,7 +370,7 @@ namespace ts.server {
return this.getLanguageService().getEmitOutput(info.fileName, emitOnlyDtsFiles);
}
getFileNames(excludeFilesFromExternalLibraries?: boolean) {
getFileNames(excludeFilesFromExternalLibraries?: boolean, excludeConfigFiles?: boolean) {
if (!this.program) {
return [];
}
@ -379,9 +393,39 @@ namespace ts.server {
}
result.push(asNormalizedPath(f.fileName));
}
if (!excludeConfigFiles) {
const configFile = this.program.getCompilerOptions().configFile;
if (configFile) {
result.push(asNormalizedPath(configFile.fileName));
if (configFile.extendedSourceFiles) {
for (const f of configFile.extendedSourceFiles) {
result.push(asNormalizedPath(f));
}
}
}
}
return result;
}
hasConfigFile(configFilePath: NormalizedPath) {
if (this.program && this.languageServiceEnabled) {
const configFile = this.program.getCompilerOptions().configFile;
if (configFile) {
if (configFilePath === asNormalizedPath(configFile.fileName)) {
return true;
}
if (configFile.extendedSourceFiles) {
for (const f of configFile.extendedSourceFiles) {
if (configFilePath === asNormalizedPath(f)) {
return true;
}
}
}
}
}
return false;
}
getAllEmittableFiles() {
if (!this.languageServiceEnabled) {
return [];
@ -561,6 +605,24 @@ namespace ts.server {
}
}
}
const oldExternalFiles = this.externalFiles || emptyArray as SortedReadonlyArray<string>;
this.externalFiles = this.getExternalFiles();
enumerateInsertsAndDeletes(this.externalFiles, oldExternalFiles,
// Ensure a ScriptInfo is created for new external files. This is performed indirectly
// by the LSHost for files in the program when the program is retrieved above but
// the program doesn't contain external files so this must be done explicitly.
inserted => {
const scriptInfo = this.projectService.getOrCreateScriptInfo(inserted, /*openedByClient*/ false);
scriptInfo.attachToProject(this);
},
removed => {
const scriptInfoToDetach = this.projectService.getScriptInfo(removed);
if (scriptInfoToDetach) {
scriptInfoToDetach.detachFromProject(this);
}
});
return hasChanges;
}
@ -638,7 +700,7 @@ namespace ts.server {
if (this.lastReportedFileNames && lastKnownVersion === this.lastReportedVersion) {
// if current structure version is the same - return info without any changes
if (this.projectStructureVersion === this.lastReportedVersion && !updatedFileNames) {
return { info, projectErrors: this.projectErrors };
return { info, projectErrors: this.getGlobalProjectErrors() };
}
// compute and return the difference
const lastReportedFileNames = this.lastReportedFileNames;
@ -660,14 +722,14 @@ namespace ts.server {
});
this.lastReportedFileNames = currentFiles;
this.lastReportedVersion = this.projectStructureVersion;
return { info, changes: { added, removed, updated }, projectErrors: this.projectErrors };
return { info, changes: { added, removed, updated }, projectErrors: this.getGlobalProjectErrors() };
}
else {
// unknown version - return everything
const projectFileNames = this.getFileNames();
this.lastReportedFileNames = arrayToMap(projectFileNames, x => x);
this.lastReportedVersion = this.projectStructureVersion;
return { info, files: projectFileNames, projectErrors: this.projectErrors };
return { info, files: projectFileNames, projectErrors: this.getGlobalProjectErrors() };
}
}
@ -757,7 +819,7 @@ namespace ts.server {
setCompilerOptions(options?: CompilerOptions) {
// Avoid manipulating the given options directly
const newOptions = options ? clone(options) : this.getCompilerOptions();
const newOptions = options ? cloneCompilerOptions(options) : this.getCompilerOptions();
if (!newOptions) {
return;
}
@ -956,19 +1018,16 @@ namespace ts.server {
return this.typeAcquisition;
}
getExternalFiles(): string[] {
const items: string[] = [];
for (const plugin of this.plugins) {
if (typeof plugin.getExternalFiles === "function") {
try {
items.push(...plugin.getExternalFiles(this));
}
catch (e) {
this.projectService.logger.info(`A plugin threw an exception in getExternalFiles: ${e}`);
}
getExternalFiles(): SortedReadonlyArray<string> {
return toSortedReadonlyArray(flatMap(this.plugins, plugin => {
if (typeof plugin.getExternalFiles !== "function") return;
try {
return plugin.getExternalFiles(this);
}
}
return items;
catch (e) {
this.projectService.logger.info(`A plugin threw an exception in getExternalFiles: ${e}`);
}
}));
}
watchConfigFile(callback: (project: ConfiguredProject) => void) {
@ -1027,6 +1086,7 @@ namespace ts.server {
if (this.projectFileWatcher) {
this.projectFileWatcher.close();
this.projectFileWatcher = undefined;
}
if (this.typeRootsWatchers) {
@ -1116,4 +1176,4 @@ namespace ts.server {
this.typeAcquisition = newTypeAcquisition;
}
}
}
}

View File

@ -98,8 +98,11 @@ namespace ts.server.protocol {
GetSupportedCodeFixes = "getSupportedCodeFixes",
GetApplicableRefactors = "getApplicableRefactors",
GetRefactorCodeActions = "getRefactorCodeActions",
GetRefactorCodeActionsFull = "getRefactorCodeActions-full",
GetEditsForRefactor = "getEditsForRefactor",
/* @internal */
GetEditsForRefactorFull = "getEditsForRefactor-full",
// NOTE: If updating this, be sure to also update `allCommandNames` in `harness/unittests/session.ts`.
}
/**
@ -401,52 +404,98 @@ namespace ts.server.protocol {
export type FileLocationOrRangeRequestArgs = FileLocationRequestArgs | FileRangeRequestArgs;
/**
* Request refactorings at a given position or selection area.
*/
export interface GetApplicableRefactorsRequest extends Request {
command: CommandTypes.GetApplicableRefactors;
arguments: GetApplicableRefactorsRequestArgs;
}
export type GetApplicableRefactorsRequestArgs = FileLocationOrRangeRequestArgs;
export interface ApplicableRefactorInfo {
name: string;
description: string;
}
/**
* Response is a list of available refactorings.
* Each refactoring exposes one or more "Actions"; a user selects one action to invoke a refactoring
*/
export interface GetApplicableRefactorsResponse extends Response {
body?: ApplicableRefactorInfo[];
}
export interface GetRefactorCodeActionsRequest extends Request {
command: CommandTypes.GetRefactorCodeActions;
arguments: GetRefactorCodeActionsRequestArgs;
/**
* A set of one or more available refactoring actions, grouped under a parent refactoring.
*/
export interface ApplicableRefactorInfo {
/**
* The programmatic name of the refactoring
*/
name: string;
/**
* A description of this refactoring category to show to the user.
* If the refactoring gets inlined (see below), this text will not be visible.
*/
description: string;
/**
* Inlineable refactorings can have their actions hoisted out to the top level
* of a context menu. Non-inlineanable refactorings should always be shown inside
* their parent grouping.
*
* If not specified, this value is assumed to be 'true'
*/
inlineable?: boolean;
actions: RefactorActionInfo[];
}
export type GetRefactorCodeActionsRequestArgs = FileLocationOrRangeRequestArgs & {
/* The kind of the applicable refactor */
refactorName: string;
/**
* Represents a single refactoring action - for example, the "Extract Method..." refactor might
* offer several actions, each corresponding to a surround class or closure to extract into.
*/
export type RefactorActionInfo = {
/**
* The programmatic name of the refactoring action
*/
name: string;
/**
* A description of this refactoring action to show to the user.
* If the parent refactoring is inlined away, this will be the only text shown,
* so this description should make sense by itself if the parent is inlineable=true
*/
description: string;
};
export type RefactorCodeActions = {
actions: protocol.CodeAction[];
renameLocation?: number
};
/* @internal */
export type RefactorCodeActionsFull = {
actions: ts.CodeAction[];
renameLocation?: number
};
export interface GetRefactorCodeActionsResponse extends Response {
body: RefactorCodeActions;
export interface GetEditsForRefactorRequest extends Request {
command: CommandTypes.GetEditsForRefactor;
arguments: GetEditsForRefactorRequestArgs;
}
/* @internal */
export interface GetRefactorCodeActionsFullResponse extends Response {
body: RefactorCodeActionsFull;
/**
* Request the edits that a particular refactoring action produces.
* Callers must specify the name of the refactor and the name of the action.
*/
export type GetEditsForRefactorRequestArgs = FileLocationOrRangeRequestArgs & {
/* The 'name' property from the refactoring that offered this action */
refactor: string;
/* The 'name' property from the refactoring action */
action: string;
};
export interface GetEditsForRefactorResponse extends Response {
body?: RefactorEditInfo;
}
export type RefactorEditInfo = {
edits: FileCodeEdits[];
/**
* An optional location where the editor should start a rename operation once
* the refactoring edits have been applied
*/
renameLocation?: Location;
renameFilename?: string;
};
/**
* Request for the available codefixes at a specific position.
*/
@ -1894,6 +1943,13 @@ namespace ts.server.protocol {
source?: string;
}
export interface DiagnosticWithFileName extends Diagnostic {
/**
* Name of the file the diagnostic is in
*/
fileName: string;
}
export interface DiagnosticEventBody {
/**
* The file for which diagnostic information is reported.
@ -1928,7 +1984,7 @@ namespace ts.server.protocol {
/**
* An arry of diagnostic information items for the found config file.
*/
diagnostics: Diagnostic[];
diagnostics: DiagnosticWithFileName[];
}
/**

View File

@ -35,6 +35,7 @@ namespace ts.server {
} = require("os");
function getGlobalTypingsCacheLocation() {
const versionMajorMinor = ts.version.match(/\d+\.\d+/)[0];
switch (process.platform) {
case "win32": {
const basePath = process.env.LOCALAPPDATA ||
@ -43,7 +44,7 @@ namespace ts.server {
process.env.USERPROFILE ||
(process.env.HOMEDRIVE && process.env.HOMEPATH && normalizeSlashes(process.env.HOMEDRIVE + process.env.HOMEPATH)) ||
os.tmpdir();
return combinePaths(normalizeSlashes(basePath), "Microsoft/TypeScript");
return combinePaths(combinePaths(normalizeSlashes(basePath), "Microsoft/TypeScript"), versionMajorMinor);
}
case "openbsd":
case "freebsd":
@ -51,7 +52,7 @@ namespace ts.server {
case "linux":
case "android": {
const cacheLocation = getNonWindowsCacheLocation(process.platform === "darwin");
return combinePaths(cacheLocation, "typescript");
return combinePaths(combinePaths(cacheLocation, "typescript"), versionMajorMinor);
}
default:
Debug.fail(`unsupported platform '${process.platform}'`);
@ -526,7 +527,7 @@ namespace ts.server {
watchedFile.callback(watchedFile.fileName);
}
else if (watchedFile.mtime.getTime() !== stats.mtime.getTime()) {
watchedFile.mtime = getModifiedTime(watchedFile.fileName);
watchedFile.mtime = stats.mtime;
watchedFile.callback(watchedFile.fileName, watchedFile.mtime.getTime() === 0);
}
});

View File

@ -84,14 +84,20 @@ namespace ts.server {
};
}
function formatConfigFileDiag(diag: ts.Diagnostic): protocol.Diagnostic {
return {
start: undefined,
end: undefined,
text: ts.flattenDiagnosticMessageText(diag.messageText, "\n"),
category: DiagnosticCategory[diag.category].toLowerCase(),
source: diag.source
};
function convertToILineInfo(lineAndCharacter: LineAndCharacter): ILineInfo {
return { line: lineAndCharacter.line + 1, offset: lineAndCharacter.character + 1 };
}
function formatConfigFileDiag(diag: ts.Diagnostic, includeFileName: true): protocol.DiagnosticWithFileName;
function formatConfigFileDiag(diag: ts.Diagnostic, includeFileName: false): protocol.Diagnostic;
function formatConfigFileDiag(diag: ts.Diagnostic, includeFileName: boolean): protocol.Diagnostic | protocol.DiagnosticWithFileName {
const start = diag.file && convertToILineInfo(getLineAndCharacterOfPosition(diag.file, diag.start));
const end = diag.file && convertToILineInfo(getLineAndCharacterOfPosition(diag.file, diag.start + diag.length));
const text = ts.flattenDiagnosticMessageText(diag.messageText, "\n");
const { code, source } = diag;
const category = DiagnosticCategory[diag.category].toLowerCase();
return includeFileName ? { start, end, text, code, category, source, fileName: diag.file && diag.file.fileName } :
{ start, end, text, code, category, source };
}
export interface PendingErrorCheck {
@ -112,7 +118,13 @@ namespace ts.server {
return true;
}
export import CommandNames = protocol.CommandTypes;
// CommandNames used to be exposed before TS 2.4 as a namespace
// In TS 2.4 we switched to an enum, keep this for backward compatibility
// The var assignment ensures that even though CommandTypes are a const enum
// we want to ensure the value is maintained in the out since the file is
// built using --preseveConstEnum.
export type CommandNames = protocol.CommandTypes;
export const CommandNames = (<any>protocol).CommandTypes;
export function formatMessage<T extends protocol.Message>(msg: T, logger: server.Logger, byteLength: (s: string, encoding: string) => number, newLine: string): string {
const verboseLogging = logger.hasLevel(LogLevel.verbose);
@ -378,7 +390,7 @@ namespace ts.server {
}
public configFileDiagnosticEvent(triggerFile: string, configFile: string, diagnostics: ts.Diagnostic[]) {
const bakedDiags = ts.map(diagnostics, formatConfigFileDiag);
const bakedDiags = ts.map(diagnostics, diagnostic => formatConfigFileDiag(diagnostic, /*includeFileName*/ true));
const ev: protocol.ConfigFileDiagnosticEvent = {
seq: 0,
type: "event",
@ -511,9 +523,55 @@ namespace ts.server {
return projectFileName && this.projectService.findProject(projectFileName);
}
private getConfigFileAndProject(args: protocol.FileRequestArgs) {
const project = this.getProject(args.projectFileName);
const file = toNormalizedPath(args.file);
return {
configFile: project && project.hasConfigFile(file) && file,
project
};
}
private getConfigFileDiagnostics(configFile: NormalizedPath, project: Project, includeLinePosition: boolean) {
const projectErrors = project.getAllProjectErrors();
const optionsErrors = project.getLanguageService().getCompilerOptionsDiagnostics();
const diagnosticsForConfigFile = filter(
concatenate(projectErrors, optionsErrors),
diagnostic => diagnostic.file && diagnostic.file.fileName === configFile
);
return includeLinePosition ?
this.convertToDiagnosticsWithLinePositionFromDiagnosticFile(diagnosticsForConfigFile) :
map(
diagnosticsForConfigFile,
diagnostic => formatConfigFileDiag(diagnostic, /*includeFileName*/ false)
);
}
private convertToDiagnosticsWithLinePositionFromDiagnosticFile(diagnostics: Diagnostic[]) {
return diagnostics.map(d => <protocol.DiagnosticWithLinePosition>{
message: flattenDiagnosticMessageText(d.messageText, this.host.newLine),
start: d.start,
length: d.length,
category: DiagnosticCategory[d.category].toLowerCase(),
code: d.code,
startLocation: d.file && convertToILineInfo(getLineAndCharacterOfPosition(d.file, d.start)),
endLocation: d.file && convertToILineInfo(getLineAndCharacterOfPosition(d.file, d.start + d.length))
});
}
private getCompilerOptionsDiagnostics(args: protocol.CompilerOptionsDiagnosticsRequestArgs) {
const project = this.getProject(args.projectFileName);
return this.convertToDiagnosticsWithLinePosition(project.getLanguageService().getCompilerOptionsDiagnostics(), /*scriptInfo*/ undefined);
// Get diagnostics that dont have associated file with them
// The diagnostics which have file would be in config file and
// would be reported as part of configFileDiagnostics
return this.convertToDiagnosticsWithLinePosition(
filter(
project.getLanguageService().getCompilerOptionsDiagnostics(),
diagnostic => !diagnostic.file
),
/*scriptInfo*/ undefined
);
}
private convertToDiagnosticsWithLinePosition(diagnostics: Diagnostic[], scriptInfo: ScriptInfo) {
@ -639,10 +697,20 @@ namespace ts.server {
}
private getSyntacticDiagnosticsSync(args: protocol.SyntacticDiagnosticsSyncRequestArgs): protocol.Diagnostic[] | protocol.DiagnosticWithLinePosition[] {
const { configFile } = this.getConfigFileAndProject(args);
if (configFile) {
// all the config file errors are reported as part of semantic check so nothing to report here
return [];
}
return this.getDiagnosticsWorker(args, /*isSemantic*/ false, (project, file) => project.getLanguageService().getSyntacticDiagnostics(file), args.includeLinePosition);
}
private getSemanticDiagnosticsSync(args: protocol.SemanticDiagnosticsSyncRequestArgs): protocol.Diagnostic[] | protocol.DiagnosticWithLinePosition[] {
const { configFile, project } = this.getConfigFileAndProject(args);
if (configFile) {
return this.getConfigFileDiagnostics(configFile, project, args.includeLinePosition);
}
return this.getDiagnosticsWorker(args, /*isSemantic*/ true, (project, file) => project.getLanguageService().getSemanticDiagnostics(file), args.includeLinePosition);
}
@ -686,15 +754,15 @@ namespace ts.server {
}
private getProjectInfo(args: protocol.ProjectInfoRequestArgs): protocol.ProjectInfo {
return this.getProjectInfoWorker(args.file, args.projectFileName, args.needFileNameList);
return this.getProjectInfoWorker(args.file, args.projectFileName, args.needFileNameList, /*excludeConfigFiles*/ false);
}
private getProjectInfoWorker(uncheckedFileName: string, projectFileName: string, needFileNameList: boolean) {
private getProjectInfoWorker(uncheckedFileName: string, projectFileName: string, needFileNameList: boolean, excludeConfigFiles: boolean) {
const { project } = this.getFileAndProjectWorker(uncheckedFileName, projectFileName, /*refreshInferredProjects*/ true, /*errorOnMissingProject*/ true);
const projectInfo = {
configFileName: project.getProjectName(),
languageServiceDisabled: !project.languageServiceEnabled,
fileNames: needFileNameList ? project.getFileNames() : undefined
fileNames: needFileNameList ? project.getFileNames(/*excludeFilesFromExternalLibraries*/ false, excludeConfigFiles) : undefined
};
return projectInfo;
}
@ -1425,29 +1493,40 @@ namespace ts.server {
return project.getLanguageService().getApplicableRefactors(file, position || textRange);
}
private getRefactorCodeActions(args: protocol.GetRefactorCodeActionsRequestArgs, simplifiedResult: boolean): protocol.RefactorCodeActions | protocol.RefactorCodeActionsFull {
private getEditsForRefactor(args: protocol.GetEditsForRefactorRequestArgs, simplifiedResult: boolean): ts.RefactorEditInfo | protocol.RefactorEditInfo {
const { file, project } = this.getFileAndProjectWithoutRefreshingInferredProjects(args);
const scriptInfo = project.getScriptInfoForNormalizedPath(file);
const { position, textRange } = this.extractPositionAndRange(args, scriptInfo);
const result: ts.CodeAction[] = project.getLanguageService().getRefactorCodeActions(
const result = project.getLanguageService().getEditsForRefactor(
file,
this.projectService.getFormatCodeOptions(),
position || textRange,
args.refactorName
args.refactor,
args.action
);
if (simplifiedResult) {
// Not full
if (result === undefined) {
return {
actions: result.map(action => this.mapCodeAction(action, scriptInfo))
edits: []
};
}
if (simplifiedResult) {
const file = result.renameFilename;
let location: ILineInfo | undefined = undefined;
if (file !== undefined && result.renameLocation !== undefined) {
const renameScriptInfo = project.getScriptInfoForNormalizedPath(toNormalizedPath(file));
location = renameScriptInfo.positionToLineOffset(result.renameLocation);
}
return {
renameLocation: location,
renameFilename: file,
edits: result.edits.map(change => this.mapTextChangesToCodeEdits(project, change))
};
}
else {
// Full
return {
actions: result
};
return result;
}
}
@ -1505,6 +1584,14 @@ namespace ts.server {
};
}
private mapTextChangesToCodeEdits(project: Project, textChanges: FileTextChanges): protocol.FileCodeEdits {
const scriptInfo = project.getScriptInfoForNormalizedPath(toNormalizedPath(textChanges.fileName));
return {
fileName: textChanges.fileName,
textChanges: textChanges.textChanges.map(textChange => this.convertTextChangeToCodeEdit(textChange, scriptInfo))
};
}
private convertTextChangeToCodeEdit(change: ts.TextChange, scriptInfo: ScriptInfo): protocol.CodeEdit {
return {
start: scriptInfo.positionToLineOffset(change.span.start),
@ -1528,7 +1615,7 @@ namespace ts.server {
}
private getDiagnosticsForProject(next: NextStep, delay: number, fileName: string): void {
const { fileNames, languageServiceDisabled } = this.getProjectInfoWorker(fileName, /*projectFileName*/ undefined, /*needFileNameList*/ true);
const { fileNames, languageServiceDisabled } = this.getProjectInfoWorker(fileName, /*projectFileName*/ undefined, /*needFileNameList*/ true, /*excludeConfigFiles*/ true);
if (languageServiceDisabled) {
return;
}
@ -1544,18 +1631,22 @@ namespace ts.server {
const normalizedFileName = toNormalizedPath(fileName);
const project = this.projectService.getDefaultProjectForFile(normalizedFileName, /*refreshInferredProjects*/ true);
for (const fileNameInProject of fileNamesInProject) {
if (this.getCanonicalFileName(fileNameInProject) === this.getCanonicalFileName(fileName))
if (this.getCanonicalFileName(fileNameInProject) === this.getCanonicalFileName(fileName)) {
highPriorityFiles.push(fileNameInProject);
}
else {
const info = this.projectService.getScriptInfo(fileNameInProject);
if (!info.isScriptOpen()) {
if (fileNameInProject.indexOf(".d.ts") > 0)
if (fileNameInProject.indexOf(Extension.Dts) > 0) {
veryLowPriorityFiles.push(fileNameInProject);
else
}
else {
lowPriorityFiles.push(fileNameInProject);
}
}
else
else {
mediumPriorityFiles.push(fileNameInProject);
}
}
}
@ -1833,11 +1924,11 @@ namespace ts.server {
[CommandNames.GetApplicableRefactors]: (request: protocol.GetApplicableRefactorsRequest) => {
return this.requiredResponse(this.getApplicableRefactors(request.arguments));
},
[CommandNames.GetRefactorCodeActions]: (request: protocol.GetRefactorCodeActionsRequest) => {
return this.requiredResponse(this.getRefactorCodeActions(request.arguments, /*simplifiedResult*/ true));
[CommandNames.GetEditsForRefactor]: (request: protocol.GetEditsForRefactorRequest) => {
return this.requiredResponse(this.getEditsForRefactor(request.arguments, /*simplifiedResult*/ true));
},
[CommandNames.GetRefactorCodeActionsFull]: (request: protocol.GetRefactorCodeActionsRequest) => {
return this.requiredResponse(this.getRefactorCodeActions(request.arguments, /*simplifiedResult*/ false));
[CommandNames.GetEditsForRefactorFull]: (request: protocol.GetEditsForRefactorRequest) => {
return this.requiredResponse(this.getEditsForRefactor(request.arguments, /*simplifiedResult*/ false));
}
});

View File

@ -49,7 +49,7 @@ namespace ts.server {
export function createInstallTypingsRequest(project: Project, typeAcquisition: TypeAcquisition, unresolvedImports: SortedReadonlyArray<string>, cachePath?: string): DiscoverTypings {
return {
projectName: project.getProjectName(),
fileNames: project.getFileNames(/*excludeFilesFromExternalLibraries*/ true),
fileNames: project.getFileNames(/*excludeFilesFromExternalLibraries*/ true, /*excludeConfigFiles*/ true),
compilerOptions: project.getCompilerOptions(),
typeAcquisition,
unresolvedImports,
@ -195,6 +195,37 @@ namespace ts.server {
return <any>arr;
}
export function enumerateInsertsAndDeletes<T>(a: SortedReadonlyArray<T>, b: SortedReadonlyArray<T>, inserted: (item: T) => void, deleted: (item: T) => void, compare?: (a: T, b: T) => Comparison) {
compare = compare || ts.compareValues;
let aIndex = 0;
let bIndex = 0;
const aLen = a.length;
const bLen = b.length;
while (aIndex < aLen && bIndex < bLen) {
const aItem = a[aIndex];
const bItem = b[bIndex];
const compareResult = compare(aItem, bItem);
if (compareResult === Comparison.LessThan) {
inserted(aItem);
aIndex++;
}
else if (compareResult === Comparison.GreaterThan) {
deleted(bItem);
bIndex++;
}
else {
aIndex++;
bIndex++;
}
}
while (aIndex < aLen) {
inserted(a[aIndex++]);
}
while (bIndex < bLen) {
deleted(b[bIndex++]);
}
}
export class ThrottledOperations {
private pendingTimeouts: Map<any> = createMap<any>();
constructor(private readonly host: ServerHost) {

View File

@ -146,7 +146,7 @@ namespace ts.BreakpointResolver {
case SyntaxKind.ForOfStatement:
// span in initializer
return spanInInitializerOfForLike(<ForOfStatement | ForInStatement>node);
return spanInInitializerOfForLike(<ForOfStatement>node);
case SyntaxKind.SwitchStatement:
// span on switch(...)

View File

@ -724,8 +724,8 @@ namespace ts {
pushCommentRange(pos, tag.pos - pos);
}
pushClassification(tag.atToken.pos, tag.atToken.end - tag.atToken.pos, ClassificationType.punctuation);
pushClassification(tag.tagName.pos, tag.tagName.end - tag.tagName.pos, ClassificationType.docCommentTagName);
pushClassification(tag.atToken.pos, tag.atToken.end - tag.atToken.pos, ClassificationType.punctuation); // "@"
pushClassification(tag.tagName.pos, tag.tagName.end - tag.tagName.pos, ClassificationType.docCommentTagName); // e.g. "param"
pos = tag.tagName.end;
@ -814,7 +814,7 @@ namespace ts {
* False will mean that node is not classified and traverse routine should recurse into node contents.
*/
function tryClassifyNode(node: Node): boolean {
if (isJSDocTag(node)) {
if (isJSDoc(node)) {
return true;
}

View File

@ -8,107 +8,161 @@ namespace ts.codefix {
function getActionsForAddMissingMember(context: CodeFixContext): CodeAction[] | undefined {
const sourceFile = context.sourceFile;
const tokenSourceFile = context.sourceFile;
const start = context.span.start;
// This is the identifier of the missing property. eg:
// The identifier of the missing property. eg:
// this.missing = 1;
// ^^^^^^^
const token = getTokenAtPosition(sourceFile, start, /*includeJsDocComment*/ false);
const token = getTokenAtPosition(tokenSourceFile, start, /*includeJsDocComment*/ false);
if (token.kind !== SyntaxKind.Identifier) {
return undefined;
}
if (!isPropertyAccessExpression(token.parent) || token.parent.expression.kind !== SyntaxKind.ThisKeyword) {
if (!isPropertyAccessExpression(token.parent)) {
return undefined;
}
const classMemberDeclaration = getThisContainer(token, /*includeArrowFunctions*/ false);
if (!isClassElement(classMemberDeclaration)) {
return undefined;
const tokenName = token.getText(tokenSourceFile);
let makeStatic = false;
let classDeclaration: ClassLikeDeclaration;
if (token.parent.expression.kind === SyntaxKind.ThisKeyword) {
const containingClassMemberDeclaration = getThisContainer(token, /*includeArrowFunctions*/ false);
if (!isClassElement(containingClassMemberDeclaration)) {
return undefined;
}
classDeclaration = <ClassLikeDeclaration>containingClassMemberDeclaration.parent;
// Property accesses on `this` in a static method are accesses of a static member.
makeStatic = classDeclaration && hasModifier(containingClassMemberDeclaration, ModifierFlags.Static);
}
else {
const checker = context.program.getTypeChecker();
const leftExpression = token.parent.expression;
const leftExpressionType = checker.getTypeAtLocation(leftExpression);
if (leftExpressionType.flags & TypeFlags.Object) {
const symbol = leftExpressionType.symbol;
if (symbol.flags & SymbolFlags.Class) {
classDeclaration = symbol.declarations && <ClassLikeDeclaration>symbol.declarations[0];
if (leftExpressionType !== checker.getDeclaredTypeOfSymbol(symbol)) {
// The expression is a class symbol but the type is not the instance-side.
makeStatic = true;
}
}
}
}
const classDeclaration = <ClassLikeDeclaration>classMemberDeclaration.parent;
if (!classDeclaration || !isClassLike(classDeclaration)) {
return undefined;
}
const isStatic = hasModifier(classMemberDeclaration, ModifierFlags.Static);
const classDeclarationSourceFile = getSourceFileOfNode(classDeclaration);
const classOpenBrace = getOpenBraceOfClassLike(classDeclaration, classDeclarationSourceFile);
return isInJavaScriptFile(sourceFile) ? getActionsForAddMissingMemberInJavaScriptFile() : getActionsForAddMissingMemberInTypeScriptFile();
return isInJavaScriptFile(classDeclarationSourceFile) ?
getActionsForAddMissingMemberInJavaScriptFile(classDeclaration, makeStatic) :
getActionsForAddMissingMemberInTypeScriptFile(classDeclaration, makeStatic);
function getActionsForAddMissingMemberInJavaScriptFile(): CodeAction[] | undefined {
const memberName = token.getText();
function getActionsForAddMissingMemberInJavaScriptFile(classDeclaration: ClassLikeDeclaration, makeStatic: boolean): CodeAction[] | undefined {
let actions: CodeAction[];
if (isStatic) {
const methodCodeAction = getActionForMethodDeclaration(/*includeTypeScriptSyntax*/ false);
if (methodCodeAction) {
actions = [methodCodeAction];
}
if (makeStatic) {
if (classDeclaration.kind === SyntaxKind.ClassExpression) {
return undefined;
return actions;
}
const className = classDeclaration.name.getText();
return [{
description: formatStringFromArgs(getLocaleSpecificMessage(Diagnostics.Initialize_static_property_0), [memberName]),
changes: [{
fileName: sourceFile.fileName,
textChanges: [{
span: { start: classDeclaration.getEnd(), length: 0 },
newText: `${context.newLineCharacter}${className}.${memberName} = undefined;${context.newLineCharacter}`
}]
}]
}];
const staticInitialization = createStatement(createAssignment(
createPropertyAccess(createIdentifier(className), tokenName),
createIdentifier("undefined")));
const staticInitializationChangeTracker = textChanges.ChangeTracker.fromCodeFixContext(context);
staticInitializationChangeTracker.insertNodeAfter(
classDeclarationSourceFile,
classDeclaration,
staticInitialization,
{ suffix: context.newLineCharacter });
const initializeStaticAction = {
description: formatStringFromArgs(getLocaleSpecificMessage(Diagnostics.Initialize_static_property_0), [tokenName]),
changes: staticInitializationChangeTracker.getChanges()
};
(actions || (actions = [])).push(initializeStaticAction);
return actions;
}
else {
const classConstructor = getFirstConstructorWithBody(classDeclaration);
if (!classConstructor) {
return undefined;
return actions;
}
return [{
description: formatStringFromArgs(getLocaleSpecificMessage(Diagnostics.Initialize_property_0_in_the_constructor), [memberName]),
changes: [{
fileName: sourceFile.fileName,
textChanges: [{
span: { start: classConstructor.body.getEnd() - 1, length: 0 },
newText: `this.${memberName} = undefined;${context.newLineCharacter}`
}]
}]
}];
const propertyInitialization = createStatement(createAssignment(
createPropertyAccess(createThis(), tokenName),
createIdentifier("undefined")));
const propertyInitializationChangeTracker = textChanges.ChangeTracker.fromCodeFixContext(context);
propertyInitializationChangeTracker.insertNodeAt(
classDeclarationSourceFile,
classConstructor.body.getEnd() - 1,
propertyInitialization,
{ prefix: context.newLineCharacter, suffix: context.newLineCharacter });
const initializeAction = {
description: formatStringFromArgs(getLocaleSpecificMessage(Diagnostics.Initialize_property_0_in_the_constructor), [tokenName]),
changes: propertyInitializationChangeTracker.getChanges()
};
(actions || (actions = [])).push(initializeAction);
return actions;
}
}
function getActionsForAddMissingMemberInTypeScriptFile(): CodeAction[] | undefined {
let typeNode: TypeNode;
function getActionsForAddMissingMemberInTypeScriptFile(classDeclaration: ClassLikeDeclaration, makeStatic: boolean): CodeAction[] | undefined {
let actions: CodeAction[];
if (token.parent.parent.kind === SyntaxKind.BinaryExpression) {
const binaryExpression = token.parent.parent as BinaryExpression;
const checker = context.program.getTypeChecker();
const widenedType = checker.getWidenedType(checker.getBaseTypeOfLiteralType(checker.getTypeAtLocation(binaryExpression.right)));
typeNode = checker.typeToTypeNode(widenedType, classDeclaration);
const methodCodeAction = getActionForMethodDeclaration(/*includeTypeScriptSyntax*/ true);
if (methodCodeAction) {
actions = [methodCodeAction];
}
let typeNode: TypeNode;
if (token.parent.parent.kind === SyntaxKind.BinaryExpression) {
const binaryExpression = token.parent.parent as BinaryExpression;
const otherExpression = token.parent === binaryExpression.left ? binaryExpression.right : binaryExpression.left;
const checker = context.program.getTypeChecker();
const widenedType = checker.getWidenedType(checker.getBaseTypeOfLiteralType(checker.getTypeAtLocation(otherExpression)));
typeNode = checker.typeToTypeNode(widenedType, classDeclaration);
}
typeNode = typeNode || createKeywordTypeNode(SyntaxKind.AnyKeyword);
const openBrace = getOpenBraceOfClassLike(classDeclaration, sourceFile);
const property = createProperty(
/*decorators*/undefined,
/*modifiers*/ isStatic ? [createToken(SyntaxKind.StaticKeyword)] : undefined,
token.getText(sourceFile),
/*modifiers*/ makeStatic ? [createToken(SyntaxKind.StaticKeyword)] : undefined,
tokenName,
/*questionToken*/ undefined,
typeNode,
/*initializer*/ undefined);
const propertyChangeTracker = textChanges.ChangeTracker.fromCodeFixContext(context);
propertyChangeTracker.insertNodeAfter(sourceFile, openBrace, property, { suffix: context.newLineCharacter });
propertyChangeTracker.insertNodeAfter(classDeclarationSourceFile, classOpenBrace, property, { suffix: context.newLineCharacter });
const actions = [{
description: formatStringFromArgs(getLocaleSpecificMessage(Diagnostics.Add_declaration_for_missing_property_0), [token.getText()]),
(actions || (actions = [])).push({
description: formatStringFromArgs(getLocaleSpecificMessage(Diagnostics.Declare_property_0), [tokenName]),
changes: propertyChangeTracker.getChanges()
}];
});
if (!isStatic) {
if (!makeStatic) {
// Index signatures cannot have the static modifier.
const stringTypeNode = createKeywordTypeNode(SyntaxKind.StringKeyword);
const indexingParameter = createParameter(
/*decorators*/ undefined,
@ -125,15 +179,32 @@ namespace ts.codefix {
typeNode);
const indexSignatureChangeTracker = textChanges.ChangeTracker.fromCodeFixContext(context);
indexSignatureChangeTracker.insertNodeAfter(sourceFile, openBrace, indexSignature, { suffix: context.newLineCharacter });
indexSignatureChangeTracker.insertNodeAfter(classDeclarationSourceFile, classOpenBrace, indexSignature, { suffix: context.newLineCharacter });
actions.push({
description: formatStringFromArgs(getLocaleSpecificMessage(Diagnostics.Add_index_signature_for_missing_property_0), [token.getText()]),
description: formatStringFromArgs(getLocaleSpecificMessage(Diagnostics.Add_index_signature_for_property_0), [tokenName]),
changes: indexSignatureChangeTracker.getChanges()
});
}
return actions;
}
function getActionForMethodDeclaration(includeTypeScriptSyntax: boolean): CodeAction | undefined {
if (token.parent.parent.kind === SyntaxKind.CallExpression) {
const callExpression = <CallExpression>token.parent.parent;
const methodDeclaration = createMethodFromCallExpression(callExpression, tokenName, includeTypeScriptSyntax, makeStatic);
const methodDeclarationChangeTracker = textChanges.ChangeTracker.fromCodeFixContext(context);
methodDeclarationChangeTracker.insertNodeAfter(classDeclarationSourceFile, classOpenBrace, methodDeclaration, { suffix: context.newLineCharacter });
return {
description: formatStringFromArgs(getLocaleSpecificMessage(makeStatic ?
Diagnostics.Declare_method_0 :
Diagnostics.Declare_static_method_0),
[tokenName]),
changes: methodDeclarationChangeTracker.getChanges()
};
}
}
}
}

View File

@ -18,14 +18,14 @@ namespace ts.codefix {
switch (token.kind) {
case ts.SyntaxKind.Identifier:
return deleteIdentifier();
return deleteIdentifierOrPrefixWithUnderscore(<Identifier>token);
case SyntaxKind.PropertyDeclaration:
case SyntaxKind.NamespaceImport:
return deleteNode(token.parent);
return [deleteNode(token.parent)];
default:
return deleteDefault();
return [deleteDefault()];
}
function deleteDefault() {
@ -40,61 +40,69 @@ namespace ts.codefix {
}
}
function deleteIdentifier(): CodeAction[] | undefined {
switch (token.parent.kind) {
function prefixIdentifierWithUnderscore(identifier: Identifier): CodeAction {
const startPosition = identifier.getStart(sourceFile, /*includeJsDocComment*/ false);
return {
description: formatStringFromArgs(getLocaleSpecificMessage(Diagnostics.Prefix_0_with_an_underscore), { 0: token.getText() }),
changes: [{
fileName: sourceFile.path,
textChanges: [{
span: { start: startPosition, length: 0 },
newText: "_"
}]
}]
};
}
function deleteIdentifierOrPrefixWithUnderscore(identifier: Identifier): CodeAction[] | undefined {
const parent = identifier.parent;
switch (parent.kind) {
case ts.SyntaxKind.VariableDeclaration:
return deleteVariableDeclaration(<ts.VariableDeclaration>token.parent);
return deleteVariableDeclarationOrPrefixWithUnderscore(identifier, <ts.VariableDeclaration>parent);
case SyntaxKind.TypeParameter:
const typeParameters = (<DeclarationWithTypeParameters>token.parent.parent).typeParameters;
const typeParameters = (<DeclarationWithTypeParameters>parent.parent).typeParameters;
if (typeParameters.length === 1) {
const previousToken = getTokenAtPosition(sourceFile, typeParameters.pos - 1, /*includeJsDocComment*/ false);
if (!previousToken || previousToken.kind !== SyntaxKind.LessThanToken) {
return deleteRange(typeParameters);
}
const nextToken = getTokenAtPosition(sourceFile, typeParameters.end, /*includeJsDocComment*/ false);
if (!nextToken || nextToken.kind !== SyntaxKind.GreaterThanToken) {
return deleteRange(typeParameters);
}
return deleteNodeRange(previousToken, nextToken);
Debug.assert(previousToken.kind === SyntaxKind.LessThanToken);
Debug.assert(nextToken.kind === SyntaxKind.GreaterThanToken);
return [deleteNodeRange(previousToken, nextToken)];
}
else {
return deleteNodeInList(token.parent);
return [deleteNodeInList(parent)];
}
case ts.SyntaxKind.Parameter:
const functionDeclaration = <FunctionDeclaration>token.parent.parent;
if (functionDeclaration.parameters.length === 1) {
return deleteNode(token.parent);
}
else {
return deleteNodeInList(token.parent);
}
const functionDeclaration = <FunctionDeclaration>parent.parent;
return [functionDeclaration.parameters.length === 1 ? deleteNode(parent) : deleteNodeInList(parent),
prefixIdentifierWithUnderscore(identifier)];
// handle case where 'import a = A;'
case SyntaxKind.ImportEqualsDeclaration:
const importEquals = getAncestor(token, SyntaxKind.ImportEqualsDeclaration);
return deleteNode(importEquals);
const importEquals = getAncestor(identifier, SyntaxKind.ImportEqualsDeclaration);
return [deleteNode(importEquals)];
case SyntaxKind.ImportSpecifier:
const namedImports = <NamedImports>token.parent.parent;
const namedImports = <NamedImports>parent.parent;
if (namedImports.elements.length === 1) {
// Only 1 import and it is unused. So the entire declaration should be removed.
const importSpec = getAncestor(token, SyntaxKind.ImportDeclaration);
return deleteNode(importSpec);
const importSpec = getAncestor(identifier, SyntaxKind.ImportDeclaration);
return [deleteNode(importSpec)];
}
else {
// delete import specifier
return deleteNodeInList(token.parent);
return [deleteNodeInList(parent)];
}
// handle case where "import d, * as ns from './file'"
// or "'import {a, b as ns} from './file'"
case SyntaxKind.ImportClause: // this covers both 'import |d|' and 'import |d,| *'
const importClause = <ImportClause>token.parent;
const importClause = <ImportClause>parent;
if (!importClause.namedBindings) { // |import d from './file'| or |import * as ns from './file'|
const importDecl = getAncestor(importClause, SyntaxKind.ImportDeclaration);
return deleteNode(importDecl);
return [deleteNode(importDecl)];
}
else {
// import |d,| * as ns from './file'
@ -102,64 +110,62 @@ namespace ts.codefix {
const nextToken = getTokenAtPosition(sourceFile, importClause.name.end, /*includeJsDocComment*/ false);
if (nextToken && nextToken.kind === SyntaxKind.CommaToken) {
// shift first non-whitespace position after comma to the start position of the node
return deleteRange({ pos: start, end: skipTrivia(sourceFile.text, nextToken.end, /*stopAfterLineBreaks*/ false, /*stopAtComments*/ true) });
return [deleteRange({ pos: start, end: skipTrivia(sourceFile.text, nextToken.end, /*stopAfterLineBreaks*/ false, /*stopAtComments*/ true) })];
}
else {
return deleteNode(importClause.name);
return [deleteNode(importClause.name)];
}
}
case SyntaxKind.NamespaceImport:
const namespaceImport = <NamespaceImport>token.parent;
if (namespaceImport.name === token && !(<ImportClause>namespaceImport.parent).name) {
const namespaceImport = <NamespaceImport>parent;
if (namespaceImport.name === identifier && !(<ImportClause>namespaceImport.parent).name) {
const importDecl = getAncestor(namespaceImport, SyntaxKind.ImportDeclaration);
return deleteNode(importDecl);
return [deleteNode(importDecl)];
}
else {
const previousToken = getTokenAtPosition(sourceFile, namespaceImport.pos - 1, /*includeJsDocComment*/ false);
if (previousToken && previousToken.kind === SyntaxKind.CommaToken) {
const startPosition = textChanges.getAdjustedStartPosition(sourceFile, previousToken, {}, textChanges.Position.FullStart);
return deleteRange({ pos: startPosition, end: namespaceImport.end });
return [deleteRange({ pos: startPosition, end: namespaceImport.end })];
}
return deleteRange(namespaceImport);
return [deleteRange(namespaceImport)];
}
default:
return deleteDefault();
return [deleteDefault()];
}
}
// token.parent is a variableDeclaration
function deleteVariableDeclaration(varDecl: ts.VariableDeclaration): CodeAction[] | undefined {
function deleteVariableDeclarationOrPrefixWithUnderscore(identifier: Identifier, varDecl: ts.VariableDeclaration): CodeAction[] | undefined {
switch (varDecl.parent.parent.kind) {
case SyntaxKind.ForStatement:
const forStatement = <ForStatement>varDecl.parent.parent;
const forInitializer = <VariableDeclarationList>forStatement.initializer;
if (forInitializer.declarations.length === 1) {
return deleteNode(forInitializer);
}
else {
return deleteNodeInList(varDecl);
}
return [forInitializer.declarations.length === 1 ? deleteNode(forInitializer) : deleteNodeInList(varDecl)];
case SyntaxKind.ForOfStatement:
const forOfStatement = <ForOfStatement>varDecl.parent.parent;
Debug.assert(forOfStatement.initializer.kind === SyntaxKind.VariableDeclarationList);
const forOfInitializer = <VariableDeclarationList>forOfStatement.initializer;
return replaceNode(forOfInitializer.declarations[0], createObjectLiteral());
return [
replaceNode(forOfInitializer.declarations[0], createObjectLiteral()),
prefixIdentifierWithUnderscore(identifier)
];
case SyntaxKind.ForInStatement:
// There is no valid fix in the case of:
// for .. in
return undefined;
return [prefixIdentifierWithUnderscore(identifier)];
default:
const variableStatement = <VariableStatement>varDecl.parent.parent;
if (variableStatement.declarationList.declarations.length === 1) {
return deleteNode(variableStatement);
return [deleteNode(variableStatement)];
}
else {
return deleteNodeInList(varDecl);
return [deleteNodeInList(varDecl)];
}
}
}
@ -184,11 +190,11 @@ namespace ts.codefix {
return makeChange(textChanges.ChangeTracker.fromCodeFixContext(context).replaceNode(sourceFile, n, newNode));
}
function makeChange(changeTracker: textChanges.ChangeTracker) {
return [{
function makeChange(changeTracker: textChanges.ChangeTracker): CodeAction {
return {
description: formatStringFromArgs(getLocaleSpecificMessage(Diagnostics.Remove_declaration_for_Colon_0), { 0: token.getText() }),
changes: changeTracker.getChanges()
}];
};
}
}
});

View File

@ -6,7 +6,7 @@
/// <reference path="fixConstructorForDerivedNeedSuperCall.ts" />
/// <reference path="fixExtendsInterfaceBecomesImplements.ts" />
/// <reference path="fixForgottenThisPropertyAccess.ts" />
/// <reference path='unusedIdentifierFixes.ts' />
/// <reference path='fixUnusedIdentifier.ts' />
/// <reference path='importFixes.ts' />
/// <reference path='disableJsDiagnostics.ts' />
/// <reference path='helpers.ts' />

View File

@ -142,6 +142,50 @@ namespace ts.codefix {
}
}
export function createMethodFromCallExpression(callExpression: CallExpression, methodName: string, includeTypeScriptSyntax: boolean, makeStatic: boolean): MethodDeclaration {
const parameters = createDummyParameters(callExpression.arguments.length, /*names*/ undefined, /*minArgumentCount*/ undefined, includeTypeScriptSyntax);
let typeParameters: TypeParameterDeclaration[];
if (includeTypeScriptSyntax) {
const typeArgCount = length(callExpression.typeArguments);
for (let i = 0; i < typeArgCount; i++) {
const name = typeArgCount < 8 ? String.fromCharCode(CharacterCodes.T + i) : `T${i}`;
const typeParameter = createTypeParameterDeclaration(name, /*constraint*/ undefined, /*defaultType*/ undefined);
(typeParameters ? typeParameters : typeParameters = []).push(typeParameter);
}
}
const newMethod = createMethod(
/*decorators*/ undefined,
/*modifiers*/ makeStatic ? [createToken(SyntaxKind.StaticKeyword)] : undefined,
/*asteriskToken*/ undefined,
methodName,
/*questionToken*/ undefined,
typeParameters,
parameters,
/*type*/ includeTypeScriptSyntax ? createKeywordTypeNode(SyntaxKind.AnyKeyword) : undefined,
createStubbedMethodBody()
);
return newMethod;
}
function createDummyParameters(argCount: number, names: string[] | undefined, minArgumentCount: number | undefined, addAnyType: boolean) {
const parameters: ParameterDeclaration[] = [];
for (let i = 0; i < argCount; i++) {
const newParameter = createParameter(
/*decorators*/ undefined,
/*modifiers*/ undefined,
/*dotDotDotToken*/ undefined,
/*name*/ names && names[i] || `arg${i}`,
/*questionToken*/ minArgumentCount !== undefined && i >= minArgumentCount ? createToken(SyntaxKind.QuestionToken) : undefined,
/*type*/ addAnyType ? createKeywordTypeNode(SyntaxKind.AnyKeyword) : undefined,
/*initializer*/ undefined);
parameters.push(newParameter);
}
return parameters;
}
function createMethodImplementingSignatures(signatures: Signature[], name: PropertyName, optional: boolean, modifiers: Modifier[] | undefined): MethodDeclaration {
/** This is *a* signature with the maximal number of arguments,
* such that if there is a "maximal" signature without rest arguments,
@ -163,19 +207,7 @@ namespace ts.codefix {
const maxNonRestArgs = maxArgsSignature.parameters.length - (maxArgsSignature.hasRestParameter ? 1 : 0);
const maxArgsParameterSymbolNames = maxArgsSignature.parameters.map(symbol => symbol.getName());
const parameters: ParameterDeclaration[] = [];
for (let i = 0; i < maxNonRestArgs; i++) {
const anyType = createKeywordTypeNode(SyntaxKind.AnyKeyword);
const newParameter = createParameter(
/*decorators*/ undefined,
/*modifiers*/ undefined,
/*dotDotDotToken*/ undefined,
maxArgsParameterSymbolNames[i],
/*questionToken*/ i >= minArgumentCount ? createToken(SyntaxKind.QuestionToken) : undefined,
anyType,
/*initializer*/ undefined);
parameters.push(newParameter);
}
const parameters = createDummyParameters(maxNonRestArgs, maxArgsParameterSymbolNames, minArgumentCount, /*addAnyType*/ true);
if (someSigHasRestParameter) {
const anyArrayType = createArrayTypeNode(createKeywordTypeNode(SyntaxKind.AnyKeyword));

View File

@ -138,8 +138,23 @@ namespace ts.codefix {
const currentTokenMeaning = getMeaningFromLocation(token);
if (context.errorCode === Diagnostics._0_refers_to_a_UMD_global_but_the_current_file_is_a_module_Consider_adding_an_import_instead.code) {
const symbol = checker.getAliasedSymbol(checker.getSymbolAtLocation(token));
return getCodeActionForImport(symbol, /*isDefault*/ false, /*isNamespaceImport*/ true);
const umdSymbol = checker.getSymbolAtLocation(token);
let symbol: ts.Symbol;
let symbolName: string;
if (umdSymbol.flags & ts.SymbolFlags.Alias) {
symbol = checker.getAliasedSymbol(umdSymbol);
symbolName = name;
}
else if (isJsxOpeningLikeElement(token.parent) && token.parent.tagName === token) {
// The error wasn't for the symbolAtLocation, it was for the JSX tag itself, which needs access to e.g. `React`.
symbol = checker.getAliasedSymbol(checker.resolveNameAtLocation(token, checker.getJsxNamespace(), SymbolFlags.Value));
symbolName = symbol.name;
}
else {
Debug.fail("Either the symbol or the JSX namespace should be a UMD global if we got here");
}
return getCodeActionForImport(symbol, symbolName, /*isDefault*/ false, /*isNamespaceImport*/ true);
}
const candidateModules = checker.getAmbientModules();
@ -159,7 +174,7 @@ namespace ts.codefix {
if (localSymbol && localSymbol.name === name && checkSymbolHasMeaning(localSymbol, currentTokenMeaning)) {
// check if this symbol is already used
const symbolId = getUniqueSymbolId(localSymbol);
symbolIdActionMap.addActions(symbolId, getCodeActionForImport(moduleSymbol, /*isDefault*/ true));
symbolIdActionMap.addActions(symbolId, getCodeActionForImport(moduleSymbol, name, /*isDefault*/ true));
}
}
@ -167,7 +182,7 @@ namespace ts.codefix {
const exportSymbolWithIdenticalName = checker.tryGetMemberInModuleExports(name, moduleSymbol);
if (exportSymbolWithIdenticalName && checkSymbolHasMeaning(exportSymbolWithIdenticalName, currentTokenMeaning)) {
const symbolId = getUniqueSymbolId(exportSymbolWithIdenticalName);
symbolIdActionMap.addActions(symbolId, getCodeActionForImport(moduleSymbol));
symbolIdActionMap.addActions(symbolId, getCodeActionForImport(moduleSymbol, name));
}
}
@ -218,7 +233,7 @@ namespace ts.codefix {
return declarations ? some(symbol.declarations, decl => !!(getMeaningFromDeclaration(decl) & meaning)) : false;
}
function getCodeActionForImport(moduleSymbol: Symbol, isDefault?: boolean, isNamespaceImport?: boolean): ImportCodeAction[] {
function getCodeActionForImport(moduleSymbol: Symbol, symbolName: string, isDefault?: boolean, isNamespaceImport?: boolean): ImportCodeAction[] {
const existingDeclarations = getImportDeclarations(moduleSymbol);
if (existingDeclarations.length > 0) {
// With an existing import statement, there are more than one actions the user can do.
@ -375,10 +390,10 @@ namespace ts.codefix {
const moduleSpecifierWithoutQuotes = stripQuotes(moduleSpecifier || getModuleSpecifierForNewImport());
const changeTracker = createChangeTracker();
const importClause = isDefault
? createImportClause(createIdentifier(name), /*namedBindings*/ undefined)
? createImportClause(createIdentifier(symbolName), /*namedBindings*/ undefined)
: isNamespaceImport
? createImportClause(/*name*/ undefined, createNamespaceImport(createIdentifier(name)))
: createImportClause(/*name*/ undefined, createNamedImports([createImportSpecifier(/*propertyName*/ undefined, createIdentifier(name))]));
? createImportClause(/*name*/ undefined, createNamespaceImport(createIdentifier(symbolName)))
: createImportClause(/*name*/ undefined, createNamedImports([createImportSpecifier(/*propertyName*/ undefined, createIdentifier(symbolName))]));
const importDecl = createImportDeclaration(/*decorators*/ undefined, /*modifiers*/ undefined, importClause, createLiteral(moduleSpecifierWithoutQuotes));
if (!lastImportDeclaration) {
changeTracker.insertNodeAt(sourceFile, sourceFile.getStart(), importDecl, { suffix: `${context.newLineCharacter}${context.newLineCharacter}` });
@ -392,7 +407,7 @@ namespace ts.codefix {
// are there are already a new line seperating code and import statements.
return createCodeAction(
Diagnostics.Import_0_from_1,
[name, `"${moduleSpecifierWithoutQuotes}"`],
[symbolName, `"${moduleSpecifierWithoutQuotes}"`],
changeTracker.getChanges(),
"NewImport",
moduleSpecifierWithoutQuotes
@ -412,8 +427,9 @@ namespace ts.codefix {
removeFileExtension(getRelativePath(moduleFileName, sourceDirectory));
function tryGetModuleNameFromAmbientModule(): string {
if (moduleSymbol.valueDeclaration.kind !== SyntaxKind.SourceFile) {
return moduleSymbol.name;
const decl = moduleSymbol.valueDeclaration;
if (isModuleDeclaration(decl) && isStringLiteral(decl.name)) {
return decl.name.text;
}
}

View File

@ -4,6 +4,12 @@
namespace ts.Completions {
export type Log = (message: string) => void;
const enum KeywordCompletionFilters {
None,
ClassElementKeywords, // Keywords at class keyword
ConstructorParameterKeywords, // Keywords at constructor parameter
}
export function getCompletionsAtPosition(host: LanguageServiceHost, typeChecker: TypeChecker, log: Log, compilerOptions: CompilerOptions, sourceFile: SourceFile, position: number): CompletionInfo | undefined {
if (isInReferenceComment(sourceFile, position)) {
return PathCompletions.getTripleSlashReferenceCompletion(sourceFile, position, compilerOptions, host);
@ -18,16 +24,33 @@ namespace ts.Completions {
return undefined;
}
const { symbols, isGlobalCompletion, isMemberCompletion, isNewIdentifierLocation, location, requestJsDocTagName, requestJsDocTag, hasFilteredClassMemberKeywords } = completionData;
const { symbols, isGlobalCompletion, isMemberCompletion, isNewIdentifierLocation, location, request, keywordFilters } = completionData;
if (requestJsDocTagName) {
// If the current position is a jsDoc tag name, only tag names should be provided for completion
return { isGlobalCompletion: false, isMemberCompletion: false, isNewIdentifierLocation: false, entries: JsDoc.getJSDocTagNameCompletions() };
if (sourceFile.languageVariant === LanguageVariant.JSX &&
location && location.parent && location.parent.kind === SyntaxKind.JsxClosingElement) {
// In the TypeScript JSX element, if such element is not defined. When users query for completion at closing tag,
// instead of simply giving unknown value, the completion will return the tag-name of an associated opening-element.
// For example:
// var x = <div> </ /*1*/> completion list at "1" will contain "div" with type any
const tagName = (<JsxElement>location.parent.parent).openingElement.tagName;
return { isGlobalCompletion: false, isMemberCompletion: true, isNewIdentifierLocation: false,
entries: [{
name: (<JsxTagNameExpression>tagName).getFullText(),
kind: ScriptElementKind.classElement,
kindModifiers: undefined,
sortText: "0",
}]};
}
if (requestJsDocTag) {
// If the current position is a jsDoc tag, only tags should be provided for completion
return { isGlobalCompletion: false, isMemberCompletion: false, isNewIdentifierLocation: false, entries: JsDoc.getJSDocTagCompletions() };
if (request) {
const entries = request.kind === "JsDocTagName"
// If the current position is a jsDoc tag name, only tag names should be provided for completion
? JsDoc.getJSDocTagNameCompletions()
: request.kind === "JsDocTag"
// If the current position is a jsDoc tag, only tags should be provided for completion
? JsDoc.getJSDocTagCompletions()
: JsDoc.getJSDocParameterNameCompletions(request.tag);
return { isGlobalCompletion: false, isMemberCompletion: false, isNewIdentifierLocation: false, entries };
}
const entries: CompletionEntry[] = [];
@ -37,35 +60,20 @@ namespace ts.Completions {
addRange(entries, getJavaScriptCompletionEntries(sourceFile, location.pos, uniqueNames, compilerOptions.target));
}
else {
if (!symbols || symbols.length === 0) {
if (sourceFile.languageVariant === LanguageVariant.JSX &&
location.parent && location.parent.kind === SyntaxKind.JsxClosingElement) {
// In the TypeScript JSX element, if such element is not defined. When users query for completion at closing tag,
// instead of simply giving unknown value, the completion will return the tag-name of an associated opening-element.
// For example:
// var x = <div> </ /*1*/> completion list at "1" will contain "div" with type any
const tagName = (<JsxElement>location.parent.parent).openingElement.tagName;
entries.push({
name: (<Identifier>tagName).text,
kind: undefined,
kindModifiers: undefined,
sortText: "0",
});
}
else if (!hasFilteredClassMemberKeywords) {
if ((!symbols || symbols.length === 0) && keywordFilters === KeywordCompletionFilters.None) {
return undefined;
}
}
getCompletionEntriesFromSymbols(symbols, entries, location, /*performCharacterChecks*/ true, typeChecker, compilerOptions.target, log);
}
if (hasFilteredClassMemberKeywords) {
addRange(entries, classMemberKeywordCompletions);
}
// Add keywords if this is not a member completion list
else if (!isMemberCompletion && !requestJsDocTag && !requestJsDocTagName) {
addRange(entries, keywordCompletions);
// TODO add filter for keyword based on type/value/namespace and also location
// Add all keywords if
// - this is not a member completion list (all the keywords)
// - other filters are enabled in required scenario so add those keywords
if (keywordFilters !== KeywordCompletionFilters.None || !isMemberCompletion) {
addRange(entries, getKeywordCompletions(keywordFilters));
}
return { isGlobalCompletion, isMemberCompletion, isNewIdentifierLocation: isNewIdentifierLocation, entries };
@ -314,7 +322,10 @@ namespace ts.Completions {
}
// Didn't find a symbol with this name. See if we can find a keyword instead.
const keywordCompletion = forEach(keywordCompletions, c => c.name === entryName);
const keywordCompletion = forEach(
getKeywordCompletions(KeywordCompletionFilters.None),
c => c.name === entryName
);
if (keywordCompletion) {
return {
name: entryName,
@ -345,16 +356,27 @@ namespace ts.Completions {
return undefined;
}
function getCompletionData(typeChecker: TypeChecker, log: (message: string) => void, sourceFile: SourceFile, position: number) {
interface CompletionData {
symbols: Symbol[];
isGlobalCompletion: boolean;
isMemberCompletion: boolean;
isNewIdentifierLocation: boolean;
location: Node;
isRightOfDot: boolean;
request?: Request;
keywordFilters: KeywordCompletionFilters;
}
type Request = { kind: "JsDocTagName" } | { kind: "JsDocTag" } | { kind: "JsDocParameterName", tag: JSDocParameterTag };
function getCompletionData(typeChecker: TypeChecker, log: (message: string) => void, sourceFile: SourceFile, position: number): CompletionData {
const isJavaScriptFile = isSourceFileJavaScript(sourceFile);
// JsDoc tag-name is just the name of the JSDoc tagname (exclude "@")
let requestJsDocTagName = false;
// JsDoc tag includes both "@" and tag-name
let requestJsDocTag = false;
let request: Request | undefined;
let start = timestamp();
const currentToken = getTokenAtPosition(sourceFile, position, /*includeJsDocComment*/ false); // TODO: GH#15853
let currentToken = getTokenAtPosition(sourceFile, position, /*includeJsDocComment*/ false); // TODO: GH#15853
// We will check for jsdoc comments with insideComment and getJsDocTagAtPosition. (TODO: that seems rather inefficient to check the same thing so many times.)
log("getCompletionData: Get current token: " + (timestamp() - start));
start = timestamp();
@ -362,12 +384,13 @@ namespace ts.Completions {
const insideComment = isInComment(sourceFile, position, currentToken);
log("getCompletionData: Is inside comment: " + (timestamp() - start));
let insideJsDocTagTypeExpression = false;
if (insideComment) {
if (hasDocComment(sourceFile, position)) {
// The current position is next to the '@' sign, when no tag name being provided yet.
// Provide a full list of tag names
if (sourceFile.text.charCodeAt(position - 1) === CharacterCodes.at) {
requestJsDocTagName = true;
// The current position is next to the '@' sign, when no tag name being provided yet.
// Provide a full list of tag names
request = { kind: "JsDocTagName" };
}
else {
// When completion is requested without "@", we will have check to make sure that
@ -387,37 +410,40 @@ namespace ts.Completions {
// * |c|
// */
const lineStart = getLineStartPositionForPosition(position, sourceFile);
requestJsDocTag = !(sourceFile.text.substring(lineStart, position).match(/[^\*|\s|(/\*\*)]/));
if (!(sourceFile.text.substring(lineStart, position).match(/[^\*|\s|(/\*\*)]/))) {
request = { kind: "JsDocTag" };
}
}
}
// Completion should work inside certain JsDoc tags. For example:
// /** @type {number | string} */
// Completion should work in the brackets
let insideJsDocTagExpression = false;
const tag = getJsDocTagAtPosition(sourceFile, position);
const tag = getJsDocTagAtPosition(currentToken, position);
if (tag) {
if (tag.tagName.pos <= position && position <= tag.tagName.end) {
requestJsDocTagName = true;
request = { kind: "JsDocTagName" };
}
switch (tag.kind) {
case SyntaxKind.JSDocTypeTag:
case SyntaxKind.JSDocParameterTag:
case SyntaxKind.JSDocReturnTag:
const tagWithExpression = <JSDocTypeTag | JSDocParameterTag | JSDocReturnTag>tag;
if (tagWithExpression.typeExpression) {
insideJsDocTagExpression = tagWithExpression.typeExpression.pos < position && position < tagWithExpression.typeExpression.end;
}
break;
if (isTagWithTypeExpression(tag) && tag.typeExpression) {
currentToken = getTokenAtPosition(sourceFile, position, /*includeJsDocComment*/ true);
if (!currentToken ||
(!isDeclarationName(currentToken) &&
(currentToken.parent.kind !== SyntaxKind.JSDocPropertyTag ||
(<JSDocPropertyTag>currentToken.parent).name !== currentToken))) {
// Use as type location if inside tag's type expression
insideJsDocTagTypeExpression = isCurrentlyEditingNode(tag.typeExpression);
}
}
if (isJSDocParameterTag(tag) && (nodeIsMissing(tag.name) || tag.name.pos <= position && position <= tag.name.end)) {
request = { kind: "JsDocParameterName", tag };
}
}
if (requestJsDocTagName || requestJsDocTag) {
return { symbols: undefined, isGlobalCompletion: false, isMemberCompletion: false, isNewIdentifierLocation: false, location: undefined, isRightOfDot: false, requestJsDocTagName, requestJsDocTag, hasFilteredClassMemberKeywords: false };
if (request) {
return { symbols: undefined, isGlobalCompletion: false, isMemberCompletion: false, isNewIdentifierLocation: false, location: undefined, isRightOfDot: false, request, keywordFilters: KeywordCompletionFilters.None };
}
if (!insideJsDocTagExpression) {
if (!insideJsDocTagTypeExpression) {
// Proceed if the current position is in jsDoc tag expression; otherwise it is a normal
// comment or the plain text part of a jsDoc comment, so no completion should be available
log("Returning an empty list because completion was inside a regular comment or plain text part of a JsDoc comment.");
@ -426,7 +452,7 @@ namespace ts.Completions {
}
start = timestamp();
const previousToken = findPrecedingToken(position, sourceFile);
const previousToken = findPrecedingToken(position, sourceFile, /*startNode*/ undefined, insideJsDocTagTypeExpression);
log("getCompletionData: Get previous token 1: " + (timestamp() - start));
// The decision to provide completion depends on the contextToken, which is determined through the previousToken.
@ -437,7 +463,7 @@ namespace ts.Completions {
// Skip this partial identifier and adjust the contextToken to the token that precedes it.
if (contextToken && position <= contextToken.end && isWord(contextToken.kind)) {
const start = timestamp();
contextToken = findPrecedingToken(contextToken.getFullStart(), sourceFile);
contextToken = findPrecedingToken(contextToken.getFullStart(), sourceFile, /*startNode*/ undefined, insideJsDocTagTypeExpression);
log("getCompletionData: Get previous token 2: " + (timestamp() - start));
}
@ -449,7 +475,7 @@ namespace ts.Completions {
let isRightOfOpenTag = false;
let isStartingCloseTag = false;
let location = getTouchingPropertyName(sourceFile, position, /*includeJsDocComment*/ false); // TODO: GH#15853
let location = getTouchingPropertyName(sourceFile, position, insideJsDocTagTypeExpression); // TODO: GH#15853
if (contextToken) {
// Bail out if this is a known invalid completion location
if (isCompletionListBlocker(contextToken)) {
@ -495,7 +521,7 @@ namespace ts.Completions {
// It has a left-hand side, so we're not in an opening JSX tag.
break;
}
// falls through
// falls through
case SyntaxKind.JsxSelfClosingElement:
case SyntaxKind.JsxElement:
@ -513,7 +539,7 @@ namespace ts.Completions {
let isGlobalCompletion = false;
let isMemberCompletion: boolean;
let isNewIdentifierLocation: boolean;
let hasFilteredClassMemberKeywords = false;
let keywordFilters = KeywordCompletionFilters.None;
let symbols: Symbol[] = [];
if (isRightOfDot) {
@ -551,7 +577,21 @@ namespace ts.Completions {
log("getCompletionData: Semantic work: " + (timestamp() - semanticStart));
return { symbols, isGlobalCompletion, isMemberCompletion, isNewIdentifierLocation, location, isRightOfDot: (isRightOfDot || isRightOfOpenTag), requestJsDocTagName, requestJsDocTag, hasFilteredClassMemberKeywords };
return { symbols, isGlobalCompletion, isMemberCompletion, isNewIdentifierLocation, location, isRightOfDot: (isRightOfDot || isRightOfOpenTag), request, keywordFilters };
type JSDocTagWithTypeExpression = JSDocAugmentsTag | JSDocParameterTag | JSDocPropertyTag | JSDocReturnTag | JSDocTypeTag | JSDocTypedefTag;
function isTagWithTypeExpression(tag: JSDocTag): tag is JSDocTagWithTypeExpression {
switch (tag.kind) {
case SyntaxKind.JSDocAugmentsTag:
case SyntaxKind.JSDocParameterTag:
case SyntaxKind.JSDocPropertyTag:
case SyntaxKind.JSDocReturnTag:
case SyntaxKind.JSDocTypeTag:
case SyntaxKind.JSDocTypedefTag:
return true;
}
}
function getTypeScriptMemberSymbols(): void {
// Right of dot member completion list
@ -559,6 +599,9 @@ namespace ts.Completions {
isMemberCompletion = true;
isNewIdentifierLocation = false;
// Since this is qualified name check its a type node location
const isTypeLocation = isPartOfTypeNode(node.parent) || insideJsDocTagTypeExpression;
const isRhsOfImportDeclaration = isInRightSideOfInternalImportEqualsDeclaration(node);
if (node.kind === SyntaxKind.Identifier || node.kind === SyntaxKind.QualifiedName || node.kind === SyntaxKind.PropertyAccessExpression) {
let symbol = typeChecker.getSymbolAtLocation(node);
@ -570,16 +613,24 @@ namespace ts.Completions {
if (symbol && symbol.flags & SymbolFlags.HasExports) {
// Extract module or enum members
const exportedSymbols = typeChecker.getExportsOfModule(symbol);
const isValidValueAccess = (symbol: Symbol) => typeChecker.isValidPropertyAccess(<PropertyAccessExpression>(node.parent), symbol.name);
const isValidTypeAccess = (symbol: Symbol) => symbolCanbeReferencedAtTypeLocation(symbol);
const isValidAccess = isRhsOfImportDeclaration ?
// Any kind is allowed when dotting off namespace in internal import equals declaration
(symbol: Symbol) => isValidTypeAccess(symbol) || isValidValueAccess(symbol) :
isTypeLocation ? isValidTypeAccess : isValidValueAccess;
forEach(exportedSymbols, symbol => {
if (typeChecker.isValidPropertyAccess(<PropertyAccessExpression>(node.parent), symbol.name)) {
if (isValidAccess(symbol)) {
symbols.push(symbol);
}
});
}
}
const type = typeChecker.getTypeAtLocation(node);
addTypeProperties(type);
if (!isTypeLocation) {
const type = typeChecker.getTypeAtLocation(node);
addTypeProperties(type);
}
}
function addTypeProperties(type: Type) {
@ -621,6 +672,16 @@ namespace ts.Completions {
return tryGetImportOrExportClauseCompletionSymbols(namedImportsOrExports);
}
if (tryGetConstructorLikeCompletionContainer(contextToken)) {
// no members, only keywords
isMemberCompletion = false;
// Declaring new property/method/accessor
isNewIdentifierLocation = true;
// Has keywords for constructor parameter
keywordFilters = KeywordCompletionFilters.ConstructorParameterKeywords;
return true;
}
if (classLikeContainer = tryGetClassLikeCompletionContainer(contextToken)) {
// cursor inside class declaration
getGetClassLikeCompletionSymbols(classLikeContainer);
@ -687,13 +748,87 @@ namespace ts.Completions {
isStatement(scopeNode);
}
/// TODO filter meaning based on the current context
const symbolMeanings = SymbolFlags.Type | SymbolFlags.Value | SymbolFlags.Namespace | SymbolFlags.Alias;
symbols = typeChecker.getSymbolsInScope(scopeNode, symbolMeanings);
symbols = filterGlobalCompletion(typeChecker.getSymbolsInScope(scopeNode, symbolMeanings));
return true;
}
function filterGlobalCompletion(symbols: Symbol[]) {
return filter(symbols, symbol => {
if (!isSourceFile(location)) {
// export = /**/ here we want to get all meanings, so any symbol is ok
if (isExportAssignment(location.parent)) {
return true;
}
// This is an alias, follow what it aliases
if (symbol && symbol.flags & SymbolFlags.Alias) {
symbol = typeChecker.getAliasedSymbol(symbol);
}
// import m = /**/ <-- It can only access namespace (if typing import = x. this would get member symbols and not namespace)
if (isInRightSideOfInternalImportEqualsDeclaration(location)) {
return !!(symbol.flags & SymbolFlags.Namespace);
}
if (insideJsDocTagTypeExpression ||
(!isContextTokenValueLocation(contextToken) &&
(isPartOfTypeNode(location) || isContextTokenTypeLocation(contextToken)))) {
// Its a type, but you can reach it by namespace.type as well
return symbolCanbeReferencedAtTypeLocation(symbol);
}
}
// expressions are value space (which includes the value namespaces)
return !!(symbol.flags & SymbolFlags.Value);
});
}
function isContextTokenValueLocation(contextToken: Node) {
return contextToken &&
contextToken.kind === SyntaxKind.TypeOfKeyword &&
contextToken.parent.kind === SyntaxKind.TypeQuery;
}
function isContextTokenTypeLocation(contextToken: Node) {
if (contextToken) {
const parentKind = contextToken.parent.kind;
switch (contextToken.kind) {
case SyntaxKind.ColonToken:
return parentKind === SyntaxKind.PropertyDeclaration ||
parentKind === SyntaxKind.PropertySignature ||
parentKind === SyntaxKind.Parameter ||
parentKind === SyntaxKind.VariableDeclaration ||
isFunctionLikeKind(parentKind);
case SyntaxKind.EqualsToken:
return parentKind === SyntaxKind.TypeAliasDeclaration;
case SyntaxKind.AsKeyword:
return parentKind === SyntaxKind.AsExpression;
}
}
}
function symbolCanbeReferencedAtTypeLocation(symbol: Symbol): boolean {
// This is an alias, follow what it aliases
if (symbol && symbol.flags & SymbolFlags.Alias) {
symbol = typeChecker.getAliasedSymbol(symbol);
}
if (symbol.flags & SymbolFlags.Type) {
return true;
}
if (symbol.flags & (SymbolFlags.ValueModule | SymbolFlags.NamespaceModule)) {
const exportedSymbols = typeChecker.getExportsOfModule(symbol);
// If the exported symbols contains type,
// symbol can be referenced at locations where type is allowed
return forEach(exportedSymbols, symbolCanbeReferencedAtTypeLocation);
}
}
/**
* Finds the first node that "embraces" the position, so that one may
* accurately aggregate locals from the closest containing scope.
@ -825,7 +960,7 @@ namespace ts.Completions {
*
* @returns true if 'symbols' was successfully populated; false otherwise.
*/
function tryGetObjectLikeCompletionSymbols(objectLikeContainer: ObjectLiteralExpression | BindingPattern): boolean {
function tryGetObjectLikeCompletionSymbols(objectLikeContainer: ObjectLiteralExpression | ObjectBindingPattern): boolean {
// We're looking up possible property names from contextual/inferred/declared type.
isMemberCompletion = true;
@ -841,41 +976,36 @@ namespace ts.Completions {
typeMembers = typeChecker.getAllPossiblePropertiesOfType(typeForObject);
existingMembers = (<ObjectLiteralExpression>objectLikeContainer).properties;
}
else if (objectLikeContainer.kind === SyntaxKind.ObjectBindingPattern) {
else {
Debug.assert(objectLikeContainer.kind === SyntaxKind.ObjectBindingPattern);
// We are *only* completing on properties from the type being destructured.
isNewIdentifierLocation = false;
const rootDeclaration = getRootDeclaration(objectLikeContainer.parent);
if (isVariableLike(rootDeclaration)) {
// We don't want to complete using the type acquired by the shape
// of the binding pattern; we are only interested in types acquired
// through type declaration or inference.
// Also proceed if rootDeclaration is a parameter and if its containing function expression/arrow function is contextually typed -
// type of parameter will flow in from the contextual type of the function
let canGetType = !!(rootDeclaration.initializer || rootDeclaration.type);
if (!canGetType && rootDeclaration.kind === SyntaxKind.Parameter) {
if (isExpression(rootDeclaration.parent)) {
canGetType = !!typeChecker.getContextualType(<Expression>rootDeclaration.parent);
}
else if (rootDeclaration.parent.kind === SyntaxKind.MethodDeclaration || rootDeclaration.parent.kind === SyntaxKind.SetAccessor) {
canGetType = isExpression(rootDeclaration.parent.parent) && !!typeChecker.getContextualType(<Expression>rootDeclaration.parent.parent);
}
if (!isVariableLike(rootDeclaration)) throw Debug.fail("Root declaration is not variable-like.");
// We don't want to complete using the type acquired by the shape
// of the binding pattern; we are only interested in types acquired
// through type declaration or inference.
// Also proceed if rootDeclaration is a parameter and if its containing function expression/arrow function is contextually typed -
// type of parameter will flow in from the contextual type of the function
let canGetType = rootDeclaration.initializer || rootDeclaration.type || rootDeclaration.parent.parent.kind === SyntaxKind.ForOfStatement;
if (!canGetType && rootDeclaration.kind === SyntaxKind.Parameter) {
if (isExpression(rootDeclaration.parent)) {
canGetType = !!typeChecker.getContextualType(<Expression>rootDeclaration.parent);
}
if (canGetType) {
const typeForObject = typeChecker.getTypeAtLocation(objectLikeContainer);
if (!typeForObject) return false;
// In a binding pattern, get only known properties. Everywhere else we will get all possible properties.
typeMembers = typeChecker.getPropertiesOfType(typeForObject);
existingMembers = (<ObjectBindingPattern>objectLikeContainer).elements;
else if (rootDeclaration.parent.kind === SyntaxKind.MethodDeclaration || rootDeclaration.parent.kind === SyntaxKind.SetAccessor) {
canGetType = isExpression(rootDeclaration.parent.parent) && !!typeChecker.getContextualType(<Expression>rootDeclaration.parent.parent);
}
}
else {
Debug.fail("Root declaration is not variable-like.");
if (canGetType) {
const typeForObject = typeChecker.getTypeAtLocation(objectLikeContainer);
if (!typeForObject) return false;
// In a binding pattern, get only known properties. Everywhere else we will get all possible properties.
typeMembers = typeChecker.getPropertiesOfType(typeForObject);
existingMembers = (<ObjectBindingPattern>objectLikeContainer).elements;
}
}
else {
Debug.fail("Expected object literal or binding pattern, got " + objectLikeContainer.kind);
}
if (typeMembers && typeMembers.length > 0) {
// Add filtered items to the completion list
@ -934,7 +1064,7 @@ namespace ts.Completions {
// Declaring new property/method/accessor
isNewIdentifierLocation = true;
// Has keywords for class elements
hasFilteredClassMemberKeywords = true;
keywordFilters = KeywordCompletionFilters.ClassElementKeywords;
const baseTypeNode = getClassExtendsHeritageClauseElement(classLikeDeclaration);
const implementsTypeNodes = getClassImplementsHeritageClauseElements(classLikeDeclaration);
@ -984,14 +1114,14 @@ namespace ts.Completions {
* Returns the immediate owning object literal or binding pattern of a context token,
* on the condition that one exists and that the context implies completion should be given.
*/
function tryGetObjectLikeCompletionContainer(contextToken: Node): ObjectLiteralExpression | BindingPattern {
function tryGetObjectLikeCompletionContainer(contextToken: Node): ObjectLiteralExpression | ObjectBindingPattern {
if (contextToken) {
switch (contextToken.kind) {
case SyntaxKind.OpenBraceToken: // const x = { |
case SyntaxKind.CommaToken: // const x = { a: 0, |
const parent = contextToken.parent;
if (parent && (parent.kind === SyntaxKind.ObjectLiteralExpression || parent.kind === SyntaxKind.ObjectBindingPattern)) {
return <ObjectLiteralExpression | BindingPattern>parent;
if (isObjectLiteralExpression(parent) || isObjectBindingPattern(parent)) {
return parent;
}
break;
}
@ -1024,6 +1154,16 @@ namespace ts.Completions {
return isClassElement(node.parent) && isClassLike(node.parent.parent);
}
function isParameterOfConstructorDeclaration(node: Node) {
return isParameter(node) && isConstructorDeclaration(node.parent);
}
function isConstructorParameterCompletion(node: Node) {
return node.parent &&
isParameterOfConstructorDeclaration(node.parent) &&
(isConstructorParameterCompletionKeyword(node.kind) || isDeclarationName(node));
}
/**
* Returns the immediate owning class declaration of a context token,
* on the condition that one exists and that the context implies completion should be given.
@ -1037,8 +1177,14 @@ namespace ts.Completions {
}
break;
// class c {getValue(): number; | }
// class c {getValue(): number, | }
case SyntaxKind.CommaToken:
if (isClassLike(contextToken.parent)) {
return contextToken.parent;
}
break;
// class c {getValue(): number; | }
case SyntaxKind.SemicolonToken:
// class c { method() { } | }
case SyntaxKind.CloseBraceToken:
@ -1050,7 +1196,7 @@ namespace ts.Completions {
default:
if (isFromClassElementDeclaration(contextToken) &&
(isClassMemberCompletionKeyword(contextToken.kind) ||
isClassMemberCompletionKeywordText(contextToken.getText()))) {
isClassMemberCompletionKeywordText(contextToken.getText()))) {
return contextToken.parent.parent as ClassLikeDeclaration;
}
}
@ -1063,6 +1209,26 @@ namespace ts.Completions {
return undefined;
}
/**
* Returns the immediate owning class declaration of a context token,
* on the condition that one exists and that the context implies completion should be given.
*/
function tryGetConstructorLikeCompletionContainer(contextToken: Node): ConstructorDeclaration {
if (contextToken) {
switch (contextToken.kind) {
case SyntaxKind.OpenParenToken:
case SyntaxKind.CommaToken:
return isConstructorDeclaration(contextToken.parent) && contextToken.parent;
default:
if (isConstructorParameterCompletion(contextToken)) {
return contextToken.parent.parent as ConstructorDeclaration;
}
}
}
return undefined;
}
function tryGetContainingJsxElement(contextToken: Node): JsxOpeningLikeElement {
if (contextToken) {
const parent = contextToken.parent;
@ -1126,21 +1292,6 @@ namespace ts.Completions {
return undefined;
}
function isFunction(kind: SyntaxKind): boolean {
if (!isFunctionLikeKind(kind)) {
return false;
}
switch (kind) {
case SyntaxKind.Constructor:
case SyntaxKind.ConstructorType:
case SyntaxKind.FunctionType:
return false;
default:
return true;
}
}
/**
* @returns true if we are certain that the currently edited location must define a new location; false otherwise.
*/
@ -1152,12 +1303,15 @@ namespace ts.Completions {
containingNodeKind === SyntaxKind.VariableDeclarationList ||
containingNodeKind === SyntaxKind.VariableStatement ||
containingNodeKind === SyntaxKind.EnumDeclaration || // enum a { foo, |
isFunction(containingNodeKind) ||
containingNodeKind === SyntaxKind.ClassDeclaration || // class A<T, |
containingNodeKind === SyntaxKind.ClassExpression || // var C = class D<T, |
isFunctionLikeButNotConstructor(containingNodeKind) ||
containingNodeKind === SyntaxKind.InterfaceDeclaration || // interface A<T, |
containingNodeKind === SyntaxKind.ArrayBindingPattern || // var [x, y|
containingNodeKind === SyntaxKind.TypeAliasDeclaration; // type Map, K, |
containingNodeKind === SyntaxKind.TypeAliasDeclaration || // type Map, K, |
// class A<T, |
// var C = class D<T, |
(isClassLike(contextToken.parent) &&
contextToken.parent.typeParameters &&
contextToken.parent.typeParameters.end >= contextToken.pos);
case SyntaxKind.DotToken:
return containingNodeKind === SyntaxKind.ArrayBindingPattern; // var [.|
@ -1170,7 +1324,7 @@ namespace ts.Completions {
case SyntaxKind.OpenParenToken:
return containingNodeKind === SyntaxKind.CatchClause ||
isFunction(containingNodeKind);
isFunctionLikeButNotConstructor(containingNodeKind);
case SyntaxKind.OpenBraceToken:
return containingNodeKind === SyntaxKind.EnumDeclaration || // enum a { |
@ -1188,7 +1342,7 @@ namespace ts.Completions {
containingNodeKind === SyntaxKind.ClassExpression || // var C = class D< |
containingNodeKind === SyntaxKind.InterfaceDeclaration || // interface A< |
containingNodeKind === SyntaxKind.TypeAliasDeclaration || // type List< |
isFunction(containingNodeKind);
isFunctionLikeKind(containingNodeKind);
case SyntaxKind.StaticKeyword:
return containingNodeKind === SyntaxKind.PropertyDeclaration && !isClassLike(contextToken.parent.parent);
@ -1201,7 +1355,7 @@ namespace ts.Completions {
case SyntaxKind.PublicKeyword:
case SyntaxKind.PrivateKeyword:
case SyntaxKind.ProtectedKeyword:
return containingNodeKind === SyntaxKind.Parameter;
return containingNodeKind === SyntaxKind.Parameter && !isConstructorDeclaration(contextToken.parent.parent);
case SyntaxKind.AsKeyword:
return containingNodeKind === SyntaxKind.ImportSpecifier ||
@ -1213,7 +1367,7 @@ namespace ts.Completions {
if (isFromClassElementDeclaration(contextToken)) {
return false;
}
// falls through
// falls through
case SyntaxKind.ClassKeyword:
case SyntaxKind.EnumKeyword:
case SyntaxKind.InterfaceKeyword:
@ -1234,6 +1388,18 @@ namespace ts.Completions {
return false;
}
if (isConstructorParameterCompletion(contextToken)) {
// constructor parameter completion is available only if
// - its modifier of the constructor parameter or
// - its name of the parameter and not being edited
// eg. constructor(a |<- this shouldnt show completion
if (!isIdentifier(contextToken) ||
isConstructorParameterCompletionKeywordText(contextToken.getText()) ||
isCurrentlyEditingNode(contextToken)) {
return false;
}
}
// Previous token may have been a keyword that was converted to an identifier.
switch (contextToken.getText()) {
case "abstract":
@ -1254,7 +1420,11 @@ namespace ts.Completions {
return true;
}
return false;
return isDeclarationName(contextToken) && !isJsxAttribute(contextToken.parent);
}
function isFunctionLikeButNotConstructor(kind: SyntaxKind) {
return isFunctionLikeKind(kind) && kind !== SyntaxKind.Constructor;
}
function isDotOfNumericLiteral(contextToken: Node): boolean {
@ -1473,14 +1643,45 @@ namespace ts.Completions {
}
// A cache of completion entries for keywords, these do not change between sessions
const keywordCompletions: CompletionEntry[] = [];
for (let i = SyntaxKind.FirstKeyword; i <= SyntaxKind.LastKeyword; i++) {
keywordCompletions.push({
name: tokenToString(i),
kind: ScriptElementKind.keyword,
kindModifiers: ScriptElementKindModifier.none,
sortText: "0"
});
const _keywordCompletions: CompletionEntry[][] = [];
function getKeywordCompletions(keywordFilter: KeywordCompletionFilters): CompletionEntry[] {
const completions = _keywordCompletions[keywordFilter];
if (completions) {
return completions;
}
return _keywordCompletions[keywordFilter] = generateKeywordCompletions(keywordFilter);
type FilterKeywordCompletions = (entryName: string) => boolean;
function generateKeywordCompletions(keywordFilter: KeywordCompletionFilters) {
switch (keywordFilter) {
case KeywordCompletionFilters.None:
return getAllKeywordCompletions();
case KeywordCompletionFilters.ClassElementKeywords:
return getFilteredKeywordCompletions(isClassMemberCompletionKeywordText);
case KeywordCompletionFilters.ConstructorParameterKeywords:
return getFilteredKeywordCompletions(isConstructorParameterCompletionKeywordText);
}
}
function getAllKeywordCompletions() {
const allKeywordsCompletions: CompletionEntry[] = [];
for (let i = SyntaxKind.FirstKeyword; i <= SyntaxKind.LastKeyword; i++) {
allKeywordsCompletions.push({
name: tokenToString(i),
kind: ScriptElementKind.keyword,
kindModifiers: ScriptElementKindModifier.none,
sortText: "0"
});
}
return allKeywordsCompletions;
}
function getFilteredKeywordCompletions(filterFn: FilterKeywordCompletions) {
return filter(
getKeywordCompletions(KeywordCompletionFilters.None),
entry => filterFn(entry.name)
);
}
}
function isClassMemberCompletionKeyword(kind: SyntaxKind) {
@ -1503,8 +1704,19 @@ namespace ts.Completions {
return isClassMemberCompletionKeyword(stringToToken(text));
}
const classMemberKeywordCompletions = filter(keywordCompletions, entry =>
isClassMemberCompletionKeywordText(entry.name));
function isConstructorParameterCompletionKeyword(kind: SyntaxKind) {
switch (kind) {
case SyntaxKind.PublicKeyword:
case SyntaxKind.PrivateKeyword:
case SyntaxKind.ProtectedKeyword:
case SyntaxKind.ReadonlyKeyword:
return true;
}
}
function isConstructorParameterCompletionKeywordText(text: string) {
return isConstructorParameterCompletionKeyword(stringToToken(text));
}
function isEqualityExpression(node: Node): node is BinaryExpression {
return isBinaryExpression(node) && isEqualityOperatorKind(node.operatorToken.kind);
@ -1516,4 +1728,34 @@ namespace ts.Completions {
kind === SyntaxKind.EqualsEqualsEqualsToken ||
kind === SyntaxKind.ExclamationEqualsEqualsToken;
}
/** Get the corresponding JSDocTag node if the position is in a jsDoc comment */
function getJsDocTagAtPosition(node: Node, position: number): JSDocTag | undefined {
const { jsDoc } = getJsDocHavingNode(node);
if (!jsDoc) return undefined;
for (const { pos, end, tags } of jsDoc) {
if (!tags || position < pos || position > end) continue;
for (let i = tags.length - 1; i >= 0; i--) {
const tag = tags[i];
if (position >= tag.pos) {
return tag;
}
}
}
}
function getJsDocHavingNode(node: Node): Node {
if (!isToken(node)) return node;
switch (node.kind) {
case SyntaxKind.VarKeyword:
case SyntaxKind.LetKeyword:
case SyntaxKind.ConstKeyword:
// if the current token is var, let or const, skip the VariableDeclarationList
return node.parent.parent;
default:
return node.parent;
}
}
}

View File

@ -1,17 +1,23 @@
/* @internal */
namespace ts.DocumentHighlights {
export function getDocumentHighlights(program: Program, cancellationToken: CancellationToken, sourceFile: SourceFile, position: number, sourceFilesToSearch: SourceFile[]): DocumentHighlights[] {
export function getDocumentHighlights(program: Program, cancellationToken: CancellationToken, sourceFile: SourceFile, position: number, sourceFilesToSearch: SourceFile[]): DocumentHighlights[] | undefined {
const node = getTouchingWord(sourceFile, position, /*includeJsDocComment*/ true);
return node && (getSemanticDocumentHighlights(node, program, cancellationToken, sourceFilesToSearch) || getSyntacticDocumentHighlights(node, sourceFile));
if (!node) return undefined;
if (isJsxOpeningElement(node.parent) && node.parent.tagName === node || isJsxClosingElement(node.parent)) {
// For a JSX element, just highlight the matching tag, not all references.
const { openingElement, closingElement } = node.parent.parent;
const highlightSpans = [openingElement, closingElement].map(({ tagName }) => getHighlightSpanForNode(tagName, sourceFile));
return [{ fileName: sourceFile.fileName, highlightSpans }];
}
return getSemanticDocumentHighlights(node, program, cancellationToken, sourceFilesToSearch) || getSyntacticDocumentHighlights(node, sourceFile);
}
function getHighlightSpanForNode(node: Node, sourceFile: SourceFile): HighlightSpan {
const start = node.getStart(sourceFile);
const end = node.getEnd();
return {
fileName: sourceFile.fileName,
textSpan: createTextSpanFromBounds(start, end),
textSpan: createTextSpanFromNode(node, sourceFile),
kind: HighlightSpanKind.none
};
}

View File

@ -185,7 +185,8 @@ namespace ts.FindAllReferences {
if (entry.type === "node") {
const { node } = entry;
return { textSpan: getTextSpan(node), fileName: node.getSourceFile().fileName, ...implementationKindDisplayParts(node, checker) };
} else {
}
else {
const { textSpan, fileName } = entry;
return { textSpan, fileName, kind: ScriptElementKind.unknown, displayParts: [] };
}
@ -306,7 +307,7 @@ namespace ts.FindAllReferences.Core {
case SyntaxKind.ExportDeclaration:
return true;
case SyntaxKind.CallExpression:
return isRequireCall(node.parent as CallExpression, /*checkArgumentIsStringLiteral*/ false);
return isRequireCall(node.parent as CallExpression, /*checkArgumentIsStringLiteral*/ false) || isImportCall(node.parent as CallExpression);
default:
return false;
}
@ -495,11 +496,10 @@ namespace ts.FindAllReferences.Core {
const { text = stripQuotes(getDeclaredName(this.checker, symbol, location)), allSearchSymbols = undefined } = searchOptions;
const escapedText = escapeIdentifier(text);
const parents = this.options.implementations && getParentSymbolsOfPropertyAccess(location, symbol, this.checker);
return { location, symbol, comingFrom, text, escapedText, parents, includes };
function includes(referenceSymbol: Symbol): boolean {
return allSearchSymbols ? contains(allSearchSymbols, referenceSymbol) : referenceSymbol === symbol;
}
return {
location, symbol, comingFrom, text, escapedText, parents,
includes: referenceSymbol => allSearchSymbols ? contains(allSearchSymbols, referenceSymbol) : referenceSymbol === symbol,
};
}
private readonly symbolIdToReferences: Entry[][] = [];
@ -678,9 +678,7 @@ namespace ts.FindAllReferences.Core {
return parent ? scope.getSourceFile() : scope;
}
function getPossibleSymbolReferencePositions(sourceFile: SourceFile, symbolName: string, container: Node = sourceFile, fullStart = false): number[] {
const start = fullStart ? container.getFullStart() : container.getStart(sourceFile);
const end = container.getEnd();
function getPossibleSymbolReferencePositions(sourceFile: SourceFile, symbolName: string, container: Node = sourceFile): number[] {
const positions: number[] = [];
/// TODO: Cache symbol existence for files to save text search
@ -695,10 +693,10 @@ namespace ts.FindAllReferences.Core {
const sourceLength = text.length;
const symbolNameLength = symbolName.length;
let position = text.indexOf(symbolName, start);
let position = text.indexOf(symbolName, container.pos);
while (position >= 0) {
// If we are past the end, stop looking
if (position > end) break;
if (position > container.end) break;
// We found a match. Make sure it's not part of a larger word (i.e. the char
// before and after it have to be a non-identifier char).
@ -759,8 +757,7 @@ namespace ts.FindAllReferences.Core {
}
function addReferencesForKeywordInFile(sourceFile: SourceFile, kind: SyntaxKind, searchText: string, references: Push<NodeEntry>): void {
// Want fullStart so we can find the symbol in JSDoc comments
const possiblePositions = getPossibleSymbolReferencePositions(sourceFile, searchText, sourceFile, /*fullStart*/ true);
const possiblePositions = getPossibleSymbolReferencePositions(sourceFile, searchText, sourceFile);
for (const position of possiblePositions) {
const referenceLocation = getTouchingPropertyName(sourceFile, position, /*includeJsDocComment*/ true);
if (referenceLocation.kind === kind) {
@ -784,8 +781,7 @@ namespace ts.FindAllReferences.Core {
return;
}
const fullStart = state.options.findInComments || container.jsDoc !== undefined || forEach(search.symbol.declarations, d => d.kind === ts.SyntaxKind.JSDocTypedefTag);
for (const position of getPossibleSymbolReferencePositions(sourceFile, search.text, container, fullStart)) {
for (const position of getPossibleSymbolReferencePositions(sourceFile, search.text, container)) {
getReferencesAtLocation(sourceFile, position, search, state);
}
}

View File

@ -73,54 +73,56 @@ namespace ts.FindAllReferences {
function handleDirectImports(exportingModuleSymbol: Symbol): void {
const theseDirectImports = getDirectImports(exportingModuleSymbol);
if (theseDirectImports) for (const direct of theseDirectImports) {
if (!markSeenDirectImport(direct)) {
continue;
}
if (theseDirectImports) {
for (const direct of theseDirectImports) {
if (!markSeenDirectImport(direct)) {
continue;
}
cancellationToken.throwIfCancellationRequested();
cancellationToken.throwIfCancellationRequested();
switch (direct.kind) {
case SyntaxKind.CallExpression:
if (!isAvailableThroughGlobal) {
const parent = direct.parent!;
if (exportKind === ExportKind.ExportEquals && parent.kind === SyntaxKind.VariableDeclaration) {
const { name } = parent as ts.VariableDeclaration;
if (name.kind === SyntaxKind.Identifier) {
directImports.push(name);
break;
switch (direct.kind) {
case SyntaxKind.CallExpression:
if (!isAvailableThroughGlobal) {
const parent = direct.parent!;
if (exportKind === ExportKind.ExportEquals && parent.kind === SyntaxKind.VariableDeclaration) {
const { name } = parent as ts.VariableDeclaration;
if (name.kind === SyntaxKind.Identifier) {
directImports.push(name);
break;
}
}
// Don't support re-exporting 'require()' calls, so just add a single indirect user.
addIndirectUser(direct.getSourceFile());
}
break;
// Don't support re-exporting 'require()' calls, so just add a single indirect user.
addIndirectUser(direct.getSourceFile());
}
break;
case SyntaxKind.ImportEqualsDeclaration:
handleNamespaceImport(direct, direct.name, hasModifier(direct, ModifierFlags.Export));
break;
case SyntaxKind.ImportEqualsDeclaration:
handleNamespaceImport(direct, direct.name, hasModifier(direct, ModifierFlags.Export));
break;
case SyntaxKind.ImportDeclaration:
const namedBindings = direct.importClause && direct.importClause.namedBindings;
if (namedBindings && namedBindings.kind === SyntaxKind.NamespaceImport) {
handleNamespaceImport(direct, namedBindings.name);
}
else {
directImports.push(direct);
}
break;
case SyntaxKind.ImportDeclaration:
const namedBindings = direct.importClause && direct.importClause.namedBindings;
if (namedBindings && namedBindings.kind === SyntaxKind.NamespaceImport) {
handleNamespaceImport(direct, namedBindings.name);
}
else {
directImports.push(direct);
}
break;
case SyntaxKind.ExportDeclaration:
if (!direct.exportClause) {
// This is `export * from "foo"`, so imports of this module may import the export too.
handleDirectImports(getContainingModuleSymbol(direct, checker));
}
else {
// This is `export { foo } from "foo"` and creates an alias symbol, so recursive search will get handle re-exports.
directImports.push(direct);
}
break;
case SyntaxKind.ExportDeclaration:
if (!direct.exportClause) {
// This is `export * from "foo"`, so imports of this module may import the export too.
handleDirectImports(getContainingModuleSymbol(direct, checker));
}
else {
// This is `export { foo } from "foo"` and creates an alias symbol, so recursive search will get handle re-exports.
directImports.push(direct);
}
break;
}
}
}
}
@ -160,8 +162,10 @@ namespace ts.FindAllReferences {
const moduleSymbol = checker.getMergedSymbol(sourceFileLike.symbol);
Debug.assert(!!(moduleSymbol.flags & SymbolFlags.Module));
const directImports = getDirectImports(moduleSymbol);
if (directImports) for (const directImport of directImports) {
addIndirectUsers(getSourceFileLikeForImportDeclaration(directImport));
if (directImports) {
for (const directImport of directImports) {
addIndirectUsers(getSourceFileLikeForImportDeclaration(directImport));
}
}
}
@ -183,8 +187,10 @@ namespace ts.FindAllReferences {
importSearches.push([location, symbol]);
}
if (directImports) for (const decl of directImports) {
handleImport(decl);
if (directImports) {
for (const decl of directImports) {
handleImport(decl);
}
}
return { importSearches, singleReferences };
@ -258,25 +264,27 @@ namespace ts.FindAllReferences {
}
function searchForNamedImport(namedBindings: NamedImportsOrExports | undefined): void {
if (namedBindings) for (const element of namedBindings.elements) {
const { name, propertyName } = element;
if ((propertyName || name).text !== exportName) {
continue;
}
if (propertyName) {
// This is `import { foo as bar } from "./a"` or `export { foo as bar } from "./a"`. `foo` isn't a local in the file, so just add it as a single reference.
singleReferences.push(propertyName);
if (!isForRename) { // If renaming `foo`, don't touch `bar`, just `foo`.
// Search locally for `bar`.
addSearch(name, checker.getSymbolAtLocation(name));
if (namedBindings) {
for (const element of namedBindings.elements) {
const { name, propertyName } = element;
if ((propertyName || name).text !== exportName) {
continue;
}
if (propertyName) {
// This is `import { foo as bar } from "./a"` or `export { foo as bar } from "./a"`. `foo` isn't a local in the file, so just add it as a single reference.
singleReferences.push(propertyName);
if (!isForRename) { // If renaming `foo`, don't touch `bar`, just `foo`.
// Search locally for `bar`.
addSearch(name, checker.getSymbolAtLocation(name));
}
}
else {
const localSymbol = element.kind === SyntaxKind.ExportSpecifier && element.propertyName
? checker.getExportSpecifierLocalTargetSymbol(element) // For re-exporting under a different name, we want to get the re-exported symbol.
: checker.getSymbolAtLocation(name);
addSearch(name, localSymbol);
}
}
else {
const localSymbol = element.kind === SyntaxKind.ExportSpecifier && element.propertyName
? checker.getExportSpecifierLocalTargetSymbol(element) // For re-exporting under a different name, we want to get the re-exported symbol.
: checker.getSymbolAtLocation(name);
addSearch(name, localSymbol);
}
}
}
@ -436,8 +444,8 @@ namespace ts.FindAllReferences {
if (parent.kind === SyntaxKind.PropertyAccessExpression) {
// When accessing an export of a JS module, there's no alias. The symbol will still be flagged as an export even though we're at the use.
// So check that we are at the declaration.
return symbol.declarations.some(d => d === parent) && parent.parent.kind === ts.SyntaxKind.BinaryExpression
? getSpecialPropertyExport(parent.parent as ts.BinaryExpression, /*useLhsSymbol*/ false)
return symbol.declarations.some(d => d === parent) && isBinaryExpression(parent.parent)
? getSpecialPropertyExport(parent.parent, /*useLhsSymbol*/ false)
: undefined;
}
else {
@ -449,31 +457,41 @@ namespace ts.FindAllReferences {
else {
const exportNode = getExportNode(parent);
if (exportNode && hasModifier(exportNode, ModifierFlags.Export)) {
if (exportNode.kind === SyntaxKind.ImportEqualsDeclaration && (exportNode as ImportEqualsDeclaration).moduleReference === node) {
if (isImportEqualsDeclaration(exportNode) && exportNode.moduleReference === node) {
// We're at `Y` in `export import X = Y`. This is not the exported symbol, the left-hand-side is. So treat this as an import statement.
if (comingFromExport) {
return undefined;
}
const lhsSymbol = checker.getSymbolAtLocation((exportNode as ImportEqualsDeclaration).name);
const lhsSymbol = checker.getSymbolAtLocation(exportNode.name);
return { kind: ImportExport.Import, symbol: lhsSymbol, isNamedImport: false };
}
else {
return exportInfo(symbol, getExportKindForDeclaration(exportNode));
}
}
else if (parent.kind === SyntaxKind.ExportAssignment) {
// Get the symbol for the `export =` node; its parent is the module it's the export of.
const exportingModuleSymbol = parent.symbol.parent;
Debug.assert(!!exportingModuleSymbol);
return { kind: ImportExport.Export, symbol, exportInfo: { exportingModuleSymbol, exportKind: ExportKind.ExportEquals } };
// If we are in `export = a;`, `parent` is the export assignment.
else if (isExportAssignment(parent)) {
return getExportAssignmentExport(parent);
}
else if (parent.kind === ts.SyntaxKind.BinaryExpression) {
return getSpecialPropertyExport(parent as ts.BinaryExpression, /*useLhsSymbol*/ true);
// If we are in `export = class A {};` at `A`, `parent.parent` is the export assignment.
else if (isExportAssignment(parent.parent)) {
return getExportAssignmentExport(parent.parent);
}
else if (parent.parent.kind === SyntaxKind.BinaryExpression) {
return getSpecialPropertyExport(parent.parent as ts.BinaryExpression, /*useLhsSymbol*/ true);
// Similar for `module.exports =` and `exports.A =`.
else if (isBinaryExpression(parent)) {
return getSpecialPropertyExport(parent, /*useLhsSymbol*/ true);
}
else if (isBinaryExpression(parent.parent)) {
return getSpecialPropertyExport(parent.parent, /*useLhsSymbol*/ true);
}
}
function getExportAssignmentExport(ex: ExportAssignment): ExportedSymbol {
// Get the symbol for the `export =` node; its parent is the module it's the export of.
const exportingModuleSymbol = ex.symbol.parent;
Debug.assert(!!exportingModuleSymbol);
return { kind: ImportExport.Export, symbol, exportInfo: { exportingModuleSymbol, exportKind: ExportKind.ExportEquals } };
}
function getSpecialPropertyExport(node: ts.BinaryExpression, useLhsSymbol: boolean): ExportedSymbol | undefined {
@ -496,21 +514,21 @@ namespace ts.FindAllReferences {
function getImport(): ImportedSymbol | undefined {
const isImport = isNodeImport(node);
if (!isImport) return;
if (!isImport) return undefined;
// A symbol being imported is always an alias. So get what that aliases to find the local symbol.
let importedSymbol = checker.getImmediateAliasedSymbol(symbol);
if (importedSymbol) {
// Search on the local symbol in the exporting module, not the exported symbol.
importedSymbol = skipExportSpecifierSymbol(importedSymbol, checker);
// Similarly, skip past the symbol for 'export ='
if (importedSymbol.name === "export=") {
importedSymbol = checker.getImmediateAliasedSymbol(importedSymbol);
}
if (!importedSymbol) return undefined;
if (symbolName(importedSymbol) === symbol.name) { // If this is a rename import, do not continue searching.
return { kind: ImportExport.Import, symbol: importedSymbol, ...isImport };
}
// Search on the local symbol in the exporting module, not the exported symbol.
importedSymbol = skipExportSpecifierSymbol(importedSymbol, checker);
// Similarly, skip past the symbol for 'export ='
if (importedSymbol.name === "export=") {
importedSymbol = getExportEqualsLocalSymbol(importedSymbol, checker);
}
if (symbolName(importedSymbol) === symbol.name) { // If this is a rename import, do not continue searching.
return { kind: ImportExport.Import, symbol: importedSymbol, ...isImport };
}
}
@ -525,12 +543,29 @@ namespace ts.FindAllReferences {
}
}
function getExportEqualsLocalSymbol(importedSymbol: Symbol, checker: TypeChecker): Symbol {
if (importedSymbol.flags & SymbolFlags.Alias) {
return checker.getImmediateAliasedSymbol(importedSymbol);
}
const decl = importedSymbol.valueDeclaration;
if (isExportAssignment(decl)) { // `export = class {}`
return decl.expression.symbol;
}
else if (isBinaryExpression(decl)) { // `module.exports = class {}`
return decl.right.symbol;
}
Debug.fail();
}
// If a reference is a class expression, the exported node would be its parent.
// If a reference is a variable declaration, the exported node would be the variable statement.
function getExportNode(parent: Node): Node | undefined {
if (parent.kind === SyntaxKind.VariableDeclaration) {
const p = parent as ts.VariableDeclaration;
return p.parent.kind === ts.SyntaxKind.CatchClause ? undefined : p.parent.parent.kind === SyntaxKind.VariableStatement ? p.parent.parent : undefined;
} else {
}
else {
return parent;
}
}
@ -577,12 +612,13 @@ namespace ts.FindAllReferences {
/** If at an export specifier, go to the symbol it refers to. */
function skipExportSpecifierSymbol(symbol: Symbol, checker: TypeChecker): Symbol {
// For `export { foo } from './bar", there's nothing to skip, because it does not create a new alias. But `export { foo } does.
if (symbol.declarations) for (const declaration of symbol.declarations) {
if (isExportSpecifier(declaration) && !(declaration as ExportSpecifier).propertyName && !(declaration as ExportSpecifier).parent.parent.moduleSpecifier) {
return checker.getExportSpecifierLocalTargetSymbol(declaration);
if (symbol.declarations) {
for (const declaration of symbol.declarations) {
if (isExportSpecifier(declaration) && !(declaration as ExportSpecifier).propertyName && !(declaration as ExportSpecifier).parent.parent.moduleSpecifier) {
return checker.getExportSpecifierLocalTargetSymbol(declaration);
}
}
}
return symbol;
}

View File

@ -132,6 +132,25 @@ namespace ts.JsDoc {
}));
}
export function getJSDocParameterNameCompletions(tag: JSDocParameterTag): CompletionEntry[] {
const nameThusFar = tag.name.text;
const jsdoc = tag.parent;
const fn = jsdoc.parent;
if (!ts.isFunctionLike(fn)) return [];
return mapDefined(fn.parameters, param => {
if (!isIdentifier(param.name)) return undefined;
const name = param.name.text;
if (jsdoc.tags.some(t => t !== tag && isJSDocParameterTag(t) && t.name.text === name)
|| nameThusFar !== undefined && !startsWith(name, nameThusFar)) {
return undefined;
}
return { name, kind: ScriptElementKind.parameterElement, kindModifiers: "", sortText: "0" };
});
}
/**
* Checks if position points to a valid position to add JSDoc comments, and if so,
* returns the appropriate template. Otherwise returns an empty string.

View File

@ -76,7 +76,7 @@ namespace ts.JsTyping {
if (!safeList) {
const result = readConfigFile(safeListPath, (path: string) => host.readFile(path));
safeList = result.config ? createMapFromTemplate<string>(result.config) : EmptySafeList;
safeList = createMapFromTemplate<string>(result.config);
}
const filesToWatch: string[] = [];
@ -143,7 +143,7 @@ namespace ts.JsTyping {
/**
* Merge a given list of typingNames to the inferredTypings map
*/
function mergeTypings(typingNames: string[]) {
function mergeTypings(typingNames: ReadonlyArray<string>) {
if (!typingNames) {
return;
}
@ -163,20 +163,18 @@ namespace ts.JsTyping {
filesToWatch.push(jsonPath);
}
const result = readConfigFile(jsonPath, (path: string) => host.readFile(path));
if (result.config) {
const jsonConfig: PackageJson = result.config;
if (jsonConfig.dependencies) {
mergeTypings(getOwnKeys(jsonConfig.dependencies));
}
if (jsonConfig.devDependencies) {
mergeTypings(getOwnKeys(jsonConfig.devDependencies));
}
if (jsonConfig.optionalDependencies) {
mergeTypings(getOwnKeys(jsonConfig.optionalDependencies));
}
if (jsonConfig.peerDependencies) {
mergeTypings(getOwnKeys(jsonConfig.peerDependencies));
}
const jsonConfig: PackageJson = result.config;
if (jsonConfig.dependencies) {
mergeTypings(getOwnKeys(jsonConfig.dependencies));
}
if (jsonConfig.devDependencies) {
mergeTypings(getOwnKeys(jsonConfig.devDependencies));
}
if (jsonConfig.optionalDependencies) {
mergeTypings(getOwnKeys(jsonConfig.optionalDependencies));
}
if (jsonConfig.peerDependencies) {
mergeTypings(getOwnKeys(jsonConfig.peerDependencies));
}
}
@ -192,7 +190,7 @@ namespace ts.JsTyping {
const cleanedTypingNames = map(inferredTypingNames, f => f.replace(/((?:\.|-)min(?=\.|$))|((?:-|\.)\d+)/g, ""));
if (safeList !== EmptySafeList) {
mergeTypings(filter(cleanedTypingNames, f => safeList.has(f)));
mergeTypings(ts.mapDefined(cleanedTypingNames, f => safeList.get(f)));
}
const hasJsxFile = forEach(fileNames, f => ensureScriptKind(f, getScriptKindFromFileName(f)) === ScriptKind.JSX);
@ -222,9 +220,6 @@ namespace ts.JsTyping {
continue;
}
const result = readConfigFile(normalizedFileName, (path: string) => host.readFile(path));
if (!result.config) {
continue;
}
const packageJson: PackageJson = result.config;
// npm 3's package.json contains a "_requiredBy" field

View File

@ -10,7 +10,7 @@ namespace ts.NavigateTo {
for (const sourceFile of sourceFiles) {
cancellationToken.throwIfCancellationRequested();
if (excludeDtsFiles && fileExtensionIs(sourceFile.fileName, ".d.ts")) {
if (excludeDtsFiles && fileExtensionIs(sourceFile.fileName, Extension.Dts)) {
continue;
}

View File

@ -95,9 +95,16 @@ namespace ts {
function tryConsumeImport(): boolean {
let token = scanner.getToken();
if (token === SyntaxKind.ImportKeyword) {
token = nextToken();
if (token === SyntaxKind.StringLiteral) {
if (token === SyntaxKind.OpenParenToken) {
token = nextToken();
if (token === SyntaxKind.StringLiteral) {
// import("mod");
recordModuleName();
return true;
}
}
else if (token === SyntaxKind.StringLiteral) {
// import "mod";
recordModuleName();
return true;
@ -297,7 +304,8 @@ namespace ts {
// import * as NS from "mod"
// import d, {a, b as B} from "mod"
// import i = require("mod");
//
// import("mod");
// export * from "mod"
// export {a as b} from "mod"
// export import i = require("mod")

View File

@ -8,10 +8,10 @@ namespace ts {
description: string;
/** Compute the associated code actions */
getCodeActions(context: RefactorContext): CodeAction[];
getEditsForAction(context: RefactorContext, actionName: string): RefactorEditInfo | undefined;
/** A fast syntactic check to see if the refactor is applicable at given position. */
isApplicable(context: RefactorContext): boolean;
/** Compute (quickly) which actions are available here */
getAvailableActions(context: RefactorContext): ApplicableRefactorInfo[] | undefined;
}
export interface RefactorContext {
@ -34,7 +34,6 @@ namespace ts {
}
export function getApplicableRefactors(context: RefactorContext): ApplicableRefactorInfo[] | undefined {
let results: ApplicableRefactorInfo[];
const refactorList: Refactor[] = [];
refactors.forEach(refactor => {
@ -44,26 +43,17 @@ namespace ts {
if (context.cancellationToken && context.cancellationToken.isCancellationRequested()) {
return results;
}
if (refactor.isApplicable(context)) {
(results || (results = [])).push({ name: refactor.name, description: refactor.description });
const infos = refactor.getAvailableActions(context);
if (infos && infos.length) {
(results || (results = [])).push(...infos);
}
}
return results;
}
export function getRefactorCodeActions(context: RefactorContext, refactorName: string): CodeAction[] | undefined {
let result: CodeAction[];
export function getEditsForRefactor(context: RefactorContext, refactorName: string, actionName: string): RefactorEditInfo | undefined {
const refactor = refactors.get(refactorName);
if (!refactor) {
return undefined;
}
const codeActions = refactor.getCodeActions(context);
if (codeActions) {
addRange((result || (result = [])), codeActions);
}
return result;
return refactor && refactor.getEditsForAction(context, actionName);
}
}
}

View File

@ -1,16 +1,18 @@
/* @internal */
namespace ts.refactor {
const actionName = "convert";
const convertFunctionToES6Class: Refactor = {
name: "Convert to ES2015 class",
description: Diagnostics.Convert_function_to_an_ES2015_class.message,
getCodeActions,
isApplicable
getEditsForAction,
getAvailableActions
};
registerRefactor(convertFunctionToES6Class);
function isApplicable(context: RefactorContext): boolean {
function getAvailableActions(context: RefactorContext): ApplicableRefactorInfo[] {
const start = context.startPosition;
const node = getTokenAtPosition(context.file, start, /*includeJsDocComment*/ false);
const checker = context.program.getTypeChecker();
@ -20,10 +22,28 @@ namespace ts.refactor {
symbol = (symbol.valueDeclaration as VariableDeclaration).initializer.symbol;
}
return symbol && symbol.flags & SymbolFlags.Function && symbol.members && symbol.members.size > 0;
if (symbol && (symbol.flags & SymbolFlags.Function) && symbol.members && (symbol.members.size > 0)) {
return [
{
name: convertFunctionToES6Class.name,
description: convertFunctionToES6Class.description,
actions: [
{
description: convertFunctionToES6Class.description,
name: actionName
}
]
}
];
}
}
function getCodeActions(context: RefactorContext): CodeAction[] | undefined {
function getEditsForAction(context: RefactorContext, action: string): RefactorEditInfo | undefined {
// Somehow wrong action got invoked?
if (actionName !== action) {
return undefined;
}
const start = context.startPosition;
const sourceFile = context.file;
const checker = context.program.getTypeChecker();
@ -35,7 +55,7 @@ namespace ts.refactor {
const deletes: (() => any)[] = [];
if (!(ctorSymbol.flags & (SymbolFlags.Function | SymbolFlags.Variable))) {
return [];
return undefined;
}
const ctorDeclaration = ctorSymbol.valueDeclaration;
@ -63,7 +83,7 @@ namespace ts.refactor {
}
if (!newClassDeclaration) {
return [];
return undefined;
}
// Because the preceding node could be touched, we need to insert nodes before delete nodes.
@ -72,10 +92,9 @@ namespace ts.refactor {
deleteCallback();
}
return [{
description: formatStringFromArgs(Diagnostics.Convert_function_0_to_class.message, [ctorSymbol.name]),
changes: changeTracker.getChanges()
}];
return {
edits: changeTracker.getChanges()
};
function deleteNode(node: Node, inList = false) {
if (deletedNodes.some(n => isNodeDescendantOf(node, n))) {

View File

@ -37,7 +37,7 @@ namespace ts {
let ruleProvider: formatting.RulesProvider;
function createNode<TKind extends SyntaxKind>(kind: TKind, pos: number, end: number, parent?: Node): NodeObject | TokenObject<TKind> | IdentifierObject {
const node = kind >= SyntaxKind.FirstNode ? new NodeObject(kind, pos, end) :
const node = isNodeKind(kind) ? new NodeObject(kind, pos, end) :
kind === SyntaxKind.Identifier ? new IdentifierObject(SyntaxKind.Identifier, pos, end) :
new TokenObject(kind, pos, end);
node.parent = parent;
@ -103,10 +103,10 @@ namespace ts {
return sourceFile.text.substring(this.getStart(sourceFile), this.getEnd());
}
private addSyntheticNodes(nodes: Node[], pos: number, end: number, useJSDocScanner?: boolean): number {
private addSyntheticNodes(nodes: Node[], pos: number, end: number): number {
scanner.setTextPos(pos);
while (pos < end) {
const token = useJSDocScanner ? scanner.scanJSDocToken() : scanner.scan();
const token = scanner.scan();
Debug.assert(token !== SyntaxKind.EndOfFileToken); // Else it would infinitely loop
const textPos = scanner.getTextPos();
if (textPos <= end) {
@ -136,54 +136,50 @@ namespace ts {
}
private createChildren(sourceFile?: SourceFileLike) {
if (isJSDocTag(this)) {
if (!isNodeKind(this.kind)) {
this._children = emptyArray;
return;
}
if (isJSDocCommentContainingNode(this)) {
/** Don't add trivia for "tokens" since this is in a comment. */
const children: Node[] = [];
this.forEachChild(child => { children.push(child); });
this._children = children;
return;
}
else if (this.kind >= SyntaxKind.FirstNode) {
const children: Node[] = [];
scanner.setText((sourceFile || this.getSourceFile()).text);
let pos = this.pos;
const useJSDocScanner = this.kind >= SyntaxKind.FirstJSDocTagNode && this.kind <= SyntaxKind.LastJSDocTagNode;
const processNode = (node: Node) => {
const isJSDocTagNode = isJSDocTag(node);
if (!isJSDocTagNode && pos < node.pos) {
pos = this.addSyntheticNodes(children, pos, node.pos, useJSDocScanner);
}
children.push(node);
if (!isJSDocTagNode) {
pos = node.end;
}
};
const processNodes = (nodes: NodeArray<Node>) => {
if (pos < nodes.pos) {
pos = this.addSyntheticNodes(children, pos, nodes.pos, useJSDocScanner);
}
children.push(this.createSyntaxList(nodes));
pos = nodes.end;
};
// jsDocComments need to be the first children
if (this.jsDoc) {
for (const jsDocComment of this.jsDoc) {
processNode(jsDocComment);
}
const children: Node[] = [];
scanner.setText((sourceFile || this.getSourceFile()).text);
let pos = this.pos;
const processNode = (node: Node) => {
pos = this.addSyntheticNodes(children, pos, node.pos);
children.push(node);
pos = node.end;
};
const processNodes = (nodes: NodeArray<Node>) => {
if (pos < nodes.pos) {
pos = this.addSyntheticNodes(children, pos, nodes.pos);
}
// For syntactic classifications, all trivia are classcified together, including jsdoc comments.
// For that to work, the jsdoc comments should still be the leading trivia of the first child.
// Restoring the scanner position ensures that.
pos = this.pos;
forEachChild(this, processNode, processNodes);
if (pos < this.end) {
this.addSyntheticNodes(children, pos, this.end);
children.push(this.createSyntaxList(nodes));
pos = nodes.end;
};
// jsDocComments need to be the first children
if (this.jsDoc) {
for (const jsDocComment of this.jsDoc) {
processNode(jsDocComment);
}
scanner.setText(undefined);
this._children = children;
}
else {
this._children = emptyArray;
// For syntactic classifications, all trivia are classcified together, including jsdoc comments.
// For that to work, the jsdoc comments should still be the leading trivia of the first child.
// Restoring the scanner position ensures that.
pos = this.pos;
forEachChild(this, processNode, processNodes);
if (pos < this.end) {
this.addSyntheticNodes(children, pos, this.end);
}
scanner.setText(undefined);
this._children = children;
}
public getChildCount(sourceFile?: SourceFile): number {
@ -684,8 +680,9 @@ namespace ts {
forEachChild(decl.name, visit);
break;
}
if (decl.initializer)
if (decl.initializer) {
visit(decl.initializer);
}
}
// falls through
case SyntaxKind.EnumMember:
@ -732,6 +729,15 @@ namespace ts {
}
}
class SourceMapSourceObject implements SourceMapSource {
lineMap: number[];
constructor (public fileName: string, public text: string, public skipTrivia?: (pos: number) => number) {}
public getLineAndCharacterOfPosition(pos: number): LineAndCharacter {
return ts.getLineAndCharacterOfPosition(this, pos);
}
}
function getServicesObjectAllocator(): ObjectAllocator {
return {
getNodeConstructor: () => NodeObject,
@ -742,6 +748,7 @@ namespace ts {
getSymbolConstructor: () => SymbolObject,
getTypeConstructor: () => TypeObject,
getSignatureConstructor: () => SignatureObject,
getSourceMapSourceConstructor: () => SourceMapSourceObject,
};
}
@ -815,7 +822,7 @@ namespace ts {
private _compilationSettings: CompilerOptions;
private currentDirectory: string;
constructor(private host: LanguageServiceHost, private getCanonicalFileName: (fileName: string) => string) {
constructor(private host: LanguageServiceHost, getCanonicalFileName: (fileName: string) => string) {
// script id => script index
this.currentDirectory = host.getCurrentDirectory();
this.fileNameToEntry = createFileMap<HostFileInformation>();
@ -850,22 +857,17 @@ namespace ts {
return entry;
}
private getEntry(path: Path): HostFileInformation {
public getEntryByPath(path: Path): HostFileInformation {
return this.fileNameToEntry.get(path);
}
private contains(path: Path): boolean {
public containsEntryByPath(path: Path): boolean {
return this.fileNameToEntry.contains(path);
}
public getOrCreateEntry(fileName: string): HostFileInformation {
const path = toPath(fileName, this.currentDirectory, this.getCanonicalFileName);
return this.getOrCreateEntryByPath(fileName, path);
}
public getOrCreateEntryByPath(fileName: string, path: Path): HostFileInformation {
return this.contains(path)
? this.getEntry(path)
return this.containsEntryByPath(path)
? this.getEntryByPath(path)
: this.createEntry(fileName, path);
}
@ -882,12 +884,12 @@ namespace ts {
}
public getVersion(path: Path): string {
const file = this.getEntry(path);
const file = this.getEntryByPath(path);
return file && file.version;
}
public getScriptSnapshot(path: Path): IScriptSnapshot {
const file = this.getEntry(path);
const file = this.getEntryByPath(path);
return file && file.scriptSnapshot;
}
}
@ -1152,12 +1154,19 @@ namespace ts {
getCurrentDirectory: () => currentDirectory,
fileExists: (fileName): boolean => {
// stub missing host functionality
return hostCache.getOrCreateEntry(fileName) !== undefined;
const path = toPath(fileName, currentDirectory, getCanonicalFileName);
return hostCache.containsEntryByPath(path) ?
!!hostCache.getEntryByPath(path) :
(host.fileExists && host.fileExists(fileName));
},
readFile: (fileName): string => {
// stub missing host functionality
const entry = hostCache.getOrCreateEntry(fileName);
return entry && entry.scriptSnapshot.getText(0, entry.scriptSnapshot.getLength());
const path = toPath(fileName, currentDirectory, getCanonicalFileName);
if (hostCache.containsEntryByPath(path)) {
const entry = hostCache.getEntryByPath(path);
return entry && entry.scriptSnapshot.getText(0, entry.scriptSnapshot.getLength());
}
return host.readFile && host.readFile(fileName);
},
directoryExists: directoryName => {
return directoryProbablyExists(directoryName, host);
@ -1290,8 +1299,20 @@ namespace ts {
}
}
const currentOptions = program.getCompilerOptions();
const newOptions = hostCache.compilationSettings();
// If the compilation settings do no match, then the program is not up-to-date
return compareDataObjects(program.getCompilerOptions(), hostCache.compilationSettings());
if (!compareDataObjects(currentOptions, newOptions)) {
return false;
}
// If everything matches but the text of config file is changed,
// error locations can change for program options, so update the program
if (currentOptions.configFile && newOptions.configFile) {
return currentOptions.configFile.text === newOptions.configFile.text;
}
return true;
}
}
@ -1309,7 +1330,9 @@ namespace ts {
if (program) {
forEach(program.getSourceFiles(), f =>
documentRegistry.releaseDocument(f.fileName, program.getCompilerOptions()));
program = undefined;
}
host = undefined;
}
/// Diagnostics
@ -1815,6 +1838,13 @@ namespace ts {
return false;
}
switch (openingBrace) {
case CharacterCodes.singleQuote:
case CharacterCodes.doubleQuote:
case CharacterCodes.backtick:
return !isInComment(sourceFile, position);
}
return true;
}
@ -1989,15 +2019,16 @@ namespace ts {
return refactor.getApplicableRefactors(getRefactorContext(file, positionOrRange));
}
function getRefactorCodeActions(
function getEditsForRefactor(
fileName: string,
formatOptions: FormatCodeSettings,
positionOrRange: number | TextRange,
refactorName: string): CodeAction[] | undefined {
refactorName: string,
actionName: string): RefactorEditInfo {
synchronizeHostData();
const file = getValidSourceFile(fileName);
return refactor.getRefactorCodeActions(getRefactorContext(file, positionOrRange, formatOptions), refactorName);
return refactor.getEditsForRefactor(getRefactorContext(file, positionOrRange, formatOptions), refactorName, actionName);
}
return {
@ -2005,8 +2036,6 @@ namespace ts {
cleanupSemanticCache,
getSyntacticDiagnostics,
getSemanticDiagnostics,
getApplicableRefactors,
getRefactorCodeActions,
getCompilerOptionsDiagnostics,
getSyntacticClassifications,
getSemanticClassifications,
@ -2044,7 +2073,9 @@ namespace ts {
getEmitOutput,
getNonBoundSourceFile,
getSourceFile,
getProgram
getProgram,
getApplicableRefactors,
getEditsForRefactor,
};
}

View File

@ -1109,27 +1109,16 @@ namespace ts {
() => {
const text = sourceTextSnapshot.getText(0, sourceTextSnapshot.getLength());
const result = parseConfigFileTextToJson(fileName, text);
if (result.error) {
return {
options: {},
typeAcquisition: {},
files: [],
raw: {},
errors: [realizeDiagnostic(result.error, "\r\n")]
};
}
const result = parseJsonText(fileName, text);
const normalizedFileName = normalizeSlashes(fileName);
const configFile = parseJsonConfigFileContent(result.config, this.host, getDirectoryPath(normalizedFileName), /*existingOptions*/ {}, normalizedFileName);
const configFile = parseJsonSourceFileConfigFileContent(result, this.host, getDirectoryPath(normalizedFileName), /*existingOptions*/ {}, normalizedFileName);
return {
options: configFile.options,
typeAcquisition: configFile.typeAcquisition,
files: configFile.fileNames,
raw: configFile.raw,
errors: realizeDiagnostics(configFile.errors, "\r\n")
errors: realizeDiagnostics(result.parseDiagnostics.concat(configFile.errors), "\r\n")
};
});
}
@ -1248,4 +1237,4 @@ namespace TypeScript.Services {
// TODO: it should be moved into a namespace though.
/* @internal */
const toolsVersion = "2.4";
const toolsVersion = "2.5";

View File

@ -4,8 +4,10 @@ namespace ts.SymbolDisplay {
export function getSymbolKind(typeChecker: TypeChecker, symbol: Symbol, location: Node): ScriptElementKind {
const { flags } = symbol;
if (flags & SymbolFlags.Class) return getDeclarationOfKind(symbol, SyntaxKind.ClassExpression) ?
if (flags & SymbolFlags.Class) {
return getDeclarationOfKind(symbol, SyntaxKind.ClassExpression) ?
ScriptElementKind.localClassElement : ScriptElementKind.classElement;
}
if (flags & SymbolFlags.Enum) return ScriptElementKind.enumElement;
if (flags & SymbolFlags.TypeAlias) return ScriptElementKind.typeElement;
if (flags & SymbolFlags.Interface) return ScriptElementKind.interfaceElement;

View File

@ -130,7 +130,7 @@ namespace ts {
commandLineOptionsStringToEnum = commandLineOptionsStringToEnum || <CommandLineOptionOfCustomType[]>filter(optionDeclarations, o =>
typeof o.type === "object" && !forEachEntry(o.type, v => typeof v !== "number"));
options = clone(options);
options = cloneCompilerOptions(options);
for (const opt of commandLineOptionsStringToEnum) {
if (!hasProperty(options, opt.name)) {

View File

@ -17,6 +17,7 @@
"../compiler/checker.ts",
"../compiler/factory.ts",
"../compiler/visitor.ts",
"../compiler/transformers/utilities.ts",
"../compiler/transformers/ts.ts",
"../compiler/transformers/jsx.ts",
"../compiler/transformers/esnext.ts",

View File

@ -71,6 +71,10 @@ namespace ts {
getLineAndCharacterOfPosition(pos: number): LineAndCharacter;
}
export interface SourceMapSource {
getLineAndCharacterOfPosition(pos: number): LineAndCharacter;
}
/**
* Represents an immutable snapshot of a script at a specified time.Once acquired, the
* snapshot is observably immutable. i.e. the same calls with the same parameters will return
@ -261,8 +265,9 @@ namespace ts {
isValidBraceCompletionAtPosition(fileName: string, position: number, openingBrace: number): boolean;
getCodeFixesAtPosition(fileName: string, start: number, end: number, errorCodes: number[], formatOptions: FormatCodeSettings): CodeAction[];
getApplicableRefactors(fileName: string, positionOrRaneg: number | TextRange): ApplicableRefactorInfo[];
getRefactorCodeActions(fileName: string, formatOptions: FormatCodeSettings, positionOrRange: number | TextRange, refactorName: string): CodeAction[] | undefined;
getEditsForRefactor(fileName: string, formatOptions: FormatCodeSettings, positionOrRange: number | TextRange, refactorName: string, actionName: string): RefactorEditInfo | undefined;
getEmitOutput(fileName: string, emitOnlyDtsFiles?: boolean): EmitOutput;
@ -353,11 +358,60 @@ namespace ts {
changes: FileTextChanges[];
}
/**
* A set of one or more available refactoring actions, grouped under a parent refactoring.
*/
export interface ApplicableRefactorInfo {
/**
* The programmatic name of the refactoring
*/
name: string;
/**
* A description of this refactoring category to show to the user.
* If the refactoring gets inlined (see below), this text will not be visible.
*/
description: string;
/**
* Inlineable refactorings can have their actions hoisted out to the top level
* of a context menu. Non-inlineanable refactorings should always be shown inside
* their parent grouping.
*
* If not specified, this value is assumed to be 'true'
*/
inlineable?: boolean;
actions: RefactorActionInfo[];
}
/**
* Represents a single refactoring action - for example, the "Extract Method..." refactor might
* offer several actions, each corresponding to a surround class or closure to extract into.
*/
export type RefactorActionInfo = {
/**
* The programmatic name of the refactoring action
*/
name: string;
/**
* A description of this refactoring action to show to the user.
* If the parent refactoring is inlined away, this will be the only text shown,
* so this description should make sense by itself if the parent is inlineable=true
*/
description: string;
};
/**
* A set of edits to make in response to a refactor action, plus an optional
* location where renaming should be invoked from
*/
export type RefactorEditInfo = {
edits: FileTextChanges[];
renameFilename?: string;
renameLocation?: number;
};
export interface TextInsertion {
newText: string;
/** The position in newText the caret should point to after the insertion. */

View File

@ -82,7 +82,7 @@ namespace ts {
else if (node.parent.kind === SyntaxKind.ExportAssignment) {
return SemanticMeaning.All;
}
else if (isInRightSideOfImport(node)) {
else if (isInRightSideOfInternalImportEqualsDeclaration(node)) {
return getMeaningFromRightHandSideOfImportEquals(node);
}
else if (isDeclarationName(node)) {
@ -94,6 +94,10 @@ namespace ts {
else if (isNamespaceReference(node)) {
return SemanticMeaning.Namespace;
}
else if (isTypeParameterDeclaration(node.parent)) {
Debug.assert(isJSDocTemplateTag(node.parent.parent)); // Else would be handled by isDeclarationName
return SemanticMeaning.Type;
}
else {
return SemanticMeaning.Value;
}
@ -114,7 +118,7 @@ namespace ts {
return SemanticMeaning.Namespace;
}
function isInRightSideOfImport(node: Node) {
export function isInRightSideOfInternalImportEqualsDeclaration(node: Node) {
while (node.parent.kind === SyntaxKind.QualifiedName) {
node = node.parent;
}
@ -615,18 +619,21 @@ namespace ts {
return getTouchingToken(sourceFile, position, includeJsDocComment, n => isPropertyName(n.kind));
}
/** Returns the token if position is in [start, end) or if position === end and includeItemAtEndPosition(token) === true */
export function getTouchingToken(sourceFile: SourceFile, position: number, includeJsDocComment: boolean, includeItemAtEndPosition?: (n: Node) => boolean): Node {
return getTokenAtPositionWorker(sourceFile, position, /*allowPositionInLeadingTrivia*/ false, includeItemAtEndPosition, includeJsDocComment);
/**
* Returns the token if position is in [start, end).
* If position === end, returns the preceding token if includeItemAtEndPosition(previousToken) === true
*/
export function getTouchingToken(sourceFile: SourceFile, position: number, includeJsDocComment: boolean, includePrecedingTokenAtEndPosition?: (n: Node) => boolean): Node {
return getTokenAtPositionWorker(sourceFile, position, /*allowPositionInLeadingTrivia*/ false, includePrecedingTokenAtEndPosition, /*includeEndPosition*/ false, includeJsDocComment);
}
/** Returns a token if position is in [start-of-leading-trivia, end) */
export function getTokenAtPosition(sourceFile: SourceFile, position: number, includeJsDocComment: boolean): Node {
return getTokenAtPositionWorker(sourceFile, position, /*allowPositionInLeadingTrivia*/ true, /*includeItemAtEndPosition*/ undefined, includeJsDocComment);
export function getTokenAtPosition(sourceFile: SourceFile, position: number, includeJsDocComment: boolean, includeEndPosition?: boolean): Node {
return getTokenAtPositionWorker(sourceFile, position, /*allowPositionInLeadingTrivia*/ true, /*includePrecedingTokenAtEndPosition*/ undefined, includeEndPosition, includeJsDocComment);
}
/** Get the token whose text contains the position */
function getTokenAtPositionWorker(sourceFile: SourceFile, position: number, allowPositionInLeadingTrivia: boolean, includeItemAtEndPosition: (n: Node) => boolean, includeJsDocComment: boolean): Node {
function getTokenAtPositionWorker(sourceFile: SourceFile, position: number, allowPositionInLeadingTrivia: boolean, includePrecedingTokenAtEndPosition: (n: Node) => boolean, includeEndPosition: boolean, includeJsDocComment: boolean): Node {
let current: Node = sourceFile;
outer: while (true) {
if (isToken(current)) {
@ -636,7 +643,7 @@ namespace ts {
// find the child that contains 'position'
for (const child of current.getChildren()) {
if (isJSDocNode(child) && !includeJsDocComment) {
if (!includeJsDocComment && isJSDocNode(child)) {
continue;
}
@ -646,13 +653,13 @@ namespace ts {
}
const end = child.getEnd();
if (position < end || (position === end && child.kind === SyntaxKind.EndOfFileToken)) {
if (position < end || (position === end && (child.kind === SyntaxKind.EndOfFileToken || includeEndPosition))) {
current = child;
continue outer;
}
else if (includeItemAtEndPosition && end === position) {
else if (includePrecedingTokenAtEndPosition && end === position) {
const previousToken = findPrecedingToken(position, sourceFile, child);
if (previousToken && includeItemAtEndPosition(previousToken)) {
if (previousToken && includePrecedingTokenAtEndPosition(previousToken)) {
return previousToken;
}
}
@ -707,7 +714,7 @@ namespace ts {
}
}
export function findPrecedingToken(position: number, sourceFile: SourceFile, startNode?: Node): Node {
export function findPrecedingToken(position: number, sourceFile: SourceFile, startNode?: Node, includeJsDoc?: boolean): Node {
return find(startNode || sourceFile);
function findRightmostToken(n: Node): Node {
@ -738,7 +745,7 @@ namespace ts {
// NOTE: JsxText is a weird kind of node that can contain only whitespaces (since they are not counted as trivia).
// if this is the case - then we should assume that token in question is located in previous child.
if (position < child.end && (nodeHasTokens(child) || child.kind === SyntaxKind.JsxText)) {
const start = child.getStart(sourceFile);
const start = child.getStart(sourceFile, includeJsDoc);
const lookInPreviousChild =
(start >= position) || // cursor in the leading trivia
(child.kind === SyntaxKind.JsxText && start === child.end); // whitespace only JsxText
@ -755,7 +762,7 @@ namespace ts {
}
}
Debug.assert(startNode !== undefined || n.kind === SyntaxKind.SourceFile);
Debug.assert(startNode !== undefined || n.kind === SyntaxKind.SourceFile || isJSDocCommentContainingNode(n));
// Here we know that none of child token nodes embrace the position,
// the only known case is when position is at the end of the file.
@ -901,42 +908,6 @@ namespace ts {
}
}
/**
* Get the corresponding JSDocTag node if the position is in a jsDoc comment
*/
export function getJsDocTagAtPosition(sourceFile: SourceFile, position: number): JSDocTag {
let node = ts.getTokenAtPosition(sourceFile, position, /*includeJsDocComment*/ false);
if (isToken(node)) {
switch (node.kind) {
case SyntaxKind.VarKeyword:
case SyntaxKind.LetKeyword:
case SyntaxKind.ConstKeyword:
// if the current token is var, let or const, skip the VariableDeclarationList
node = node.parent === undefined ? undefined : node.parent.parent;
break;
default:
node = node.parent;
break;
}
}
if (node) {
if (node.jsDoc) {
for (const jsDoc of node.jsDoc) {
if (jsDoc.tags) {
for (const tag of jsDoc.tags) {
if (tag.pos <= position && position <= tag.end) {
return tag;
}
}
}
}
}
}
return undefined;
}
function nodeHasTokens(n: Node): boolean {
// If we have a token or node that has a non-zero width, it must have tokens.
// Note, that getWidth() does not take trivia into account.
@ -1011,6 +982,12 @@ namespace ts {
return false;
}
export function cloneCompilerOptions(options: CompilerOptions): CompilerOptions {
const result = clone(options);
setConfigFileInOptions(result, options && options.configFile);
return result;
}
export function compareDataObjects(dst: any, src: any): boolean {
if (!dst || !src || Object.keys(dst).length !== Object.keys(src).length) {
return false;
@ -1335,30 +1312,6 @@ namespace ts {
return ensureScriptKind(fileName, scriptKind);
}
export function sanitizeConfigFile(configFileName: string, content: string) {
const options: TranspileOptions = {
fileName: "config.js",
compilerOptions: {
target: ScriptTarget.ES2015,
removeComments: true
},
reportDiagnostics: true
};
const { outputText, diagnostics } = ts.transpileModule("(" + content + ")", options);
// Becasue the content was wrapped in "()", the start position of diagnostics needs to be subtract by 1
// also, the emitted result will have "(" in the beginning and ");" in the end. We need to strip these
// as well
const trimmedOutput = outputText.trim();
for (const diagnostic of diagnostics) {
diagnostic.start = diagnostic.start - 1;
}
const {config, error} = parseConfigFileTextToJson(configFileName, trimmedOutput.substring(1, trimmedOutput.length - 2), /*stripComments*/ false);
return {
configJsonObject: config || {},
diagnostics: error ? concatenate(diagnostics, [error]) : diagnostics
};
}
export function getFirstNonSpaceCharacterPosition(text: string, position: number) {
while (isWhiteSpaceLike(text.charCodeAt(position))) {
position += 1;

View File

@ -40,6 +40,7 @@
"end": 27,
"text": "name1"
},
"isBracketed": false,
"comment": "Description"
},
"length": 1,

View File

@ -40,6 +40,7 @@
"end": 32,
"text": "name1"
},
"isBracketed": false,
"comment": "Description"
},
"length": 1,

View File

@ -40,6 +40,7 @@
"end": 29,
"text": "name1"
},
"isBracketed": false,
"comment": ""
},
"length": 1,

View File

@ -40,6 +40,7 @@
"end": 29,
"text": "name1"
},
"isBracketed": false,
"comment": "Description text follows"
},
"length": 1,

View File

@ -40,6 +40,7 @@
"end": 20,
"text": "name1"
},
"isBracketed": false,
"comment": ""
},
"length": 1,

View File

@ -40,6 +40,7 @@
"end": 20,
"text": "name1"
},
"isBracketed": false,
"comment": "Description"
},
"length": 1,

View File

@ -30,6 +30,7 @@
"end": 18,
"text": "foo"
},
"isBracketed": false,
"comment": ""
},
"length": 1,

View File

@ -40,6 +40,7 @@
"end": 29,
"text": "name1"
},
"isBracketed": false,
"comment": ""
},
"1": {
@ -79,6 +80,7 @@
"end": 55,
"text": "name2"
},
"isBracketed": false,
"comment": ""
},
"length": 2,

Some files were not shown because too many files have changed in this diff Show More