Merge branch 'master' into constContexts

# Conflicts:
#	src/compiler/checker.ts
This commit is contained in:
Anders Hejlsberg 2019-01-29 12:29:30 -08:00
commit 08fe06f527
314 changed files with 74291 additions and 6924 deletions

View File

@ -16,7 +16,7 @@ Please fill in the *entire* template below.
-->
<!-- Please try to reproduce the issue with `typescript@next`. It may have already been fixed. -->
**TypeScript Version:** 3.3.0-dev.201xxxxx
**TypeScript Version:** 3.4.0-dev.201xxxxx
<!-- Search terms you tried before logging this (so others can find this issue more easily) -->
**Search Terms:**

View File

@ -121,6 +121,7 @@ Ken Howard <ken@simplicatedweb.com>
Kevin Lang <klang2012@gmail.com>
kimamula <kenji.imamula@gmail.com> # Kenji Imamula
Kitson Kelly <me@kitsonkelly.com>
Krishnadas Babu <krishnadas100033@gmail.com>
Klaus Meinhardt <klaus.meinhardt1@gmail.com>
Kyle Kelley <rgbkrk@gmail.com>
Lorant Pinter <lorant.pinter@prezi.com>

View File

@ -47,6 +47,16 @@ In general, things we find useful when reviewing suggestions are:
# Instructions for Contributing Code
## Tips
### Faster clones
The TypeScript repository is relatively large. To save some time, you might want to clone it without the repo's full history using `git clone --depth=1`.
### Using local builds
Run `gulp build` to build a version of the compiler/language service that reflects changes you've made. You can then run `node <repo-root>/built/local/tsc.js` in place of `tsc` in your project. For example, to run `tsc --watch` from within the root of the repository on a file called `test.ts`, you can run `node ./built/local/tsc.js --watch test.ts`.
## Contributing bug fixes
TypeScript is currently accepting contributions in the form of bug fixes. A bug must have an issue tracking it in the issue tracker that has been approved ("Milestone == Community") by the TypeScript team. Your pull request should include a link to the bug that you are fixing. If you've submitted a PR for a bug, please post a comment in the bug to avoid duplication of effort.

File diff suppressed because it is too large Load Diff

View File

@ -8,10 +8,8 @@ const path = require("path");
const fold = require("travis-fold");
const ts = require("./lib/typescript");
const del = require("del");
const getDirSize = require("./scripts/build/getDirSize");
const { getDirSize, needsUpdate, flatten } = require("./scripts/build/utils");
const { base64VLQFormatEncode } = require("./scripts/build/sourcemaps");
const needsUpdate = require("./scripts/build/needsUpdate");
const { flatten } = require("./scripts/build/project");
// add node_modules to path so we don't need global modules, prefer the modules by adding them first
var nodeModulesPathPrefix = path.resolve("./node_modules/.bin/") + path.delimiter;
@ -361,7 +359,7 @@ file(ConfigFileFor.tsserverLibrary, [], function () {
compilerOptions: {
"removeComments": false,
"stripInternal": true,
"declarationMap": false,
"declaration": true,
"outFile": "tsserverlibrary.out.js"
}
})

View File

@ -1,5 +1,5 @@
[![Build Status](https://travis-ci.org/Microsoft/TypeScript.svg?branch=master)](https://travis-ci.org/Microsoft/TypeScript)
[![VSTS Build Status](https://typescript.visualstudio.com/_apis/public/build/definitions/cf7ac146-d525-443c-b23c-0d58337efebc/4/badge)](https://typescript.visualstudio.com/TypeScript/_build/latest?definitionId=4&view=logs)
[![VSTS Build Status](https://dev.azure.com/typescript/TypeScript/_apis/build/status/Typescript/node10)](https://dev.azure.com/typescript/TypeScript/_build/latest?definitionId=4&view=logs)
[![npm version](https://badge.fury.io/js/typescript.svg)](https://www.npmjs.com/package/typescript)
[![Downloads](https://img.shields.io/npm/dm/typescript.svg)](https://www.npmjs.com/package/typescript)

View File

@ -239,7 +239,7 @@ TypeScript is a trademark of Microsoft Corporation.
# <a name="1"/>1 Introduction
JavaScript applications such as web e-mail, maps, document editing, and collaboration tools are becoming an increasingly important part of the everyday computing. We designed TypeScript to meet the needs of the JavaScript programming teams that build and maintain large JavaScript programs. TypeScript helps programming teams to define interfaces between software components and to gain insight into the behavior of existing JavaScript libraries. TypeScript also enables teams to reduce naming conflicts by organizing their code into dynamically-loadable modules. TypeScript's optional type system enables JavaScript programmers to use highly-productive development tools and practices: static checking, symbol-based navigation, statement completion, and code re-factoring.
JavaScript applications such as web e-mail, maps, document editing, and collaboration tools are becoming an increasingly important part of the everyday computing. We designed TypeScript to meet the needs of the JavaScript programming teams that build and maintain large JavaScript programs. TypeScript helps programming teams to define interfaces between software components and to gain insight into the behavior of existing JavaScript libraries. TypeScript also enables teams to reduce naming conflicts by organizing their code into dynamically-loadable modules. TypeScript's optional type system enables JavaScript programmers to use highly-productive development tools and practices: static checking, symbol-based navigation, statement completion, and code refactoring.
TypeScript is a syntactic sugar for JavaScript. TypeScript syntax is a superset of ECMAScript 2015 (ES2015) syntax. Every JavaScript program is also a TypeScript program. The TypeScript compiler performs only file-local transformations on TypeScript programs and does not re-order variables declared in TypeScript. This leads to JavaScript output that closely matches the TypeScript input. TypeScript does not transform variable names, making tractable the direct debugging of emitted JavaScript. TypeScript optionally provides source maps, enabling source-level debugging. TypeScript tools typically emit JavaScript upon file save, preserving the test, edit, refresh cycle commonly used in JavaScript development.
@ -263,7 +263,7 @@ function f() {
}
```
To benefit from this inference, a programmer can use the TypeScript language service. For example, a code editor can incorporate the TypeScript language service and use the service to find the members of a string object as in the following screen shot.
To benefit from this inference, a programmer can use the TypeScript language service. For example, a code editor can incorporate the TypeScript language service and use the service to find the members of a string object as in the following screenshot.
&emsp;&emsp;![](images/image1.png)
@ -411,7 +411,7 @@ We mentioned above that the '$' function behaves differently depending on the ty
This signature denotes that a function may be passed as the parameter of the '$' function. When a function is passed to '$', the jQuery library will invoke that function when a DOM document is ready. Because TypeScript supports overloading, tools can use TypeScript to show all available function signatures with their documentation tips and to give the correct documentation once a function has been called with a particular signature.
A typical client would not need to add any additional typing but could just use a community-supplied typing to discover (through statement completion with documentation tips) and verify (through static checking) correct use of the library, as in the following screen shot.
A typical client would not need to add any additional typing but could just use a community-supplied typing to discover (through statement completion with documentation tips) and verify (through static checking) correct use of the library, as in the following screenshot.
&emsp;&emsp;![](images/image2.png)
@ -628,7 +628,7 @@ JavaScript implementations can use these explicit constants to generate efficien
An important goal of TypeScript is to provide accurate and straightforward types for existing JavaScript programming patterns. To that end, TypeScript includes generic types, discussed in the next section, and *overloading on string parameters*, the topic of this section.
JavaScript programming interfaces often include functions whose behavior is discriminated by a string constant passed to the function. The Document Object Model makes heavy use of this pattern. For example, the following screen shot shows that the 'createElement' method of the 'document' object has multiple signatures, some of which identify the types returned when specific strings are passed into the method.
JavaScript programming interfaces often include functions whose behavior is discriminated by a string constant passed to the function. The Document Object Model makes heavy use of this pattern. For example, the following screenshot shows that the 'createElement' method of the 'document' object has multiple signatures, some of which identify the types returned when specific strings are passed into the method.
&emsp;&emsp;![](images/image3.png)
@ -639,7 +639,7 @@ var span = document.createElement("span");
span.isMultiLine = false; // OK: HTMLSpanElement has isMultiline property
```
In the following screen shot, a programming tool combines information from overloading on string parameters with contextual typing to infer that the type of the variable 'e' is 'MouseEvent' and that therefore 'e' has a 'clientX' property.
In the following screenshot, a programming tool combines information from overloading on string parameters with contextual typing to infer that the type of the variable 'e' is 'MouseEvent' and that therefore 'e' has a 'clientX' property.
&emsp;&emsp;![](images/image4.png)

View File

@ -2,7 +2,7 @@
"name": "typescript",
"author": "Microsoft Corp.",
"homepage": "https://www.typescriptlang.org/",
"version": "3.3.0",
"version": "3.4.0",
"license": "Apache-2.0",
"description": "TypeScript is a language for application scale JavaScript development",
"keywords": [
@ -35,10 +35,8 @@
"@types/convert-source-map": "latest",
"@types/del": "latest",
"@types/glob": "latest",
"@types/gulp": "3.X",
"@types/gulp": "^4.0.5",
"@types/gulp-concat": "latest",
"@types/gulp-help": "latest",
"@types/gulp-if": "0.0.33",
"@types/gulp-newer": "latest",
"@types/gulp-rename": "0.0.33",
"@types/gulp-sourcemaps": "0.0.32",
@ -50,7 +48,6 @@
"@types/mocha": "latest",
"@types/node": "8.5.5",
"@types/q": "latest",
"@types/run-sequence": "latest",
"@types/source-map-support": "latest",
"@types/through2": "latest",
"@types/travis-fold": "latest",
@ -63,16 +60,12 @@
"del": "latest",
"fancy-log": "latest",
"fs-extra": "^6.0.1",
"gulp": "3.X",
"gulp-clone": "latest",
"gulp": "^4.0.0",
"gulp-concat": "latest",
"gulp-help": "latest",
"gulp-if": "latest",
"gulp-insert": "latest",
"gulp-newer": "latest",
"gulp-rename": "latest",
"gulp-sourcemaps": "latest",
"gulp-typescript": "latest",
"istanbul": "latest",
"jake": "latest",
"lodash": "4.17.10",
@ -86,7 +79,6 @@
"prex": "^0.4.3",
"q": "latest",
"remove-internal": "^2.9.2",
"run-sequence": "latest",
"source-map-support": "latest",
"through2": "latest",
"travis-fold": "latest",

View File

@ -1,24 +0,0 @@
// @ts-check
const merge2 = require("merge2");
const gulp = require("./gulp");
const rename = require("gulp-rename");
const rm = require("./rm");
const { localBaseline, refBaseline } = require("./tests");
module.exports = baselineAccept;
function baselineAccept(subfolder = "") {
return merge2(baselineCopy(subfolder), baselineDelete(subfolder));
}
function baselineCopy(subfolder = "") {
return gulp.src([`${localBaseline}${subfolder ? `${subfolder}/` : ``}**`, `!${localBaseline}${subfolder}/**/*.delete`], { base: localBaseline })
.pipe(gulp.dest(refBaseline));
}
function baselineDelete(subfolder = "") {
return gulp.src([`${localBaseline}${subfolder ? `${subfolder}/` : ``}**/*.delete`], { base: localBaseline, read: false })
.pipe(rm())
.pipe(rename({ extname: "" }))
.pipe(rm(refBaseline));
}

View File

@ -1,12 +1,10 @@
// @ts-check
const browserify = require("browserify");
const Vinyl = require("./vinyl");
const Vinyl = require("vinyl");
const { Transform } = require("stream");
const { streamFromFile } = require("./utils");
const { replaceContents } = require("./sourcemaps");
module.exports = browserifyFile;
/**
* @param {import("browserify").Options} [opts]
*/
@ -31,4 +29,5 @@ function browserifyFile(opts) {
}
}
});
}
}
exports.browserify = browserifyFile;

View File

@ -1,5 +0,0 @@
// @ts-check
// this just fixes the incorrect types for chalk :/
const chalk = /**@type {import("chalk").Chalk}*/(require("chalk").default || require("chalk"));
module.exports = chalk;

View File

@ -1,19 +0,0 @@
// @ts-check
const replace = require("./replace");
module.exports = exports = convertConstEnum;
/**
* This regexp exists to capture our const enums and replace them with normal enums in our public API
* - this is fine since we compile with preserveConstEnums, and ensures our consumers are not locked
* to the TS version they compile with.
*/
const constEnumCaptureRegexp = /^(\s*)(export )?const enum (\S+) {(\s*)$/gm;
const constEnumReplacement = "$1$2enum $3 {$4";
/**
* Converts `const enum` declarations in a .d.ts file into non-const `enum` declarations.
*/
function convertConstEnum() {
return replace(constEnumCaptureRegexp, constEnumReplacement);
}

View File

@ -1,31 +0,0 @@
// @ts-check
module.exports = debounce;
/**
* @param {() => void} cb
* @param {number} timeout
* @param {DebounceOptions} [opts]
*
* @typedef DebounceOptions
* @property {number} [max]
*/
function debounce(cb, timeout, opts = {}) {
if (timeout < 10) timeout = 10;
let max = opts.max || 10;
if (max < timeout) max = timeout;
let minTimer;
let maxTimer;
return trigger;
function trigger() {
if (max > timeout && !maxTimer) maxTimer = setTimeout(done, max);
if (minTimer) clearTimeout(minTimer);
minTimer = setTimeout(done, timeout);
}
function done() {
if (maxTimer) maxTimer = void clearTimeout(maxTimer);
if (minTimer) minTimer = void clearTimeout(minTimer);
cb();
}
}

View File

@ -1,49 +0,0 @@
// @ts-check
const ts = require("../../lib/typescript");
const log = require("fancy-log"); // was `require("gulp-util").log (see https://github.com/gulpjs/gulp-util)
/** @type {FormatDiagnosticsHost} */
const formatDiagnosticsHost = exports.formatDiagnosticsHost = {
getCanonicalFileName: fileName => fileName,
getCurrentDirectory: () => process.cwd(),
getNewLine: () => ts.sys.newLine
};
/**
* @param {Diagnostic[]} diagnostics
* @param {{ cwd?: string, pretty?: boolean }} [options]
*/
function formatDiagnostics(diagnostics, options) {
return options && options.pretty
? ts.formatDiagnosticsWithColorAndContext(diagnostics, getFormatDiagnosticsHost(options && options.cwd))
: ts.formatDiagnostics(diagnostics, getFormatDiagnosticsHost(options && options.cwd));
}
exports.formatDiagnostics = formatDiagnostics;
/**
* @param {Diagnostic[]} diagnostics
* @param {{ cwd?: string }} [options]
*/
function reportDiagnostics(diagnostics, options) {
log(formatDiagnostics(diagnostics, { cwd: options && options.cwd, pretty: process.stdout.isTTY }));
}
exports.reportDiagnostics = reportDiagnostics;
/**
* @param {string | undefined} cwd
* @returns {FormatDiagnosticsHost}
*/
function getFormatDiagnosticsHost(cwd) {
if (!cwd || cwd === process.cwd()) return formatDiagnosticsHost;
return {
getCanonicalFileName: formatDiagnosticsHost.getCanonicalFileName,
getCurrentDirectory: () => cwd,
getNewLine: formatDiagnosticsHost.getNewLine
};
}
/**
* @typedef {import("../../lib/typescript").FormatDiagnosticsHost} FormatDiagnosticsHost
* @typedef {import("../../lib/typescript").Diagnostic} Diagnostic
*/
void 0;

View File

@ -1,58 +0,0 @@
// @ts-check
const cp = require("child_process");
const log = require("fancy-log"); // was `require("gulp-util").log (see https://github.com/gulpjs/gulp-util)
const isWin = /^win/.test(process.platform);
const chalk = require("./chalk");
const { CancellationToken, CancelError } = require("prex");
module.exports = exec;
/**
* Executes the provided command once with the supplied arguments.
* @param {string} cmd
* @param {string[]} args
* @param {ExecOptions} [options]
*
* @typedef ExecOptions
* @property {boolean} [ignoreExitCode]
* @property {import("prex").CancellationToken} [cancelToken]
*/
function exec(cmd, args, options = {}) {
return /**@type {Promise<{exitCode: number}>}*/(new Promise((resolve, reject) => {
const { ignoreExitCode, cancelToken = CancellationToken.none } = options;
cancelToken.throwIfCancellationRequested();
// TODO (weswig): Update child_process types to add windowsVerbatimArguments to the type definition
const subshellFlag = isWin ? "/c" : "-c";
const command = isWin ? [possiblyQuote(cmd), ...args] : [`${cmd} ${args.join(" ")}`];
log(`> ${chalk.green(cmd)} ${args.join(" ")}`);
const proc = cp.spawn(isWin ? "cmd" : "/bin/sh", [subshellFlag, ...command], { stdio: "inherit", windowsVerbatimArguments: true });
const registration = cancelToken.register(() => {
log(`${chalk.red("killing")} '${chalk.green(cmd)} ${args.join(" ")}'...`);
proc.kill("SIGINT");
proc.kill("SIGTERM");
reject(new CancelError());
});
proc.on("exit", exitCode => {
registration.unregister();
if (exitCode === 0 || ignoreExitCode) {
resolve({ exitCode });
}
else {
reject(new Error(`Process exited with code: ${exitCode}`));
}
});
proc.on("error", error => {
registration.unregister();
reject(error);
});
}));
}
/**
* @param {string} cmd
*/
function possiblyQuote(cmd) {
return cmd.indexOf(" ") >= 0 ? `"${cmd}"` : cmd;
}

View File

@ -1,46 +0,0 @@
// @ts-check
module.exports = finished;
/**
* @param {NodeJS.ReadableStream | NodeJS.WritableStream} stream
* @returns {Promise<void>}
*/
function finished(stream) {
return new Promise((resolve, reject) => {
const readable = "readable" in stream && stream.readable;
const writable = "writable" in stream && stream.writable;
let countdown = 0;
const cleanup = () => {
if (readable) stream.removeListener("end", signal);
if (writable) stream.removeListener("finish", signal);
stream.removeListener("error", onerror);
};
const signal = () => {
if (countdown > 0) {
countdown--;
if (countdown === 0) {
cleanup();
resolve();
}
}
};
const onerror = (error) => {
if (countdown > 0) {
countdown = 0;
cleanup();
reject(error);
}
};
stream.once("error", onerror);
if (readable) {
countdown++;
stream.once("end", signal);
}
if (writable) {
countdown++;
stream.once("finish", signal);
}
if (countdown === 0) signal();
});
}

View File

@ -1,12 +0,0 @@
// @ts-check
const log = require("fancy-log"); // was `require("gulp-util").log (see https://github.com/gulpjs/gulp-util)
module.exports = getDiffTool;
function getDiffTool() {
const program = process.env.DIFF;
if (!program) {
log.warn("Add the 'DIFF' environment variable to the path of the program you want to use.");
process.exit(1);
}
return program;
}

View File

@ -1,23 +0,0 @@
// @ts-check
const { lstatSync, readdirSync } = require("fs");
const { join } = require("path");
/**
* Find the size of a directory recursively.
* Symbolic links can cause a loop.
* @param {string} root
* @returns {number} bytes
*/
function getDirSize(root) {
const stats = lstatSync(root);
if (!stats.isDirectory()) {
return stats.size;
}
return readdirSync(root)
.map(file => getDirSize(join(root, file)))
.reduce((acc, num) => acc + num, 0);
}
module.exports = getDirSize;

View File

@ -1,149 +0,0 @@
// @ts-check
const path = require("path");
const child_process = require("child_process");
const fs = require("fs");
const tsc = require("gulp-typescript");
const Vinyl = require("vinyl");
const { Duplex, Readable } = require("stream");
const protocol = require("./protocol");
/**
* @param {string | undefined} tsConfigFileName
* @param {tsc.Settings} settings
* @param {CreateProjectOptions} options
*
* @typedef CreateProjectOptions
* @property {string} [typescript]
* @property {boolean} [parse]
*/
function createProject(tsConfigFileName, settings, options) {
settings = Object.assign({}, settings);
options = Object.assign({}, options);
if (settings.typescript) throw new Error();
const localSettings = Object.assign({}, settings);
if (options.typescript) {
options.typescript = path.resolve(options.typescript);
localSettings.typescript = require(options.typescript);
}
const project = tsConfigFileName === undefined ? tsc.createProject(localSettings) : tsc.createProject(tsConfigFileName, localSettings);
const wrappedProject = /** @type {tsc.Project} */((reporter = tsc.reporter.defaultReporter()) => {
const ts = project.typescript;
const proc = child_process.fork(require.resolve("./worker.js"), [], {
// Prevent errors when debugging gulpfile due to the same debug port being passed to forked children.
execArgv: []
});
/** @type {Map<string, import("vinyl")>} */
const inputs = new Map();
/** @type {Map<string, *>} */
const sourceFiles = new Map();
/** @type {protocol.SourceFileHost & protocol.VinylHost} */
const host = {
getVinyl(path) { return inputs.get(path); },
getSourceFile(fileName) { return sourceFiles.get(fileName); },
createSourceFile(fileName, text, languageVersion) {
if (text === undefined) {
text = fs.readFileSync(fileName, "utf8");
}
/** @type {protocol.SourceFile} */
let file;
if (options.parse) {
file = ts.createSourceFile(fileName, text, languageVersion, /*setParentNodes*/ true);
}
else {
// NOTE: the built-in reporters in gulp-typescript don't actually need a full
// source file, so save time by faking one unless requested.
file = /**@type {protocol.SourceFile}*/({
pos: 0,
end: text.length,
kind: ts.SyntaxKind.SourceFile,
fileName,
text,
languageVersion,
statements: /**@type {*} */([]),
endOfFileToken: { pos: text.length, end: text.length, kind: ts.SyntaxKind.EndOfFileToken },
amdDependencies: /**@type {*} */([]),
referencedFiles: /**@type {*} */([]),
typeReferenceDirectives: /**@type {*} */([]),
libReferenceDirectives: /**@type {*} */([]),
languageVariant: ts.LanguageVariant.Standard,
isDeclarationFile: /\.d\.ts$/.test(fileName),
hasNoDefaultLib: /[\\/]lib\.[^\\/]+\.d\.ts$/.test(fileName)
});
}
sourceFiles.set(fileName, file);
return file;
}
};
/** @type {Duplex & { js?: Readable, dts?: Readable }} */
const compileStream = new Duplex({
objectMode: true,
read() {},
/** @param {*} file */
write(file, _encoding, callback) {
inputs.set(file.path, file);
proc.send(protocol.message.write(file));
callback();
},
final(callback) {
proc.send(protocol.message.final());
callback();
}
});
const jsStream = compileStream.js = new Readable({
objectMode: true,
read() {}
});
const dtsStream = compileStream.dts = new Readable({
objectMode: true,
read() {}
});
proc.send(protocol.message.createProject(tsConfigFileName, settings, options));
proc.on("message", (/**@type {protocol.WorkerMessage}*/ message) => {
switch (message.method) {
case "write": {
const file = protocol.vinylFromJson(message.params);
compileStream.push(file);
if (file.path.endsWith(".d.ts")) {
dtsStream.push(file);
}
else {
jsStream.push(file);
}
break;
}
case "final": {
compileStream.push(null);
jsStream.push(null);
dtsStream.push(null);
proc.kill(); // TODO(rbuckton): pool workers? may not be feasible due to gulp-typescript holding onto memory
break;
}
case "error": {
const error = protocol.errorFromJson(message.params);
compileStream.emit("error", error);
proc.kill(); // TODO(rbuckton): pool workers? may not be feasible due to gulp-typescript holding onto memory
break;
}
case "reporter.error": {
if (reporter.error) {
const error = protocol.typeScriptErrorFromJson(message.params, host);
reporter.error(error, project.typescript);
}
break;
}
case "reporter.finish": {
if (reporter.finish) {
reporter.finish(message.params);
}
}
}
});
return /** @type {*} */(compileStream);
});
return Object.assign(wrappedProject, project);
}
exports.createProject = createProject;

View File

@ -1,281 +0,0 @@
// @ts-check
const Vinyl = require("vinyl");
/**
* @param {File} file
* @returns {*}
*/
function vinylToJson(file) {
if (file.isStream()) throw new TypeError("Streams not supported.");
return {
path: file.path,
cwd: file.cwd,
base: file.base,
contents: file.isBuffer() ? file.contents.toString("utf8") : undefined,
sourceMap: file.sourceMap
};
}
exports.vinylToJson = vinylToJson;
/**
* @param {*} json
* @returns {File}
*/
function vinylFromJson(json) {
return new Vinyl({
path: json.path,
cwd: json.cwd,
base: json.base,
contents: typeof json.contents === "string" ? Buffer.from(json.contents, "utf8") : undefined,
sourceMap: json.sourceMap
});
}
exports.vinylFromJson = vinylFromJson;
/**
* @param {Error} error
* @returns {*}
*/
function errorToJson(error) {
return {
name: error.name,
message: error.message,
stack: error.stack
};
}
exports.errorToJson = errorToJson;
/**
* @param {*} json
* @returns {Error}
*/
function errorFromJson(json) {
const error = new Error();
error.name = json.name;
error.message = json.message;
error.stack = json.stack;
return error;
}
exports.errorFromJson = errorFromJson;
/**
* @param {TypeScriptError} error
* @returns {*}
*/
function typeScriptErrorToJson(error) {
return Object.assign({}, errorToJson(error), {
fullFilename: error.fullFilename,
relativeFilename: error.relativeFilename,
file: error.file && { path: error.file.path },
tsFile: error.tsFile && sourceFileToJson(error.tsFile),
diagnostic: diagnosticToJson(error.diagnostic),
startPosition: error.startPosition,
endPosition: error.endPosition
});
}
exports.typeScriptErrorToJson = typeScriptErrorToJson;
/**
* @param {*} json
* @param {SourceFileHost & VinylHost} host
* @returns {TypeScriptError}
*/
function typeScriptErrorFromJson(json, host) {
const error = /**@type {TypeScriptError}*/(errorFromJson(json));
error.fullFilename = json.fullFilename;
error.relativeFilename = json.relativeFilename;
error.file = json.file && host.getVinyl(json.file.path);
error.tsFile = json.tsFile && sourceFileFromJson(json.tsFile, host);
error.diagnostic = diagnosticFromJson(json.diagnostic, host);
error.startPosition = json.startPosition;
error.endPosition = json.endPosition;
return error;
}
exports.typeScriptErrorFromJson = typeScriptErrorFromJson;
/**
* @param {SourceFile} file
* @returns {*}
*/
function sourceFileToJson(file) {
return {
fileName: file.fileName,
text: file.text,
languageVersion: file.languageVersion
};
}
exports.sourceFileToJson = sourceFileToJson;
/**
* @param {*} json
* @param {SourceFileHost} host
*/
function sourceFileFromJson(json, host) {
return host.getSourceFile(json.fileName)
|| host.createSourceFile(json.fileName, json.text, json.languageVersion);
}
exports.sourceFileFromJson = sourceFileFromJson;
/**
* @param {Diagnostic} diagnostic
* @returns {*}
*/
function diagnosticToJson(diagnostic) {
return Object.assign({}, diagnosticRelatedInformationToJson(diagnostic), {
category: diagnostic.category,
code: diagnostic.code,
source: diagnostic.source,
relatedInformation: diagnostic.relatedInformation && diagnostic.relatedInformation.map(diagnosticRelatedInformationToJson)
});
}
exports.diagnosticToJson = diagnosticToJson;
/**
* @param {*} json
* @param {SourceFileHost} host
* @returns {Diagnostic}
*/
function diagnosticFromJson(json, host) {
return Object.assign({}, diagnosticRelatedInformationFromJson(json, host), {
category: json.category,
code: json.code,
source: json.source,
relatedInformation: json.relatedInformation && json.relatedInformation.map(json => diagnosticRelatedInformationFromJson(json, host))
});
}
exports.diagnosticFromJson = diagnosticFromJson;
/**
* @param {DiagnosticRelatedInformation} diagnostic
* @returns {*}
*/
function diagnosticRelatedInformationToJson(diagnostic) {
return {
file: diagnostic.file && { fileName: diagnostic.file.fileName },
start: diagnostic.start,
length: diagnostic.length,
messageText: diagnostic.messageText
};
}
exports.diagnosticRelatedInformationToJson = diagnosticRelatedInformationToJson;
/**
* @param {*} json
* @param {SourceFileHost} host
* @returns {DiagnosticRelatedInformation}
*/
function diagnosticRelatedInformationFromJson(json, host) {
return {
file: json.file && sourceFileFromJson(json.file, host),
start: json.start,
length: json.length,
messageText: json.messageText,
category: json.category,
code: json.code
};
}
exports.diagnosticRelatedInformationFromJson = diagnosticRelatedInformationFromJson;
exports.message = {};
/**
* @param {string | undefined} tsConfigFileName
* @param {import("gulp-typescript").Settings} settings
* @param {Object} options
* @param {string} [options.typescript]
* @returns {CreateProjectMessage}
*
* @typedef CreateProjectMessage
* @property {"createProject"} method
* @property {CreateProjectParams} params
*
* @typedef CreateProjectParams
* @property {string | undefined} tsConfigFileName
* @property {import("gulp-typescript").Settings} settings
* @property {CreateProjectOptions} options
*
* @typedef CreateProjectOptions
* @property {string} [typescript]
*/
exports.message.createProject = function(tsConfigFileName, settings, options) {
return { method: "createProject", params: { tsConfigFileName, settings, options } };
};
/**
* @param {File} file
* @returns {WriteMessage}
*
* @typedef WriteMessage
* @property {"write"} method
* @property {*} params
*/
exports.message.write = function(file) {
return { method: "write", params: vinylToJson(file) };
};
/**
* @returns {FinalMessage}
*
* @typedef FinalMessage
* @property {"final"} method
*/
exports.message.final = function() {
return { method: "final" };
};
/**
* @param {Error} error
* @returns {ErrorMessage}
*
* @typedef ErrorMessage
* @property {"error"} method
* @property {*} params
*/
exports.message.error = function(error) {
return { method: "error", params: errorToJson(error) };
};
exports.message.reporter = {};
/**
* @param {TypeScriptError} error
* @returns {reporter.ErrorMessage}
*
* @typedef reporter.ErrorMessage
* @property {"reporter.error"} method
* @property {*} params
*/
exports.message.reporter.error = function(error) {
return { method: "reporter.error", params: typeScriptErrorToJson(error) };
};
/**
* @param {*} results
* @returns {reporter.FinishMessage}
*
* @typedef reporter.FinishMessage
* @property {"reporter.finish"} method
* @property {*} params
*/
exports.message.reporter.finish = function(results) {
return { method: "reporter.finish", params: results };
};
/**
* @typedef {import("vinyl")} File
* @typedef {typeof import("typescript")} TypeScriptModule
* @typedef {import("typescript").SourceFile} SourceFile
* @typedef {import("typescript").Diagnostic} Diagnostic
* @typedef {import("typescript").DiagnosticRelatedInformation} DiagnosticRelatedInformation
* @typedef {import("gulp-typescript").reporter.TypeScriptError} TypeScriptError
* @typedef {WriteMessage | FinalMessage | CreateProjectMessage} HostMessage
* @typedef {WriteMessage | FinalMessage | ErrorMessage | reporter.ErrorMessage | reporter.FinishMessage} WorkerMessage
*
* @typedef SourceFileHost
* @property {(fileName: string) => SourceFile | undefined} getSourceFile
* @property {(fileName: string, text: string, languageVersion: number) => SourceFile} createSourceFile
*
* @typedef VinylHost
* @property {(path: string) => File | undefined} getVinyl
*/
void 0;

View File

@ -1,79 +0,0 @@
// @ts-check
const fs = require("fs");
const tsc = require("gulp-typescript");
const { Readable, Writable } = require("stream");
const protocol = require("./protocol");
/** @type {tsc.Project} */
let project;
/** @type {Readable} */
let inputStream;
/** @type {Writable} */
let outputStream;
/** @type {tsc.CompileStream} */
let compileStream;
process.on("message", (/**@type {protocol.HostMessage}*/ message) => {
try {
switch (message.method) {
case "createProject": {
const { tsConfigFileName, settings, options } = message.params;
if (options.typescript) {
settings.typescript = require(options.typescript);
}
project = tsConfigFileName === undefined
? tsc.createProject(settings)
: tsc.createProject(tsConfigFileName, settings);
inputStream = new Readable({
objectMode: true,
read() {}
});
outputStream = new Writable({
objectMode: true,
/**
* @param {*} file
*/
write(file, _, callback) {
process.send(protocol.message.write(file));
callback();
},
final(callback) {
process.send(protocol.message.final());
callback();
}
});
compileStream = project({
error(error) { process.send(protocol.message.reporter.error(error)); },
finish(results) { process.send(protocol.message.reporter.finish(results)); }
});
compileStream.on("error", error => {
process.send(protocol.message.error(error));
});
outputStream.on("error", () => {
/* do nothing */
});
inputStream.pipe(compileStream).pipe(outputStream);
break;
}
case "write": {
const file = protocol.vinylFromJson(message.params);
if (!file.isBuffer()) file.contents = fs.readFileSync(file.path);
inputStream.push(file);
break;
}
case "final": {
inputStream.push(null);
break;
}
}
}
catch (e) {
process.send(protocol.message.error(e));
}
});

View File

@ -1,8 +0,0 @@
// @ts-check
/**
* @typedef {import("gulp").Gulp} Gulp
* @typedef {import("gulp-help").GulpHelp} GulpHelp
* @typedef {GulpHelp & { Gulp: new () => Gulp }} DotGulpModule
* @type {DotGulpModule}
*/
module.exports = require("gulp-help")(require("gulp"));

View File

@ -1,30 +0,0 @@
// @ts-check
const readJson = require("./readJson");
const path = require("path");
const gulp = require("./gulp");
const newer = require("gulp-newer");
const concat = require("gulp-concat");
const merge2 = require("merge2");
/** @type {{ libs: string[], paths?: Record<string, string>, sources?: Record<string, string[]> }} */
const libraries = readJson("./src/lib/libs.json");
const libs = libraries.libs.map(lib => {
const relativeSources = ["header.d.ts"].concat(libraries.sources && libraries.sources[lib] || [lib + ".d.ts"]);
const relativeTarget = libraries.paths && libraries.paths[lib] || ("lib." + lib + ".d.ts");
const sources = relativeSources.map(s => path.posix.join("src/lib", s));
const target = `built/local/${relativeTarget}`;
return { target, relativeTarget, sources };
});
exports.libraryTargets = libs.map(lib => lib.target);
/**
* @param {string[]} prepends
*/
function generateLibs(prepends) {
return merge2(libs.map(({ sources, target, relativeTarget }) =>
gulp.src(prepends.concat(sources))
.pipe(newer(target))
.pipe(concat(relativeTarget, { newLine: "\n\n" }))
.pipe(gulp.dest("built/local"))));
}
exports.generateLibs = generateLibs;

View File

@ -1,14 +0,0 @@
// @ts-check
const mkdirp = require("mkdirp");
module.exports = exports = mkdirpAsync;
/**
* @param {string} dir
* @param {mkdirp.Mode | mkdirp.Options} [opts]
*/
function mkdirpAsync(dir, opts) {
return new Promise((resolve, reject) => mkdirp(dir, opts, (err, made) => err ? reject(err) : resolve(made)));
}
exports.sync = mkdirp.sync;

View File

@ -1,72 +0,0 @@
// @ts-check
const fs = require("fs");
module.exports = needsUpdate;
/**
* @param {string | string[]} source
* @param {string | string[]} dest
* @returns {boolean}
*/
function needsUpdate(source, dest) {
if (typeof source === "string" && typeof dest === "string") {
if (fs.existsSync(dest)) {
const {mtime: outTime} = fs.statSync(dest);
const {mtime: inTime} = fs.statSync(source);
if (+inTime <= +outTime) {
return false;
}
}
}
else if (typeof source === "string" && typeof dest !== "string") {
const {mtime: inTime} = fs.statSync(source);
for (const filepath of dest) {
if (fs.existsSync(filepath)) {
const {mtime: outTime} = fs.statSync(filepath);
if (+inTime > +outTime) {
return true;
}
}
else {
return true;
}
}
return false;
}
else if (typeof source !== "string" && typeof dest === "string") {
if (fs.existsSync(dest)) {
const {mtime: outTime} = fs.statSync(dest);
for (const filepath of source) {
if (fs.existsSync(filepath)) {
const {mtime: inTime} = fs.statSync(filepath);
if (+inTime > +outTime) {
return true;
}
}
else {
return true;
}
}
return false;
}
}
else if (typeof source !== "string" && typeof dest !== "string") {
for (let i = 0; i < source.length; i++) {
if (!dest[i]) {
continue;
}
if (fs.existsSync(dest[i])) {
const {mtime: outTime} = fs.statSync(dest[i]);
const {mtime: inTime} = fs.statSync(source[i]);
if (+inTime > +outTime) {
return true;
}
}
else {
return true;
}
}
return false;
}
return true;
}

View File

@ -4,7 +4,7 @@ const os = require("os");
/** @type {CommandLineOptions} */
module.exports = minimist(process.argv.slice(2), {
boolean: ["debug", "dirty", "inspect", "light", "colors", "lint", "lkg", "soft", "fix", "failed", "keepFailed"],
boolean: ["debug", "dirty", "inspect", "light", "colors", "lint", "lkg", "soft", "fix", "failed", "keepFailed", "force", "built"],
string: ["browser", "tests", "host", "reporter", "stackTraceLimit", "timeout"],
alias: {
"b": "browser",
@ -15,7 +15,7 @@ module.exports = minimist(process.argv.slice(2), {
"r": "reporter",
"c": "colors", "color": "colors",
"w": "workers",
"f": "fix",
"f": "fix"
},
default: {
soft: false,
@ -34,11 +34,16 @@ module.exports = minimist(process.argv.slice(2), {
workers: process.env.workerCount || os.cpus().length,
failed: false,
keepFailed: false,
lkg: false,
dirty: false
lkg: true,
dirty: false,
built: false
}
});
if (module.exports.built) {
module.exports.lkg = false;
}
/**
* @typedef TypedOptions
* @property {boolean} debug
@ -48,6 +53,7 @@ module.exports = minimist(process.argv.slice(2), {
* @property {boolean} colors
* @property {boolean} lint
* @property {boolean} lkg
* @property {boolean} built
* @property {boolean} soft
* @property {boolean} fix
* @property {string} browser

View File

@ -1,20 +1,18 @@
// @ts-check
const stream = require("stream");
const Vinyl = require("./vinyl");
const Vinyl = require("vinyl");
const ts = require("../../lib/typescript");
const fs = require("fs");
const { base64VLQFormatEncode } = require("./sourcemaps");
module.exports = exports = prepend;
/**
* @param {string | ((file: Vinyl) => string)} data
* @param {string | ((file: import("vinyl")) => string)} data
*/
function prepend(data) {
return new stream.Transform({
objectMode: true,
/**
* @param {string | Buffer | Vinyl} input
* @param {string | Buffer | import("vinyl")} input
* @param {(error: Error, data?: any) => void} cb
*/
transform(input, _, cb) {
@ -56,11 +54,11 @@ function prepend(data) {
exports.prepend = prepend;
/**
* @param {string | ((file: Vinyl) => string)} file
* @param {string | ((file: import("vinyl")) => string)} file
*/
function prependFile(file) {
const data = typeof file === "string" ? fs.readFileSync(file, "utf8") :
vinyl => fs.readFileSync(file(vinyl), "utf8");
return prepend(data)
}
exports.file = prependFile;
exports.prependFile = prependFile;

File diff suppressed because it is too large Load Diff

60
scripts/build/projects.js Normal file
View File

@ -0,0 +1,60 @@
// @ts-check
const { exec, Debouncer } = require("./utils");
class ProjectQueue {
/**
* @param {(projects: string[], lkg: boolean, force: boolean) => Promise<any>} action
*/
constructor(action) {
/** @type {{ lkg: boolean, force: boolean, projects?: string[], debouncer: Debouncer }[]} */
this._debouncers = [];
this._action = action;
}
/**
* @param {string} project
* @param {object} options
*/
enqueue(project, { lkg = true, force = false } = {}) {
let entry = this._debouncers.find(entry => entry.lkg === lkg && entry.force === force);
if (!entry) {
const debouncer = new Debouncer(100, async () => {
const projects = entry.projects;
if (projects) {
entry.projects = undefined;
await this._action(projects, lkg, force);
}
});
this._debouncers.push(entry = { lkg, force, debouncer });
}
if (!entry.projects) entry.projects = [];
entry.projects.push(project);
return entry.debouncer.enqueue();
}
}
const projectBuilder = new ProjectQueue((projects, lkg, force) => exec(process.execPath, [lkg ? "./lib/tsc" : "./built/local/tsc", "-b", ...(force ? ["--force"] : []), ...projects], { hidePrompt: true }));
/**
* @param {string} project
* @param {object} [options]
* @param {boolean} [options.lkg=true]
* @param {boolean} [options.force=false]
*/
exports.buildProject = (project, { lkg, force } = {}) => projectBuilder.enqueue(project, { lkg, force });
const projectCleaner = new ProjectQueue((projects, lkg) => exec(process.execPath, [lkg ? "./lib/tsc" : "./built/local/tsc", "-b", "--clean", ...projects], { hidePrompt: true }));
/**
* @param {string} project
*/
exports.cleanProject = (project) => projectCleaner.enqueue(project);
const projectWatcher = new ProjectQueue((projects) => exec(process.execPath, ["./lib/tsc", "-b", "--watch", ...projects], { hidePrompt: true }));
/**
* @param {string} project
* @param {object} [options]
* @param {boolean} [options.lkg=true]
*/
exports.watchProject = (project, { lkg } = {}) => projectWatcher.enqueue(project, { lkg });

View File

@ -1,17 +0,0 @@
// @ts-check
const ts = require("../../lib/typescript");
const fs = require("fs");
const { reportDiagnostics } = require("./diagnostics");
module.exports = exports = readJson;
/** @param {string} jsonPath */
function readJson(jsonPath) {
const jsonText = fs.readFileSync(jsonPath, "utf8");
const result = ts.parseConfigFileTextToJson(jsonPath, jsonText);
if (result.error) {
reportDiagnostics([result.error]);
throw new Error("An error occurred during parse.");
}
return result.config;
}

View File

@ -1,12 +0,0 @@
// @ts-check
const insert = require("gulp-insert");
/**
* @param {string | RegExp} searchValue
* @param {string | ((...args: string[]) => string)} replacer
*/
function replace(searchValue, replacer) {
return insert.transform(content => content.replace(searchValue, /**@type {string}*/(replacer)));
}
module.exports = replace;

View File

@ -1,84 +0,0 @@
// @ts-check
const { Duplex } = require("stream");
const path = require("path");
const Vinyl = require("vinyl");
const del = require("del");
module.exports = rm;
/**
* @param {string | ((file: File) => string) | Options} [dest]
* @param {Options} [opts]
*/
function rm(dest, opts) {
if (dest && typeof dest === "object") opts = dest, dest = undefined;
let failed = false;
const cwd = path.resolve(opts && opts.cwd || process.cwd());
/** @type {{ file: File, deleted: boolean, promise: Promise<any>, cb: Function }[]} */
const pending = [];
const processDeleted = () => {
if (failed) return;
while (pending.length && pending[0].deleted) {
const { file, cb } = pending.shift();
duplex.push(file);
cb();
}
};
const duplex = new Duplex({
objectMode: true,
/**
* @param {string|Buffer|File} file
*/
write(file, _, cb) {
if (failed) return;
if (typeof file === "string" || Buffer.isBuffer(file)) return cb(new Error("Only Vinyl files are supported."));
const basePath = typeof dest === "string" ? path.resolve(cwd, dest) :
typeof dest === "function" ? path.resolve(cwd, dest(file)) :
file.base;
const filePath = path.resolve(basePath, file.relative);
file.cwd = cwd;
file.base = basePath;
file.path = filePath;
const entry = {
file,
deleted: false,
cb,
promise: del(file.path).then(() => {
entry.deleted = true;
processDeleted();
}, err => {
failed = true;
pending.length = 0;
cb(err);
})
};
pending.push(entry);
},
final(cb) {
processDeleted();
if (pending.length) {
Promise
.all(pending.map(entry => entry.promise))
.then(() => processDeleted())
.then(() => cb(), cb);
return;
}
cb();
},
read() {
}
});
return duplex;
}
/**
* @typedef {import("vinyl")} File
*
* @typedef Options
* @property {string} [cwd]
*/
void 0;

View File

@ -1,12 +1,13 @@
// @ts-check
/// <reference path="../types/ambient.d.ts" />
const path = require("path");
const Vinyl = require("./vinyl");
const convertMap = require("convert-source-map");
const applySourceMap = require("vinyl-sourcemaps-apply");
const through2 = require("through2");
/**
* @param {Vinyl} input
* @param {import("vinyl")} input
* @param {string | Buffer} contents
* @param {string | RawSourceMap} [sourceMap]
*/
@ -16,13 +17,13 @@ function replaceContents(input, contents, sourceMap) {
if (input.sourceMap) {
output.sourceMap = typeof input.sourceMap === "string" ? /**@type {RawSourceMap}*/(JSON.parse(input.sourceMap)) : input.sourceMap;
if (typeof sourceMap === "string") {
sourceMap = /**@type {RawSourceMap}*/(JSON.parse(sourceMap));
sourceMap = /** @type {RawSourceMap} */(JSON.parse(sourceMap));
}
else if (sourceMap === undefined) {
const stringContents = typeof contents === "string" ? contents : contents.toString("utf8");
const newSourceMapConverter = convertMap.fromSource(stringContents);
if (newSourceMapConverter) {
sourceMap = /**@type {RawSourceMap}*/(newSourceMapConverter.toObject());
sourceMap = /** @type {RawSourceMap} */(newSourceMapConverter.toObject());
output.contents = new Buffer(convertMap.removeMapFileComments(stringContents), "utf8");
}
}
@ -31,7 +32,7 @@ function replaceContents(input, contents, sourceMap) {
const base = input.base || cwd;
const sourceRoot = output.sourceMap.sourceRoot;
makeAbsoluteSourceMap(cwd, base, output.sourceMap);
makeAbsoluteSourceMap(cwd, base, sourceMap);
makeAbsoluteSourceMap(cwd, base, /** @type {RawSourceMap} */(sourceMap));
applySourceMap(output, sourceMap);
makeRelativeSourceMap(cwd, base, sourceRoot, output.sourceMap);
}
@ -44,10 +45,12 @@ function replaceContents(input, contents, sourceMap) {
exports.replaceContents = replaceContents;
function removeSourceMaps() {
return through2.obj((/**@type {Vinyl}*/file, _, cb) => {
if (file.sourceMap && file.isBuffer()) {
return through2.obj((/**@type {import("vinyl")}*/file, _, cb) => {
if (file.isBuffer()) {
file.contents = Buffer.from(convertMap.removeMapFileComments(file.contents.toString("utf8")), "utf8");
file.sourceMap = undefined;
if (file.sourceMap) {
file.sourceMap = undefined;
}
}
cb(null, file);
});
@ -59,7 +62,7 @@ exports.removeSourceMaps = removeSourceMaps;
* @param {string | undefined} base
* @param {RawSourceMap} sourceMap
*
* @typedef RawSourceMap
* @typedef {object} RawSourceMap
* @property {string} version
* @property {string} file
* @property {string} [sourceRoot]

View File

@ -1,16 +1,16 @@
// @ts-check
const gulp = require("./gulp");
const gulp = require("gulp");
const del = require("del");
const fs = require("fs");
const os = require("os");
const path = require("path");
const mkdirP = require("./mkdirp");
const mkdirP = require("mkdirp");
const log = require("fancy-log");
const cmdLineOptions = require("./options");
const exec = require("./exec");
const log = require("fancy-log"); // was `require("gulp-util").log (see https://github.com/gulpjs/gulp-util)
const { CancellationToken } = require("prex");
const mochaJs = require.resolve("mocha/bin/_mocha");
const { exec } = require("./utils");
const mochaJs = require.resolve("mocha/bin/_mocha");
exports.localBaseline = "tests/baselines/local/";
exports.refBaseline = "tests/baselines/reference/";
exports.localRwcBaseline = "internal/baselines/rwc/local";
@ -27,7 +27,6 @@ exports.localTest262Baseline = "internal/baselines/test262/local";
async function runConsoleTests(runJs, defaultReporter, runInParallel, watchMode, cancelToken = CancellationToken.none) {
let testTimeout = cmdLineOptions.timeout;
let tests = cmdLineOptions.tests;
const lintFlag = cmdLineOptions.lint;
const debug = cmdLineOptions.debug;
const inspect = cmdLineOptions.inspect;
const runners = cmdLineOptions.runners;
@ -117,9 +116,6 @@ async function runConsoleTests(runJs, defaultReporter, runInParallel, watchMode,
errorStatus = exitCode;
error = new Error(`Process exited with status code ${errorStatus}.`);
}
else if (lintFlag) {
await new Promise((resolve, reject) => gulp.start(["lint"], error => error ? reject(error) : resolve()));
}
}
catch (e) {
errorStatus = undefined;
@ -144,10 +140,10 @@ async function runConsoleTests(runJs, defaultReporter, runInParallel, watchMode,
}
exports.runConsoleTests = runConsoleTests;
function cleanTestDirs() {
return del([exports.localBaseline, exports.localRwcBaseline])
.then(() => mkdirP(exports.localRwcBaseline))
.then(() => mkdirP(exports.localBaseline));
async function cleanTestDirs() {
await del([exports.localBaseline, exports.localRwcBaseline])
mkdirP.sync(exports.localRwcBaseline);
mkdirP.sync(exports.localBaseline);
}
exports.cleanTestDirs = cleanTestDirs;
@ -165,7 +161,7 @@ exports.cleanTestDirs = cleanTestDirs;
function writeTestConfigFile(tests, runners, light, taskConfigsFolder, workerCount, stackTraceLimit, timeout, keepFailed) {
const testConfigContents = JSON.stringify({
test: tests ? [tests] : undefined,
runner: runners ? runners.split(",") : undefined,
runners: runners ? runners.split(",") : undefined,
light,
workerCount,
stackTraceLimit,
@ -192,4 +188,4 @@ function restoreSavedNodeEnv() {
function deleteTemporaryProjectOutput() {
return del(path.join(exports.localBaseline, "projectOutput/"));
}
}

View File

@ -1,435 +0,0 @@
// @ts-check
const path = require("path");
const fs = require("fs");
const log = require("fancy-log"); // was `require("gulp-util").log (see https://github.com/gulpjs/gulp-util)
const ts = require("../../lib/typescript");
const { Duplex } = require("stream");
const chalk = /**@type {*} */(require("chalk"));
const Vinyl = require("vinyl");
/**
* Creates a stream that passes through its inputs only if the project outputs are not up to date
* with respect to the inputs.
* @param {ParsedCommandLine} parsedProject
* @param {UpToDateOptions} [options]
*
* @typedef UpToDateOptions
* @property {boolean | "minimal"} [verbose]
* @property {(configFilePath: string) => ParsedCommandLine | undefined} [parseProject]
*/
function upToDate(parsedProject, options) {
/** @type {File[]} */
const inputs = [];
/** @type {Map<string, File>} */
const inputMap = new Map();
/** @type {Map<string, fs.Stats>} */
const statCache = new Map();
/** @type {UpToDateHost} */
const upToDateHost = {
fileExists(fileName) {
const stats = getStat(fileName);
return stats ? stats.isFile() : false;
},
getModifiedTime(fileName) {
return getStat(fileName).mtime;
},
parseConfigFile: options && options.parseProject
};
const duplex = new Duplex({
objectMode: true,
/**
* @param {string|Buffer|File} file
*/
write(file, _, cb) {
if (typeof file === "string" || Buffer.isBuffer(file)) return cb(new Error("Only Vinyl files are supported."));
inputs.push(file);
inputMap.set(path.resolve(file.path), file);
cb();
},
final(cb) {
const status = getUpToDateStatus(upToDateHost, parsedProject);
reportStatus(parsedProject, status, options);
if (status.type !== UpToDateStatusType.UpToDate) {
for (const input of inputs) duplex.push(input);
}
duplex.push(null);
inputMap.clear();
statCache.clear();
cb();
},
read() {
}
});
return duplex;
function getStat(fileName) {
fileName = path.resolve(fileName);
const inputFile = inputMap.get(fileName);
if (inputFile && inputFile.stat) return inputFile.stat;
let stats = statCache.get(fileName);
if (!stats && fs.existsSync(fileName)) {
stats = fs.statSync(fileName);
statCache.set(fileName, stats);
}
return stats;
}
}
module.exports = exports = upToDate;
/**
* @param {DiagnosticMessage} message
* @param {...string} args
*/
function formatMessage(message, ...args) {
log.info(formatStringFromArgs(message.message, args));
}
/**
* @param {ParsedCommandLine} project
* @param {UpToDateStatus} status
* @param {{verbose?: boolean | "minimal"}} options
*/
function reportStatus(project, status, options) {
switch (options.verbose) {
case "minimal":
switch (status.type) {
case UpToDateStatusType.UpToDate:
log.info(`Project '${fileName(project.options.configFilePath)}' is up to date.`);
break;
default:
log.info(`Project '${fileName(project.options.configFilePath)}' is out of date, rebuilding...`);
break;
}
break;
case true:
/**@type {*}*/(ts).formatUpToDateStatus(project.options.configFilePath, status, fileName, formatMessage);
break;
}
if (!options.verbose) return;
}
/**
* @param {string} file
* @private
*/
function normalizeSlashes(file) {
return file.replace(/\\/g, "/");
}
/**
* @param {string} file
* @private
*/
function fileName(file) {
return chalk.cyan(normalizeSlashes(path.relative(process.cwd(), path.resolve(file))));
}
/**
* @param {string} text
* @param {string[]} args
* @param {number} [baseIndex]
*/
function formatStringFromArgs(text, args, baseIndex = 0) {
return text.replace(/{(\d+)}/g, (_match, index) => args[+index + baseIndex]);
}
const minimumDate = new Date(-8640000000000000);
const maximumDate = new Date(8640000000000000);
const missingFileModifiedTime = new Date(0);
/**
* @typedef {0} UpToDateStatusType.Unbuildable
* @typedef {1} UpToDateStatusType.UpToDate
* @typedef {2} UpToDateStatusType.UpToDateWithUpstreamTypes
* @typedef {3} UpToDateStatusType.OutputMissing
* @typedef {4} UpToDateStatusType.OutOfDateWithSelf
* @typedef {5} UpToDateStatusType.OutOfDateWithUpstream
* @typedef {6} UpToDateStatusType.UpstreamOutOfDate
* @typedef {7} UpToDateStatusType.UpstreamBlocked
* @typedef {8} UpToDateStatusType.ComputingUpstream
* @typedef {9} UpToDateStatusType.ContainerOnly
* @enum {UpToDateStatusType.Unbuildable | UpToDateStatusType.UpToDate | UpToDateStatusType.UpToDateWithUpstreamTypes | UpToDateStatusType.OutputMissing | UpToDateStatusType.OutOfDateWithSelf | UpToDateStatusType.OutOfDateWithUpstream | UpToDateStatusType.UpstreamOutOfDate | UpToDateStatusType.UpstreamBlocked | UpToDateStatusType.ComputingUpstream | UpToDateStatusType.ContainerOnly}
*/
const UpToDateStatusType = {
Unbuildable: /** @type {0} */(0),
UpToDate: /** @type {1} */(1),
UpToDateWithUpstreamTypes: /** @type {2} */(2),
OutputMissing: /** @type {3} */(3),
OutOfDateWithSelf: /** @type {4} */(4),
OutOfDateWithUpstream: /** @type {5} */(5),
UpstreamOutOfDate: /** @type {6} */(6),
UpstreamBlocked: /** @type {7} */(7),
ComputingUpstream: /** @type {8} */(8),
ContainerOnly: /** @type {9} */(9),
};
/**
* @param {Date} date1
* @param {Date} date2
* @returns {Date}
*/
function newer(date1, date2) {
return date2 > date1 ? date2 : date1;
}
/**
* @param {UpToDateHost} host
* @param {ParsedCommandLine | undefined} project
* @returns {UpToDateStatus}
*/
function getUpToDateStatus(host, project) {
if (project === undefined) return { type: UpToDateStatusType.Unbuildable, reason: "File deleted mid-build" };
const prior = host.getLastStatus ? host.getLastStatus(project.options.configFilePath) : undefined;
if (prior !== undefined) {
return prior;
}
const actual = getUpToDateStatusWorker(host, project);
if (host.setLastStatus) {
host.setLastStatus(project.options.configFilePath, actual);
}
return actual;
}
/**
* @param {UpToDateHost} host
* @param {ParsedCommandLine | undefined} project
* @returns {UpToDateStatus}
*/
function getUpToDateStatusWorker(host, project) {
/** @type {string} */
let newestInputFileName = undefined;
let newestInputFileTime = minimumDate;
// Get timestamps of input files
for (const inputFile of project.fileNames) {
if (!host.fileExists(inputFile)) {
return {
type: UpToDateStatusType.Unbuildable,
reason: `${inputFile} does not exist`
};
}
const inputTime = host.getModifiedTime(inputFile) || missingFileModifiedTime;
if (inputTime > newestInputFileTime) {
newestInputFileName = inputFile;
newestInputFileTime = inputTime;
}
}
// Collect the expected outputs of this project
const outputs = /**@type {string[]}*/(/**@type {*}*/(ts).getAllProjectOutputs(project));
if (outputs.length === 0) {
return {
type: UpToDateStatusType.ContainerOnly
};
}
// Now see if all outputs are newer than the newest input
let oldestOutputFileName = "(none)";
let oldestOutputFileTime = maximumDate;
let newestOutputFileName = "(none)";
let newestOutputFileTime = minimumDate;
/** @type {string | undefined} */
let missingOutputFileName;
let newestDeclarationFileContentChangedTime = minimumDate;
let isOutOfDateWithInputs = false;
for (const output of outputs) {
// Output is missing; can stop checking
// Don't immediately return because we can still be upstream-blocked, which is a higher-priority status
if (!host.fileExists(output)) {
missingOutputFileName = output;
break;
}
const outputTime = host.getModifiedTime(output) || missingFileModifiedTime;
if (outputTime < oldestOutputFileTime) {
oldestOutputFileTime = outputTime;
oldestOutputFileName = output;
}
// If an output is older than the newest input, we can stop checking
// Don't immediately return because we can still be upstream-blocked, which is a higher-priority status
if (outputTime < newestInputFileTime) {
isOutOfDateWithInputs = true;
break;
}
if (outputTime > newestOutputFileTime) {
newestOutputFileTime = outputTime;
newestOutputFileName = output;
}
// Keep track of when the most recent time a .d.ts file was changed.
// In addition to file timestamps, we also keep track of when a .d.ts file
// had its file touched but not had its contents changed - this allows us
// to skip a downstream typecheck
if (path.extname(output) === ".d.ts") {
const unchangedTime = host.getUnchangedTime ? host.getUnchangedTime(output) : undefined;
if (unchangedTime !== undefined) {
newestDeclarationFileContentChangedTime = newer(unchangedTime, newestDeclarationFileContentChangedTime);
}
else {
const outputModifiedTime = host.getModifiedTime(output) || missingFileModifiedTime;
newestDeclarationFileContentChangedTime = newer(newestDeclarationFileContentChangedTime, outputModifiedTime);
}
}
}
let pseudoUpToDate = false;
let usesPrepend = false;
/** @type {string | undefined} */
let upstreamChangedProject;
if (project.projectReferences) {
if (host.setLastStatus) host.setLastStatus(project.options.configFilePath, { type: UpToDateStatusType.ComputingUpstream });
for (const ref of project.projectReferences) {
usesPrepend = usesPrepend || !!(ref.prepend);
const resolvedRef = ts.resolveProjectReferencePath(host, ref);
const parsedRef = host.parseConfigFile ? host.parseConfigFile(resolvedRef) : ts.getParsedCommandLineOfConfigFile(resolvedRef, {}, parseConfigHost);
const refStatus = getUpToDateStatus(host, parsedRef);
// Its a circular reference ignore the status of this project
if (refStatus.type === UpToDateStatusType.ComputingUpstream) {
continue;
}
// An upstream project is blocked
if (refStatus.type === UpToDateStatusType.Unbuildable) {
return {
type: UpToDateStatusType.UpstreamBlocked,
upstreamProjectName: ref.path
};
}
// If the upstream project is out of date, then so are we (someone shouldn't have asked, though?)
if (refStatus.type !== UpToDateStatusType.UpToDate) {
return {
type: UpToDateStatusType.UpstreamOutOfDate,
upstreamProjectName: ref.path
};
}
// If the upstream project's newest file is older than our oldest output, we
// can't be out of date because of it
if (refStatus.newestInputFileTime && refStatus.newestInputFileTime <= oldestOutputFileTime) {
continue;
}
// If the upstream project has only change .d.ts files, and we've built
// *after* those files, then we're "psuedo up to date" and eligible for a fast rebuild
if (refStatus.newestDeclarationFileContentChangedTime && refStatus.newestDeclarationFileContentChangedTime <= oldestOutputFileTime) {
pseudoUpToDate = true;
upstreamChangedProject = ref.path;
continue;
}
// We have an output older than an upstream output - we are out of date
return {
type: UpToDateStatusType.OutOfDateWithUpstream,
outOfDateOutputFileName: oldestOutputFileName,
newerProjectName: ref.path
};
}
}
if (missingOutputFileName !== undefined) {
return {
type: UpToDateStatusType.OutputMissing,
missingOutputFileName
};
}
if (isOutOfDateWithInputs) {
return {
type: UpToDateStatusType.OutOfDateWithSelf,
outOfDateOutputFileName: oldestOutputFileName,
newerInputFileName: newestInputFileName
};
}
if (usesPrepend && pseudoUpToDate) {
return {
type: UpToDateStatusType.OutOfDateWithUpstream,
outOfDateOutputFileName: oldestOutputFileName,
newerProjectName: upstreamChangedProject
};
}
// Up to date
return {
type: pseudoUpToDate ? UpToDateStatusType.UpToDateWithUpstreamTypes : UpToDateStatusType.UpToDate,
newestDeclarationFileContentChangedTime,
newestInputFileTime,
newestOutputFileTime,
newestInputFileName,
newestOutputFileName,
oldestOutputFileName
};
}
const parseConfigHost = {
useCaseSensitiveFileNames: true,
getCurrentDirectory: () => process.cwd(),
readDirectory: (file) => fs.readdirSync(file),
fileExists: file => fs.existsSync(file) && fs.statSync(file).isFile(),
readFile: file => fs.readFileSync(file, "utf8"),
onUnRecoverableConfigFileDiagnostic: () => undefined
};
/**
* @typedef {import("vinyl")} File
* @typedef {import("../../lib/typescript").ParsedCommandLine & { options: CompilerOptions }} ParsedCommandLine
* @typedef {import("../../lib/typescript").CompilerOptions & { configFilePath?: string }} CompilerOptions
* @typedef {import("../../lib/typescript").DiagnosticMessage} DiagnosticMessage
* @typedef UpToDateHost
* @property {(fileName: string) => boolean} fileExists
* @property {(fileName: string) => Date} getModifiedTime
* @property {(fileName: string) => Date} [getUnchangedTime]
* @property {(configFilePath: string) => ParsedCommandLine | undefined} parseConfigFile
* @property {(configFilePath: string) => UpToDateStatus} [getLastStatus]
* @property {(configFilePath: string, status: UpToDateStatus) => void} [setLastStatus]
*
* @typedef Status.Unbuildable
* @property {UpToDateStatusType.Unbuildable} type
* @property {string} reason
*
* @typedef Status.ContainerOnly
* @property {UpToDateStatusType.ContainerOnly} type
*
* @typedef Status.UpToDate
* @property {UpToDateStatusType.UpToDate | UpToDateStatusType.UpToDateWithUpstreamTypes} type
* @property {Date} [newestInputFileTime]
* @property {string} [newestInputFileName]
* @property {Date} [newestDeclarationFileContentChangedTime]
* @property {Date} [newestOutputFileTime]
* @property {string} [newestOutputFileName]
* @property {string} [oldestOutputFileName]
*
* @typedef Status.OutputMissing
* @property {UpToDateStatusType.OutputMissing} type
* @property {string} missingOutputFileName
*
* @typedef Status.OutOfDateWithSelf
* @property {UpToDateStatusType.OutOfDateWithSelf} type
* @property {string} outOfDateOutputFileName
* @property {string} newerInputFileName
*
* @typedef Status.UpstreamOutOfDate
* @property {UpToDateStatusType.UpstreamOutOfDate} type
* @property {string} upstreamProjectName
*
* @typedef Status.UpstreamBlocked
* @property {UpToDateStatusType.UpstreamBlocked} type
* @property {string} upstreamProjectName
*
* @typedef Status.ComputingUpstream
* @property {UpToDateStatusType.ComputingUpstream} type
*
* @typedef Status.OutOfDateWithUpstream
* @property {UpToDateStatusType.OutOfDateWithUpstream} type
* @property {string} outOfDateOutputFileName
* @property {string} newerProjectName
* @typedef {Status.Unbuildable | Status.ContainerOnly | Status.UpToDate | Status.OutputMissing | Status.OutOfDateWithSelf | Status.UpstreamOutOfDate | Status.UpstreamBlocked | Status.ComputingUpstream | Status.OutOfDateWithUpstream} UpToDateStatus
*/
void 0;

View File

@ -1,7 +1,119 @@
// @ts-check
/// <reference path="../types/ambient.d.ts" />
const fs = require("fs");
const File = require("./vinyl");
const { Readable } = require("stream");
const path = require("path");
const log = require("fancy-log");
const mkdirp = require("mkdirp");
const del = require("del");
const File = require("vinyl");
const ts = require("../../lib/typescript");
const { default: chalk } = require("chalk");
const { spawn } = require("child_process");
const { CancellationToken, CancelError, Deferred } = require("prex");
const { Readable, Duplex } = require("stream");
const isWindows = /^win/.test(process.platform);
/**
* Executes the provided command once with the supplied arguments.
* @param {string} cmd
* @param {string[]} args
* @param {ExecOptions} [options]
*
* @typedef ExecOptions
* @property {boolean} [ignoreExitCode]
* @property {import("prex").CancellationToken} [cancelToken]
* @property {boolean} [hidePrompt]
*/
function exec(cmd, args, options = {}) {
return /**@type {Promise<{exitCode: number}>}*/(new Promise((resolve, reject) => {
const { ignoreExitCode, cancelToken = CancellationToken.none } = options;
cancelToken.throwIfCancellationRequested();
// TODO (weswig): Update child_process types to add windowsVerbatimArguments to the type definition
const subshellFlag = isWindows ? "/c" : "-c";
const command = isWindows ? [possiblyQuote(cmd), ...args] : [`${cmd} ${args.join(" ")}`];
if (!options.hidePrompt) log(`> ${chalk.green(cmd)} ${args.join(" ")}`);
const proc = spawn(isWindows ? "cmd" : "/bin/sh", [subshellFlag, ...command], { stdio: "inherit", windowsVerbatimArguments: true });
const registration = cancelToken.register(() => {
log(`${chalk.red("killing")} '${chalk.green(cmd)} ${args.join(" ")}'...`);
proc.kill("SIGINT");
proc.kill("SIGTERM");
reject(new CancelError());
});
proc.on("exit", exitCode => {
registration.unregister();
if (exitCode === 0 || ignoreExitCode) {
resolve({ exitCode });
}
else {
reject(new Error(`Process exited with code: ${exitCode}`));
}
});
proc.on("error", error => {
registration.unregister();
reject(error);
});
}));
}
exports.exec = exec;
/**
* @param {string} cmd
*/
function possiblyQuote(cmd) {
return cmd.indexOf(" ") >= 0 ? `"${cmd}"` : cmd;
}
/**
* @param {ts.Diagnostic[]} diagnostics
* @param {{ cwd?: string, pretty?: boolean }} [options]
*/
function formatDiagnostics(diagnostics, options) {
return options && options.pretty
? ts.formatDiagnosticsWithColorAndContext(diagnostics, getFormatDiagnosticsHost(options && options.cwd))
: ts.formatDiagnostics(diagnostics, getFormatDiagnosticsHost(options && options.cwd));
}
exports.formatDiagnostics = formatDiagnostics;
/**
* @param {ts.Diagnostic[]} diagnostics
* @param {{ cwd?: string }} [options]
*/
function reportDiagnostics(diagnostics, options) {
log(formatDiagnostics(diagnostics, { cwd: options && options.cwd, pretty: process.stdout.isTTY }));
}
exports.reportDiagnostics = reportDiagnostics;
/**
* @param {string | undefined} cwd
* @returns {ts.FormatDiagnosticsHost}
*/
function getFormatDiagnosticsHost(cwd) {
return {
getCanonicalFileName: fileName => fileName,
getCurrentDirectory: () => cwd,
getNewLine: () => ts.sys.newLine,
};
}
exports.getFormatDiagnosticsHost = getFormatDiagnosticsHost;
/**
* Reads JSON data with optional comments using the LKG TypeScript compiler
* @param {string} jsonPath
*/
function readJson(jsonPath) {
const jsonText = fs.readFileSync(jsonPath, "utf8");
const result = ts.parseConfigFileTextToJson(jsonPath, jsonText);
if (result.error) {
reportDiagnostics([result.error]);
throw new Error("An error occurred during parse.");
}
return result.config;
}
exports.readJson = readJson;
/**
* @param {File} file
@ -24,4 +136,299 @@ function streamFromBuffer(buffer) {
}
});
}
exports.streamFromBuffer = streamFromBuffer;
exports.streamFromBuffer = streamFromBuffer;
/**
* @param {string | string[]} source
* @param {string | string[]} dest
* @returns {boolean}
*/
function needsUpdate(source, dest) {
if (typeof source === "string" && typeof dest === "string") {
if (fs.existsSync(dest)) {
const {mtime: outTime} = fs.statSync(dest);
const {mtime: inTime} = fs.statSync(source);
if (+inTime <= +outTime) {
return false;
}
}
}
else if (typeof source === "string" && typeof dest !== "string") {
const {mtime: inTime} = fs.statSync(source);
for (const filepath of dest) {
if (fs.existsSync(filepath)) {
const {mtime: outTime} = fs.statSync(filepath);
if (+inTime > +outTime) {
return true;
}
}
else {
return true;
}
}
return false;
}
else if (typeof source !== "string" && typeof dest === "string") {
if (fs.existsSync(dest)) {
const {mtime: outTime} = fs.statSync(dest);
for (const filepath of source) {
if (fs.existsSync(filepath)) {
const {mtime: inTime} = fs.statSync(filepath);
if (+inTime > +outTime) {
return true;
}
}
else {
return true;
}
}
return false;
}
}
else if (typeof source !== "string" && typeof dest !== "string") {
for (let i = 0; i < source.length; i++) {
if (!dest[i]) {
continue;
}
if (fs.existsSync(dest[i])) {
const {mtime: outTime} = fs.statSync(dest[i]);
const {mtime: inTime} = fs.statSync(source[i]);
if (+inTime > +outTime) {
return true;
}
}
else {
return true;
}
}
return false;
}
return true;
}
exports.needsUpdate = needsUpdate;
function getDiffTool() {
const program = process.env.DIFF;
if (!program) {
log.warn("Add the 'DIFF' environment variable to the path of the program you want to use.");
process.exit(1);
}
return program;
}
exports.getDiffTool = getDiffTool;
/**
* Find the size of a directory recursively.
* Symbolic links can cause a loop.
* @param {string} root
* @returns {number} bytes
*/
function getDirSize(root) {
const stats = fs.lstatSync(root);
if (!stats.isDirectory()) {
return stats.size;
}
return fs.readdirSync(root)
.map(file => getDirSize(path.join(root, file)))
.reduce((acc, num) => acc + num, 0);
}
exports.getDirSize = getDirSize;
/**
* Flattens a project with project references into a single project.
* @param {string} projectSpec The path to a tsconfig.json file or its containing directory.
* @param {string} flattenedProjectSpec The output path for the flattened tsconfig.json file.
* @param {FlattenOptions} [options] Options used to flatten a project hierarchy.
*
* @typedef FlattenOptions
* @property {string} [cwd] The path to use for the current working directory. Defaults to `process.cwd()`.
* @property {import("../../lib/typescript").CompilerOptions} [compilerOptions] Compiler option overrides.
* @property {boolean} [force] Forces creation of the output project.
* @property {string[]} [exclude] Files to exclude (relative to `cwd`)
*/
function flatten(projectSpec, flattenedProjectSpec, options = {}) {
const cwd = normalizeSlashes(options.cwd ? path.resolve(options.cwd) : process.cwd());
const files = [];
const resolvedOutputSpec = path.resolve(cwd, flattenedProjectSpec);
const resolvedOutputDirectory = path.dirname(resolvedOutputSpec);
const resolvedProjectSpec = resolveProjectSpec(projectSpec, cwd, undefined);
const project = readJson(resolvedProjectSpec);
const skipProjects = /**@type {Set<string>}*/(new Set());
const skipFiles = new Set(options && options.exclude && options.exclude.map(file => normalizeSlashes(path.resolve(cwd, file))));
recur(resolvedProjectSpec, project);
if (options.force || needsUpdate(files, resolvedOutputSpec)) {
const config = {
extends: normalizeSlashes(path.relative(resolvedOutputDirectory, resolvedProjectSpec)),
compilerOptions: options.compilerOptions || {},
files: files.map(file => normalizeSlashes(path.relative(resolvedOutputDirectory, file)))
};
mkdirp.sync(resolvedOutputDirectory);
fs.writeFileSync(resolvedOutputSpec, JSON.stringify(config, undefined, 2), "utf8");
}
/**
* @param {string} projectSpec
* @param {object} project
*/
function recur(projectSpec, project) {
if (skipProjects.has(projectSpec)) return;
skipProjects.add(project);
if (project.references) {
for (const ref of project.references) {
const referencedSpec = resolveProjectSpec(ref.path, cwd, projectSpec);
const referencedProject = readJson(referencedSpec);
recur(referencedSpec, referencedProject);
}
}
if (project.include) {
throw new Error("Flattened project may not have an 'include' list.");
}
if (!project.files) {
throw new Error("Flattened project must have an explicit 'files' list.");
}
const projectDirectory = path.dirname(projectSpec);
for (let file of project.files) {
file = normalizeSlashes(path.resolve(projectDirectory, file));
if (skipFiles.has(file)) continue;
skipFiles.add(file);
files.push(file);
}
}
}
exports.flatten = flatten;
/**
* @param {string} file
*/
function normalizeSlashes(file) {
return file.replace(/\\/g, "/");
}
/**
* @param {string} projectSpec
* @param {string} cwd
* @param {string | undefined} referrer
* @returns {string}
*/
function resolveProjectSpec(projectSpec, cwd, referrer) {
let projectPath = normalizeSlashes(path.resolve(cwd, referrer ? path.dirname(referrer) : "", projectSpec));
const stats = fs.statSync(projectPath);
if (stats.isFile()) return normalizeSlashes(projectPath);
return normalizeSlashes(path.resolve(cwd, projectPath, "tsconfig.json"));
}
/**
* @param {string | ((file: File) => string) | { cwd?: string }} [dest]
* @param {{ cwd?: string }} [opts]
*/
function rm(dest, opts) {
if (dest && typeof dest === "object") opts = dest, dest = undefined;
let failed = false;
const cwd = path.resolve(opts && opts.cwd || process.cwd());
/** @type {{ file: File, deleted: boolean, promise: Promise<any>, cb: Function }[]} */
const pending = [];
const processDeleted = () => {
if (failed) return;
while (pending.length && pending[0].deleted) {
const { file, cb } = pending.shift();
duplex.push(file);
cb();
}
};
const duplex = new Duplex({
objectMode: true,
/**
* @param {string|Buffer|File} file
*/
write(file, _, cb) {
if (failed) return;
if (typeof file === "string" || Buffer.isBuffer(file)) return cb(new Error("Only Vinyl files are supported."));
const basePath = typeof dest === "string" ? path.resolve(cwd, dest) :
typeof dest === "function" ? path.resolve(cwd, dest(file)) :
file.base;
const filePath = path.resolve(basePath, file.relative);
file.cwd = cwd;
file.base = basePath;
file.path = filePath;
const entry = {
file,
deleted: false,
cb,
promise: del(file.path).then(() => {
entry.deleted = true;
processDeleted();
}, err => {
failed = true;
pending.length = 0;
cb(err);
})
};
pending.push(entry);
},
final(cb) {
processDeleted();
if (pending.length) {
Promise
.all(pending.map(entry => entry.promise))
.then(() => processDeleted())
.then(() => cb(), cb);
return;
}
cb();
},
read() {
}
});
return duplex;
}
exports.rm = rm;
class Debouncer {
/**
* @param {number} timeout
* @param {() => Promise<any>} action
*/
constructor(timeout, action) {
this._timeout = timeout;
this._action = action;
}
enqueue() {
if (this._timer) {
clearTimeout(this._timer);
this._timer = undefined;
}
if (!this._deferred) {
this._deferred = new Deferred();
}
this._timer = setTimeout(() => this.run(), 100);
return this._deferred.promise;
}
run() {
if (this._timer) {
clearTimeout(this._timer);
this._timer = undefined;
}
const deferred = this._deferred;
this._deferred = undefined;
this._projects = undefined;
try {
deferred.resolve(this._action());
}
catch (e) {
deferred.reject(e);
}
}
}
exports.Debouncer = Debouncer;

View File

@ -1,60 +0,0 @@
// NOTE: This makes it possible to correctly type vinyl Files under @ts-check.
export = File;
declare class File<T extends File.Contents = File.Contents> {
constructor(options?: File.VinylOptions<T>);
cwd: string;
base: string;
path: string;
readonly history: ReadonlyArray<string>;
contents: T;
relative: string;
dirname: string;
basename: string;
stem: string;
extname: string;
symlink: string | null;
stat: import("fs").Stats | null;
sourceMap?: import("./sourcemaps").RawSourceMap | string;
[custom: string]: any;
isBuffer(): this is T extends Buffer ? File<Buffer> : never;
isStream(): this is T extends NodeJS.ReadableStream ? File<NodeJS.ReadableStream> : never;
isNull(): this is T extends null ? File<null> : never;
isDirectory(): this is T extends null ? File.Directory : never;
isSymbolic(): this is T extends null ? File.Symbolic : never;
clone(opts?: { contents?: boolean, deep?: boolean }): this;
}
namespace File {
export interface VinylOptions<T extends Contents = Contents> {
cwd?: string;
base?: string;
path?: string;
history?: ReadonlyArray<string>;
stat?: import("fs").Stats;
contents?: T;
sourceMap?: import("./sourcemaps").RawSourceMap | string;
[custom: string]: any;
}
export type Contents = Buffer | NodeJS.ReadableStream | null;
export type File = import("./vinyl");
export type NullFile = File<null>;
export type BufferFile = File<Buffer>;
export type StreamFile = File<NodeJS.ReadableStream>;
export interface Directory extends NullFile {
isNull(): true;
isDirectory(): true;
isSymbolic(): this is never;
}
export interface Symbolic extends NullFile {
isNull(): true;
isDirectory(): this is never;
isSymbolic(): true;
}
}

View File

@ -1 +0,0 @@
module.exports = require("vinyl");

View File

@ -1,3 +1,5 @@
import { TaskFunction } from "gulp";
declare module "gulp-clone" {
function Clone(): NodeJS.ReadWriteStream;
namespace Clone {
@ -14,3 +16,78 @@ declare module "gulp-insert" {
}
declare module "sorcery";
declare module "vinyl" {
// NOTE: This makes it possible to correctly type vinyl Files under @ts-check.
export = File;
declare class File<T extends File.Contents = File.Contents> {
constructor(options?: File.VinylOptions<T>);
cwd: string;
base: string;
path: string;
readonly history: ReadonlyArray<string>;
contents: T;
relative: string;
dirname: string;
basename: string;
stem: string;
extname: string;
symlink: string | null;
stat: import("fs").Stats | null;
sourceMap?: import("./sourcemaps").RawSourceMap | string;
[custom: string]: any;
isBuffer(): this is T extends Buffer ? File<Buffer> : never;
isStream(): this is T extends NodeJS.ReadableStream ? File<NodeJS.ReadableStream> : never;
isNull(): this is T extends null ? File<null> : never;
isDirectory(): this is T extends null ? File.Directory : never;
isSymbolic(): this is T extends null ? File.Symbolic : never;
clone(opts?: { contents?: boolean, deep?: boolean }): this;
}
namespace File {
export interface VinylOptions<T extends Contents = Contents> {
cwd?: string;
base?: string;
path?: string;
history?: ReadonlyArray<string>;
stat?: import("fs").Stats;
contents?: T;
sourceMap?: import("./sourcemaps").RawSourceMap | string;
[custom: string]: any;
}
export type Contents = Buffer | NodeJS.ReadableStream | null;
export type File = import("./vinyl");
export type NullFile = File<null>;
export type BufferFile = File<Buffer>;
export type StreamFile = File<NodeJS.ReadableStream>;
export interface Directory extends NullFile {
isNull(): true;
isDirectory(): true;
isSymbolic(): this is never;
}
export interface Symbolic extends NullFile {
isNull(): true;
isDirectory(): this is never;
isSymbolic(): true;
}
}
}
declare module "undertaker" {
interface TaskFunctionParams {
flags?: Record<string, string>;
}
}
declare module "gulp-sourcemaps" {
interface WriteOptions {
destPath?: string;
}
}

View File

@ -1,5 +1,5 @@
{
"extends": "../tsconfig-base",
"extends": "../tsconfig-noncomposite-base",
"compilerOptions": {
"outDir": "../../built/local/",
"rootDir": ".",

View File

@ -100,6 +100,8 @@ namespace ts {
IsObjectLiteralOrClassExpressionMethod = 1 << 7,
}
let flowNodeCreated: <T extends FlowNode>(node: T) => T = identity;
const binder = createBinder();
export function bindSourceFile(file: SourceFile, options: CompilerOptions) {
@ -530,6 +532,7 @@ namespace ts {
blockScopeContainer.locals = undefined;
}
if (containerFlags & ContainerFlags.IsControlFlowContainer) {
const saveFlowNodeCreated = flowNodeCreated;
const saveCurrentFlow = currentFlow;
const saveBreakTarget = currentBreakTarget;
const saveContinueTarget = currentContinueTarget;
@ -553,6 +556,7 @@ namespace ts {
currentContinueTarget = undefined;
activeLabels = undefined;
hasExplicitReturn = false;
flowNodeCreated = identity;
bindChildren(node);
// Reset all reachability check related flags on node (for incremental scenarios)
node.flags &= ~NodeFlags.ReachabilityAndEmitFlags;
@ -579,6 +583,7 @@ namespace ts {
currentReturnTarget = saveReturnTarget;
activeLabels = saveActiveLabels;
hasExplicitReturn = saveHasExplicitReturn;
flowNodeCreated = saveFlowNodeCreated;
}
else if (containerFlags & ContainerFlags.IsInterface) {
seenThisKeyword = false;
@ -753,7 +758,7 @@ namespace ts {
function isNarrowableReference(expr: Expression): boolean {
return expr.kind === SyntaxKind.Identifier || expr.kind === SyntaxKind.ThisKeyword || expr.kind === SyntaxKind.SuperKeyword ||
isPropertyAccessExpression(expr) && isNarrowableReference(expr.expression) ||
(isPropertyAccessExpression(expr) || isNonNullExpression(expr) || isParenthesizedExpression(expr)) && isNarrowableReference(expr.expression) ||
isElementAccessExpression(expr) && expr.argumentExpression &&
(isStringLiteral(expr.argumentExpression) || isNumericLiteral(expr.argumentExpression)) &&
isNarrowableReference(expr.expression);
@ -858,7 +863,7 @@ namespace ts {
return antecedent;
}
setFlowNodeReferenced(antecedent);
return { flags, expression, antecedent };
return flowNodeCreated({ flags, expression, antecedent });
}
function createFlowSwitchClause(antecedent: FlowNode, switchStatement: SwitchStatement, clauseStart: number, clauseEnd: number): FlowNode {
@ -866,17 +871,17 @@ namespace ts {
return antecedent;
}
setFlowNodeReferenced(antecedent);
return { flags: FlowFlags.SwitchClause, switchStatement, clauseStart, clauseEnd, antecedent };
return flowNodeCreated({ flags: FlowFlags.SwitchClause, switchStatement, clauseStart, clauseEnd, antecedent });
}
function createFlowAssignment(antecedent: FlowNode, node: Expression | VariableDeclaration | BindingElement): FlowNode {
setFlowNodeReferenced(antecedent);
return { flags: FlowFlags.Assignment, antecedent, node };
return flowNodeCreated({ flags: FlowFlags.Assignment, antecedent, node });
}
function createFlowArrayMutation(antecedent: FlowNode, node: CallExpression | BinaryExpression): FlowNode {
setFlowNodeReferenced(antecedent);
const res: FlowArrayMutation = { flags: FlowFlags.ArrayMutation, antecedent, node };
const res: FlowArrayMutation = flowNodeCreated({ flags: FlowFlags.ArrayMutation, antecedent, node });
return res;
}
@ -1080,8 +1085,16 @@ namespace ts {
function bindTryStatement(node: TryStatement): void {
const preFinallyLabel = createBranchLabel();
const preTryFlow = currentFlow;
// TODO: Every statement in try block is potentially an exit point!
const tryPriors: FlowNode[] = [];
const oldFlowNodeCreated = flowNodeCreated;
// We hook the creation of all flow nodes within the `try` scope and store them so we can add _all_ of them
// as possible antecedents of the start of the `catch` or `finally` blocks.
// Don't bother intercepting the call if there's no finally or catch block that needs the information
if (node.catchClause || node.finallyBlock) {
flowNodeCreated = node => (tryPriors.push(node), node);
}
bind(node.tryBlock);
flowNodeCreated = oldFlowNodeCreated;
addAntecedent(preFinallyLabel, currentFlow);
const flowAfterTry = currentFlow;
@ -1089,12 +1102,32 @@ namespace ts {
if (node.catchClause) {
currentFlow = preTryFlow;
if (tryPriors.length) {
const preCatchFlow = createBranchLabel();
addAntecedent(preCatchFlow, currentFlow);
for (const p of tryPriors) {
addAntecedent(preCatchFlow, p);
}
currentFlow = finishFlowLabel(preCatchFlow);
}
bind(node.catchClause);
addAntecedent(preFinallyLabel, currentFlow);
flowAfterCatch = currentFlow;
}
if (node.finallyBlock) {
// We add the nodes within the `try` block to the `finally`'s antecedents if there's no catch block
// (If there is a `catch` block, it will have all these antecedents instead, and the `finally` will
// have the end of the `try` block and the end of the `catch` block)
if (!node.catchClause) {
if (tryPriors.length) {
for (const p of tryPriors) {
addAntecedent(preFinallyLabel, p);
}
}
}
// in finally flow is combined from pre-try/flow from try/flow from catch
// pre-flow is necessary to make sure that finally is reachable even if finally flows in both try and finally blocks are unreachable
@ -1142,7 +1175,7 @@ namespace ts {
}
}
if (!(currentFlow.flags & FlowFlags.Unreachable)) {
const afterFinallyFlow: AfterFinallyFlow = { flags: FlowFlags.AfterFinally, antecedent: currentFlow };
const afterFinallyFlow: AfterFinallyFlow = flowNodeCreated({ flags: FlowFlags.AfterFinally, antecedent: currentFlow });
preFinallyFlow.lock = afterFinallyFlow;
currentFlow = afterFinallyFlow;
}

View File

@ -49,7 +49,27 @@ namespace ts {
/**
* program corresponding to this state
*/
program: Program;
program: Program | undefined;
/**
* compilerOptions for the program
*/
compilerOptions: CompilerOptions;
/**
* Files pending to be emitted
*/
affectedFilesPendingEmit: ReadonlyArray<Path> | undefined;
/**
* Current index to retrieve pending affected file
*/
affectedFilesPendingEmitIndex: number | undefined;
/**
* Already seen affected files
*/
seenEmittedFiles: Map<true> | undefined;
/**
* true if program has been emitted
*/
programEmitComplete?: true;
}
function hasSameKeys<T, U>(map1: ReadonlyMap<T> | undefined, map2: ReadonlyMap<U> | undefined): boolean {
@ -64,6 +84,7 @@ namespace ts {
const state = BuilderState.create(newProgram, getCanonicalFileName, oldState) as BuilderProgramState;
state.program = newProgram;
const compilerOptions = newProgram.getCompilerOptions();
state.compilerOptions = compilerOptions;
// With --out or --outFile, any change affects all semantic diagnostics so no need to cache them
// With --isolatedModules, emitting changed file doesnt emit dependent files so we cant know of dependent files to retrieve errors so dont cache the errors
if (!compilerOptions.outFile && !compilerOptions.out && !compilerOptions.isolatedModules) {
@ -72,13 +93,14 @@ namespace ts {
state.changedFilesSet = createMap<true>();
const useOldState = BuilderState.canReuseOldState(state.referencedMap, oldState);
const oldCompilerOptions = useOldState ? oldState!.program.getCompilerOptions() : undefined;
const oldCompilerOptions = useOldState ? oldState!.compilerOptions : undefined;
const canCopySemanticDiagnostics = useOldState && oldState!.semanticDiagnosticsPerFile && !!state.semanticDiagnosticsPerFile &&
!compilerOptionsAffectSemanticDiagnostics(compilerOptions, oldCompilerOptions!);
if (useOldState) {
// Verify the sanity of old state
if (!oldState!.currentChangedFilePath) {
Debug.assert(!oldState!.affectedFiles && (!oldState!.currentAffectedFilesSignatures || !oldState!.currentAffectedFilesSignatures!.size), "Cannot reuse if only few affected files of currentChangedFile were iterated");
const affectedSignatures = oldState!.currentAffectedFilesSignatures;
Debug.assert(!oldState!.affectedFiles && (!affectedSignatures || !affectedSignatures.size), "Cannot reuse if only few affected files of currentChangedFile were iterated");
}
if (canCopySemanticDiagnostics) {
Debug.assert(!forEachKey(oldState!.changedFilesSet, path => oldState!.semanticDiagnosticsPerFile!.has(path)), "Semantic diagnostics shouldnt be available for changed files");
@ -86,6 +108,10 @@ namespace ts {
// Copy old state's changed files set
copyEntries(oldState!.changedFilesSet, state.changedFilesSet);
if (!compilerOptions.outFile && !compilerOptions.out && oldState!.affectedFilesPendingEmit) {
state.affectedFilesPendingEmit = oldState!.affectedFilesPendingEmit;
state.affectedFilesPendingEmitIndex = oldState!.affectedFilesPendingEmitIndex;
}
}
// Update changed files and copy semantic diagnostics if we can
@ -111,7 +137,7 @@ namespace ts {
state.changedFilesSet.set(sourceFilePath, true);
}
else if (canCopySemanticDiagnostics) {
const sourceFile = state.program.getSourceFileByPath(sourceFilePath as Path)!;
const sourceFile = newProgram.getSourceFileByPath(sourceFilePath as Path)!;
if (sourceFile.isDeclarationFile && !copyDeclarationFileDiagnostics) { return; }
if (sourceFile.hasNoDefaultLib && !copyLibFileDiagnostics) { return; }
@ -131,6 +157,38 @@ namespace ts {
return state;
}
/**
* Releases program and other related not needed properties
*/
function releaseCache(state: BuilderProgramState) {
BuilderState.releaseCache(state);
state.program = undefined;
}
/**
* Creates a clone of the state
*/
function cloneBuilderProgramState(state: Readonly<BuilderProgramState>): BuilderProgramState {
const newState = BuilderState.clone(state) as BuilderProgramState;
newState.semanticDiagnosticsPerFile = cloneMapOrUndefined(state.semanticDiagnosticsPerFile);
newState.changedFilesSet = cloneMap(state.changedFilesSet);
newState.affectedFiles = state.affectedFiles;
newState.affectedFilesIndex = state.affectedFilesIndex;
newState.currentChangedFilePath = state.currentChangedFilePath;
newState.currentAffectedFilesSignatures = cloneMapOrUndefined(state.currentAffectedFilesSignatures);
newState.currentAffectedFilesExportedModulesMap = cloneMapOrUndefined(state.currentAffectedFilesExportedModulesMap);
newState.seenAffectedFiles = cloneMapOrUndefined(state.seenAffectedFiles);
newState.cleanedDiagnosticsOfLibFiles = state.cleanedDiagnosticsOfLibFiles;
newState.semanticDiagnosticsFromOldState = cloneMapOrUndefined(state.semanticDiagnosticsFromOldState);
newState.program = state.program;
newState.compilerOptions = state.compilerOptions;
newState.affectedFilesPendingEmit = state.affectedFilesPendingEmit;
newState.affectedFilesPendingEmitIndex = state.affectedFilesPendingEmitIndex;
newState.seenEmittedFiles = cloneMapOrUndefined(state.seenEmittedFiles);
newState.programEmitComplete = state.programEmitComplete;
return newState;
}
/**
* Verifies that source file is ok to be used in calls that arent handled by next
*/
@ -181,10 +239,11 @@ namespace ts {
// With --out or --outFile all outputs go into single file
// so operations are performed directly on program, return program
const compilerOptions = state.program.getCompilerOptions();
const program = Debug.assertDefined(state.program);
const compilerOptions = program.getCompilerOptions();
if (compilerOptions.outFile || compilerOptions.out) {
Debug.assert(!state.semanticDiagnosticsPerFile);
return state.program;
return program;
}
// Get next batch of affected files
@ -192,13 +251,34 @@ namespace ts {
if (state.exportedModulesMap) {
state.currentAffectedFilesExportedModulesMap = state.currentAffectedFilesExportedModulesMap || createMap<BuilderState.ReferencedSet | false>();
}
state.affectedFiles = BuilderState.getFilesAffectedBy(state, state.program, nextKey.value as Path, cancellationToken, computeHash, state.currentAffectedFilesSignatures, state.currentAffectedFilesExportedModulesMap);
state.affectedFiles = BuilderState.getFilesAffectedBy(state, program, nextKey.value as Path, cancellationToken, computeHash, state.currentAffectedFilesSignatures, state.currentAffectedFilesExportedModulesMap);
state.currentChangedFilePath = nextKey.value as Path;
state.affectedFilesIndex = 0;
state.seenAffectedFiles = state.seenAffectedFiles || createMap<true>();
}
}
/**
* Returns next file to be emitted from files that retrieved semantic diagnostics but did not emit yet
*/
function getNextAffectedFilePendingEmit(state: BuilderProgramState): SourceFile | undefined {
const { affectedFilesPendingEmit } = state;
if (affectedFilesPendingEmit) {
const seenEmittedFiles = state.seenEmittedFiles || (state.seenEmittedFiles = createMap());
for (let i = state.affectedFilesPendingEmitIndex!; i < affectedFilesPendingEmit.length; i++) {
const affectedFile = Debug.assertDefined(state.program).getSourceFileByPath(affectedFilesPendingEmit[i]);
if (affectedFile && !seenEmittedFiles.has(affectedFile.path)) {
// emit this file
state.affectedFilesPendingEmitIndex = i;
return affectedFile;
}
}
state.affectedFilesPendingEmit = undefined;
state.affectedFilesPendingEmitIndex = undefined;
}
return undefined;
}
/**
* Remove the semantic diagnostics cached from old state for affected File and the files that are referencing modules that export entities from affected file
*/
@ -211,9 +291,10 @@ namespace ts {
// Clean lib file diagnostics if its all files excluding default files to emit
if (state.allFilesExcludingDefaultLibraryFile === state.affectedFiles && !state.cleanedDiagnosticsOfLibFiles) {
state.cleanedDiagnosticsOfLibFiles = true;
const options = state.program.getCompilerOptions();
if (forEach(state.program.getSourceFiles(), f =>
state.program.isSourceFileDefaultLibrary(f) &&
const program = Debug.assertDefined(state.program);
const options = program.getCompilerOptions();
if (forEach(program.getSourceFiles(), f =>
program.isSourceFileDefaultLibrary(f) &&
!skipTypeChecking(f, options) &&
removeSemanticDiagnosticsOf(state, f.path)
)) {
@ -316,21 +397,27 @@ namespace ts {
* This is called after completing operation on the next affected file.
* The operations here are postponed to ensure that cancellation during the iteration is handled correctly
*/
function doneWithAffectedFile(state: BuilderProgramState, affected: SourceFile | Program) {
function doneWithAffectedFile(state: BuilderProgramState, affected: SourceFile | Program, isPendingEmit?: boolean) {
if (affected === state.program) {
state.changedFilesSet.clear();
state.programEmitComplete = true;
}
else {
state.seenAffectedFiles!.set((affected as SourceFile).path, true);
state.affectedFilesIndex!++;
if (isPendingEmit) {
state.affectedFilesPendingEmitIndex!++;
}
else {
state.affectedFilesIndex!++;
}
}
}
/**
* Returns the result with affected file
*/
function toAffectedFileResult<T>(state: BuilderProgramState, result: T, affected: SourceFile | Program): AffectedFileResult<T> {
doneWithAffectedFile(state, affected);
function toAffectedFileResult<T>(state: BuilderProgramState, result: T, affected: SourceFile | Program, isPendingEmit?: boolean): AffectedFileResult<T> {
doneWithAffectedFile(state, affected, isPendingEmit);
return { result, affected };
}
@ -349,7 +436,7 @@ namespace ts {
}
// Diagnostics werent cached, get them from program, and cache the result
const diagnostics = state.program.getSemanticDiagnostics(sourceFile, cancellationToken);
const diagnostics = Debug.assertDefined(state.program).getSemanticDiagnostics(sourceFile, cancellationToken);
if (state.semanticDiagnosticsPerFile) {
state.semanticDiagnosticsPerFile.set(path, diagnostics);
}
@ -385,7 +472,7 @@ namespace ts {
rootNames: newProgramOrRootNames,
options: hostOrOptions as CompilerOptions,
host: oldProgramOrHost as CompilerHost,
oldProgram: oldProgram && oldProgram.getProgram(),
oldProgram: oldProgram && oldProgram.getProgramOrUndefined(),
configFileParsingDiagnostics,
projectReferences
});
@ -418,28 +505,31 @@ namespace ts {
/**
* Computing hash to for signature verification
*/
const computeHash = host.createHash || identity;
const state = createBuilderProgramState(newProgram, getCanonicalFileName, oldState);
const computeHash = host.createHash || generateDjb2Hash;
let state = createBuilderProgramState(newProgram, getCanonicalFileName, oldState);
let backupState: BuilderProgramState | undefined;
// To ensure that we arent storing any references to old program or new program without state
newProgram = undefined!; // TODO: GH#18217
oldProgram = undefined;
oldState = undefined;
const result: BuilderProgram = {
getState: () => state,
getProgram: () => state.program,
getCompilerOptions: () => state.program.getCompilerOptions(),
getSourceFile: fileName => state.program.getSourceFile(fileName),
getSourceFiles: () => state.program.getSourceFiles(),
getOptionsDiagnostics: cancellationToken => state.program.getOptionsDiagnostics(cancellationToken),
getGlobalDiagnostics: cancellationToken => state.program.getGlobalDiagnostics(cancellationToken),
getConfigFileParsingDiagnostics: () => configFileParsingDiagnostics || state.program.getConfigFileParsingDiagnostics(),
getSyntacticDiagnostics: (sourceFile, cancellationToken) => state.program.getSyntacticDiagnostics(sourceFile, cancellationToken),
getSemanticDiagnostics,
emit,
getAllDependencies: sourceFile => BuilderState.getAllDependencies(state, state.program, sourceFile),
getCurrentDirectory: () => state.program.getCurrentDirectory()
const result = createRedirectedBuilderProgram(state, configFileParsingDiagnostics);
result.getState = () => state;
result.backupState = () => {
Debug.assert(backupState === undefined);
backupState = cloneBuilderProgramState(state);
};
result.restoreState = () => {
state = Debug.assertDefined(backupState);
backupState = undefined;
};
result.getAllDependencies = sourceFile => BuilderState.getAllDependencies(state, Debug.assertDefined(state.program), sourceFile);
result.getSemanticDiagnostics = getSemanticDiagnostics;
result.emit = emit;
result.releaseProgram = () => {
releaseCache(state);
backupState = undefined;
};
if (kind === BuilderProgramKind.SemanticDiagnosticsBuilderProgram) {
@ -460,18 +550,39 @@ namespace ts {
* in that order would be used to write the files
*/
function emitNextAffectedFile(writeFile?: WriteFileCallback, cancellationToken?: CancellationToken, emitOnlyDtsFiles?: boolean, customTransformers?: CustomTransformers): AffectedFileResult<EmitResult> {
const affected = getNextAffectedFile(state, cancellationToken, computeHash);
let affected = getNextAffectedFile(state, cancellationToken, computeHash);
let isPendingEmitFile = false;
if (!affected) {
// Done
return undefined;
if (!state.compilerOptions.out && !state.compilerOptions.outFile) {
affected = getNextAffectedFilePendingEmit(state);
if (!affected) {
return undefined;
}
isPendingEmitFile = true;
}
else {
const program = Debug.assertDefined(state.program);
// Check if program uses any prepend project references, if thats the case we cant track of the js files of those, so emit even though there are no changes
if (state.programEmitComplete || !some(program.getProjectReferences(), ref => !!ref.prepend)) {
state.programEmitComplete = true;
return undefined;
}
affected = program;
}
}
// Mark seen emitted files if there are pending files to be emitted
if (state.affectedFilesPendingEmit && state.program !== affected) {
(state.seenEmittedFiles || (state.seenEmittedFiles = createMap())).set((affected as SourceFile).path, true);
}
return toAffectedFileResult(
state,
// When whole program is affected, do emit only once (eg when --out or --outFile is specified)
// Otherwise just affected file
state.program.emit(affected === state.program ? undefined : affected as SourceFile, writeFile || host.writeFile, cancellationToken, emitOnlyDtsFiles, customTransformers),
affected
Debug.assertDefined(state.program).emit(affected === state.program ? undefined : affected as SourceFile, writeFile || host.writeFile, cancellationToken, emitOnlyDtsFiles, customTransformers),
affected,
isPendingEmitFile
);
}
@ -511,7 +622,7 @@ namespace ts {
};
}
}
return state.program.emit(targetSourceFile, writeFile || host.writeFile, cancellationToken, emitOnlyDtsFiles, customTransformers);
return Debug.assertDefined(state.program).emit(targetSourceFile, writeFile || host.writeFile, cancellationToken, emitOnlyDtsFiles, customTransformers);
}
/**
@ -559,33 +670,74 @@ namespace ts {
*/
function getSemanticDiagnostics(sourceFile?: SourceFile, cancellationToken?: CancellationToken): ReadonlyArray<Diagnostic> {
assertSourceFileOkWithoutNextAffectedCall(state, sourceFile);
const compilerOptions = state.program.getCompilerOptions();
const compilerOptions = Debug.assertDefined(state.program).getCompilerOptions();
if (compilerOptions.outFile || compilerOptions.out) {
Debug.assert(!state.semanticDiagnosticsPerFile);
// We dont need to cache the diagnostics just return them from program
return state.program.getSemanticDiagnostics(sourceFile, cancellationToken);
return Debug.assertDefined(state.program).getSemanticDiagnostics(sourceFile, cancellationToken);
}
if (sourceFile) {
return getSemanticDiagnosticsOfFile(state, sourceFile, cancellationToken);
}
if (kind === BuilderProgramKind.SemanticDiagnosticsBuilderProgram) {
// When semantic builder asks for diagnostics of the whole program,
// ensure that all the affected files are handled
let affected: SourceFile | Program | undefined;
while (affected = getNextAffectedFile(state, cancellationToken, computeHash)) {
doneWithAffectedFile(state, affected);
// When semantic builder asks for diagnostics of the whole program,
// ensure that all the affected files are handled
let affected: SourceFile | Program | undefined;
let affectedFilesPendingEmit: Path[] | undefined;
while (affected = getNextAffectedFile(state, cancellationToken, computeHash)) {
if (affected !== state.program && kind === BuilderProgramKind.EmitAndSemanticDiagnosticsBuilderProgram) {
(affectedFilesPendingEmit || (affectedFilesPendingEmit = [])).push((affected as SourceFile).path);
}
doneWithAffectedFile(state, affected);
}
// In case of emit builder, cache the files to be emitted
if (affectedFilesPendingEmit) {
state.affectedFilesPendingEmit = concatenate(state.affectedFilesPendingEmit, affectedFilesPendingEmit);
// affectedFilesPendingEmitIndex === undefined
// - means the emit state.affectedFilesPendingEmit was undefined before adding current affected files
// so start from 0 as array would be affectedFilesPendingEmit
// else, continue to iterate from existing index, the current set is appended to existing files
if (state.affectedFilesPendingEmitIndex === undefined) {
state.affectedFilesPendingEmitIndex = 0;
}
}
let diagnostics: Diagnostic[] | undefined;
for (const sourceFile of state.program.getSourceFiles()) {
for (const sourceFile of Debug.assertDefined(state.program).getSourceFiles()) {
diagnostics = addRange(diagnostics, getSemanticDiagnosticsOfFile(state, sourceFile, cancellationToken));
}
return diagnostics || emptyArray;
}
}
export function createRedirectedBuilderProgram(state: { program: Program | undefined; compilerOptions: CompilerOptions; }, configFileParsingDiagnostics: ReadonlyArray<Diagnostic>): BuilderProgram {
return {
getState: notImplemented,
backupState: noop,
restoreState: noop,
getProgram,
getProgramOrUndefined: () => state.program,
releaseProgram: () => state.program = undefined,
getCompilerOptions: () => state.compilerOptions,
getSourceFile: fileName => getProgram().getSourceFile(fileName),
getSourceFiles: () => getProgram().getSourceFiles(),
getOptionsDiagnostics: cancellationToken => getProgram().getOptionsDiagnostics(cancellationToken),
getGlobalDiagnostics: cancellationToken => getProgram().getGlobalDiagnostics(cancellationToken),
getConfigFileParsingDiagnostics: () => configFileParsingDiagnostics,
getSyntacticDiagnostics: (sourceFile, cancellationToken) => getProgram().getSyntacticDiagnostics(sourceFile, cancellationToken),
getDeclarationDiagnostics: (sourceFile, cancellationToken) => getProgram().getDeclarationDiagnostics(sourceFile, cancellationToken),
getSemanticDiagnostics: (sourceFile, cancellationToken) => getProgram().getSemanticDiagnostics(sourceFile, cancellationToken),
emit: (sourceFile, writeFile, cancellationToken, emitOnlyDts, customTransformers) => getProgram().emit(sourceFile, writeFile, cancellationToken, emitOnlyDts, customTransformers),
getAllDependencies: notImplemented,
getCurrentDirectory: () => getProgram().getCurrentDirectory()
};
function getProgram() {
return Debug.assertDefined(state.program);
}
}
}
namespace ts {
@ -613,10 +765,24 @@ namespace ts {
export interface BuilderProgram {
/*@internal*/
getState(): BuilderProgramState;
/*@internal*/
backupState(): void;
/*@internal*/
restoreState(): void;
/**
* Returns current program
*/
getProgram(): Program;
/**
* Returns current program that could be undefined if the program was released
*/
/*@internal*/
getProgramOrUndefined(): Program | undefined;
/**
* Releases reference to the program, making all the other operations that need program to fail.
*/
/*@internal*/
releaseProgram(): void;
/**
* Get compiler options of the program
*/
@ -645,10 +811,15 @@ namespace ts {
* Get the syntax diagnostics, for all source files if source file is not supplied
*/
getSyntacticDiagnostics(sourceFile?: SourceFile, cancellationToken?: CancellationToken): ReadonlyArray<Diagnostic>;
/**
* Get the declaration diagnostics, for all source files if source file is not supplied
*/
getDeclarationDiagnostics(sourceFile?: SourceFile, cancellationToken?: CancellationToken): ReadonlyArray<DiagnosticWithLocation>;
/**
* Get all the dependencies of the file
*/
getAllDependencies(sourceFile: SourceFile): ReadonlyArray<string>;
/**
* Gets the semantic diagnostics from the program corresponding to this state of file (if provided) or whole program
* The semantic diagnostics are cached and managed here
@ -725,22 +896,7 @@ namespace ts {
export function createAbstractBuilder(newProgram: Program, host: BuilderProgramHost, oldProgram?: BuilderProgram, configFileParsingDiagnostics?: ReadonlyArray<Diagnostic>): BuilderProgram;
export function createAbstractBuilder(rootNames: ReadonlyArray<string> | undefined, options: CompilerOptions | undefined, host?: CompilerHost, oldProgram?: BuilderProgram, configFileParsingDiagnostics?: ReadonlyArray<Diagnostic>, projectReferences?: ReadonlyArray<ProjectReference>): BuilderProgram;
export function createAbstractBuilder(newProgramOrRootNames: Program | ReadonlyArray<string> | undefined, hostOrOptions: BuilderProgramHost | CompilerOptions | undefined, oldProgramOrHost?: CompilerHost | BuilderProgram, configFileParsingDiagnosticsOrOldProgram?: ReadonlyArray<Diagnostic> | BuilderProgram, configFileParsingDiagnostics?: ReadonlyArray<Diagnostic>, projectReferences?: ReadonlyArray<ProjectReference>): BuilderProgram {
const { newProgram: program } = getBuilderCreationParameters(newProgramOrRootNames, hostOrOptions, oldProgramOrHost, configFileParsingDiagnosticsOrOldProgram, configFileParsingDiagnostics, projectReferences);
return {
// Only return program, all other methods are not implemented
getProgram: () => program,
getState: notImplemented,
getCompilerOptions: notImplemented,
getSourceFile: notImplemented,
getSourceFiles: notImplemented,
getOptionsDiagnostics: notImplemented,
getGlobalDiagnostics: notImplemented,
getConfigFileParsingDiagnostics: notImplemented,
getSyntacticDiagnostics: notImplemented,
getSemanticDiagnostics: notImplemented,
emit: notImplemented,
getAllDependencies: notImplemented,
getCurrentDirectory: notImplemented
};
const { newProgram, configFileParsingDiagnostics: newConfigFileParsingDiagnostics } = getBuilderCreationParameters(newProgramOrRootNames, hostOrOptions, oldProgramOrHost, configFileParsingDiagnosticsOrOldProgram, configFileParsingDiagnostics, projectReferences);
return createRedirectedBuilderProgram({ program: newProgram, compilerOptions: newProgram.getCompilerOptions() }, newConfigFileParsingDiagnostics);
}
}

View File

@ -50,11 +50,15 @@ namespace ts {
/**
* Cache of all files excluding default library file for the current program
*/
allFilesExcludingDefaultLibraryFile: ReadonlyArray<SourceFile> | undefined;
allFilesExcludingDefaultLibraryFile?: ReadonlyArray<SourceFile>;
/**
* Cache of all the file names
*/
allFileNames: ReadonlyArray<string> | undefined;
allFileNames?: ReadonlyArray<string>;
}
export function cloneMapOrUndefined<T>(map: ReadonlyMap<T> | undefined) {
return map ? cloneMap(map) : undefined;
}
}
@ -230,9 +234,32 @@ namespace ts.BuilderState {
fileInfos,
referencedMap,
exportedModulesMap,
hasCalledUpdateShapeSignature,
allFilesExcludingDefaultLibraryFile: undefined,
allFileNames: undefined
hasCalledUpdateShapeSignature
};
}
/**
* Releases needed properties
*/
export function releaseCache(state: BuilderState) {
state.allFilesExcludingDefaultLibraryFile = undefined;
state.allFileNames = undefined;
}
/**
* Creates a clone of the state
*/
export function clone(state: Readonly<BuilderState>): BuilderState {
const fileInfos = createMap<FileInfo>();
state.fileInfos.forEach((value, key) => {
fileInfos.set(key, { ...value });
});
// Dont need to backup allFiles info since its cache anyway
return {
fileInfos,
referencedMap: cloneMapOrUndefined(state.referencedMap),
exportedModulesMap: cloneMapOrUndefined(state.exportedModulesMap),
hasCalledUpdateShapeSignature: cloneMap(state.hasCalledUpdateShapeSignature),
};
}
@ -505,14 +532,14 @@ namespace ts.BuilderState {
// Start with the paths this file was referenced by
seenFileNamesMap.set(sourceFileWithUpdatedShape.path, sourceFileWithUpdatedShape);
const queue = getReferencedByPaths(state, sourceFileWithUpdatedShape.path);
const queue = getReferencedByPaths(state, sourceFileWithUpdatedShape.resolvedPath);
while (queue.length > 0) {
const currentPath = queue.pop()!;
if (!seenFileNamesMap.has(currentPath)) {
const currentSourceFile = programOfThisState.getSourceFileByPath(currentPath)!;
seenFileNamesMap.set(currentPath, currentSourceFile);
if (currentSourceFile && updateShapeSignature(state, programOfThisState, currentSourceFile, cacheToUpdateSignature, cancellationToken, computeHash!, exportedModulesMapCache)) { // TODO: GH#18217
queue.push(...getReferencedByPaths(state, currentPath));
queue.push(...getReferencedByPaths(state, currentSourceFile.resolvedPath));
}
}
}

View File

@ -67,6 +67,7 @@ namespace ts {
let enumCount = 0;
let instantiationDepth = 0;
let constraintDepth = 0;
let currentNode: Node | undefined;
const emptySymbols = createSymbolTable();
const identityMapper: (type: Type) => Type = identity;
@ -4858,7 +4859,7 @@ namespace ts {
function getLiteralPropertyNameText(name: PropertyName) {
const type = getLiteralTypeFromPropertyName(name);
return type.flags & (TypeFlags.StringLiteral | TypeFlags.NumberLiteral) ? "" + (<LiteralType>type).value : undefined;
return type.flags & (TypeFlags.StringLiteral | TypeFlags.NumberLiteral) ? "" + (<StringLiteralType | NumberLiteralType>type).value : undefined;
}
/** Return the inferred type for a binding element */
@ -5283,17 +5284,18 @@ namespace ts {
let objectFlags = ObjectFlags.ObjectLiteral;
forEach(pattern.elements, e => {
const name = e.propertyName || <Identifier>e.name;
if (isComputedNonLiteralName(name)) {
// do not include computed properties in the implied type
objectFlags |= ObjectFlags.ObjectLiteralPatternWithComputedProperties;
return;
}
if (e.dotDotDotToken) {
stringIndexInfo = createIndexInfo(anyType, /*isReadonly*/ false);
return;
}
const text = getTextOfPropertyName(name);
const exprType = getLiteralTypeFromPropertyName(name);
if (!isTypeUsableAsPropertyName(exprType)) {
// do not include computed properties in the implied type
objectFlags |= ObjectFlags.ObjectLiteralPatternWithComputedProperties;
return;
}
const text = getPropertyNameFromType(exprType);
const flags = SymbolFlags.Property | (e.initializer ? SymbolFlags.Optional : 0);
const symbol = createSymbol(flags, text);
symbol.type = getTypeFromBindingElement(e, includePatternInType, reportErrors);
@ -5434,6 +5436,10 @@ namespace ts {
// Handle variable, parameter or property
if (!pushTypeResolution(symbol, TypeSystemPropertyName.Type)) {
// Symbol is property of some kind that is merged with something - should use `getTypeOfFuncClassEnumModule` and not `getTypeOfVariableOrParameterOrProperty`
if (symbol.flags & SymbolFlags.ValueModule) {
return getTypeOfFuncClassEnumModule(symbol);
}
return errorType;
}
let type: Type | undefined;
@ -5489,6 +5495,10 @@ namespace ts {
}
if (!popTypeResolution()) {
// Symbol is property of some kind that is merged with something - should use `getTypeOfFuncClassEnumModule` and not `getTypeOfVariableOrParameterOrProperty`
if (symbol.flags & SymbolFlags.ValueModule) {
return getTypeOfFuncClassEnumModule(symbol);
}
type = reportCircularityError(symbol);
}
return type;
@ -6394,9 +6404,9 @@ namespace ts {
}
/**
* Indicates whether a type can be used as a late-bound name.
* Indicates whether a type can be used as a property name.
*/
function isTypeUsableAsLateBoundName(type: Type): type is LiteralType | UniqueESSymbolType {
function isTypeUsableAsPropertyName(type: Type): type is StringLiteralType | NumberLiteralType | UniqueESSymbolType {
return !!(type.flags & TypeFlags.StringOrNumberLiteralOrUnique);
}
@ -6411,7 +6421,7 @@ namespace ts {
function isLateBindableName(node: DeclarationName): node is LateBoundName {
return isComputedPropertyName(node)
&& isEntityNameExpression(node.expression)
&& isTypeUsableAsLateBoundName(checkComputedPropertyName(node));
&& isTypeUsableAsPropertyName(checkComputedPropertyName(node));
}
function isLateBoundName(name: __String): boolean {
@ -6443,14 +6453,14 @@ namespace ts {
}
/**
* Gets the symbolic name for a late-bound member from its type.
* Gets the symbolic name for a member from its type.
*/
function getLateBoundNameFromType(type: LiteralType | UniqueESSymbolType): __String {
function getPropertyNameFromType(type: StringLiteralType | NumberLiteralType | UniqueESSymbolType): __String {
if (type.flags & TypeFlags.UniqueESSymbol) {
return `__@${type.symbol.escapedName}@${getSymbolId(type.symbol)}` as __String;
return (<UniqueESSymbolType>type).escapedName;
}
if (type.flags & (TypeFlags.StringLiteral | TypeFlags.NumberLiteral)) {
return escapeLeadingUnderscores("" + (<LiteralType>type).value);
return escapeLeadingUnderscores("" + (<StringLiteralType | NumberLiteralType>type).value);
}
return Debug.fail();
}
@ -6513,8 +6523,8 @@ namespace ts {
// fall back to the early-bound name of this member.
links.resolvedSymbol = decl.symbol;
const type = checkComputedPropertyName(decl.name);
if (isTypeUsableAsLateBoundName(type)) {
const memberName = getLateBoundNameFromType(type);
if (isTypeUsableAsPropertyName(type)) {
const memberName = getPropertyNameFromType(type);
const symbolFlags = decl.symbol.flags;
// Get or add a late-bound symbol for the member. This allows us to merge late-bound accessor declarations.
@ -6529,9 +6539,9 @@ namespace ts {
// If we have an existing early-bound member, combine its declarations so that we can
// report an error at each declaration.
const declarations = earlySymbol ? concatenate(earlySymbol.declarations, lateSymbol.declarations) : lateSymbol.declarations;
const name = declarationNameToString(decl.name);
forEach(declarations, declaration => error(getNameOfDeclaration(declaration) || declaration, Diagnostics.Duplicate_declaration_0, name));
error(decl.name || decl, Diagnostics.Duplicate_declaration_0, name);
const name = !(type.flags & TypeFlags.UniqueESSymbol) && unescapeLeadingUnderscores(memberName) || declarationNameToString(decl.name);
forEach(declarations, declaration => error(getNameOfDeclaration(declaration) || declaration, Diagnostics.Property_0_was_also_declared_here, name));
error(decl.name || decl, Diagnostics.Duplicate_property_0, name);
lateSymbol = createSymbol(SymbolFlags.None, memberName, CheckFlags.Late);
}
lateSymbol.nameType = type;
@ -7163,8 +7173,8 @@ namespace ts {
const propType = instantiateType(templateType, templateMapper);
// If the current iteration type constituent is a string literal type, create a property.
// Otherwise, for type string create a string index signature.
if (t.flags & TypeFlags.StringOrNumberLiteralOrUnique) {
const propName = getLateBoundNameFromType(t as LiteralType);
if (isTypeUsableAsPropertyName(t)) {
const propName = getPropertyNameFromType(t);
const modifiersProp = getPropertyOfType(modifiersType, propName);
const isOptional = !!(templateModifiers & MappedTypeModifiers.IncludeOptional ||
!(templateModifiers & MappedTypeModifiers.ExcludeOptional) && modifiersProp && modifiersProp.flags & SymbolFlags.Optional);
@ -7349,7 +7359,8 @@ namespace ts {
function isTypeInvalidDueToUnionDiscriminant(contextualType: Type, obj: ObjectLiteralExpression | JsxAttributes): boolean {
const list = obj.properties as NodeArray<ObjectLiteralElementLike | JsxAttributeLike>;
return list.some(property => {
const name = property.name && getTextOfPropertyName(property.name);
const nameType = property.name && getLiteralTypeFromPropertyName(property.name);
const name = nameType && isTypeUsableAsPropertyName(nameType) ? getPropertyNameFromType(nameType) : undefined;
const expected = name === undefined ? undefined : getTypeOfPropertyOfType(contextualType, name);
return !!expected && isLiteralType(expected) && !isTypeIdenticalTo(getTypeOfNode(property), expected);
});
@ -7515,6 +7526,7 @@ namespace ts {
// very high likelyhood we're dealing with an infinite generic type that perpetually generates
// new type identities as we descend into it. We stop the recursion here and mark this type
// and the outer types as having circular constraints.
error(currentNode, Diagnostics.Type_instantiation_is_excessively_deep_and_possibly_infinite);
nonTerminating = true;
return t.immediateBaseConstraint = noConstraintType;
}
@ -7736,7 +7748,13 @@ namespace ts {
result.containingType = containingType;
if (!hasNonUniformValueDeclaration && firstValueDeclaration) {
result.valueDeclaration = firstValueDeclaration;
// Inherit information about parent type.
if (firstValueDeclaration.symbol.parent) {
result.parent = firstValueDeclaration.symbol.parent;
}
}
result.declarations = declarations!;
result.nameType = nameType;
result.type = isUnion ? getUnionType(propTypes) : getIntersectionType(propTypes);
@ -8693,21 +8711,17 @@ namespace ts {
* the type of this reference is just the type of the value we resolved to.
*/
function getJSDocTypeReference(node: NodeWithTypeArguments, symbol: Symbol, typeArguments: Type[] | undefined): Type | undefined {
if (!pushTypeResolution(symbol, TypeSystemPropertyName.JSDocTypeReference)) {
return errorType;
}
const assignedType = getAssignedClassType(symbol);
const valueType = getTypeOfSymbol(symbol);
const referenceType = valueType.symbol && valueType.symbol !== symbol && !isInferredClassType(valueType) && getTypeReferenceTypeWorker(node, valueType.symbol, typeArguments);
if (!popTypeResolution()) {
getSymbolLinks(symbol).resolvedJSDocType = errorType;
error(node, Diagnostics.JSDoc_type_0_circularly_references_itself, symbolToString(symbol));
return errorType;
}
if (referenceType || assignedType) {
// TODO: GH#18217 (should the `|| assignedType` be at a lower precedence?)
const type = (referenceType && assignedType ? getIntersectionType([assignedType, referenceType]) : referenceType || assignedType)!;
return getSymbolLinks(symbol).resolvedJSDocType = type;
// In the case of an assignment of a function expression (binary expressions, variable declarations, etc.), we will get the
// correct instance type for the symbol on the LHS by finding the type for RHS. For example if we want to get the type of the symbol `foo`:
// var foo = function() {}
// We will find the static type of the assigned anonymous function.
const staticType = getTypeOfSymbol(symbol);
const instanceType =
staticType.symbol &&
staticType.symbol !== symbol && // Make sure this is an assignment like expression by checking that symbol -> type -> symbol doesn't roundtrips.
getTypeReferenceTypeWorker(node, staticType.symbol, typeArguments); // Get the instance type of the RHS symbol.
if (instanceType) {
return getSymbolLinks(symbol).resolvedJSDocType = instanceType;
}
}
@ -8728,8 +8742,11 @@ namespace ts {
if (symbol.flags & SymbolFlags.Function &&
isJSDocTypeReference(node) &&
(symbol.members || getJSDocClassTag(symbol.valueDeclaration))) {
return getInferredClassType(symbol);
isJSConstructor(symbol.valueDeclaration)) {
const resolved = resolveStructuredTypeMembers(<ObjectType>getTypeOfSymbol(symbol));
if (resolved.callSignatures.length === 1) {
return getReturnTypeOfSignature(resolved.callSignatures[0]);
}
}
}
@ -9225,18 +9242,6 @@ namespace ts {
return includes;
}
function isSubtypeOfAny(source: Type, targets: ReadonlyArray<Type>): boolean {
for (const target of targets) {
if (source !== target && isTypeSubtypeOf(source, target) && (
!(getObjectFlags(getTargetType(source)) & ObjectFlags.Class) ||
!(getObjectFlags(getTargetType(target)) & ObjectFlags.Class) ||
isTypeDerivedFrom(source, target))) {
return true;
}
}
return false;
}
function isSetOfLiteralsFromSameEnum(types: ReadonlyArray<Type>): boolean {
const first = types[0];
if (first.flags & TypeFlags.EnumLiteral) {
@ -9253,17 +9258,42 @@ namespace ts {
return false;
}
function removeSubtypes(types: Type[]) {
if (types.length === 0 || isSetOfLiteralsFromSameEnum(types)) {
return;
function removeSubtypes(types: Type[], primitivesOnly: boolean): boolean {
const len = types.length;
if (len === 0 || isSetOfLiteralsFromSameEnum(types)) {
return true;
}
let i = types.length;
let i = len;
let count = 0;
while (i > 0) {
i--;
if (isSubtypeOfAny(types[i], types)) {
orderedRemoveItemAt(types, i);
const source = types[i];
for (const target of types) {
if (source !== target) {
if (count === 10000) {
// After 10000 subtype checks we estimate the remaining amount of work by assuming the
// same ratio of checks to removals. If the estimated number of remaining type checks is
// greater than an upper limit we deem the union type too complex to represent. The
// upper limit is 25M for unions of primitives only, and 1M otherwise. This for example
// caps union types at 5000 unique literal types and 1000 unique object types.
const estimatedCount = (count / (len - i)) * len;
if (estimatedCount > (primitivesOnly ? 25000000 : 1000000)) {
error(currentNode, Diagnostics.Expression_produces_a_union_type_that_is_too_complex_to_represent);
return false;
}
}
count++;
if (isTypeSubtypeOf(source, target) && (
!(getObjectFlags(getTargetType(source)) & ObjectFlags.Class) ||
!(getObjectFlags(getTargetType(target)) & ObjectFlags.Class) ||
isTypeDerivedFrom(source, target))) {
orderedRemoveItemAt(types, i);
break;
}
}
}
}
return true;
}
function removeRedundantLiteralTypes(types: Type[], includes: TypeFlags) {
@ -9310,7 +9340,9 @@ namespace ts {
}
break;
case UnionReduction.Subtype:
removeSubtypes(typeSet);
if (!removeSubtypes(typeSet, !(includes & TypeFlags.StructuredOrInstantiable))) {
return errorType;
}
break;
}
if (typeSet.length === 0) {
@ -9718,8 +9750,8 @@ namespace ts {
function getPropertyTypeForIndexType(objectType: Type, indexType: Type, accessNode: ElementAccessExpression | IndexedAccessTypeNode | PropertyName | BindingName | SyntheticExpression | undefined, cacheSymbol: boolean, missingType: Type) {
const accessExpression = accessNode && accessNode.kind === SyntaxKind.ElementAccessExpression ? accessNode : undefined;
const propName = isTypeUsableAsLateBoundName(indexType) ?
getLateBoundNameFromType(indexType) :
const propName = isTypeUsableAsPropertyName(indexType) ?
getPropertyNameFromType(indexType) :
accessExpression && checkThatExpressionIsProperSymbolReference(accessExpression.argumentExpression, indexType, /*reportError*/ false) ?
getPropertyNameForKnownSymbolName(idText((<PropertyAccessExpression>accessExpression.argumentExpression).name)) :
accessNode && isPropertyName(accessNode) ?
@ -9810,7 +9842,7 @@ namespace ts {
if (accessNode) {
const indexNode = getIndexNodeForAccessExpression(accessNode);
if (indexType.flags & (TypeFlags.StringLiteral | TypeFlags.NumberLiteral)) {
error(indexNode, Diagnostics.Property_0_does_not_exist_on_type_1, "" + (<LiteralType>indexType).value, typeToString(objectType));
error(indexNode, Diagnostics.Property_0_does_not_exist_on_type_1, "" + (<StringLiteralType | NumberLiteralType>indexType).value, typeToString(objectType));
}
else if (indexType.flags & (TypeFlags.String | TypeFlags.Number)) {
error(indexNode, Diagnostics.Type_0_has_no_matching_index_signature_for_type_1, typeToString(objectType), typeToString(indexType));
@ -10405,6 +10437,7 @@ namespace ts {
function createUniqueESSymbolType(symbol: Symbol) {
const type = <UniqueESSymbolType>createType(TypeFlags.UniqueESSymbol);
type.symbol = symbol;
type.escapedName = `__@${type.symbol.escapedName}@${getSymbolId(type.symbol)}` as __String;
return type;
}
@ -10625,7 +10658,11 @@ namespace ts {
}
function getRestrictiveTypeParameter(tp: TypeParameter) {
return !tp.constraint ? tp : tp.restrictiveInstantiation || (tp.restrictiveInstantiation = createTypeParameter(tp.symbol));
return tp.constraint === unknownType ? tp : tp.restrictiveInstantiation || (
tp.restrictiveInstantiation = createTypeParameter(tp.symbol),
(tp.restrictiveInstantiation as TypeParameter).constraint = unknownType,
tp.restrictiveInstantiation
);
}
function restrictiveMapper(type: Type) {
@ -10933,6 +10970,7 @@ namespace ts {
// We have reached 50 recursive type instantiations and there is a very high likelyhood we're dealing
// with a combination of infinite generic types that perpetually generate new type identities. We stop
// the recursion here by yielding the error type.
error(currentNode, Diagnostics.Type_instantiation_is_excessively_deep_and_possibly_infinite);
return errorType;
}
instantiationDepth++;
@ -11198,7 +11236,7 @@ namespace ts {
case SyntaxKind.ArrayLiteralExpression:
return elaborateArrayLiteral(node as ArrayLiteralExpression, source, target, relation);
case SyntaxKind.JsxAttributes:
return elaborateJsxAttributes(node as JsxAttributes, source, target, relation);
return elaborateJsxComponents(node as JsxAttributes, source, target, relation);
case SyntaxKind.ArrowFunction:
return elaborateArrowFunction(node as ArrowFunction, source, target, relation);
}
@ -11297,7 +11335,7 @@ namespace ts {
}
if (resultObj.error) {
const reportedDiag = resultObj.error;
const propertyName = isTypeUsableAsLateBoundName(nameType) ? getLateBoundNameFromType(nameType) : undefined;
const propertyName = isTypeUsableAsPropertyName(nameType) ? getPropertyNameFromType(nameType) : undefined;
const targetProp = propertyName !== undefined ? getPropertyOfType(target, propertyName) : undefined;
let issuedElaboration = false;
@ -11338,8 +11376,113 @@ namespace ts {
}
}
function elaborateJsxAttributes(node: JsxAttributes, source: Type, target: Type, relation: Map<RelationComparisonResult>) {
return elaborateElementwise(generateJsxAttributes(node), source, target, relation);
function *generateJsxChildren(node: JsxElement, getInvalidTextDiagnostic: () => DiagnosticMessage): ElaborationIterator {
if (!length(node.children)) return;
let memberOffset = 0;
for (let i = 0; i < node.children.length; i++) {
const child = node.children[i];
const nameType = getLiteralType(i - memberOffset);
const elem = getElaborationElementForJsxChild(child, nameType, getInvalidTextDiagnostic);
if (elem) {
yield elem;
}
else {
memberOffset++;
}
}
}
function getElaborationElementForJsxChild(child: JsxChild, nameType: LiteralType, getInvalidTextDiagnostic: () => DiagnosticMessage) {
switch (child.kind) {
case SyntaxKind.JsxExpression:
// child is of the type of the expression
return { errorNode: child, innerExpression: child.expression, nameType };
case SyntaxKind.JsxText:
if (child.containsOnlyWhiteSpaces) {
break; // Whitespace only jsx text isn't real jsx text
}
// child is a string
return { errorNode: child, innerExpression: undefined, nameType, errorMessage: getInvalidTextDiagnostic() };
case SyntaxKind.JsxElement:
case SyntaxKind.JsxSelfClosingElement:
case SyntaxKind.JsxFragment:
// child is of type JSX.Element
return { errorNode: child, innerExpression: child, nameType };
default:
return Debug.assertNever(child, "Found invalid jsx child");
}
}
function elaborateJsxComponents(node: JsxAttributes, source: Type, target: Type, relation: Map<RelationComparisonResult>) {
let result = elaborateElementwise(generateJsxAttributes(node), source, target, relation);
let invalidTextDiagnostic: DiagnosticMessage | undefined;
if (isJsxOpeningElement(node.parent) && isJsxElement(node.parent.parent)) {
const containingElement = node.parent.parent;
const childPropName = getJsxElementChildrenPropertyName(getJsxNamespaceAt(node));
const childrenPropName = childPropName === undefined ? "children" : unescapeLeadingUnderscores(childPropName);
const childrenNameType = getLiteralType(childrenPropName);
const childrenTargetType = getIndexedAccessType(target, childrenNameType);
const validChildren = filter(containingElement.children, i => !isJsxText(i) || !i.containsOnlyWhiteSpaces);
if (!length(validChildren)) {
return result;
}
const moreThanOneRealChildren = length(validChildren) > 1;
const arrayLikeTargetParts = filterType(childrenTargetType, isArrayOrTupleLikeType);
const nonArrayLikeTargetParts = filterType(childrenTargetType, t => !isArrayOrTupleLikeType(t));
if (moreThanOneRealChildren) {
if (arrayLikeTargetParts !== neverType) {
const realSource = createTupleType(checkJsxChildren(containingElement, CheckMode.Normal));
result = elaborateElementwise(generateJsxChildren(containingElement, getInvalidTextualChildDiagnostic), realSource, arrayLikeTargetParts, relation) || result;
}
else if (!isTypeRelatedTo(getIndexedAccessType(source, childrenNameType), childrenTargetType, relation)) {
// arity mismatch
result = true;
error(
containingElement.openingElement.tagName,
Diagnostics.This_JSX_tag_s_0_prop_expects_a_single_child_of_type_1_but_multiple_children_were_provided,
childrenPropName,
typeToString(childrenTargetType)
);
}
}
else {
if (nonArrayLikeTargetParts !== neverType) {
const child = validChildren[0];
const elem = getElaborationElementForJsxChild(child, childrenNameType, getInvalidTextualChildDiagnostic);
if (elem) {
result = elaborateElementwise(
(function*() { yield elem; })(),
source,
target,
relation
) || result;
}
}
else if (!isTypeRelatedTo(getIndexedAccessType(source, childrenNameType), childrenTargetType, relation)) {
// arity mismatch
result = true;
error(
containingElement.openingElement.tagName,
Diagnostics.This_JSX_tag_s_0_prop_expects_type_1_which_requires_multiple_children_but_only_a_single_child_was_provided,
childrenPropName,
typeToString(childrenTargetType)
);
}
}
}
return result;
function getInvalidTextualChildDiagnostic() {
if (!invalidTextDiagnostic) {
const tagNameText = getTextOfNode(node.parent.tagName);
const childPropName = getJsxElementChildrenPropertyName(getJsxNamespaceAt(node));
const childrenPropName = childPropName === undefined ? "children" : unescapeLeadingUnderscores(childPropName);
const childrenTargetType = getIndexedAccessType(target, getLiteralType(childrenPropName));
const diagnostic = Diagnostics._0_components_don_t_accept_text_as_child_elements_Text_in_JSX_has_the_type_string_but_the_expected_type_of_1_is_2;
invalidTextDiagnostic = { ...diagnostic, key: "!!ALREADY FORMATTED!!", message: formatMessage(/*_dummy*/ undefined, diagnostic, tagNameText, childrenPropName, typeToString(childrenTargetType)) };
}
return invalidTextDiagnostic;
}
}
function *generateLimitedTupleElements(node: ArrayLiteralExpression, target: Type): ElaborationIterator {
@ -11651,11 +11794,11 @@ namespace ts {
if (s & TypeFlags.StringLike && t & TypeFlags.String) return true;
if (s & TypeFlags.StringLiteral && s & TypeFlags.EnumLiteral &&
t & TypeFlags.StringLiteral && !(t & TypeFlags.EnumLiteral) &&
(<LiteralType>source).value === (<LiteralType>target).value) return true;
(<StringLiteralType>source).value === (<StringLiteralType>target).value) return true;
if (s & TypeFlags.NumberLike && t & TypeFlags.Number) return true;
if (s & TypeFlags.NumberLiteral && s & TypeFlags.EnumLiteral &&
t & TypeFlags.NumberLiteral && !(t & TypeFlags.EnumLiteral) &&
(<LiteralType>source).value === (<LiteralType>target).value) return true;
(<NumberLiteralType>source).value === (<NumberLiteralType>target).value) return true;
if (s & TypeFlags.BigIntLike && t & TypeFlags.BigInt) return true;
if (s & TypeFlags.BooleanLike && t & TypeFlags.Boolean) return true;
if (s & TypeFlags.ESSymbolLike && t & TypeFlags.ESSymbol) return true;
@ -11669,7 +11812,6 @@ namespace ts {
if (s & TypeFlags.Undefined && (!strictNullChecks || t & (TypeFlags.Undefined | TypeFlags.Void))) return true;
if (s & TypeFlags.Null && (!strictNullChecks || t & TypeFlags.Null)) return true;
if (s & TypeFlags.Object && t & TypeFlags.NonPrimitive) return true;
if (s & TypeFlags.UniqueESSymbol || t & TypeFlags.UniqueESSymbol) return false;
if (relation === assignableRelation || relation === comparableRelation) {
if (s & TypeFlags.Any) return true;
// Type number or any numeric literal type is assignable to any numeric enum type or any
@ -12569,7 +12711,7 @@ namespace ts {
}
else {
// An empty object type is related to any mapped type that includes a '?' modifier.
if (isPartialMappedType(target) && isEmptyObjectType(source)) {
if (relation !== subtypeRelation && isPartialMappedType(target) && isEmptyObjectType(source)) {
return Ternary.True;
}
if (isGenericMappedType(target)) {
@ -13159,11 +13301,8 @@ namespace ts {
}
function getVariances(type: GenericType): Variance[] {
if (!strictFunctionTypes) {
return emptyArray;
}
if (type === globalArrayType || type === globalReadonlyArrayType) {
// Arrays are known to be covariant, no need to spend time computing this (emptyArray implies covariance for all parameters)
// Arrays and tuples are known to be covariant, no need to spend time computing this (emptyArray implies covariance for all parameters)
if (!strictFunctionTypes || type === globalArrayType || type === globalReadonlyArrayType || type.objectFlags & ObjectFlags.Tuple) {
return emptyArray;
}
return getVariancesWorker(type.typeParameters, type, getMarkerTypeReference);
@ -13479,6 +13618,10 @@ namespace ts {
return isTupleType(type) || !!getPropertyOfType(type, "0" as __String);
}
function isArrayOrTupleLikeType(type: Type): boolean {
return isArrayLikeType(type) || isTupleLikeType(type);
}
function getTupleElementType(type: Type, index: number) {
const propType = getTypeOfPropertyOfType(type, "" + index as __String);
if (propType) {
@ -13575,8 +13718,8 @@ namespace ts {
// no flags for all other types (including non-falsy literal types).
function getFalsyFlags(type: Type): TypeFlags {
return type.flags & TypeFlags.Union ? getFalsyFlagsOfTypes((<UnionType>type).types) :
type.flags & TypeFlags.StringLiteral ? (<LiteralType>type).value === "" ? TypeFlags.StringLiteral : 0 :
type.flags & TypeFlags.NumberLiteral ? (<LiteralType>type).value === 0 ? TypeFlags.NumberLiteral : 0 :
type.flags & TypeFlags.StringLiteral ? (<StringLiteralType>type).value === "" ? TypeFlags.StringLiteral : 0 :
type.flags & TypeFlags.NumberLiteral ? (<NumberLiteralType>type).value === 0 ? TypeFlags.NumberLiteral : 0 :
type.flags & TypeFlags.BigIntLiteral ? isZeroBigInt(<BigIntLiteralType>type) ? TypeFlags.BigIntLiteral : 0 :
type.flags & TypeFlags.BooleanLiteral ? (type === falseType || type === regularFalseType) ? TypeFlags.BooleanLiteral : 0 :
type.flags & TypeFlags.PossiblyFalsy;
@ -13599,8 +13742,8 @@ namespace ts {
type === regularFalseType ||
type === falseType ||
type.flags & (TypeFlags.Void | TypeFlags.Undefined | TypeFlags.Null) ||
type.flags & TypeFlags.StringLiteral && (<LiteralType>type).value === "" ||
type.flags & TypeFlags.NumberLiteral && (<LiteralType>type).value === 0 ||
type.flags & TypeFlags.StringLiteral && (<StringLiteralType>type).value === "" ||
type.flags & TypeFlags.NumberLiteral && (<NumberLiteralType>type).value === 0 ||
type.flags & TypeFlags.BigIntLiteral && isZeroBigInt(<BigIntLiteralType>type) ? type :
neverType;
}
@ -14452,7 +14595,12 @@ namespace ts {
priority |= InferencePriority.MappedTypeConstraint;
inferFromTypes(getIndexType(source), constraintType);
priority = savePriority;
inferFromTypes(getUnionType(map(getPropertiesOfType(source), getTypeOfSymbol)), getTemplateTypeFromMappedType(<MappedType>target));
const valueTypes = compact([
getIndexTypeOfType(source, IndexKind.String),
getIndexTypeOfType(source, IndexKind.Number),
...map(getPropertiesOfType(source), getTypeOfSymbol)
]);
inferFromTypes(getUnionType(valueTypes), getTemplateTypeFromMappedType(<MappedType>target));
return true;
}
return false;
@ -14779,6 +14927,9 @@ namespace ts {
return symbol !== unknownSymbol ? (isConstraintPosition(node) ? "@" : "") + getSymbolId(symbol) : undefined;
case SyntaxKind.ThisKeyword:
return "0";
case SyntaxKind.NonNullExpression:
case SyntaxKind.ParenthesizedExpression:
return getFlowCacheKey((<NonNullExpression | ParenthesizedExpression>node).expression);
case SyntaxKind.PropertyAccessExpression:
case SyntaxKind.ElementAccessExpression:
const propName = getAccessedPropertyName(<AccessExpression>node);
@ -14791,6 +14942,11 @@ namespace ts {
}
function isMatchingReference(source: Node, target: Node): boolean {
switch (target.kind) {
case SyntaxKind.ParenthesizedExpression:
case SyntaxKind.NonNullExpression:
return isMatchingReference(source, (target as NonNullExpression | ParenthesizedExpression).expression);
}
switch (source.kind) {
case SyntaxKind.Identifier:
return target.kind === SyntaxKind.Identifier && getResolvedSymbol(<Identifier>source) === getResolvedSymbol(<Identifier>target) ||
@ -14800,6 +14956,9 @@ namespace ts {
return target.kind === SyntaxKind.ThisKeyword;
case SyntaxKind.SuperKeyword:
return target.kind === SyntaxKind.SuperKeyword;
case SyntaxKind.NonNullExpression:
case SyntaxKind.ParenthesizedExpression:
return isMatchingReference((source as NonNullExpression | ParenthesizedExpression).expression, target);
case SyntaxKind.PropertyAccessExpression:
case SyntaxKind.ElementAccessExpression:
return isAccessExpression(target) &&
@ -14974,7 +15133,7 @@ namespace ts {
return strictNullChecks ? TypeFacts.StringStrictFacts : TypeFacts.StringFacts;
}
if (flags & TypeFlags.StringLiteral) {
const isEmpty = (<LiteralType>type).value === "";
const isEmpty = (<StringLiteralType>type).value === "";
return strictNullChecks ?
isEmpty ? TypeFacts.EmptyStringStrictFacts : TypeFacts.NonEmptyStringStrictFacts :
isEmpty ? TypeFacts.EmptyStringFacts : TypeFacts.NonEmptyStringFacts;
@ -14983,7 +15142,7 @@ namespace ts {
return strictNullChecks ? TypeFacts.NumberStrictFacts : TypeFacts.NumberFacts;
}
if (flags & TypeFlags.NumberLiteral) {
const isZero = (<LiteralType>type).value === 0;
const isZero = (<NumberLiteralType>type).value === 0;
return strictNullChecks ?
isZero ? TypeFacts.ZeroNumberStrictFacts : TypeFacts.NonZeroNumberStrictFacts :
isZero ? TypeFacts.ZeroNumberFacts : TypeFacts.NonZeroNumberFacts;
@ -15046,7 +15205,9 @@ namespace ts {
}
function getTypeOfDestructuredProperty(type: Type, name: PropertyName) {
const text = getTextOfPropertyName(name);
const nameType = getLiteralTypeFromPropertyName(name);
if (!isTypeUsableAsPropertyName(nameType)) return errorType;
const text = getPropertyNameFromType(nameType);
return getConstraintForLocation(getTypeOfPropertyOfType(type, text), name) ||
isNumericLiteralName(text) && getIndexTypeOfType(type, IndexKind.Number) ||
getIndexTypeOfType(type, IndexKind.String) ||
@ -15654,9 +15815,6 @@ namespace ts {
function getTypeAtSwitchClause(flow: FlowSwitchClause): FlowType {
const expr = flow.switchStatement.expression;
if (containsMatchingReferenceDiscriminant(reference, expr)) {
return declaredType;
}
const flowType = getTypeAtFlowNode(flow.antecedent);
let type = getTypeFromFlowType(flowType);
if (isMatchingReference(reference, expr)) {
@ -15671,6 +15829,9 @@ namespace ts {
else if (expr.kind === SyntaxKind.TypeOfExpression && isMatchingReference(reference, (expr as TypeOfExpression).expression)) {
type = narrowBySwitchOnTypeOf(type, flow.switchStatement, flow.clauseStart, flow.clauseEnd);
}
else if (containsMatchingReferenceDiscriminant(reference, expr)) {
type = declaredType;
}
return createFlowType(type, isIncomplete(flowType));
}
@ -16750,7 +16911,7 @@ namespace ts {
else if (isInJS &&
(container.kind === SyntaxKind.FunctionExpression || container.kind === SyntaxKind.FunctionDeclaration) &&
getJSDocClassTag(container)) {
const classType = getJSClassType(container.symbol);
const classType = getJSClassType(getMergedSymbol(container.symbol));
if (classType) {
return getFlowTypeOfReference(node, classType);
}
@ -17178,9 +17339,10 @@ namespace ts {
const parentDeclaration = declaration.parent.parent;
const name = declaration.propertyName || declaration.name;
const parentType = getContextualTypeForVariableLikeDeclaration(parentDeclaration);
if (parentType && !isBindingPattern(name)) {
const text = getTextOfPropertyName(name);
if (text !== undefined) {
if (parentType && !isBindingPattern(name) && !isComputedNonLiteralName(name)) {
const nameType = getLiteralTypeFromPropertyName(name);
if (isTypeUsableAsPropertyName(nameType)) {
const text = getPropertyNameFromType(nameType);
return getTypeOfPropertyOfType(parentType, text);
}
}
@ -17480,11 +17642,23 @@ namespace ts {
return node === conditional.whenTrue || node === conditional.whenFalse ? getContextualType(conditional) : undefined;
}
function getContextualTypeForChildJsxExpression(node: JsxElement) {
function getContextualTypeForChildJsxExpression(node: JsxElement, child: JsxChild) {
const attributesType = getApparentTypeOfContextualType(node.openingElement.tagName);
// JSX expression is in children of JSX Element, we will look for an "children" atttribute (we get the name from JSX.ElementAttributesProperty)
const jsxChildrenPropertyName = getJsxElementChildrenPropertyName(getJsxNamespaceAt(node));
return attributesType && !isTypeAny(attributesType) && jsxChildrenPropertyName && jsxChildrenPropertyName !== "" ? getTypeOfPropertyOfContextualType(attributesType, jsxChildrenPropertyName) : undefined;
if (!(attributesType && !isTypeAny(attributesType) && jsxChildrenPropertyName && jsxChildrenPropertyName !== "")) {
return undefined;
}
const childIndex = node.children.indexOf(child);
const childFieldType = getTypeOfPropertyOfContextualType(attributesType, jsxChildrenPropertyName);
return childFieldType && mapType(childFieldType, t => {
if (isArrayLikeType(t)) {
return getIndexedAccessType(t, getLiteralType(childIndex));
}
else {
return t;
}
}, /*noReductions*/ true);
}
function getContextualTypeForJsxExpression(node: JsxExpression): Type | undefined {
@ -17492,7 +17666,7 @@ namespace ts {
return isJsxAttributeLike(exprParent)
? getContextualType(node)
: isJsxElement(exprParent)
? getContextualTypeForChildJsxExpression(exprParent)
? getContextualTypeForChildJsxExpression(exprParent, node)
: undefined;
}
@ -18124,10 +18298,9 @@ namespace ts {
}
}
typeFlags |= type.flags;
const nameType = computedNameType && computedNameType.flags & TypeFlags.StringOrNumberLiteralOrUnique ?
<LiteralType | UniqueESSymbolType>computedNameType : undefined;
const nameType = computedNameType && isTypeUsableAsPropertyName(computedNameType) ? computedNameType : undefined;
const prop = nameType ?
createSymbol(SymbolFlags.Property | member.flags, getLateBoundNameFromType(nameType), checkFlags | CheckFlags.Late) :
createSymbol(SymbolFlags.Property | member.flags, getPropertyNameFromType(nameType), checkFlags | CheckFlags.Late) :
createSymbol(SymbolFlags.Property | member.flags, member.escapedName, checkFlags);
if (nameType) {
prop.nameType = nameType;
@ -18416,6 +18589,10 @@ namespace ts {
childrenPropSymbol.type = childrenTypes.length === 1 ?
childrenTypes[0] :
(getArrayLiteralTupleTypeIfApplicable(childrenTypes, childrenContextualType, /*hasRestElement*/ false) || createArrayType(getUnionType(childrenTypes)));
// Fake up a property declaration for the children
childrenPropSymbol.valueDeclaration = createPropertySignature(/*modifiers*/ undefined, unescapeLeadingUnderscores(jsxChildrenPropertyName), /*questionToken*/ undefined, /*type*/ undefined, /*initializer*/ undefined);
childrenPropSymbol.valueDeclaration.parent = attributes;
childrenPropSymbol.valueDeclaration.symbol = childrenPropSymbol;
const childPropMap = createSymbolTable();
childPropMap.set(jsxChildrenPropertyName, childrenPropSymbol);
spread = getSpreadType(spread, createAnonymousType(attributes.symbol, childPropMap, emptyArray, emptyArray, /*stringIndexInfo*/ undefined, /*numberIndexInfo*/ undefined),
@ -20278,6 +20455,12 @@ namespace ts {
if (inferenceContext) {
const typeArgumentTypes = inferTypeArguments(node, candidate, args, excludeArgument, inferenceContext);
checkCandidate = getSignatureInstantiation(candidate, typeArgumentTypes, isInJSFile(candidate.declaration));
// If the original signature has a generic rest type, instantiation may produce a
// signature with different arity and we need to perform another arity check.
if (getNonArrayRestType(candidate) && !hasCorrectArity(node, args, checkCandidate, signatureHelpTrailingComma)) {
candidateForArgumentArityError = checkCandidate;
continue;
}
}
if (!checkApplicableSignature(node, args, checkCandidate, relation, excludeArgument, /*reportErrors*/ false)) {
// Give preference to error candidates that have no rest parameters (as they are more specific)
@ -20907,7 +21090,7 @@ namespace ts {
// If the symbol of the node has members, treat it like a constructor.
const symbol = getSymbolOfNode(func);
return !!symbol && symbol.members !== undefined;
return !!symbol && (symbol.members !== undefined || symbol.exports !== undefined && symbol.exports.get("prototype" as __String) !== undefined);
}
return false;
}
@ -20926,10 +21109,6 @@ namespace ts {
inferred = getInferredClassType(symbol);
}
const assigned = getAssignedClassType(symbol);
const valueType = getTypeOfSymbol(symbol);
if (valueType.symbol && !isInferredClassType(valueType) && isJSConstructor(valueType.symbol.valueDeclaration)) {
inferred = getInferredClassType(valueType.symbol);
}
return assigned && inferred ?
getIntersectionType([inferred, assigned]) :
assigned || inferred;
@ -20969,12 +21148,6 @@ namespace ts {
return links.inferredClassType;
}
function isInferredClassType(type: Type) {
return type.symbol
&& getObjectFlags(type) & ObjectFlags.Anonymous
&& getSymbolLinks(type.symbol).inferredClassType === type;
}
/**
* Syntactically and semantically checks a call or new expression.
* @param node The call/new expression to be checked.
@ -20996,21 +21169,10 @@ namespace ts {
declaration.kind !== SyntaxKind.Constructor &&
declaration.kind !== SyntaxKind.ConstructSignature &&
declaration.kind !== SyntaxKind.ConstructorType &&
!isJSDocConstructSignature(declaration)) {
!isJSDocConstructSignature(declaration) &&
!isJSConstructor(declaration)) {
// When resolved signature is a call signature (and not a construct signature) the result type is any, unless
// the declaring function had members created through 'x.prototype.y = expr' or 'this.y = expr' psuedodeclarations
// in a JS file
// Note:JS inferred classes might come from a variable declaration instead of a function declaration.
// In this case, using getResolvedSymbol directly is required to avoid losing the members from the declaration.
let funcSymbol = checkExpression(node.expression).symbol;
if (!funcSymbol && node.expression.kind === SyntaxKind.Identifier) {
funcSymbol = getResolvedSymbol(node.expression as Identifier);
}
const type = funcSymbol && getJSClassType(funcSymbol);
if (type) {
return signature.target ? instantiateType(type, signature.mapper) : type;
}
// When resolved signature is a call signature (and not a construct signature) the result type is any
if (noImplicitAny) {
error(node, Diagnostics.new_expression_whose_target_lacks_a_construct_signature_implicitly_has_an_any_type);
}
@ -22193,7 +22355,7 @@ namespace ts {
if (!(isTypeComparableTo(leftType, stringType) || isTypeAssignableToKind(leftType, TypeFlags.NumberLike | TypeFlags.ESSymbolLike))) {
error(left, Diagnostics.The_left_hand_side_of_an_in_expression_must_be_of_type_any_string_number_or_symbol);
}
if (!isTypeAssignableToKind(rightType, TypeFlags.NonPrimitive | TypeFlags.InstantiableNonPrimitive)) {
if (!allTypesAssignableToKind(rightType, TypeFlags.NonPrimitive | TypeFlags.InstantiableNonPrimitive)) {
error(right, Diagnostics.The_right_hand_side_of_an_in_expression_must_be_of_type_any_an_object_type_or_a_type_parameter);
}
return booleanType;
@ -22214,15 +22376,15 @@ namespace ts {
function checkObjectLiteralDestructuringPropertyAssignment(objectLiteralType: Type, property: ObjectLiteralElementLike, allProperties?: NodeArray<ObjectLiteralElementLike>, rightIsThis = false) {
if (property.kind === SyntaxKind.PropertyAssignment || property.kind === SyntaxKind.ShorthandPropertyAssignment) {
const name = property.name;
const text = getTextOfPropertyName(name);
if (text) {
const exprType = getLiteralTypeFromPropertyName(name);
if (isTypeUsableAsPropertyName(exprType)) {
const text = getPropertyNameFromType(exprType);
const prop = getPropertyOfType(objectLiteralType, text);
if (prop) {
markPropertyAsReferenced(prop, property, rightIsThis);
checkPropertyAccessibility(property, /*isSuper*/ false, objectLiteralType, prop);
}
}
const exprType = getLiteralTypeFromPropertyName(name);
const elementType = getIndexedAccessType(objectLiteralType, exprType, name);
const type = getFlowTypeOfDestructuring(property, elementType);
return checkDestructuringAssignment(property.kind === SyntaxKind.ShorthandPropertyAssignment ? property : property.initializer, type);
@ -22953,7 +23115,7 @@ namespace ts {
return instantiateTypeWithSingleGenericCallSignature(node, uninstantiatedType, checkMode);
}
function instantiateTypeWithSingleGenericCallSignature(node: Expression | MethodDeclaration, type: Type, checkMode?: CheckMode) {
function instantiateTypeWithSingleGenericCallSignature(node: Expression | MethodDeclaration | QualifiedName, type: Type, checkMode?: CheckMode) {
if (checkMode === CheckMode.Inferential) {
const signature = getSingleCallSignature(type);
if (signature && signature.typeParameters) {
@ -23023,15 +23185,10 @@ namespace ts {
// have the wildcard function type; this form of type check is used during overload resolution to exclude
// contextually typed function and arrow expressions in the initial phase.
function checkExpression(node: Expression | QualifiedName, checkMode?: CheckMode, forceTuple?: boolean): Type {
let type: Type;
if (node.kind === SyntaxKind.QualifiedName) {
type = checkQualifiedName(<QualifiedName>node);
}
else {
const uninstantiatedType = checkExpressionWorker(node, checkMode, forceTuple);
type = instantiateTypeWithSingleGenericCallSignature(node, uninstantiatedType, checkMode);
}
const saveCurrentNode = currentNode;
currentNode = node;
const uninstantiatedType = checkExpressionWorker(node, checkMode, forceTuple);
const type = instantiateTypeWithSingleGenericCallSignature(node, uninstantiatedType, checkMode);
if (isConstEnumObjectType(type)) {
// enum object type for const enums are only permitted in:
// - 'left' in property access
@ -23047,6 +23204,7 @@ namespace ts {
error(node, Diagnostics.const_enums_can_only_be_used_in_property_or_index_access_expressions_or_the_right_hand_side_of_an_import_declaration_or_export_assignment_or_type_query);
}
}
currentNode = saveCurrentNode;
return type;
}
@ -23058,7 +23216,7 @@ namespace ts {
return checkExpression(node.expression, checkMode);
}
function checkExpressionWorker(node: Expression, checkMode: CheckMode | undefined, forceTuple?: boolean): Type {
function checkExpressionWorker(node: Expression | QualifiedName, checkMode: CheckMode | undefined, forceTuple?: boolean): Type {
switch (node.kind) {
case SyntaxKind.Identifier:
return checkIdentifier(<Identifier>node);
@ -23091,6 +23249,8 @@ namespace ts {
return checkObjectLiteral(<ObjectLiteralExpression>node, checkMode);
case SyntaxKind.PropertyAccessExpression:
return checkPropertyAccessExpression(<PropertyAccessExpression>node);
case SyntaxKind.QualifiedName:
return checkQualifiedName(<QualifiedName>node);
case SyntaxKind.ElementAccessExpression:
return checkIndexedAccess(<ElementAccessExpression>node);
case SyntaxKind.CallExpression:
@ -24806,9 +24966,17 @@ namespace ts {
return;
}
if (!containsArgumentsReference(decl)) {
error(node.name,
Diagnostics.JSDoc_param_tag_has_name_0_but_there_is_no_parameter_with_that_name,
idText(node.name.kind === SyntaxKind.QualifiedName ? node.name.right : node.name));
if (isQualifiedName(node.name)) {
error(node.name,
Diagnostics.Qualified_name_0_is_not_allowed_without_a_leading_param_object_1,
entityNameToString(node.name),
entityNameToString(node.name.left));
}
else {
error(node.name,
Diagnostics.JSDoc_param_tag_has_name_0_but_there_is_no_parameter_with_that_name,
idText(node.name));
}
}
else if (findLast(getJSDocTags(decl), isJSDocParameterTag) === node &&
node.typeExpression && node.typeExpression.type &&
@ -25537,13 +25705,14 @@ namespace ts {
const parent = node.parent.parent;
const parentType = getTypeForBindingElementParent(parent);
const name = node.propertyName || node.name;
if (!isBindingPattern(name)) {
const nameText = getTextOfPropertyName(name);
if (nameText) {
const property = getPropertyOfType(parentType!, nameText); // TODO: GH#18217
if (!isBindingPattern(name) && parentType) {
const exprType = getLiteralTypeFromPropertyName(name);
if (isTypeUsableAsPropertyName(exprType)) {
const nameText = getPropertyNameFromType(exprType);
const property = getPropertyOfType(parentType, nameText);
if (property) {
markPropertyAsReferenced(property, /*nodeForCheckWriteOnly*/ undefined, /*isThisAccess*/ false); // A destructuring is never a write-only reference.
checkPropertyAccessibility(parent, !!parent.initializer && parent.initializer.kind === SyntaxKind.SuperKeyword, parentType!, property);
checkPropertyAccessibility(parent, !!parent.initializer && parent.initializer.kind === SyntaxKind.SuperKeyword, parentType, property);
}
}
}
@ -25946,7 +26115,7 @@ namespace ts {
? downlevelIteration
? Diagnostics.Type_0_is_not_an_array_type_or_does_not_have_a_Symbol_iterator_method_that_returns_an_iterator
: isIterable
? Diagnostics.Type_0_is_not_an_array_type_Use_compiler_option_downlevelIteration_to_allow_iterating_of_iterators
? Diagnostics.Type_0_is_not_an_array_type_or_a_string_type_Use_compiler_option_downlevelIteration_to_allow_iterating_of_iterators
: Diagnostics.Type_0_is_not_an_array_type
: downlevelIteration
? Diagnostics.Type_0_is_not_an_array_type_or_a_string_type_or_does_not_have_a_Symbol_iterator_method_that_returns_an_iterator
@ -27763,10 +27932,15 @@ namespace ts {
}
function checkSourceElement(node: Node | undefined): void {
if (!node) {
return;
if (node) {
const saveCurrentNode = currentNode;
currentNode = node;
checkSourceElementWorker(node);
currentNode = saveCurrentNode;
}
}
function checkSourceElementWorker(node: Node): void {
if (isInJSFile(node)) {
forEach((node as JSDocContainer).jsDoc, ({ tags }) => forEach(tags, checkSourceElement));
}
@ -28024,32 +28198,36 @@ namespace ts {
function checkDeferredNodes(context: SourceFile) {
const links = getNodeLinks(context);
if (!links.deferredNodes) {
return;
if (links.deferredNodes) {
links.deferredNodes.forEach(checkDeferredNode);
}
links.deferredNodes.forEach(node => {
switch (node.kind) {
case SyntaxKind.FunctionExpression:
case SyntaxKind.ArrowFunction:
case SyntaxKind.MethodDeclaration:
case SyntaxKind.MethodSignature:
checkFunctionExpressionOrObjectLiteralMethodDeferred(<FunctionExpression>node);
break;
case SyntaxKind.GetAccessor:
case SyntaxKind.SetAccessor:
checkAccessorDeclaration(<AccessorDeclaration>node);
break;
case SyntaxKind.ClassExpression:
checkClassExpressionDeferred(<ClassExpression>node);
break;
case SyntaxKind.JsxSelfClosingElement:
checkJsxSelfClosingElementDeferred(<JsxSelfClosingElement>node);
break;
case SyntaxKind.JsxElement:
checkJsxElementDeferred(<JsxElement>node);
break;
}
});
}
function checkDeferredNode(node: Node) {
const saveCurrentNode = currentNode;
currentNode = node;
switch (node.kind) {
case SyntaxKind.FunctionExpression:
case SyntaxKind.ArrowFunction:
case SyntaxKind.MethodDeclaration:
case SyntaxKind.MethodSignature:
checkFunctionExpressionOrObjectLiteralMethodDeferred(<FunctionExpression>node);
break;
case SyntaxKind.GetAccessor:
case SyntaxKind.SetAccessor:
checkAccessorDeclaration(<AccessorDeclaration>node);
break;
case SyntaxKind.ClassExpression:
checkClassExpressionDeferred(<ClassExpression>node);
break;
case SyntaxKind.JsxSelfClosingElement:
checkJsxSelfClosingElementDeferred(<JsxSelfClosingElement>node);
break;
case SyntaxKind.JsxElement:
checkJsxElementDeferred(<JsxElement>node);
break;
}
currentNode = saveCurrentNode;
}
function checkSourceFile(node: SourceFile) {
@ -30017,10 +30195,6 @@ namespace ts {
checkGrammarForDisallowedTrailingComma(parameters, Diagnostics.A_rest_parameter_or_binding_pattern_may_not_have_a_trailing_comma);
}
if (isBindingPattern(parameter.name)) {
return grammarErrorOnNode(parameter.name, Diagnostics.A_rest_element_cannot_contain_a_binding_pattern);
}
if (parameter.questionToken) {
return grammarErrorOnNode(parameter.questionToken, Diagnostics.A_rest_parameter_cannot_be_optional);
}
@ -31086,7 +31260,7 @@ namespace ts {
if (nodeArguments.length !== 1) {
return grammarErrorOnNode(node, Diagnostics.Dynamic_import_must_have_one_specifier_as_an_argument);
}
checkGrammarForDisallowedTrailingComma(nodeArguments);
// see: parseArgumentOrArrayLiteralElement...we use this function which parse arguments of callExpression to parse specifier for dynamic import.
// parseArgumentOrArrayLiteralElement allows spread element to be in an argument list which is not allowed as specifier in dynamic import.
if (isSpreadElement(nodeArguments[0])) {

View File

@ -1,7 +1,7 @@
namespace ts {
// WARNING: The script `configureNightly.ts` uses a regexp to parse out these values.
// If changing the text in this section, be sure to test `configureNightly` too.
export const versionMajorMinor = "3.3";
export const versionMajorMinor = "3.4";
/** The version of the TypeScript compiler release */
export const version = `${versionMajorMinor}.0-dev`;
}
@ -884,8 +884,11 @@ namespace ts {
/**
* Compacts an array, removing any falsey elements.
*/
export function compact<T>(array: T[]): T[];
export function compact<T>(array: ReadonlyArray<T>): ReadonlyArray<T>;
export function compact<T>(array: (T | undefined | null | false | 0 | "")[]): T[];
export function compact<T>(array: ReadonlyArray<T | undefined | null | false | 0 | "">): ReadonlyArray<T>;
// TSLint thinks these can be combined with the above - they cannot; they'd produce higher-priority inferences and prevent the falsey types from being stripped
export function compact<T>(array: T[]): T[]; // tslint:disable-line unified-signatures
export function compact<T>(array: ReadonlyArray<T>): ReadonlyArray<T>; // tslint:disable-line unified-signatures
export function compact<T>(array: T[]): T[] {
let result: T[] | undefined;
if (array) {
@ -1387,6 +1390,18 @@ namespace ts {
return result;
}
export function copyProperties<T1 extends T2, T2>(first: T1, second: T2) {
for (const id in second) {
if (hasOwnProperty.call(second, id)) {
(first as any)[id] = second[id];
}
}
}
export function maybeBind<T, A extends any[], R>(obj: T, fn: ((this: T, ...args: A) => R) | undefined): ((...args: A) => R) | undefined {
return fn ? fn.bind(obj) : undefined;
}
export interface MultiMap<T> extends Map<T[]> {
/**
* Adds the value to an array of values associated with the key, and returns the array.

View File

@ -2064,10 +2064,6 @@
"category": "Error",
"code": 2567
},
"Type '{0}' is not an array type. Use compiler option '--downlevelIteration' to allow iterating of iterators.": {
"category": "Error",
"code": 2568
},
"Type '{0}' is not an array type or a string type. Use compiler option '--downlevelIteration' to allow iterating of iterators.": {
"category": "Error",
"code": 2569
@ -2140,6 +2136,14 @@
"category": "Error",
"code": 2588
},
"Type instantiation is excessively deep and possibly infinite.": {
"category": "Error",
"code": 2589
},
"Expression produces a union type that is too complex to represent.": {
"category": "Error",
"code": 2590
},
"JSX element attributes type '{0}' may not be a union type.": {
"category": "Error",
"code": 2600
@ -2445,7 +2449,7 @@
"category": "Error",
"code": 2717
},
"Duplicate declaration '{0}'.": {
"Duplicate property '{0}'.": {
"category": "Error",
"code": 2718
},
@ -2505,6 +2509,10 @@
"category": "Error",
"code": 2732
},
"Property '{0}' was also declared here.": {
"category": "Error",
"code": 2733
},
"It is highly likely that you are missing a semicolon.": {
"category": "Error",
"code": 2734
@ -2549,6 +2557,18 @@
"category": "Error",
"code": 2744
},
"This JSX tag's '{0}' prop expects type '{1}' which requires multiple children, but only a single child was provided.": {
"category": "Error",
"code": 2745
},
"This JSX tag's '{0}' prop expects a single child of type '{1}', but multiple children were provided.": {
"category": "Error",
"code": 2746
},
"'{0}' components don't accept text as child elements. Text in JSX has the type 'string', but the expected type of '{1}' is '{2}'.": {
"category": "Error",
"code": 2747
},
"Import declaration '{0}' is using private name '{1}'.": {
"category": "Error",
@ -3957,6 +3977,10 @@
"category": "Error",
"code": 6370
},
"Updating unchanged output timestamps of project '{0}'...": {
"category": "Message",
"code": 6371
},
"The expected type comes from property '{0}' which is declared here on type '{1}'": {
"category": "Message",
@ -4274,6 +4298,10 @@
"category": "Error",
"code": 8031
},
"Qualified name '{0}' is not allowed without a leading '@param {object} {1}'.": {
"category": "Error",
"code": 8032
},
"Only identifiers/qualified-names with optional type arguments are currently supported in a class 'extends' clause.": {
"category": "Error",
"code": 9002
@ -4819,5 +4847,9 @@
"Add names to all parameters without names": {
"category": "Message",
"code": 95073
},
"Enable the 'experimentalDecorators' option in your configuration file": {
"category": "Message",
"code": 95074
}
}

View File

@ -38,17 +38,22 @@ namespace ts {
}
}
/*@internal*/
export function getOutputPathsForBundle(options: CompilerOptions, forceDtsPaths: boolean): EmitFileNames {
const outPath = options.outFile || options.out!;
const jsFilePath = options.emitDeclarationOnly ? undefined : outPath;
const sourceMapFilePath = jsFilePath && getSourceMapFilePath(jsFilePath, options);
const declarationFilePath = (forceDtsPaths || getEmitDeclarations(options)) ? removeFileExtension(outPath) + Extension.Dts : undefined;
const declarationMapPath = declarationFilePath && getAreDeclarationMapsEnabled(options) ? declarationFilePath + ".map" : undefined;
const bundleInfoPath = options.references && jsFilePath ? (removeFileExtension(jsFilePath) + infoExtension) : undefined;
return { jsFilePath, sourceMapFilePath, declarationFilePath, declarationMapPath, bundleInfoPath };
}
/*@internal*/
export function getOutputPathsFor(sourceFile: SourceFile | Bundle, host: EmitHost, forceDtsPaths: boolean): EmitFileNames {
const options = host.getCompilerOptions();
if (sourceFile.kind === SyntaxKind.Bundle) {
const outPath = options.outFile || options.out!;
const jsFilePath = options.emitDeclarationOnly ? undefined : outPath;
const sourceMapFilePath = jsFilePath && getSourceMapFilePath(jsFilePath, options);
const declarationFilePath = (forceDtsPaths || getEmitDeclarations(options)) ? removeFileExtension(outPath) + Extension.Dts : undefined;
const declarationMapPath = declarationFilePath && getAreDeclarationMapsEnabled(options) ? declarationFilePath + ".map" : undefined;
const bundleInfoPath = options.references && jsFilePath ? (removeFileExtension(jsFilePath) + infoExtension) : undefined;
return { jsFilePath, sourceMapFilePath, declarationFilePath, declarationMapPath, bundleInfoPath };
return getOutputPathsForBundle(options, forceDtsPaths);
}
else {
const ownOutputFilePath = getOwnEmitOutputFilePath(sourceFile.fileName, host, getOutputExtension(sourceFile, options));
@ -2561,6 +2566,7 @@ namespace ts {
function emitJsxSelfClosingElement(node: JsxSelfClosingElement) {
writePunctuation("<");
emitJsxTagName(node.tagName);
emitTypeArguments(node, node.typeArguments);
writeSpace();
emit(node.attributes);
writePunctuation("/>");
@ -2577,6 +2583,7 @@ namespace ts {
if (isJsxOpeningElement(node)) {
emitJsxTagName(node.tagName);
emitTypeArguments(node, node.typeArguments);
if (node.attributes.properties && node.attributes.properties.length > 0) {
writeSpace();
}
@ -4370,10 +4377,10 @@ namespace ts {
}
/**
* Skips trivia such as comments and white-space that can optionally overriden by the source map source
* Skips trivia such as comments and white-space that can be optionally overridden by the source-map source
*/
function skipSourceTrivia(source: SourceMapSource, pos: number): number {
return source.skipTrivia ? source.skipTrivia(pos) : skipTrivia(sourceMapSource.text, pos);
return source.skipTrivia ? source.skipTrivia(pos) : skipTrivia(source.text, pos);
}
/**
@ -4389,7 +4396,7 @@ namespace ts {
return;
}
const { line: sourceLine, character: sourceCharacter } = getLineAndCharacterOfPosition(currentSourceFile!, pos);
const { line: sourceLine, character: sourceCharacter } = getLineAndCharacterOfPosition(sourceMapSource, pos);
sourceMapGenerator!.addMapping(
writer.getLine(),
writer.getColumn(),

View File

@ -2630,41 +2630,88 @@ namespace ts {
}
export function createUnparsedSourceFile(text: string): UnparsedSource;
export function createUnparsedSourceFile(inputFile: InputFiles, type: "js" | "dts"): UnparsedSource;
export function createUnparsedSourceFile(text: string, mapPath: string | undefined, map: string | undefined): UnparsedSource;
export function createUnparsedSourceFile(text: string, mapPath?: string, map?: string): UnparsedSource {
export function createUnparsedSourceFile(textOrInputFiles: string | InputFiles, mapPathOrType?: string, map?: string): UnparsedSource {
const node = <UnparsedSource>createNode(SyntaxKind.UnparsedSource);
node.text = text;
node.sourceMapPath = mapPath;
node.sourceMapText = map;
if (!isString(textOrInputFiles)) {
Debug.assert(mapPathOrType === "js" || mapPathOrType === "dts");
node.fileName = mapPathOrType === "js" ? textOrInputFiles.javascriptPath : textOrInputFiles.declarationPath;
node.sourceMapPath = mapPathOrType === "js" ? textOrInputFiles.javascriptMapPath : textOrInputFiles.declarationMapPath;
Object.defineProperties(node, {
text: { get() { return mapPathOrType === "js" ? textOrInputFiles.javascriptText : textOrInputFiles.declarationText; } },
sourceMapText: { get() { return mapPathOrType === "js" ? textOrInputFiles.javascriptMapText : textOrInputFiles.declarationMapText; } },
});
}
else {
node.text = textOrInputFiles;
node.sourceMapPath = mapPathOrType;
node.sourceMapText = map;
}
return node;
}
export function createInputFiles(
javascript: string,
declaration: string
javascriptText: string,
declarationText: string
): InputFiles;
export function createInputFiles(
javascript: string,
declaration: string,
readFileText: (path: string) => string | undefined,
javascriptPath: string,
javascriptMapPath: string | undefined,
declarationPath: string,
declarationMapPath: string | undefined,
): InputFiles;
export function createInputFiles(
javascriptText: string,
declarationText: string,
javascriptMapPath: string | undefined,
javascriptMapText: string | undefined,
declarationMapPath: string | undefined,
declarationMapText: string | undefined
): InputFiles;
export function createInputFiles(
javascript: string,
declaration: string,
javascriptTextOrReadFileText: string | ((path: string) => string | undefined),
declarationTextOrJavascriptPath: string,
javascriptMapPath?: string,
javascriptMapText?: string,
javascriptMapTextOrDeclarationPath?: string,
declarationMapPath?: string,
declarationMapText?: string
): InputFiles {
const node = <InputFiles>createNode(SyntaxKind.InputFiles);
node.javascriptText = javascript;
node.javascriptMapPath = javascriptMapPath;
node.javascriptMapText = javascriptMapText;
node.declarationText = declaration;
node.declarationMapPath = declarationMapPath;
node.declarationMapText = declarationMapText;
if (!isString(javascriptTextOrReadFileText)) {
const cache = createMap<string | false>();
const textGetter = (path: string | undefined) => {
if (path === undefined) return undefined;
let value = cache.get(path);
if (value === undefined) {
value = javascriptTextOrReadFileText(path);
cache.set(path, value !== undefined ? value : false);
}
return value !== false ? value as string : undefined;
};
const definedTextGetter = (path: string) => {
const result = textGetter(path);
return result !== undefined ? result : `/* Input file ${path} was missing */\r\n`;
};
node.javascriptPath = declarationTextOrJavascriptPath;
node.javascriptMapPath = javascriptMapPath;
node.declarationPath = Debug.assertDefined(javascriptMapTextOrDeclarationPath);
node.declarationMapPath = declarationMapPath;
Object.defineProperties(node, {
javascriptText: { get() { return definedTextGetter(declarationTextOrJavascriptPath); } },
javascriptMapText: { get() { return textGetter(javascriptMapPath); } }, // TODO:: if there is inline sourceMap in jsFile, use that
declarationText: { get() { return definedTextGetter(Debug.assertDefined(javascriptMapTextOrDeclarationPath)); } },
declarationMapText: { get() { return textGetter(declarationMapPath); } } // TODO:: if there is inline sourceMap in dtsFile, use that
});
}
else {
node.javascriptText = javascriptTextOrReadFileText;
node.javascriptMapPath = javascriptMapPath;
node.javascriptMapText = javascriptMapTextOrDeclarationPath;
node.declarationText = declarationTextOrJavascriptPath;
node.declarationMapPath = declarationMapPath;
node.declarationMapText = declarationMapText;
}
return node;
}

View File

@ -1093,6 +1093,10 @@ namespace ts {
return currentToken = scanner.reScanTemplateToken();
}
function reScanLessThanToken(): SyntaxKind {
return currentToken = scanner.reScanLessThanToken();
}
function scanJsxIdentifier(): SyntaxKind {
return currentToken = scanner.scanJsxIdentifier();
}
@ -2276,7 +2280,7 @@ namespace ts {
function parseTypeReference(): TypeReferenceNode {
const node = <TypeReferenceNode>createNode(SyntaxKind.TypeReference);
node.typeName = parseEntityName(/*allowReservedWords*/ true, Diagnostics.Type_expected);
if (!scanner.hasPrecedingLineBreak() && token() === SyntaxKind.LessThanToken) {
if (!scanner.hasPrecedingLineBreak() && reScanLessThanToken() === SyntaxKind.LessThanToken) {
node.typeArguments = parseBracketedList(ParsingContext.TypeArguments, parseType, SyntaxKind.LessThanToken, SyntaxKind.GreaterThanToken);
}
return finishNode(node);
@ -4525,7 +4529,8 @@ namespace ts {
function parseCallExpressionRest(expression: LeftHandSideExpression): LeftHandSideExpression {
while (true) {
expression = parseMemberExpressionRest(expression);
if (token() === SyntaxKind.LessThanToken) {
// handle 'foo<<T>()'
if (token() === SyntaxKind.LessThanToken || token() === SyntaxKind.LessThanLessThanToken) {
// See if this is the start of a generic invocation. If so, consume it and
// keep checking for postfix expressions. Otherwise, it's just a '<' that's
// part of an arithmetic expression. Break out so we consume it higher in the
@ -4567,9 +4572,10 @@ namespace ts {
}
function parseTypeArgumentsInExpression() {
if (!parseOptional(SyntaxKind.LessThanToken)) {
if (reScanLessThanToken() !== SyntaxKind.LessThanToken) {
return undefined;
}
nextToken();
const typeArguments = parseDelimitedList(ParsingContext.TypeArguments, parseType);
if (!parseExpected(SyntaxKind.GreaterThanToken)) {
@ -7776,17 +7782,18 @@ namespace ts {
const libReferenceDirectives = context.libReferenceDirectives;
forEach(toArray(entryOrList), (arg: PragmaPseudoMap["reference"]) => {
// TODO: GH#18217
const { types, lib, path } = arg!.arguments;
if (arg!.arguments["no-default-lib"]) {
context.hasNoDefaultLib = true;
}
else if (arg!.arguments.types) {
typeReferenceDirectives.push({ pos: arg!.arguments.types!.pos, end: arg!.arguments.types!.end, fileName: arg!.arguments.types!.value });
else if (types) {
typeReferenceDirectives.push({ pos: types.pos, end: types.end, fileName: types.value });
}
else if (arg!.arguments.lib) {
libReferenceDirectives.push({ pos: arg!.arguments.lib!.pos, end: arg!.arguments.lib!.end, fileName: arg!.arguments.lib!.value });
else if (lib) {
libReferenceDirectives.push({ pos: lib.pos, end: lib.end, fileName: lib.value });
}
else if (arg!.arguments.path) {
referencedFiles.push({ pos: arg!.arguments.path!.pos, end: arg!.arguments.path!.end, fileName: arg!.arguments.path!.value });
else if (path) {
referencedFiles.push({ pos: path.pos, end: path.end, fileName: path.value });
}
else {
reportDiagnostic(arg!.range.pos, arg!.range.end - arg!.range.pos, Diagnostics.Invalid_reference_directive_syntax);

View File

@ -69,6 +69,7 @@ namespace ts {
export function createCompilerHost(options: CompilerOptions, setParentNodes?: boolean): CompilerHost {
return createCompilerHostWorker(options, setParentNodes);
}
/*@internal*/
// TODO(shkamat): update this after reworking ts build API
export function createCompilerHostWorker(options: CompilerOptions, setParentNodes?: boolean, system = sys): CompilerHost {
@ -93,7 +94,6 @@ namespace ts {
}
text = "";
}
return text !== undefined ? createSourceFile(fileName, text, languageVersion, setParentNodes) : undefined;
}
@ -203,18 +203,25 @@ namespace ts {
return compilerHost;
}
interface CompilerHostLikeForCache {
fileExists(fileName: string): boolean;
readFile(fileName: string, encoding?: string): string | undefined;
directoryExists?(directory: string): boolean;
createDirectory?(directory: string): void;
writeFile?: WriteFileCallback;
}
/*@internal*/
export function changeCompilerHostToUseCache(
host: CompilerHost,
export function changeCompilerHostLikeToUseCache(
host: CompilerHostLikeForCache,
toPath: (fileName: string) => Path,
useCacheForSourceFile: boolean
getSourceFile?: CompilerHost["getSourceFile"]
) {
const originalReadFile = host.readFile;
const originalFileExists = host.fileExists;
const originalDirectoryExists = host.directoryExists;
const originalCreateDirectory = host.createDirectory;
const originalWriteFile = host.writeFile;
const originalGetSourceFile = host.getSourceFile;
const readFileCache = createMap<string | false>();
const fileExistsCache = createMap<boolean>();
const directoryExistsCache = createMap<boolean>();
@ -242,19 +249,17 @@ namespace ts {
return setReadFileCache(key, fileName);
};
if (useCacheForSourceFile) {
host.getSourceFile = (fileName, languageVersion, onError, shouldCreateNewSourceFile) => {
const key = toPath(fileName);
const value = sourceFileCache.get(key);
if (value) return value;
const getSourceFileWithCache: CompilerHost["getSourceFile"] | undefined = getSourceFile ? (fileName, languageVersion, onError, shouldCreateNewSourceFile) => {
const key = toPath(fileName);
const value = sourceFileCache.get(key);
if (value) return value;
const sourceFile = originalGetSourceFile.call(host, fileName, languageVersion, onError, shouldCreateNewSourceFile);
if (sourceFile && (isDeclarationFileName(fileName) || fileExtensionIs(fileName, Extension.Json))) {
sourceFileCache.set(key, sourceFile);
}
return sourceFile;
};
}
const sourceFile = getSourceFile(fileName, languageVersion, onError, shouldCreateNewSourceFile);
if (sourceFile && (isDeclarationFileName(fileName) || fileExtensionIs(fileName, Extension.Json))) {
sourceFileCache.set(key, sourceFile);
}
return sourceFile;
} : undefined;
// fileExists for any kind of extension
host.fileExists = fileName => {
@ -265,23 +270,25 @@ namespace ts {
fileExistsCache.set(key, !!newValue);
return newValue;
};
host.writeFile = (fileName, data, writeByteOrderMark, onError, sourceFiles) => {
const key = toPath(fileName);
fileExistsCache.delete(key);
if (originalWriteFile) {
host.writeFile = (fileName, data, writeByteOrderMark, onError, sourceFiles) => {
const key = toPath(fileName);
fileExistsCache.delete(key);
const value = readFileCache.get(key);
if (value && value !== data) {
readFileCache.delete(key);
sourceFileCache.delete(key);
}
else if (useCacheForSourceFile) {
const sourceFile = sourceFileCache.get(key);
if (sourceFile && sourceFile.text !== data) {
const value = readFileCache.get(key);
if (value && value !== data) {
readFileCache.delete(key);
sourceFileCache.delete(key);
}
}
originalWriteFile.call(host, fileName, data, writeByteOrderMark, onError, sourceFiles);
};
else if (getSourceFileWithCache) {
const sourceFile = sourceFileCache.get(key);
if (sourceFile && sourceFile.text !== data) {
sourceFileCache.delete(key);
}
}
originalWriteFile.call(host, fileName, data, writeByteOrderMark, onError, sourceFiles);
};
}
// directoryExists
if (originalDirectoryExists && originalCreateDirectory) {
@ -306,7 +313,7 @@ namespace ts {
originalDirectoryExists,
originalCreateDirectory,
originalWriteFile,
originalGetSourceFile,
getSourceFileWithCache,
readFileWithCache
};
}
@ -735,7 +742,7 @@ namespace ts {
performance.mark("beforeProgram");
const host = createProgramOptions.host || createCompilerHost(options);
const configParsingHost = parseConfigHostFromCompilerHost(host);
const configParsingHost = parseConfigHostFromCompilerHostLike(host);
let skipDefaultLib = options.noLib;
const getDefaultLibraryFileName = memoize(() => host.getDefaultLibFileName(options));
@ -1449,14 +1456,12 @@ namespace ts {
// Upstream project didn't have outFile set -- skip (error will have been issued earlier)
if (!out) continue;
const dtsFilename = changeExtension(out, ".d.ts");
const js = host.readFile(out) || `/* Input file ${out} was missing */\r\n`;
const jsMapPath = out + ".map"; // TODO: try to read sourceMappingUrl comment from the file
const jsMap = host.readFile(jsMapPath);
const dts = host.readFile(dtsFilename) || `/* Input file ${dtsFilename} was missing */\r\n`;
const dtsMapPath = dtsFilename + ".map";
const dtsMap = host.readFile(dtsMapPath);
const node = createInputFiles(js, dts, jsMap && jsMapPath, jsMap, dtsMap && dtsMapPath, dtsMap);
const { jsFilePath, sourceMapFilePath, declarationFilePath, declarationMapPath } = getOutputPathsForBundle(resolvedRefOpts.options, /*forceDtsPaths*/ true);
const node = createInputFiles(fileName => {
const path = toPath(fileName);
const sourceFile = getSourceFileByPath(path);
return sourceFile ? sourceFile.text : filesByName.has(path) ? undefined : host.readFile(path);
}, jsFilePath! , sourceMapFilePath, declarationFilePath! , declarationMapPath);
nodes.push(node);
}
}
@ -2227,8 +2232,9 @@ namespace ts {
processReferencedFiles(file, isDefaultLib);
processTypeReferenceDirectives(file);
}
processLibReferenceDirectives(file);
if (!options.noLib) {
processLibReferenceDirectives(file);
}
modulesWithElidedImports.set(file.path, false);
processImportedModules(file);
@ -2315,8 +2321,10 @@ namespace ts {
processReferencedFiles(file, isDefaultLib);
processTypeReferenceDirectives(file);
}
if (!options.noLib) {
processLibReferenceDirectives(file);
}
processLibReferenceDirectives(file);
// always process imported modules to record module name resolutions
processImportedModules(file);
@ -3101,18 +3109,28 @@ namespace ts {
}
}
interface CompilerHostLike {
useCaseSensitiveFileNames(): boolean;
getCurrentDirectory(): string;
fileExists(fileName: string): boolean;
readFile(fileName: string): string | undefined;
readDirectory?(rootDir: string, extensions: ReadonlyArray<string>, excludes: ReadonlyArray<string> | undefined, includes: ReadonlyArray<string>, depth?: number): string[];
trace?(s: string): void;
onUnRecoverableConfigFileDiagnostic?: DiagnosticReporter;
}
/* @internal */
export function parseConfigHostFromCompilerHost(host: CompilerHost): ParseConfigFileHost {
export function parseConfigHostFromCompilerHostLike(host: CompilerHostLike, directoryStructureHost: DirectoryStructureHost = host): ParseConfigFileHost {
return {
fileExists: f => host.fileExists(f),
fileExists: f => directoryStructureHost.fileExists(f),
readDirectory(root, extensions, excludes, includes, depth) {
Debug.assertDefined(host.readDirectory, "'CompilerHost.readDirectory' must be implemented to correctly process 'projectReferences'");
return host.readDirectory!(root, extensions, excludes, includes, depth);
Debug.assertDefined(directoryStructureHost.readDirectory, "'CompilerHost.readDirectory' must be implemented to correctly process 'projectReferences'");
return directoryStructureHost.readDirectory!(root, extensions, excludes, includes, depth);
},
readFile: f => host.readFile(f),
readFile: f => directoryStructureHost.readFile(f),
useCaseSensitiveFileNames: host.useCaseSensitiveFileNames(),
getCurrentDirectory: () => host.getCurrentDirectory(),
onUnRecoverableConfigFileDiagnostic: () => undefined,
onUnRecoverableConfigFileDiagnostic: host.onUnRecoverableConfigFileDiagnostic || (() => undefined),
trace: host.trace ? (s) => host.trace!(s) : undefined
};
}

View File

@ -31,6 +31,7 @@ namespace ts {
scanJsxIdentifier(): SyntaxKind;
scanJsxAttributeValue(): SyntaxKind;
reScanJsxToken(): JsxTokenSyntaxKind;
reScanLessThanToken(): SyntaxKind;
scanJsxToken(): JsxTokenSyntaxKind;
scanJSDocToken(): JsDocSyntaxKind;
scan(): SyntaxKind;
@ -874,6 +875,7 @@ namespace ts {
scanJsxIdentifier,
scanJsxAttributeValue,
reScanJsxToken,
reScanLessThanToken,
scanJsxToken,
scanJSDocToken,
scan,
@ -1939,6 +1941,14 @@ namespace ts {
return token = scanJsxToken();
}
function reScanLessThanToken(): SyntaxKind {
if (token === SyntaxKind.LessThanLessThanToken) {
pos = tokenPos + 1;
return token = SyntaxKind.LessThanToken;
}
return token;
}
function scanJsxToken(): JsxTokenSyntaxKind {
startPos = tokenPos = pos;

View File

@ -2,6 +2,16 @@ declare function setTimeout(handler: (...args: any[]) => void, timeout: number):
declare function clearTimeout(handle: any): void;
namespace ts {
/**
* djb2 hashing algorithm
* http://www.cse.yorku.ca/~oz/hash.html
*/
/* @internal */
export function generateDjb2Hash(data: string): string {
const chars = data.split("").map(str => str.charCodeAt(0));
return `${chars.reduce((prev, curr) => ((prev << 5) + prev) + curr, 5381)}`;
}
/**
* Set a high stack trace limit to provide more information in case of an error.
* Called for command-line and server use cases.
@ -1115,15 +1125,6 @@ namespace ts {
}
}
/**
* djb2 hashing algorithm
* http://www.cse.yorku.ca/~oz/hash.html
*/
function generateDjb2Hash(data: string): string {
const chars = data.split("").map(str => str.charCodeAt(0));
return `${chars.reduce((prev, curr) => ((prev << 5) + prev) + curr, 5381)}`;
}
function createMD5HashUsingNativeCrypto(data: string): string {
const hash = _crypto!.createHash("md5");
hash.update(data);

View File

@ -207,7 +207,7 @@ namespace ts {
}
), mapDefined(node.prepends, prepend => {
if (prepend.kind === SyntaxKind.InputFiles) {
return createUnparsedSourceFile(prepend.declarationText, prepend.declarationMapPath, prepend.declarationMapText);
return createUnparsedSourceFile(prepend, "dts");
}
}));
bundle.syntheticFileReferences = [];
@ -634,7 +634,10 @@ namespace ts {
if (!isLateVisibilityPaintedStatement(i)) {
return Debug.fail(`Late replaced statement was found which is not handled by the declaration transformer!: ${(ts as any).SyntaxKind ? (ts as any).SyntaxKind[(i as any).kind] : (i as any).kind}`);
}
const priorNeedsDeclare = needsDeclare;
needsDeclare = i.parent && isSourceFile(i.parent) && !(isExternalModule(i.parent) && isBundledEmit);
const result = transformTopLevelDeclaration(i, /*privateDeclaration*/ true);
needsDeclare = priorNeedsDeclare;
lateStatementReplacementMap.set("" + getOriginalNodeId(i), result);
}

View File

@ -1350,8 +1350,8 @@ namespace ts {
* part of a constructor declaration with a
* synthesized call to `super`
*/
function shouldAddRestParameter(node: ParameterDeclaration | undefined, inConstructorWithSynthesizedSuper: boolean) {
return node && node.dotDotDotToken && node.name.kind === SyntaxKind.Identifier && !inConstructorWithSynthesizedSuper;
function shouldAddRestParameter(node: ParameterDeclaration | undefined, inConstructorWithSynthesizedSuper: boolean): node is ParameterDeclaration {
return !!(node && node.dotDotDotToken && !inConstructorWithSynthesizedSuper);
}
/**
@ -1370,11 +1370,11 @@ namespace ts {
}
// `declarationName` is the name of the local declaration for the parameter.
const declarationName = getMutableClone(<Identifier>parameter!.name);
const declarationName = parameter.name.kind === SyntaxKind.Identifier ? getMutableClone(parameter.name) : createTempVariable(/*recordTempVariable*/ undefined);
setEmitFlags(declarationName, EmitFlags.NoSourceMap);
// `expressionName` is the name of the parameter used in expressions.
const expressionName = getSynthesizedClone(<Identifier>parameter!.name);
const expressionName = parameter.name.kind === SyntaxKind.Identifier ? getSynthesizedClone(parameter.name) : declarationName;
const restIndex = node.parameters.length - 1;
const temp = createLoopVariable();
@ -1439,6 +1439,24 @@ namespace ts {
setEmitFlags(forStatement, EmitFlags.CustomPrologue);
startOnNewLine(forStatement);
statements.push(forStatement);
if (parameter.name.kind !== SyntaxKind.Identifier) {
// do the actual destructuring of the rest parameter if necessary
statements.push(
setEmitFlags(
setTextRange(
createVariableStatement(
/*modifiers*/ undefined,
createVariableDeclarationList(
flattenDestructuringBinding(parameter, visitor, context, FlattenLevel.All, expressionName),
)
),
parameter
),
EmitFlags.CustomPrologue
)
);
}
}
/**

View File

@ -101,7 +101,7 @@ namespace ts {
function transformBundle(node: Bundle) {
return createBundle(node.sourceFiles.map(transformSourceFile), mapDefined(node.prepends, prepend => {
if (prepend.kind === SyntaxKind.InputFiles) {
return createUnparsedSourceFile(prepend.javascriptText, prepend.javascriptMapPath, prepend.javascriptMapText);
return createUnparsedSourceFile(prepend, "js");
}
return prepend;
}));
@ -1934,8 +1934,13 @@ namespace ts {
case SyntaxKind.ConditionalType:
return serializeTypeList([(<ConditionalTypeNode>node).trueType, (<ConditionalTypeNode>node).falseType]);
case SyntaxKind.TypeQuery:
case SyntaxKind.TypeOperator:
if ((<TypeOperatorNode>node).operator === SyntaxKind.ReadonlyKeyword) {
return serializeTypeNode((<TypeOperatorNode>node).type);
}
break;
case SyntaxKind.TypeQuery:
case SyntaxKind.IndexedAccessType:
case SyntaxKind.MappedType:
case SyntaxKind.TypeLiteral:

View File

@ -119,7 +119,7 @@ namespace ts {
newestDeclarationFileContentChangedTime?: Date;
newestOutputFileTime?: Date;
newestOutputFileName?: string;
oldestOutputFileName?: string;
oldestOutputFileName: string;
}
/**
@ -321,7 +321,7 @@ namespace ts {
return fileExtensionIs(fileName, Extension.Dts);
}
export interface SolutionBuilderHostBase extends CompilerHost {
export interface SolutionBuilderHostBase<T extends BuilderProgram> extends ProgramHost<T> {
getModifiedTime(fileName: string): Date | undefined;
setModifiedTime(fileName: string, date: Date): void;
deleteFile(fileName: string): void;
@ -331,15 +331,17 @@ namespace ts {
// TODO: To do better with watch mode and normal build mode api that creates program and emits files
// This currently helps enable --diagnostics and --extendedDiagnostics
beforeCreateProgram?(options: CompilerOptions): void;
afterProgramEmitAndDiagnostics?(program: Program): void;
afterProgramEmitAndDiagnostics?(program: T): void;
// For testing
now?(): Date;
}
export interface SolutionBuilderHost extends SolutionBuilderHostBase {
export interface SolutionBuilderHost<T extends BuilderProgram> extends SolutionBuilderHostBase<T> {
reportErrorSummary?: ReportEmitErrorSummary;
}
export interface SolutionBuilderWithWatchHost extends SolutionBuilderHostBase, WatchHost {
export interface SolutionBuilderWithWatchHost<T extends BuilderProgram> extends SolutionBuilderHostBase<T>, WatchHost {
}
export interface SolutionBuilder {
@ -372,8 +374,8 @@ namespace ts {
};
}
function createSolutionBuilderHostBase(system = sys, reportDiagnostic?: DiagnosticReporter, reportSolutionBuilderStatus?: DiagnosticReporter) {
const host = createCompilerHostWorker({}, /*setParentNodes*/ undefined, system) as SolutionBuilderHostBase;
function createSolutionBuilderHostBase<T extends BuilderProgram>(system: System, createProgram: CreateProgram<T>, reportDiagnostic?: DiagnosticReporter, reportSolutionBuilderStatus?: DiagnosticReporter) {
const host = createProgramHost(system, createProgram) as SolutionBuilderHostBase<T>;
host.getModifiedTime = system.getModifiedTime ? path => system.getModifiedTime!(path) : () => undefined;
host.setModifiedTime = system.setModifiedTime ? (path, date) => system.setModifiedTime!(path, date) : noop;
host.deleteFile = system.deleteFile ? path => system.deleteFile!(path) : noop;
@ -382,20 +384,16 @@ namespace ts {
return host;
}
export function createSolutionBuilderHost(system = sys, reportDiagnostic?: DiagnosticReporter, reportSolutionBuilderStatus?: DiagnosticReporter, reportErrorSummary?: ReportEmitErrorSummary) {
const host = createSolutionBuilderHostBase(system, reportDiagnostic, reportSolutionBuilderStatus) as SolutionBuilderHost;
export function createSolutionBuilderHost<T extends BuilderProgram = BuilderProgram>(system = sys, createProgram?: CreateProgram<T>, reportDiagnostic?: DiagnosticReporter, reportSolutionBuilderStatus?: DiagnosticReporter, reportErrorSummary?: ReportEmitErrorSummary) {
const host = createSolutionBuilderHostBase(system, createProgram || createAbstractBuilder as any as CreateProgram<T>, reportDiagnostic, reportSolutionBuilderStatus) as SolutionBuilderHost<T>;
host.reportErrorSummary = reportErrorSummary;
return host;
}
export function createSolutionBuilderWithWatchHost(system?: System, reportDiagnostic?: DiagnosticReporter, reportSolutionBuilderStatus?: DiagnosticReporter, reportWatchStatus?: WatchStatusReporter) {
const host = createSolutionBuilderHostBase(system, reportDiagnostic, reportSolutionBuilderStatus) as SolutionBuilderWithWatchHost;
export function createSolutionBuilderWithWatchHost<T extends BuilderProgram = SemanticDiagnosticsBuilderProgram>(system = sys, createProgram?: CreateProgram<T>, reportDiagnostic?: DiagnosticReporter, reportSolutionBuilderStatus?: DiagnosticReporter, reportWatchStatus?: WatchStatusReporter) {
const host = createSolutionBuilderHostBase(system, createProgram || createEmitAndSemanticDiagnosticsBuilderProgram as any as CreateProgram<T>, reportDiagnostic, reportSolutionBuilderStatus) as SolutionBuilderWithWatchHost<T>;
const watchHost = createWatchHost(system, reportWatchStatus);
host.onWatchStatusChange = watchHost.onWatchStatusChange;
host.watchFile = watchHost.watchFile;
host.watchDirectory = watchHost.watchDirectory;
host.setTimeout = watchHost.setTimeout;
host.clearTimeout = watchHost.clearTimeout;
copyProperties(host, watchHost);
return host;
}
@ -413,13 +411,13 @@ namespace ts {
* TODO: use SolutionBuilderWithWatchHost => watchedSolution
* use SolutionBuilderHost => Solution
*/
export function createSolutionBuilder(host: SolutionBuilderHost, rootNames: ReadonlyArray<string>, defaultOptions: BuildOptions): SolutionBuilder;
export function createSolutionBuilder(host: SolutionBuilderWithWatchHost, rootNames: ReadonlyArray<string>, defaultOptions: BuildOptions): SolutionBuilderWithWatch;
export function createSolutionBuilder(host: SolutionBuilderHost | SolutionBuilderWithWatchHost, rootNames: ReadonlyArray<string>, defaultOptions: BuildOptions): SolutionBuilderWithWatch {
const hostWithWatch = host as SolutionBuilderWithWatchHost;
export function createSolutionBuilder<T extends BuilderProgram>(host: SolutionBuilderHost<T>, rootNames: ReadonlyArray<string>, defaultOptions: BuildOptions): SolutionBuilder;
export function createSolutionBuilder<T extends BuilderProgram>(host: SolutionBuilderWithWatchHost<T>, rootNames: ReadonlyArray<string>, defaultOptions: BuildOptions): SolutionBuilderWithWatch;
export function createSolutionBuilder<T extends BuilderProgram>(host: SolutionBuilderHost<T> | SolutionBuilderWithWatchHost<T>, rootNames: ReadonlyArray<string>, defaultOptions: BuildOptions): SolutionBuilderWithWatch {
const hostWithWatch = host as SolutionBuilderWithWatchHost<T>;
const currentDirectory = host.getCurrentDirectory();
const getCanonicalFileName = createGetCanonicalFileName(host.useCaseSensitiveFileNames());
const parseConfigFileHost = parseConfigHostFromCompilerHost(host);
const parseConfigFileHost = parseConfigHostFromCompilerHostLike(host);
// State of the solution
let options = defaultOptions;
@ -434,8 +432,14 @@ namespace ts {
let globalDependencyGraph: DependencyGraph | undefined;
const writeFileName = (s: string) => host.trace && host.trace(s);
let readFileWithCache = (f: string) => host.readFile(f);
let projectCompilerOptions = baseCompilerOptions;
const compilerHost = createCompilerHostFromProgramHost(host, () => projectCompilerOptions);
const originalGetSourceFile = compilerHost.getSourceFile;
const computeHash = host.createHash || generateDjb2Hash;
updateGetSourceFile();
// Watch state
const builderPrograms = createFileMap<T>(toPath);
const diagnostics = createFileMap<ReadonlyArray<Diagnostic>>(toPath);
const projectPendingBuild = createFileMap<ConfigFileProgramReloadLevel>(toPath);
const projectErrorsReported = createFileMap<true>(toPath);
@ -443,6 +447,7 @@ namespace ts {
let nextProjectToBuild = 0;
let timerToBuildInvalidatedProject: any;
let reportFileChangeDetected = false;
const { watchFile, watchFilePath, watchDirectory, writeLog } = createWatchFactory<ResolvedConfigFileName>(host, options);
// Watches for the solution
const allWatchedWildcardDirectories = createFileMap<Map<WildcardDirectoryWatcher>>(toPath);
@ -492,6 +497,27 @@ namespace ts {
clearMap(allWatchedWildcardDirectories, wildCardWatches => clearMap(wildCardWatches, closeFileWatcherOf));
clearMap(allWatchedInputFiles, inputFileWatches => clearMap(inputFileWatches, closeFileWatcher));
clearMap(allWatchedConfigFiles, closeFileWatcher);
builderPrograms.clear();
updateGetSourceFile();
}
function updateGetSourceFile() {
if (options.watch) {
if (compilerHost.getSourceFile === originalGetSourceFile) {
compilerHost.getSourceFile = (...args) => {
const result = originalGetSourceFile.call(compilerHost, ...args);
if (result && options.watch) {
result.version = computeHash.call(host, result.text);
}
return result;
};
}
}
else {
if (compilerHost.getSourceFile !== originalGetSourceFile) {
compilerHost.getSourceFile = originalGetSourceFile;
}
}
}
function isParsedCommandLine(entry: ConfigFileCacheEntry): entry is ParsedCommandLine {
@ -542,9 +568,16 @@ namespace ts {
function watchConfigFile(resolved: ResolvedConfigFileName) {
if (options.watch && !allWatchedConfigFiles.hasKey(resolved)) {
allWatchedConfigFiles.setValue(resolved, hostWithWatch.watchFile(resolved, () => {
invalidateProjectAndScheduleBuilds(resolved, ConfigFileProgramReloadLevel.Full);
}));
allWatchedConfigFiles.setValue(resolved, watchFile(
hostWithWatch,
resolved,
() => {
invalidateProjectAndScheduleBuilds(resolved, ConfigFileProgramReloadLevel.Full);
},
PollingInterval.High,
WatchType.ConfigFile,
resolved
));
}
}
@ -554,20 +587,27 @@ namespace ts {
getOrCreateValueMapFromConfigFileMap(allWatchedWildcardDirectories, resolved),
createMapFromTemplate(parsed.configFileSpecs!.wildcardDirectories),
(dir, flags) => {
return hostWithWatch.watchDirectory(dir, fileOrDirectory => {
const fileOrDirectoryPath = toPath(fileOrDirectory);
if (fileOrDirectoryPath !== toPath(dir) && hasExtension(fileOrDirectoryPath) && !isSupportedSourceFileName(fileOrDirectory, parsed.options)) {
// writeLog(`Project: ${configFileName} Detected file add/remove of non supported extension: ${fileOrDirectory}`);
return;
}
return watchDirectory(
hostWithWatch,
dir,
fileOrDirectory => {
const fileOrDirectoryPath = toPath(fileOrDirectory);
if (fileOrDirectoryPath !== toPath(dir) && hasExtension(fileOrDirectoryPath) && !isSupportedSourceFileName(fileOrDirectory, parsed.options)) {
writeLog(`Project: ${resolved} Detected file add/remove of non supported extension: ${fileOrDirectory}`);
return;
}
if (isOutputFile(fileOrDirectory, parsed)) {
// writeLog(`${fileOrDirectory} is output file`);
return;
}
if (isOutputFile(fileOrDirectory, parsed)) {
writeLog(`${fileOrDirectory} is output file`);
return;
}
invalidateProjectAndScheduleBuilds(resolved, ConfigFileProgramReloadLevel.Partial);
}, !!(flags & WatchDirectoryFlags.Recursive));
invalidateProjectAndScheduleBuilds(resolved, ConfigFileProgramReloadLevel.Partial);
},
flags,
WatchType.WildcardDirectory,
resolved
);
}
);
}
@ -578,9 +618,15 @@ namespace ts {
getOrCreateValueMapFromConfigFileMap(allWatchedInputFiles, resolved),
arrayToMap(parsed.fileNames, toPath),
{
createNewValue: (_key, input) => hostWithWatch.watchFile(input, () => {
invalidateProjectAndScheduleBuilds(resolved, ConfigFileProgramReloadLevel.None);
}),
createNewValue: (path, input) => watchFilePath(
hostWithWatch,
input,
() => invalidateProjectAndScheduleBuilds(resolved, ConfigFileProgramReloadLevel.None),
PollingInterval.Low,
path as Path,
WatchType.SourceFile,
resolved
),
onDeleteValue: closeFileWatcher,
}
);
@ -898,7 +944,7 @@ namespace ts {
}
function reportErrorSummary() {
if (options.watch || (host as SolutionBuilderHost).reportErrorSummary) {
if (options.watch || (host as SolutionBuilderHost<T>).reportErrorSummary) {
// Report errors from the other projects
getGlobalDependencyGraph().buildQueue.forEach(project => {
if (!projectErrorsReported.hasKey(project)) {
@ -911,7 +957,7 @@ namespace ts {
reportWatchStatus(getWatchErrorSummaryDiagnosticMessage(totalErrors), totalErrors);
}
else {
(host as SolutionBuilderHost).reportErrorSummary!(totalErrors);
(host as SolutionBuilderHost<T>).reportErrorSummary!(totalErrors);
}
}
}
@ -944,16 +990,40 @@ namespace ts {
return;
}
if (status.type === UpToDateStatusType.UpToDateWithUpstreamTypes) {
// Fake that files have been built by updating output file stamps
updateOutputTimestamps(proj);
return;
}
const buildResult = buildSingleProject(resolved);
const dependencyGraph = getGlobalDependencyGraph();
const referencingProjects = dependencyGraph.referencingProjectsMap.getValue(resolved);
if (buildResult & BuildResultFlags.AnyErrors) return;
const { referencingProjectsMap, buildQueue } = getGlobalDependencyGraph();
const referencingProjects = referencingProjectsMap.getValue(resolved);
if (!referencingProjects) return;
// Always use build order to queue projects
for (const project of dependencyGraph.buildQueue) {
for (let index = buildQueue.indexOf(resolved) + 1; index < buildQueue.length; index++) {
const project = buildQueue[index];
const prepend = referencingProjects.getValue(project);
// If the project is referenced with prepend, always build downstream projectm,
// otherwise queue it only if declaration output changed
if (prepend || (prepend !== undefined && !(buildResult & BuildResultFlags.DeclarationOutputUnchanged))) {
if (prepend !== undefined) {
// If the project is referenced with prepend, always build downstream projects,
// If declaration output is changed, build the project
// otherwise mark the project UpToDateWithUpstreamTypes so it updates output time stamps
const status = projectStatus.getValue(project);
if (prepend || !(buildResult & BuildResultFlags.DeclarationOutputUnchanged)) {
if (status && (status.type === UpToDateStatusType.UpToDate || status.type === UpToDateStatusType.UpToDateWithUpstreamTypes)) {
projectStatus.setValue(project, {
type: UpToDateStatusType.OutOfDateWithUpstream,
outOfDateOutputFileName: status.oldestOutputFileName,
newerProjectName: resolved
});
}
}
else if (status && status.type === UpToDateStatusType.UpToDate) {
status.type = UpToDateStatusType.UpToDateWithUpstreamTypes;
}
addProjToQueue(project);
}
}
@ -1030,22 +1100,23 @@ namespace ts {
return BuildResultFlags.None;
}
const programOptions: CreateProgramOptions = {
projectReferences: configFile.projectReferences,
host,
rootNames: configFile.fileNames,
options: configFile.options,
configFileParsingDiagnostics: configFile.errors
};
if (host.beforeCreateProgram) {
host.beforeCreateProgram(options);
}
const program = createProgram(programOptions);
// TODO: handle resolve module name to cache result in project reference redirect
projectCompilerOptions = configFile.options;
const program = host.createProgram(
configFile.fileNames,
configFile.options,
compilerHost,
builderPrograms.getValue(proj),
configFile.errors,
configFile.projectReferences
);
projectCompilerOptions = baseCompilerOptions;
// Don't emit anything in the presence of syntactic errors or options diagnostics
const syntaxDiagnostics = [
...program.getOptionsDiagnostics(),
...program.getConfigFileParsingDiagnostics(),
...program.getOptionsDiagnostics(),
...program.getGlobalDiagnostics(),
...program.getSyntacticDiagnostics()];
if (syntaxDiagnostics.length) {
return buildErrors(syntaxDiagnostics, BuildResultFlags.SyntaxErrors, "Syntactic");
@ -1057,6 +1128,8 @@ namespace ts {
return buildErrors(semanticDiagnostics, BuildResultFlags.TypeErrors, "Semantic");
}
// Before emitting lets backup state, so we can revert it back if there are declaration errors to handle emit and declaration errors correctly
program.backupState();
let newestDeclarationFileContentChangedTime = minimumDate;
let anyDtsChanged = false;
let declDiagnostics: Diagnostic[] | undefined;
@ -1065,11 +1138,13 @@ namespace ts {
emitFilesAndReportErrors(program, reportDeclarationDiagnostics, writeFileName, /*reportSummary*/ undefined, (name, text, writeByteOrderMark) => outputFiles.push({ name, text, writeByteOrderMark }));
// Don't emit .d.ts if there are decl file errors
if (declDiagnostics) {
program.restoreState();
return buildErrors(declDiagnostics, BuildResultFlags.DeclarationEmitErrors, "Declaration file");
}
// Actual Emit
const emitterDiagnostics = createDiagnosticCollection();
const emittedOutputs = createFileMap<true>(toPath as ToPath);
outputFiles.forEach(({ name, text, writeByteOrderMark }) => {
let priorChangeTime: Date | undefined;
if (!anyDtsChanged && isDeclarationFile(name)) {
@ -1083,7 +1158,8 @@ namespace ts {
}
}
writeFile(host, emitterDiagnostics, name, text, writeByteOrderMark);
emittedOutputs.setValue(name, true);
writeFile(compilerHost, emitterDiagnostics, name, text, writeByteOrderMark);
if (priorChangeTime !== undefined) {
newestDeclarationFileContentChangedTime = newer(priorChangeTime, newestDeclarationFileContentChangedTime);
unchangedOutputs.setValue(name, priorChangeTime);
@ -1095,51 +1171,72 @@ namespace ts {
return buildErrors(emitDiagnostics, BuildResultFlags.EmitErrors, "Emit");
}
// Update time stamps for rest of the outputs
newestDeclarationFileContentChangedTime = updateOutputTimestampsWorker(configFile, newestDeclarationFileContentChangedTime, Diagnostics.Updating_unchanged_output_timestamps_of_project_0, emittedOutputs);
const status: UpToDateStatus = {
type: UpToDateStatusType.UpToDate,
newestDeclarationFileContentChangedTime: anyDtsChanged ? maximumDate : newestDeclarationFileContentChangedTime
newestDeclarationFileContentChangedTime: anyDtsChanged ? maximumDate : newestDeclarationFileContentChangedTime,
oldestOutputFileName: outputFiles.length ? outputFiles[0].name : getFirstProjectOutput(configFile)
};
diagnostics.removeKey(proj);
projectStatus.setValue(proj, status);
if (host.afterProgramEmitAndDiagnostics) {
host.afterProgramEmitAndDiagnostics(program);
}
afterProgramCreate(proj, program);
return resultFlags;
function buildErrors(diagnostics: ReadonlyArray<Diagnostic>, errorFlags: BuildResultFlags, errorType: string) {
resultFlags |= errorFlags;
reportAndStoreErrors(proj, diagnostics);
projectStatus.setValue(proj, { type: UpToDateStatusType.Unbuildable, reason: `${errorType} errors` });
if (host.afterProgramEmitAndDiagnostics) {
host.afterProgramEmitAndDiagnostics(program);
}
afterProgramCreate(proj, program);
return resultFlags;
}
}
function afterProgramCreate(proj: ResolvedConfigFileName, program: T) {
if (host.afterProgramEmitAndDiagnostics) {
host.afterProgramEmitAndDiagnostics(program);
}
if (options.watch) {
program.releaseProgram();
builderPrograms.setValue(proj, program);
}
}
function updateOutputTimestamps(proj: ParsedCommandLine) {
if (options.dry) {
return reportStatus(Diagnostics.A_non_dry_build_would_build_project_0, proj.options.configFilePath!);
}
if (options.verbose) {
reportStatus(Diagnostics.Updating_output_timestamps_of_project_0, proj.options.configFilePath!);
}
const now = new Date();
const outputs = getAllProjectOutputs(proj);
let priorNewestUpdateTime = minimumDate;
for (const file of outputs) {
if (isDeclarationFile(file)) {
priorNewestUpdateTime = newer(priorNewestUpdateTime, host.getModifiedTime(file) || missingFileModifiedTime);
}
host.setModifiedTime(file, now);
}
const priorNewestUpdateTime = updateOutputTimestampsWorker(proj, minimumDate, Diagnostics.Updating_output_timestamps_of_project_0);
projectStatus.setValue(proj.options.configFilePath as ResolvedConfigFilePath, { type: UpToDateStatusType.UpToDate, newestDeclarationFileContentChangedTime: priorNewestUpdateTime } as UpToDateStatus);
}
function updateOutputTimestampsWorker(proj: ParsedCommandLine, priorNewestUpdateTime: Date, verboseMessage: DiagnosticMessage, skipOutputs?: FileMap<true>) {
const outputs = getAllProjectOutputs(proj);
if (!skipOutputs || outputs.length !== skipOutputs.getSize()) {
if (options.verbose) {
reportStatus(verboseMessage, proj.options.configFilePath!);
}
const now = host.now ? host.now() : new Date();
for (const file of outputs) {
if (skipOutputs && skipOutputs.hasKey(file)) {
continue;
}
if (isDeclarationFile(file)) {
priorNewestUpdateTime = newer(priorNewestUpdateTime, host.getModifiedTime(file) || missingFileModifiedTime);
}
host.setModifiedTime(file, now);
if (proj.options.listEmittedFiles) {
writeFileName(`TSFILE: ${file}`);
}
}
}
return priorNewestUpdateTime;
}
function getFilesToClean(): string[] {
// Get the same graph for cleaning we'd use for building
const graph = getGlobalDependencyGraph();
@ -1187,12 +1284,15 @@ namespace ts {
if (options.watch) { reportWatchStatus(Diagnostics.Starting_compilation_in_watch_mode); }
// TODO:: In watch mode as well to use caches for incremental build once we can invalidate caches correctly and have right api
// Override readFile for json files and output .d.ts to cache the text
const { originalReadFile, originalFileExists, originalDirectoryExists,
originalCreateDirectory, originalWriteFile, originalGetSourceFile,
readFileWithCache: newReadFileWithCache
} = changeCompilerHostToUseCache(host, toPath, /*useCacheForSourceFile*/ true);
const savedReadFileWithCache = readFileWithCache;
const savedGetSourceFile = compilerHost.getSourceFile;
const { originalReadFile, originalFileExists, originalDirectoryExists,
originalCreateDirectory, originalWriteFile, getSourceFileWithCache,
readFileWithCache: newReadFileWithCache
} = changeCompilerHostLikeToUseCache(host, toPath, (...args) => savedGetSourceFile.call(compilerHost, ...args));
readFileWithCache = newReadFileWithCache;
compilerHost.getSourceFile = getSourceFileWithCache!;
const graph = getGlobalDependencyGraph();
reportBuildQueue(graph);
@ -1249,8 +1349,8 @@ namespace ts {
host.directoryExists = originalDirectoryExists;
host.createDirectory = originalCreateDirectory;
host.writeFile = originalWriteFile;
compilerHost.getSourceFile = savedGetSourceFile;
readFileWithCache = savedReadFileWithCache;
host.getSourceFile = originalGetSourceFile;
return anyFailed ? ExitStatus.DiagnosticsPresent_OutputsSkipped : ExitStatus.Success;
}
@ -1278,7 +1378,7 @@ namespace ts {
}
function relName(path: string): string {
return convertToRelativePath(path, host.getCurrentDirectory(), f => host.getCanonicalFileName(f));
return convertToRelativePath(path, host.getCurrentDirectory(), f => compilerHost.getCanonicalFileName(f));
}
/**
@ -1311,6 +1411,20 @@ namespace ts {
}
}
function getFirstProjectOutput(project: ParsedCommandLine): string {
if (project.options.outFile || project.options.out) {
return first(getOutFileOutputs(project));
}
for (const inputFile of project.fileNames) {
const outputs = getOutputFileNames(inputFile, project);
if (outputs.length) {
return first(outputs);
}
}
return Debug.fail(`project ${project.options.configFilePath} expected to have at least one output`);
}
export function formatUpToDateStatus<T>(configFileName: string, status: UpToDateStatus, relName: (fileName: string) => string, formatMessage: (message: DiagnosticMessage, ...args: string[]) => T) {
switch (status.type) {
case UpToDateStatusType.OutOfDateWithSelf:

View File

@ -2761,9 +2761,11 @@ namespace ts {
export interface InputFiles extends Node {
kind: SyntaxKind.InputFiles;
javascriptPath?: string;
javascriptText: string;
javascriptMapPath?: string;
javascriptMapText?: string;
declarationPath?: string;
declarationText: string;
declarationMapPath?: string;
declarationMapText?: string;
@ -2771,6 +2773,7 @@ namespace ts {
export interface UnparsedSource extends Node {
kind: SyntaxKind.UnparsedSource;
fileName?: string;
text: string;
sourceMapPath?: string;
sourceMapText?: string;
@ -2827,7 +2830,7 @@ namespace ts {
fileName: string,
data: string,
writeByteOrderMark: boolean,
onError: ((message: string) => void) | undefined,
onError?: (message: string) => void,
sourceFiles?: ReadonlyArray<SourceFile>,
) => void;
@ -3037,8 +3040,10 @@ namespace ts {
/* @internal */ typeToTypeNode(type: Type, enclosingDeclaration?: Node, flags?: NodeBuilderFlags, tracker?: SymbolTracker): TypeNode | undefined; // tslint:disable-line unified-signatures
/** Note that the resulting nodes cannot be checked. */
signatureToSignatureDeclaration(signature: Signature, kind: SyntaxKind, enclosingDeclaration?: Node, flags?: NodeBuilderFlags): SignatureDeclaration & {typeArguments?: NodeArray<TypeNode>} | undefined;
/* @internal */ signatureToSignatureDeclaration(signature: Signature, kind: SyntaxKind, enclosingDeclaration?: Node, flags?: NodeBuilderFlags, tracker?: SymbolTracker): SignatureDeclaration & {typeArguments?: NodeArray<TypeNode>} | undefined; // tslint:disable-line unified-signatures
/** Note that the resulting nodes cannot be checked. */
indexInfoToIndexSignatureDeclaration(indexInfo: IndexInfo, kind: IndexKind, enclosingDeclaration?: Node, flags?: NodeBuilderFlags): IndexSignatureDeclaration | undefined;
/* @internal */ indexInfoToIndexSignatureDeclaration(indexInfo: IndexInfo, kind: IndexKind, enclosingDeclaration?: Node, flags?: NodeBuilderFlags, tracker?: SymbolTracker): IndexSignatureDeclaration | undefined; // tslint:disable-line unified-signatures
/** Note that the resulting nodes cannot be checked. */
symbolToEntityName(symbol: Symbol, meaning: SymbolFlags, enclosingDeclaration?: Node, flags?: NodeBuilderFlags): EntityName | undefined;
/** Note that the resulting nodes cannot be checked. */
@ -3950,6 +3955,7 @@ namespace ts {
// Unique symbol types (TypeFlags.UniqueESSymbol)
export interface UniqueESSymbolType extends Type {
symbol: Symbol;
escapedName: __String;
}
export interface StringLiteralType extends LiteralType {
@ -5001,7 +5007,6 @@ namespace ts {
getDefaultLibLocation?(): string;
writeFile: WriteFileCallback;
getCurrentDirectory(): string;
getDirectories(path: string): string[];
getCanonicalFileName(fileName: string): string;
useCaseSensitiveFileNames(): boolean;
getNewLine(): string;
@ -5863,6 +5868,7 @@ namespace ts {
/** Determines whether we import `foo/index.ts` as "foo", "foo/index", or "foo/index.js" */
readonly importModuleSpecifierEnding?: "minimal" | "index" | "js";
readonly allowTextChangesInNewFiles?: boolean;
readonly providePrefixAndSuffixTextForRename?: boolean;
}
/** Represents a bigint literal value without requiring bigint support */

View File

@ -778,7 +778,8 @@ namespace ts {
case SyntaxKind.NoSubstitutionTemplateLiteral:
return escapeLeadingUnderscores(name.text);
case SyntaxKind.ComputedPropertyName:
return isStringOrNumericLiteralLike(name.expression) ? escapeLeadingUnderscores(name.expression.text) : undefined!; // TODO: GH#18217 Almost all uses of this assume the result to be defined!
if (isStringOrNumericLiteralLike(name.expression)) return escapeLeadingUnderscores(name.expression.text);
return Debug.fail("Text of property name cannot be read from non-literal-valued ComputedPropertyNames");
default:
return Debug.assertNever(name);
}
@ -888,7 +889,7 @@ namespace ts {
}
const isMissing = nodeIsMissing(errorNode);
const pos = isMissing
const pos = isMissing || isJsxText(node)
? errorNode.pos
: skipTrivia(sourceFile.text, errorNode.pos);
@ -7029,6 +7030,7 @@ namespace ts {
};
}
export function formatMessage(_dummy: any, message: DiagnosticMessage, ...args: (string | number | undefined)[]): string;
export function formatMessage(_dummy: any, message: DiagnosticMessage): string {
let text = getLocaleSpecificMessage(message);

View File

@ -88,21 +88,6 @@ namespace ts {
return result;
}
/**
* Program structure needed to emit the files and report diagnostics
*/
export interface ProgramToEmitFilesAndReportErrors {
getCurrentDirectory(): string;
getCompilerOptions(): CompilerOptions;
getSourceFiles(): ReadonlyArray<SourceFile>;
getSyntacticDiagnostics(): ReadonlyArray<Diagnostic>;
getOptionsDiagnostics(): ReadonlyArray<Diagnostic>;
getGlobalDiagnostics(): ReadonlyArray<Diagnostic>;
getSemanticDiagnostics(): ReadonlyArray<Diagnostic>;
getConfigFileParsingDiagnostics(): ReadonlyArray<Diagnostic>;
emit(targetSourceFile?: SourceFile, writeFile?: WriteFileCallback): EmitResult;
}
export type ReportEmitErrorSummary = (errorCount: number) => void;
export function getErrorCountForSummary(diagnostics: ReadonlyArray<Diagnostic>) {
@ -121,6 +106,21 @@ namespace ts {
return `${newLine}${flattenDiagnosticMessageText(d.messageText, newLine)}${newLine}${newLine}`;
}
/**
* Program structure needed to emit the files and report diagnostics
*/
export interface ProgramToEmitFilesAndReportErrors {
getCurrentDirectory(): string;
getCompilerOptions(): CompilerOptions;
getSourceFiles(): ReadonlyArray<SourceFile>;
getSyntacticDiagnostics(): ReadonlyArray<Diagnostic>;
getOptionsDiagnostics(): ReadonlyArray<Diagnostic>;
getGlobalDiagnostics(): ReadonlyArray<Diagnostic>;
getSemanticDiagnostics(): ReadonlyArray<Diagnostic>;
getConfigFileParsingDiagnostics(): ReadonlyArray<Diagnostic>;
emit(targetSourceFile?: SourceFile, writeFile?: WriteFileCallback): EmitResult;
}
/**
* Helper that emit files, report diagnostics and lists emitted and/or source files depending on compiler options
*/
@ -187,30 +187,110 @@ namespace ts {
const onWatchStatusChange = reportWatchStatus || createWatchStatusReporter(system);
return {
onWatchStatusChange,
watchFile: system.watchFile ? ((path, callback, pollingInterval) => system.watchFile!(path, callback, pollingInterval)) : () => noopFileWatcher,
watchDirectory: system.watchDirectory ? ((path, callback, recursive) => system.watchDirectory!(path, callback, recursive)) : () => noopFileWatcher,
setTimeout: system.setTimeout ? ((callback, ms, ...args: any[]) => system.setTimeout!.call(system, callback, ms, ...args)) : noop,
clearTimeout: system.clearTimeout ? (timeoutId => system.clearTimeout!(timeoutId)) : noop
watchFile: maybeBind(system, system.watchFile) || (() => noopFileWatcher),
watchDirectory: maybeBind(system, system.watchDirectory) || (() => noopFileWatcher),
setTimeout: maybeBind(system, system.setTimeout) || noop,
clearTimeout: maybeBind(system, system.clearTimeout) || noop
};
}
export const enum WatchType {
ConfigFile = "Config file",
SourceFile = "Source file",
MissingFile = "Missing file",
WildcardDirectory = "Wild card directory",
FailedLookupLocations = "Failed Lookup Locations",
TypeRoots = "Type roots"
}
interface WatchFactory<X, Y = undefined> extends ts.WatchFactory<X, Y> {
writeLog: (s: string) => void;
}
export function createWatchFactory<Y = undefined>(host: { trace?(s: string): void; }, options: { extendedDiagnostics?: boolean; diagnostics?: boolean; }) {
const watchLogLevel = host.trace ? options.extendedDiagnostics ? WatchLogLevel.Verbose : options.diagnostics ? WatchLogLevel.TriggerOnly : WatchLogLevel.None : WatchLogLevel.None;
const writeLog: (s: string) => void = watchLogLevel !== WatchLogLevel.None ? (s => host.trace!(s)) : noop;
const result = getWatchFactory<WatchType, Y>(watchLogLevel, writeLog) as WatchFactory<WatchType, Y>;
result.writeLog = writeLog;
return result;
}
export function createCompilerHostFromProgramHost(host: ProgramHost<any>, getCompilerOptions: () => CompilerOptions, directoryStructureHost: DirectoryStructureHost = host): CompilerHost {
const useCaseSensitiveFileNames = host.useCaseSensitiveFileNames();
const hostGetNewLine = memoize(() => host.getNewLine());
return {
getSourceFile: (fileName, languageVersion, onError) => {
let text: string | undefined;
try {
performance.mark("beforeIORead");
text = host.readFile(fileName, getCompilerOptions().charset);
performance.mark("afterIORead");
performance.measure("I/O Read", "beforeIORead", "afterIORead");
}
catch (e) {
if (onError) {
onError(e.message);
}
text = "";
}
return text !== undefined ? createSourceFile(fileName, text, languageVersion) : undefined;
},
getDefaultLibLocation: maybeBind(host, host.getDefaultLibLocation),
getDefaultLibFileName: options => host.getDefaultLibFileName(options),
writeFile,
getCurrentDirectory: memoize(() => host.getCurrentDirectory()),
useCaseSensitiveFileNames: () => useCaseSensitiveFileNames,
getCanonicalFileName: createGetCanonicalFileName(useCaseSensitiveFileNames),
getNewLine: () => getNewLineCharacter(getCompilerOptions(), hostGetNewLine),
fileExists: f => host.fileExists(f),
readFile: f => host.readFile(f),
trace: maybeBind(host, host.trace),
directoryExists: maybeBind(directoryStructureHost, directoryStructureHost.directoryExists),
getDirectories: maybeBind(directoryStructureHost, directoryStructureHost.getDirectories),
realpath: maybeBind(host, host.realpath),
getEnvironmentVariable: maybeBind(host, host.getEnvironmentVariable) || (() => ""),
createHash: maybeBind(host, host.createHash),
readDirectory: maybeBind(host, host.readDirectory),
};
function ensureDirectoriesExist(directoryPath: string) {
if (directoryPath.length > getRootLength(directoryPath) && !host.directoryExists!(directoryPath)) {
const parentDirectory = getDirectoryPath(directoryPath);
ensureDirectoriesExist(parentDirectory);
if (host.createDirectory) host.createDirectory(directoryPath);
}
}
function writeFile(fileName: string, text: string, writeByteOrderMark: boolean, onError: (message: string) => void) {
try {
performance.mark("beforeIOWrite");
ensureDirectoriesExist(getDirectoryPath(normalizePath(fileName)));
host.writeFile!(fileName, text, writeByteOrderMark);
performance.mark("afterIOWrite");
performance.measure("I/O Write", "beforeIOWrite", "afterIOWrite");
}
catch (e) {
if (onError) {
onError(e.message);
}
}
}
}
/**
* Creates the watch compiler host that can be extended with config file or root file names and options host
*/
function createWatchCompilerHost<T extends BuilderProgram = EmitAndSemanticDiagnosticsBuilderProgram>(system = sys, createProgram: CreateProgram<T> | undefined, reportDiagnostic: DiagnosticReporter, reportWatchStatus?: WatchStatusReporter): WatchCompilerHost<T> {
if (!createProgram) {
createProgram = createEmitAndSemanticDiagnosticsBuilderProgram as any as CreateProgram<T>;
}
export function createProgramHost<T extends BuilderProgram>(system: System, createProgram: CreateProgram<T>): ProgramHost<T> {
const getDefaultLibLocation = memoize(() => getDirectoryPath(normalizePath(system.getExecutingFilePath())));
let host: DirectoryStructureHost = system;
host; // tslint:disable-line no-unused-expression (TODO: `host` is unused!)
const useCaseSensitiveFileNames = () => system.useCaseSensitiveFileNames;
const writeFileName = (s: string) => system.write(s + system.newLine);
const { onWatchStatusChange, watchFile, watchDirectory, setTimeout, clearTimeout } = createWatchHost(system, reportWatchStatus);
return {
useCaseSensitiveFileNames,
useCaseSensitiveFileNames: () => system.useCaseSensitiveFileNames,
getNewLine: () => system.newLine,
getCurrentDirectory: () => system.getCurrentDirectory(),
getCurrentDirectory: memoize(() => system.getCurrentDirectory()),
getDefaultLibLocation,
getDefaultLibFileName: options => combinePaths(getDefaultLibLocation(), getDefaultLibFileName(options)),
fileExists: path => system.fileExists(path),
@ -218,27 +298,25 @@ namespace ts {
directoryExists: path => system.directoryExists(path),
getDirectories: path => system.getDirectories(path),
readDirectory: (path, extensions, exclude, include, depth) => system.readDirectory(path, extensions, exclude, include, depth),
realpath: system.realpath && (path => system.realpath!(path)),
getEnvironmentVariable: system.getEnvironmentVariable && (name => system.getEnvironmentVariable(name)),
watchFile,
watchDirectory,
setTimeout,
clearTimeout,
trace: s => system.write(s),
onWatchStatusChange,
realpath: maybeBind(system, system.realpath),
getEnvironmentVariable: maybeBind(system, system.getEnvironmentVariable),
trace: s => system.write(s + system.newLine),
createDirectory: path => system.createDirectory(path),
writeFile: (path, data, writeByteOrderMark) => system.writeFile(path, data, writeByteOrderMark),
onCachedDirectoryStructureHostCreate: cacheHost => host = cacheHost || system,
createHash: system.createHash && (s => system.createHash!(s)),
createProgram,
afterProgramCreate: emitFilesAndReportErrorUsingBuilder
createHash: maybeBind(system, system.createHash),
createProgram
};
}
function getDefaultLibLocation() {
return getDirectoryPath(normalizePath(system.getExecutingFilePath()));
}
function emitFilesAndReportErrorUsingBuilder(builderProgram: BuilderProgram) {
/**
* Creates the watch compiler host that can be extended with config file or root file names and options host
*/
function createWatchCompilerHost<T extends BuilderProgram = EmitAndSemanticDiagnosticsBuilderProgram>(system = sys, createProgram: CreateProgram<T> | undefined, reportDiagnostic: DiagnosticReporter, reportWatchStatus?: WatchStatusReporter): WatchCompilerHost<T> {
const writeFileName = (s: string) => system.write(s + system.newLine);
const result = createProgramHost(system, createProgram || createEmitAndSemanticDiagnosticsBuilderProgram as any as CreateProgram<T>) as WatchCompilerHost<T>;
copyProperties(result, createWatchHost(system, reportWatchStatus));
result.afterProgramCreate = builderProgram => {
const compilerOptions = builderProgram.getCompilerOptions();
const newLine = getNewLineCharacter(compilerOptions, () => system.newLine);
@ -246,13 +324,14 @@ namespace ts {
builderProgram,
reportDiagnostic,
writeFileName,
errorCount => onWatchStatusChange!(
errorCount => result.onWatchStatusChange!(
createCompilerDiagnostic(getWatchErrorSummaryDiagnosticMessage(errorCount), errorCount),
newLine,
compilerOptions
)
);
}
};
return result;
}
/**
@ -291,6 +370,7 @@ namespace ts {
export type WatchStatusReporter = (diagnostic: Diagnostic, newLine: string, options: CompilerOptions) => void;
/** Create the program with rootNames and options, if they are undefined, oldProgram and new configFile diagnostics create new program */
export type CreateProgram<T extends BuilderProgram> = (rootNames: ReadonlyArray<string> | undefined, options: CompilerOptions | undefined, host?: CompilerHost, oldProgram?: T, configFileParsingDiagnostics?: ReadonlyArray<Diagnostic>, projectReferences?: ReadonlyArray<ProjectReference> | undefined) => T;
/** Host that has watch functionality used in --watch mode */
export interface WatchHost {
/** If provided, called with Diagnostic message that informs about change in watch status */
@ -305,19 +385,11 @@ namespace ts {
/** If provided, will be used to reset existing delayed compilation */
clearTimeout?(timeoutId: any): void;
}
export interface WatchCompilerHost<T extends BuilderProgram> extends WatchHost {
// TODO: GH#18217 Optional methods are frequently asserted
export interface ProgramHost<T extends BuilderProgram> {
/**
* Used to create the program when need for program creation or recreation detected
*/
createProgram: CreateProgram<T>;
/** If provided, callback to invoke after every new program creation */
afterProgramCreate?(program: T): void;
// Only for testing
/*@internal*/
maxNumberOfFilesToIterateForInvalidation?: number;
// Sub set of compiler host methods to read and generate new program
useCaseSensitiveFileNames(): boolean;
@ -357,16 +429,25 @@ namespace ts {
/** If provided, used to resolve type reference directives, otherwise typescript's default resolution */
resolveTypeReferenceDirectives?(typeReferenceDirectiveNames: string[], containingFile: string, redirectedReference?: ResolvedProjectReference): (ResolvedTypeReferenceDirective | undefined)[];
}
/** Internal interface used to wire emit through same host */
/*@internal*/
export interface WatchCompilerHost<T extends BuilderProgram> {
export interface ProgramHost<T extends BuilderProgram> {
// TODO: GH#18217 Optional methods are frequently asserted
createDirectory?(path: string): void;
writeFile?(path: string, data: string, writeByteOrderMark?: boolean): void;
onCachedDirectoryStructureHostCreate?(host: CachedDirectoryStructureHost): void;
}
export interface WatchCompilerHost<T extends BuilderProgram> extends ProgramHost<T>, WatchHost {
/** If provided, callback to invoke after every new program creation */
afterProgramCreate?(program: T): void;
// Only for testing
/*@internal*/
maxNumberOfFilesToIterateForInvalidation?: number;
}
/**
* Host to create watch with root files and options
*/
@ -479,8 +560,6 @@ namespace ts {
const useCaseSensitiveFileNames = host.useCaseSensitiveFileNames();
const currentDirectory = host.getCurrentDirectory();
const getCurrentDirectory = () => currentDirectory;
const readFile: (path: string, encoding?: string) => string | undefined = (path, encoding) => host.readFile(path, encoding);
const { configFileName, optionsToExtend: optionsToExtendForConfigFile = {}, createProgram } = host;
let { rootFiles: rootFileNames, options: compilerOptions, projectReferences } = host;
let configFileSpecs: ConfigFileSpecs;
@ -493,15 +572,7 @@ namespace ts {
host.onCachedDirectoryStructureHostCreate(cachedDirectoryStructureHost);
}
const directoryStructureHost: DirectoryStructureHost = cachedDirectoryStructureHost || host;
const parseConfigFileHost: ParseConfigFileHost = {
useCaseSensitiveFileNames,
readDirectory: (path, extensions, exclude, include, depth) => directoryStructureHost.readDirectory!(path, extensions, exclude, include, depth),
fileExists: path => host.fileExists(path),
readFile,
getCurrentDirectory,
onUnRecoverableConfigFileDiagnostic: host.onUnRecoverableConfigFileDiagnostic,
trace: host.trace ? s => host.trace!(s) : undefined
};
const parseConfigFileHost = parseConfigHostFromCompilerHostLike(host, directoryStructureHost);
// From tsc we want to get already parsed result and hence check for rootFileNames
let newLine = updateNewLine();
@ -517,55 +588,37 @@ namespace ts {
newLine = updateNewLine();
}
const trace = host.trace && ((s: string) => { host.trace!(s + newLine); });
const watchLogLevel = trace ? compilerOptions.extendedDiagnostics ? WatchLogLevel.Verbose :
compilerOptions.diagnostics ? WatchLogLevel.TriggerOnly : WatchLogLevel.None : WatchLogLevel.None;
const writeLog: (s: string) => void = watchLogLevel !== WatchLogLevel.None ? trace! : noop; // TODO: GH#18217
const { watchFile, watchFilePath, watchDirectory } = getWatchFactory<string>(watchLogLevel, writeLog);
const { watchFile, watchFilePath, watchDirectory, writeLog } = createWatchFactory<string>(host, compilerOptions);
const getCanonicalFileName = createGetCanonicalFileName(useCaseSensitiveFileNames);
writeLog(`Current directory: ${currentDirectory} CaseSensitiveFileNames: ${useCaseSensitiveFileNames}`);
if (configFileName) {
watchFile(host, configFileName, scheduleProgramReload, PollingInterval.High, "Config file");
watchFile(host, configFileName, scheduleProgramReload, PollingInterval.High, WatchType.ConfigFile);
}
const compilerHost: CompilerHost & ResolutionCacheHost = {
// Members for CompilerHost
getSourceFile: (fileName, languageVersion, onError?, shouldCreateNewSourceFile?) => getVersionedSourceFileByPath(fileName, toPath(fileName), languageVersion, onError, shouldCreateNewSourceFile),
getSourceFileByPath: getVersionedSourceFileByPath,
getDefaultLibLocation: host.getDefaultLibLocation && (() => host.getDefaultLibLocation!()),
getDefaultLibFileName: options => host.getDefaultLibFileName(options),
writeFile,
getCurrentDirectory,
useCaseSensitiveFileNames: () => useCaseSensitiveFileNames,
getCanonicalFileName,
getNewLine: () => newLine,
fileExists,
readFile,
trace,
directoryExists: directoryStructureHost.directoryExists && (path => directoryStructureHost.directoryExists!(path)),
getDirectories: (directoryStructureHost.getDirectories && ((path: string) => directoryStructureHost.getDirectories!(path)))!, // TODO: GH#18217
realpath: host.realpath && (s => host.realpath!(s)),
getEnvironmentVariable: host.getEnvironmentVariable ? (name => host.getEnvironmentVariable!(name)) : (() => ""),
onReleaseOldSourceFile,
createHash: host.createHash && (data => host.createHash!(data)),
// Members for ResolutionCacheHost
toPath,
getCompilationSettings: () => compilerOptions,
watchDirectoryOfFailedLookupLocation: (dir, cb, flags) => watchDirectory(host, dir, cb, flags, "Failed Lookup Locations"),
watchTypeRootsDirectory: (dir, cb, flags) => watchDirectory(host, dir, cb, flags, "Type roots"),
getCachedDirectoryStructureHost: () => cachedDirectoryStructureHost,
onInvalidatedResolution: scheduleProgramUpdate,
onChangedAutomaticTypeDirectiveNames: () => {
hasChangedAutomaticTypeDirectiveNames = true;
scheduleProgramUpdate();
},
maxNumberOfFilesToIterateForInvalidation: host.maxNumberOfFilesToIterateForInvalidation,
getCurrentProgram,
writeLog,
readDirectory: (path, extensions, exclude, include, depth?) => directoryStructureHost.readDirectory!(path, extensions, exclude, include, depth),
const compilerHost = createCompilerHostFromProgramHost(host, () => compilerOptions, directoryStructureHost) as CompilerHost & ResolutionCacheHost;
// Members for CompilerHost
const getNewSourceFile = compilerHost.getSourceFile;
compilerHost.getSourceFile = (fileName, ...args) => getVersionedSourceFileByPath(fileName, toPath(fileName), ...args);
compilerHost.getSourceFileByPath = getVersionedSourceFileByPath;
compilerHost.getNewLine = () => newLine;
compilerHost.fileExists = fileExists;
compilerHost.onReleaseOldSourceFile = onReleaseOldSourceFile;
// Members for ResolutionCacheHost
compilerHost.toPath = toPath;
compilerHost.getCompilationSettings = () => compilerOptions;
compilerHost.watchDirectoryOfFailedLookupLocation = (dir, cb, flags) => watchDirectory(host, dir, cb, flags, WatchType.FailedLookupLocations);
compilerHost.watchTypeRootsDirectory = (dir, cb, flags) => watchDirectory(host, dir, cb, flags, WatchType.TypeRoots);
compilerHost.getCachedDirectoryStructureHost = () => cachedDirectoryStructureHost;
compilerHost.onInvalidatedResolution = scheduleProgramUpdate;
compilerHost.onChangedAutomaticTypeDirectiveNames = () => {
hasChangedAutomaticTypeDirectiveNames = true;
scheduleProgramUpdate();
};
compilerHost.maxNumberOfFilesToIterateForInvalidation = host.maxNumberOfFilesToIterateForInvalidation;
compilerHost.getCurrentProgram = getCurrentProgram;
compilerHost.writeLog = writeLog;
// Cache for the module resolution
const resolutionCache = createResolutionCache(compilerHost, configFileName ?
getDirectoryPath(getNormalizedAbsolutePath(configFileName, currentDirectory)) :
@ -630,11 +683,9 @@ namespace ts {
function createNewProgram(program: Program, hasInvalidatedResolution: HasInvalidatedResolution) {
// Compile the program
if (watchLogLevel !== WatchLogLevel.None) {
writeLog("CreatingProgramWith::");
writeLog(` roots: ${JSON.stringify(rootFileNames)}`);
writeLog(` options: ${JSON.stringify(compilerOptions)}`);
}
writeLog("CreatingProgramWith::");
writeLog(` roots: ${JSON.stringify(rootFileNames)}`);
writeLog(` options: ${JSON.stringify(compilerOptions)}`);
const needsUpdateInTypeRootWatch = hasChangedCompilerOptions || !program;
hasChangedCompilerOptions = false;
@ -708,7 +759,7 @@ namespace ts {
// Create new source file if requested or the versions dont match
if (!hostSourceFile || shouldCreateNewSourceFile || !isFilePresentOnHost(hostSourceFile) || hostSourceFile.version.toString() !== hostSourceFile.sourceFile.version) {
const sourceFile = getNewSourceFile();
const sourceFile = getNewSourceFile(fileName, languageVersion, onError);
if (hostSourceFile) {
if (shouldCreateNewSourceFile) {
hostSourceFile.version++;
@ -719,7 +770,7 @@ namespace ts {
(hostSourceFile as FilePresentOnHost).sourceFile = sourceFile;
sourceFile.version = hostSourceFile.version.toString();
if (!(hostSourceFile as FilePresentOnHost).fileWatcher) {
(hostSourceFile as FilePresentOnHost).fileWatcher = watchFilePath(host, fileName, onSourceFileChange, PollingInterval.Low, path, "Source file");
(hostSourceFile as FilePresentOnHost).fileWatcher = watchFilePath(host, fileName, onSourceFileChange, PollingInterval.Low, path, WatchType.SourceFile);
}
}
else {
@ -733,7 +784,7 @@ namespace ts {
else {
if (sourceFile) {
sourceFile.version = initialVersion.toString();
const fileWatcher = watchFilePath(host, fileName, onSourceFileChange, PollingInterval.Low, path, "Source file");
const fileWatcher = watchFilePath(host, fileName, onSourceFileChange, PollingInterval.Low, path, WatchType.SourceFile);
sourceFilesCache.set(path, { sourceFile, version: initialVersion, fileWatcher });
}
else {
@ -743,23 +794,6 @@ namespace ts {
return sourceFile;
}
return hostSourceFile.sourceFile;
function getNewSourceFile() {
let text: string | undefined;
try {
performance.mark("beforeIORead");
text = host.readFile(fileName, compilerOptions.charset);
performance.mark("afterIORead");
performance.measure("I/O Read", "beforeIORead", "afterIORead");
}
catch (e) {
if (onError) {
onError(e.message);
}
}
return text !== undefined ? createSourceFile(fileName, text, languageVersion) : undefined;
}
}
function nextSourceFileVersion(path: Path) {
@ -907,7 +941,7 @@ namespace ts {
}
function watchMissingFilePath(missingFilePath: Path) {
return watchFilePath(host, missingFilePath, onMissingFileChange, PollingInterval.Medium, missingFilePath, "Missing file");
return watchFilePath(host, missingFilePath, onMissingFileChange, PollingInterval.Medium, missingFilePath, WatchType.MissingFile);
}
function onMissingFileChange(fileName: string, eventKind: FileWatcherEventKind, missingFilePath: Path) {
@ -971,33 +1005,8 @@ namespace ts {
}
},
flags,
"Wild card directories"
WatchType.WildcardDirectory
);
}
function ensureDirectoriesExist(directoryPath: string) {
if (directoryPath.length > getRootLength(directoryPath) && !host.directoryExists!(directoryPath)) {
const parentDirectory = getDirectoryPath(directoryPath);
ensureDirectoriesExist(parentDirectory);
host.createDirectory!(directoryPath);
}
}
function writeFile(fileName: string, text: string, writeByteOrderMark: boolean, onError: (message: string) => void) {
try {
performance.mark("beforeIOWrite");
ensureDirectoriesExist(getDirectoryPath(normalizePath(fileName)));
host.writeFile!(fileName, text, writeByteOrderMark);
performance.mark("afterIOWrite");
performance.measure("I/O Write", "beforeIOWrite", "afterIOWrite");
}
catch (e) {
if (onError) {
onError(e.message);
}
}
}
}
}

View File

@ -343,10 +343,10 @@ namespace ts {
export interface WatchDirectoryHost {
watchDirectory(path: string, callback: DirectoryWatcherCallback, recursive?: boolean): FileWatcher;
}
export type WatchFile<X, Y> = (host: WatchFileHost, file: string, callback: FileWatcherCallback, pollingInterval: PollingInterval, detailInfo1?: X, detailInfo2?: Y) => FileWatcher;
export type WatchFile<X, Y> = (host: WatchFileHost, file: string, callback: FileWatcherCallback, pollingInterval: PollingInterval, detailInfo1: X, detailInfo2?: Y) => FileWatcher;
export type FilePathWatcherCallback = (fileName: string, eventKind: FileWatcherEventKind, filePath: Path) => void;
export type WatchFilePath<X, Y> = (host: WatchFileHost, file: string, callback: FilePathWatcherCallback, pollingInterval: PollingInterval, path: Path, detailInfo1?: X, detailInfo2?: Y) => FileWatcher;
export type WatchDirectory<X, Y> = (host: WatchDirectoryHost, directory: string, callback: DirectoryWatcherCallback, flags: WatchDirectoryFlags, detailInfo1?: X, detailInfo2?: Y) => FileWatcher;
export type WatchFilePath<X, Y> = (host: WatchFileHost, file: string, callback: FilePathWatcherCallback, pollingInterval: PollingInterval, path: Path, detailInfo1: X, detailInfo2?: Y) => FileWatcher;
export type WatchDirectory<X, Y> = (host: WatchDirectoryHost, directory: string, callback: DirectoryWatcherCallback, flags: WatchDirectoryFlags, detailInfo1: X, detailInfo2?: Y) => FileWatcher;
export interface WatchFactory<X, Y> {
watchFile: WatchFile<X, Y>;
@ -444,7 +444,7 @@ namespace ts {
}
function getWatchInfo<T, X, Y>(file: string, flags: T, detailInfo1: X, detailInfo2: Y | undefined, getDetailWatchInfo: GetDetailWatchInfo<X, Y> | undefined) {
return `WatchInfo: ${file} ${flags} ${getDetailWatchInfo ? getDetailWatchInfo(detailInfo1, detailInfo2) : detailInfo1}`;
return `WatchInfo: ${file} ${flags} ${getDetailWatchInfo ? getDetailWatchInfo(detailInfo1, detailInfo2) : detailInfo2 === undefined ? detailInfo1 : `${detailInfo1} ${detailInfo2}`}`;
}
export function closeFileWatcherOf<T extends { watcher: FileWatcher; }>(objWithWatcher: T) {

View File

@ -384,7 +384,8 @@ namespace ts.server {
return notImplemented();
}
getRenameInfo(fileName: string, position: number, findInStrings?: boolean, findInComments?: boolean): RenameInfo {
getRenameInfo(fileName: string, position: number, _options?: RenameInfoOptions, findInStrings?: boolean, findInComments?: boolean): RenameInfo {
// Not passing along 'options' because server should already have those from the 'configure' command
const args: protocol.RenameRequestArgs = { ...this.createFileLocationRequestArgs(fileName, position), findInStrings, findInComments };
const request = this.processRequest<protocol.RenameRequest>(CommandNames.Rename, args);
@ -428,7 +429,7 @@ namespace ts.server {
this.lastRenameEntry.inputs.position !== position ||
this.lastRenameEntry.inputs.findInStrings !== findInStrings ||
this.lastRenameEntry.inputs.findInComments !== findInComments) {
this.getRenameInfo(fileName, position, findInStrings, findInComments);
this.getRenameInfo(fileName, position, { allowRenameOfImportPath: true }, findInStrings, findInComments);
}
return this.lastRenameEntry!.locations;

View File

@ -183,8 +183,9 @@ namespace compiler {
}
public getSourceMapRecord(): string | undefined {
if (this.result!.sourceMaps && this.result!.sourceMaps!.length > 0) {
return Harness.SourceMapRecorder.getSourceMapRecord(this.result!.sourceMaps!, this.program!, Array.from(this.js.values()).filter(d => !ts.fileExtensionIs(d.file, ts.Extension.Json)), Array.from(this.dts.values()));
const maps = this.result!.sourceMaps;
if (maps && maps.length > 0) {
return Harness.SourceMapRecorder.getSourceMapRecord(maps, this.program!, Array.from(this.js.values()).filter(d => !ts.fileExtensionIs(d.file, ts.Extension.Json)), Array.from(this.dts.values()));
}
}

View File

@ -375,7 +375,12 @@ namespace fakes {
}
}
export class SolutionBuilderHost extends CompilerHost implements ts.SolutionBuilderHost {
export class SolutionBuilderHost extends CompilerHost implements ts.SolutionBuilderHost<ts.BuilderProgram> {
createProgram = ts.createAbstractBuilder;
now() {
return new Date(this.sys.vfs.time());
}
diagnostics: ts.Diagnostic[] = [];
reportDiagnostic(diagnostic: ts.Diagnostic) {

View File

@ -939,8 +939,8 @@ namespace FourSlash {
const startFile = this.activeFile.fileName;
for (const fileName of files) {
const searchFileNames = startFile === fileName ? [startFile] : [startFile, fileName];
const highlights = this.getDocumentHighlightsAtCurrentPosition(searchFileNames)!;
if (!highlights.every(dh => ts.contains(searchFileNames, dh.fileName))) {
const highlights = this.getDocumentHighlightsAtCurrentPosition(searchFileNames);
if (highlights && !highlights.every(dh => ts.contains(searchFileNames, dh.fileName))) {
this.raiseError(`When asking for document highlights only in files ${searchFileNames}, got document highlights in ${unique(highlights, dh => dh.fileName)}`);
}
}
@ -1170,7 +1170,7 @@ Actual: ${stringify(fullActual)}`);
}
public verifyRenameLocations(startRanges: ArrayOrSingle<Range>, options: FourSlashInterface.RenameLocationsOptions) {
const { findInStrings = false, findInComments = false, ranges = this.getRanges() } = ts.isArray(options) ? { findInStrings: false, findInComments: false, ranges: options } : options;
const { findInStrings = false, findInComments = false, ranges = this.getRanges(), providePrefixAndSuffixTextForRename = true } = ts.isArray(options) ? { findInStrings: false, findInComments: false, ranges: options, providePrefixAndSuffixTextForRename: true } : options;
for (const startRange of toArray(startRanges)) {
this.goToRangeStart(startRange);
@ -1182,7 +1182,7 @@ Actual: ${stringify(fullActual)}`);
}
const references = this.languageService.findRenameLocations(
this.activeFile.fileName, this.currentCaretPosition, findInStrings, findInComments);
this.activeFile.fileName, this.currentCaretPosition, findInStrings, findInComments, providePrefixAndSuffixTextForRename);
const sort = (locations: ReadonlyArray<ts.RenameLocation> | undefined) =>
locations && ts.sort(locations, (r1, r2) => ts.compareStringsCaseSensitive(r1.fileName, r2.fileName) || r1.textSpan.start - r2.textSpan.start);
@ -1308,8 +1308,8 @@ Actual: ${stringify(fullActual)}`);
}
}
public verifyRenameInfoSucceeded(displayName: string | undefined, fullDisplayName: string | undefined, kind: string | undefined, kindModifiers: string | undefined, fileToRename: string | undefined, expectedRange: Range | undefined): void {
const renameInfo = this.languageService.getRenameInfo(this.activeFile.fileName, this.currentCaretPosition);
public verifyRenameInfoSucceeded(displayName: string | undefined, fullDisplayName: string | undefined, kind: string | undefined, kindModifiers: string | undefined, fileToRename: string | undefined, expectedRange: Range | undefined, renameInfoOptions: ts.RenameInfoOptions | undefined): void {
const renameInfo = this.languageService.getRenameInfo(this.activeFile.fileName, this.currentCaretPosition, renameInfoOptions || { allowRenameOfImportPath: true });
if (!renameInfo.canRename) {
throw this.raiseError("Rename did not succeed");
}
@ -1334,8 +1334,9 @@ Actual: ${stringify(fullActual)}`);
}
}
public verifyRenameInfoFailed(message?: string) {
const renameInfo = this.languageService.getRenameInfo(this.activeFile.fileName, this.currentCaretPosition);
public verifyRenameInfoFailed(message?: string, allowRenameOfImportPath?: boolean) {
allowRenameOfImportPath = allowRenameOfImportPath === undefined ? true : allowRenameOfImportPath;
const renameInfo = this.languageService.getRenameInfo(this.activeFile.fileName, this.currentCaretPosition, { allowRenameOfImportPath });
if (renameInfo.canRename) {
throw this.raiseError("Rename was expected to fail");
}
@ -4091,12 +4092,12 @@ namespace FourSlashInterface {
this.state.verifySemanticClassifications(classifications);
}
public renameInfoSucceeded(displayName?: string, fullDisplayName?: string, kind?: string, kindModifiers?: string, fileToRename?: string, expectedRange?: FourSlash.Range) {
this.state.verifyRenameInfoSucceeded(displayName, fullDisplayName, kind, kindModifiers, fileToRename, expectedRange);
public renameInfoSucceeded(displayName?: string, fullDisplayName?: string, kind?: string, kindModifiers?: string, fileToRename?: string, expectedRange?: FourSlash.Range, options?: ts.RenameInfoOptions) {
this.state.verifyRenameInfoSucceeded(displayName, fullDisplayName, kind, kindModifiers, fileToRename, expectedRange, options);
}
public renameInfoFailed(message?: string) {
this.state.verifyRenameInfoFailed(message);
public renameInfoFailed(message?: string, allowRenameOfImportPath?: boolean) {
this.state.verifyRenameInfoFailed(message, allowRenameOfImportPath);
}
public renameLocations(startRanges: ArrayOrSingle<FourSlash.Range>, options: RenameLocationsOptions) {
@ -5086,6 +5087,7 @@ namespace FourSlashInterface {
readonly findInStrings?: boolean;
readonly findInComments?: boolean;
readonly ranges: ReadonlyArray<RenameLocationOptions>;
readonly providePrefixAndSuffixTextForRename?: boolean;
};
export type RenameLocationOptions = FourSlash.Range | { readonly range: FourSlash.Range, readonly prefixText?: string, readonly suffixText?: string };
}

View File

@ -469,11 +469,11 @@ namespace Harness.LanguageService {
getSignatureHelpItems(fileName: string, position: number, options: ts.SignatureHelpItemsOptions | undefined): ts.SignatureHelpItems {
return unwrapJSONCallResult(this.shim.getSignatureHelpItems(fileName, position, options));
}
getRenameInfo(fileName: string, position: number): ts.RenameInfo {
return unwrapJSONCallResult(this.shim.getRenameInfo(fileName, position));
getRenameInfo(fileName: string, position: number, options?: ts.RenameInfoOptions): ts.RenameInfo {
return unwrapJSONCallResult(this.shim.getRenameInfo(fileName, position, options));
}
findRenameLocations(fileName: string, position: number, findInStrings: boolean, findInComments: boolean): ts.RenameLocation[] {
return unwrapJSONCallResult(this.shim.findRenameLocations(fileName, position, findInStrings, findInComments));
findRenameLocations(fileName: string, position: number, findInStrings: boolean, findInComments: boolean, providePrefixAndSuffixTextForRename?: boolean): ts.RenameLocation[] {
return unwrapJSONCallResult(this.shim.findRenameLocations(fileName, position, findInStrings, findInComments, providePrefixAndSuffixTextForRename));
}
getDefinitionAtPosition(fileName: string, position: number): ts.DefinitionInfo[] {
return unwrapJSONCallResult(this.shim.getDefinitionAtPosition(fileName, position));

View File

@ -4,5 +4,5 @@ The files within this directory are used to generate `lib.d.ts` and `lib.es6.d.t
## Generated files
Any files ending in `.generated.d.ts` aren't mean to be edited by hand.
Any files ending in `.generated.d.ts` aren't meant to be edited by hand.
If you need to make changes to such files, make a change to the input files for [**our library generator**](https://github.com/Microsoft/TSJS-lib-generator).

74
src/lib/es5.d.ts vendored
View File

@ -587,7 +587,7 @@ interface TemplateStringsArray extends ReadonlyArray<string> {
/**
* The type of `import.meta`.
*
*
* If you need to declare that a given property exists on `import.meta`,
* this type may be augmented via interface merging.
*/
@ -1913,13 +1913,19 @@ interface Int8ArrayConstructor {
*/
of(...items: number[]): Int8Array;
/**
* Creates an array from an array-like or iterable object.
* @param arrayLike An array-like or iterable object to convert to an array.
*/
from(arrayLike: ArrayLike<number>): Int8Array;
/**
* Creates an array from an array-like or iterable object.
* @param arrayLike An array-like or iterable object to convert to an array.
* @param mapfn A mapping function to call on every element of the array.
* @param thisArg Value of 'this' used to invoke the mapfn.
*/
from(arrayLike: ArrayLike<number>, mapfn?: (v: number, k: number) => number, thisArg?: any): Int8Array;
from<T>(arrayLike: ArrayLike<T>, mapfn: (v: T, k: number) => number, thisArg?: any): Int8Array;
}
@ -2183,13 +2189,19 @@ interface Uint8ArrayConstructor {
*/
of(...items: number[]): Uint8Array;
/**
* Creates an array from an array-like or iterable object.
* @param arrayLike An array-like or iterable object to convert to an array.
*/
from(arrayLike: ArrayLike<number>): Uint8Array;
/**
* Creates an array from an array-like or iterable object.
* @param arrayLike An array-like or iterable object to convert to an array.
* @param mapfn A mapping function to call on every element of the array.
* @param thisArg Value of 'this' used to invoke the mapfn.
*/
from(arrayLike: ArrayLike<number>, mapfn?: (v: number, k: number) => number, thisArg?: any): Uint8Array;
from<T>(arrayLike: ArrayLike<T>, mapfn: (v: T, k: number) => number, thisArg?: any): Uint8Array;
}
declare const Uint8Array: Uint8ArrayConstructor;
@ -2452,13 +2464,19 @@ interface Uint8ClampedArrayConstructor {
*/
of(...items: number[]): Uint8ClampedArray;
/**
* Creates an array from an array-like or iterable object.
* @param arrayLike An array-like or iterable object to convert to an array.
*/
from(arrayLike: ArrayLike<number>): Uint8ClampedArray;
/**
* Creates an array from an array-like or iterable object.
* @param arrayLike An array-like or iterable object to convert to an array.
* @param mapfn A mapping function to call on every element of the array.
* @param thisArg Value of 'this' used to invoke the mapfn.
*/
from(arrayLike: ArrayLike<number>, mapfn?: (v: number, k: number) => number, thisArg?: any): Uint8ClampedArray;
from<T>(arrayLike: ArrayLike<T>, mapfn: (v: T, k: number) => number, thisArg?: any): Uint8ClampedArray;
}
declare const Uint8ClampedArray: Uint8ClampedArrayConstructor;
@ -2719,13 +2737,19 @@ interface Int16ArrayConstructor {
*/
of(...items: number[]): Int16Array;
/**
* Creates an array from an array-like or iterable object.
* @param arrayLike An array-like or iterable object to convert to an array.
*/
from(arrayLike: ArrayLike<number>): Int16Array;
/**
* Creates an array from an array-like or iterable object.
* @param arrayLike An array-like or iterable object to convert to an array.
* @param mapfn A mapping function to call on every element of the array.
* @param thisArg Value of 'this' used to invoke the mapfn.
*/
from(arrayLike: ArrayLike<number>, mapfn?: (v: number, k: number) => number, thisArg?: any): Int16Array;
from<T>(arrayLike: ArrayLike<T>, mapfn: (v: T, k: number) => number, thisArg?: any): Int16Array;
}
@ -2989,13 +3013,19 @@ interface Uint16ArrayConstructor {
*/
of(...items: number[]): Uint16Array;
/**
* Creates an array from an array-like or iterable object.
* @param arrayLike An array-like or iterable object to convert to an array.
*/
from(arrayLike: ArrayLike<number>): Uint16Array;
/**
* Creates an array from an array-like or iterable object.
* @param arrayLike An array-like or iterable object to convert to an array.
* @param mapfn A mapping function to call on every element of the array.
* @param thisArg Value of 'this' used to invoke the mapfn.
*/
from(arrayLike: ArrayLike<number>, mapfn?: (v: number, k: number) => number, thisArg?: any): Uint16Array;
from<T>(arrayLike: ArrayLike<T>, mapfn: (v: T, k: number) => number, thisArg?: any): Uint16Array;
}
@ -3258,13 +3288,19 @@ interface Int32ArrayConstructor {
*/
of(...items: number[]): Int32Array;
/**
* Creates an array from an array-like or iterable object.
* @param arrayLike An array-like or iterable object to convert to an array.
*/
from(arrayLike: ArrayLike<number>): Int32Array;
/**
* Creates an array from an array-like or iterable object.
* @param arrayLike An array-like or iterable object to convert to an array.
* @param mapfn A mapping function to call on every element of the array.
* @param thisArg Value of 'this' used to invoke the mapfn.
*/
from(arrayLike: ArrayLike<number>, mapfn?: (v: number, k: number) => number, thisArg?: any): Int32Array;
from<T>(arrayLike: ArrayLike<T>, mapfn: (v: T, k: number) => number, thisArg?: any): Int32Array;
}
declare const Int32Array: Int32ArrayConstructor;
@ -3526,13 +3562,19 @@ interface Uint32ArrayConstructor {
*/
of(...items: number[]): Uint32Array;
/**
* Creates an array from an array-like or iterable object.
* @param arrayLike An array-like or iterable object to convert to an array.
*/
from(arrayLike: ArrayLike<number>): Uint32Array;
/**
* Creates an array from an array-like or iterable object.
* @param arrayLike An array-like or iterable object to convert to an array.
* @param mapfn A mapping function to call on every element of the array.
* @param thisArg Value of 'this' used to invoke the mapfn.
*/
from(arrayLike: ArrayLike<number>, mapfn?: (v: number, k: number) => number, thisArg?: any): Uint32Array;
from<T>(arrayLike: ArrayLike<T>, mapfn: (v: T, k: number) => number, thisArg?: any): Uint32Array;
}
declare const Uint32Array: Uint32ArrayConstructor;
@ -3795,13 +3837,19 @@ interface Float32ArrayConstructor {
*/
of(...items: number[]): Float32Array;
/**
* Creates an array from an array-like or iterable object.
* @param arrayLike An array-like or iterable object to convert to an array.
*/
from(arrayLike: ArrayLike<number>): Float32Array;
/**
* Creates an array from an array-like or iterable object.
* @param arrayLike An array-like or iterable object to convert to an array.
* @param mapfn A mapping function to call on every element of the array.
* @param thisArg Value of 'this' used to invoke the mapfn.
*/
from(arrayLike: ArrayLike<number>, mapfn?: (v: number, k: number) => number, thisArg?: any): Float32Array;
from<T>(arrayLike: ArrayLike<T>, mapfn: (v: T, k: number) => number, thisArg?: any): Float32Array;
}
@ -4065,13 +4113,19 @@ interface Float64ArrayConstructor {
*/
of(...items: number[]): Float64Array;
/**
* Creates an array from an array-like or iterable object.
* @param arrayLike An array-like or iterable object to convert to an array.
*/
from(arrayLike: ArrayLike<number>): Float64Array;
/**
* Creates an array from an array-like or iterable object.
* @param arrayLike An array-like or iterable object to convert to an array.
* @param mapfn A mapping function to call on every element of the array.
* @param thisArg Value of 'this' used to invoke the mapfn.
*/
from(arrayLike: ArrayLike<number>, mapfn?: (v: number, k: number) => number, thisArg?: any): Float64Array;
from<T>(arrayLike: ArrayLike<T>, mapfn: (v: T, k: number) => number, thisArg?: any): Float64Array;
}
declare const Float64Array: Float64ArrayConstructor;

View File

@ -332,19 +332,6 @@ namespace ts.server {
}
}
/* @internal */
export const enum WatchType {
ConfigFilePath = "Config file for the program",
MissingFilePath = "Missing file from program",
WildcardDirectories = "Wild card directory",
ClosedScriptInfo = "Closed Script info",
ConfigFileForInferredRoot = "Config file for the inferred project root",
FailedLookupLocation = "Directory of Failed lookup locations in module resolution",
TypeRoots = "Type root directory",
NodeModulesForClosedScriptInfo = "node_modules for closed script infos in them",
MissingSourceMapFile = "Missing source map file"
}
const enum ConfigFileWatcherStatus {
ReloadingFiles = "Reloading configured projects for files",
ReloadingInferredRootFiles = "Reloading configured projects for only inferred root files",
@ -1035,7 +1022,7 @@ namespace ts.server {
}
},
flags,
WatchType.WildcardDirectories,
WatchType.WildcardDirectory,
project
);
}
@ -1339,7 +1326,7 @@ namespace ts.server {
watches.push(WatchType.ConfigFileForInferredRoot);
}
if (this.configuredProjects.has(canonicalConfigFilePath)) {
watches.push(WatchType.ConfigFilePath);
watches.push(WatchType.ConfigFile);
}
this.logger.info(`ConfigFilePresence:: Current Watches: ${watches}:: File: ${configFileName} Currently impacted open files: RootsOfInferredProjects: ${inferredRoots} OtherOpenFiles: ${otherFiles} Status: ${status}`);
}
@ -1706,7 +1693,7 @@ namespace ts.server {
configFileName,
(_fileName, eventKind) => this.onConfigChangedForConfiguredProject(project, eventKind),
PollingInterval.High,
WatchType.ConfigFilePath,
WatchType.ConfigFile,
project
);
this.configuredProjects.set(project.canonicalConfigFilePath, project);
@ -2777,7 +2764,12 @@ namespace ts.server {
return;
}
const info: OpenFileInfo = { checkJs: !!scriptInfo.getDefaultProject().getSourceFile(scriptInfo.path)!.checkJsDirective };
const project = scriptInfo.getDefaultProject();
if (!project.languageServiceEnabled) {
return;
}
const info: OpenFileInfo = { checkJs: !!project.getSourceFile(scriptInfo.path)!.checkJsDirective };
this.eventHandler({ eventName: OpenFileInfoTelemetryEvent, data: { info } });
}

View File

@ -428,7 +428,7 @@ namespace ts.server {
directory,
cb,
flags,
WatchType.FailedLookupLocation,
WatchType.FailedLookupLocations,
this
);
}
@ -989,7 +989,7 @@ namespace ts.server {
}
},
PollingInterval.Medium,
WatchType.MissingFilePath,
WatchType.MissingFile,
this
);
return fileWatcher;

View File

@ -2905,6 +2905,8 @@ namespace ts.server.protocol {
readonly importModuleSpecifierPreference?: "relative" | "non-relative";
readonly allowTextChangesInNewFiles?: boolean;
readonly lazyConfiguredProjectsFromExternalProject?: boolean;
readonly providePrefixAndSuffixTextForRename?: boolean;
readonly allowRenameOfImportPath?: boolean;
}
export interface CompilerOptions {

View File

@ -314,7 +314,8 @@ namespace ts.server {
defaultProject: Project,
initialLocation: DocumentPosition,
findInStrings: boolean,
findInComments: boolean
findInComments: boolean,
hostPreferences: UserPreferences
): ReadonlyArray<RenameLocation> {
const outputs: RenameLocation[] = [];
@ -323,7 +324,7 @@ namespace ts.server {
defaultProject,
initialLocation,
({ project, location }, tryAddToTodo) => {
for (const output of project.getLanguageService().findRenameLocations(location.fileName, location.pos, findInStrings, findInComments) || emptyArray) {
for (const output of project.getLanguageService().findRenameLocations(location.fileName, location.pos, findInStrings, findInComments, hostPreferences.providePrefixAndSuffixTextForRename) || emptyArray) {
if (!contains(outputs, output, documentSpansEqual) && !tryAddToTodo(project, documentSpanLocation(output))) {
outputs.push(output);
}
@ -1177,7 +1178,8 @@ namespace ts.server {
private getRenameInfo(args: protocol.FileLocationRequestArgs): RenameInfo {
const { file, project } = this.getFileAndProject(args);
const position = this.getPositionInFile(args, file);
return project.getLanguageService().getRenameInfo(file, position);
const preferences = this.getHostPreferences();
return project.getLanguageService().getRenameInfo(file, position, { allowRenameOfImportPath: preferences.allowRenameOfImportPath });
}
private getProjects(args: protocol.FileRequestArgs, getScriptInfoEnsuringProjectsUptoDate?: boolean, ignoreNoProjectError?: boolean): Projects {
@ -1231,12 +1233,13 @@ namespace ts.server {
this.getDefaultProject(args),
{ fileName: args.file, pos: position },
!!args.findInStrings,
!!args.findInComments
!!args.findInComments,
this.getHostPreferences()
);
if (!simplifiedResult) return locations;
const defaultProject = this.getDefaultProject(args);
const renameInfo: protocol.RenameInfo = this.mapRenameInfo(defaultProject.getLanguageService().getRenameInfo(file, position), Debug.assertDefined(this.projectService.getScriptInfo(file)));
const renameInfo: protocol.RenameInfo = this.mapRenameInfo(defaultProject.getLanguageService().getRenameInfo(file, position, { allowRenameOfImportPath: this.getHostPreferences().allowRenameOfImportPath }), Debug.assertDefined(this.projectService.getScriptInfo(file)));
return { info: renameInfo, locs: this.toSpanGroups(locations) };
}

View File

@ -217,3 +217,14 @@ namespace ts.server {
return indentStr + JSON.stringify(json);
}
}
/* @internal */
namespace ts {
// Additional tsserver specific watch information
export const enum WatchType {
ClosedScriptInfo = "Closed Script info",
ConfigFileForInferredRoot = "Config file for the inferred project root",
NodeModulesForClosedScriptInfo = "node_modules for closed script infos in them",
MissingSourceMapFile = "Missing source map file",
}
}

View File

@ -275,7 +275,7 @@ namespace ts.codefix {
inJs: boolean,
preferences: UserPreferences,
): void {
const methodDeclaration = createMethodFromCallExpression(context, callExpression, token.text, inJs, makeStatic, preferences, !isInterfaceDeclaration(typeDecl));
const methodDeclaration = createMethodFromCallExpression(context, callExpression, token.text, inJs, makeStatic, preferences, typeDecl);
const containingMethodDeclaration = getAncestor(callExpression, SyntaxKind.MethodDeclaration);
if (containingMethodDeclaration && containingMethodDeclaration.parent === typeDecl) {

View File

@ -74,7 +74,7 @@ namespace ts.codefix {
const tsconfigObjectLiteral = getTsConfigObjectLiteralExpression(configFile);
if (!tsconfigObjectLiteral) return undefined;
const compilerOptionsProperty = findProperty(tsconfigObjectLiteral, "compilerOptions");
const compilerOptionsProperty = findJsonProperty(tsconfigObjectLiteral, "compilerOptions");
if (!compilerOptionsProperty) {
const newCompilerOptions = createObjectLiteral([makeDefaultBaseUrl(), makeDefaultPaths()]);
changes.insertNodeAtObjectStart(configFile, tsconfigObjectLiteral, createJsonPropertyAssignment("compilerOptions", newCompilerOptions));
@ -94,7 +94,7 @@ namespace ts.codefix {
return createJsonPropertyAssignment("baseUrl", createStringLiteral(defaultBaseUrl));
}
function getOrAddBaseUrl(changes: textChanges.ChangeTracker, tsconfig: TsConfigSourceFile, compilerOptions: ObjectLiteralExpression): string {
const baseUrlProp = findProperty(compilerOptions, "baseUrl");
const baseUrlProp = findJsonProperty(compilerOptions, "baseUrl");
if (baseUrlProp) {
return isStringLiteral(baseUrlProp.initializer) ? baseUrlProp.initializer.text : defaultBaseUrl;
}
@ -112,7 +112,7 @@ namespace ts.codefix {
return createJsonPropertyAssignment("paths", createObjectLiteral([makeDefaultPathMapping()]));
}
function getOrAddPathMapping(changes: textChanges.ChangeTracker, tsconfig: TsConfigSourceFile, compilerOptions: ObjectLiteralExpression) {
const paths = findProperty(compilerOptions, "paths");
const paths = findJsonProperty(compilerOptions, "paths");
if (!paths || !isObjectLiteralExpression(paths.initializer)) {
changes.insertNodeAtObjectStart(tsconfig, compilerOptions, makeDefaultPaths());
return defaultTypesDirectoryName;
@ -129,14 +129,6 @@ namespace ts.codefix {
return defaultTypesDirectoryName;
}
function createJsonPropertyAssignment(name: string, initializer: Expression) {
return createPropertyAssignment(createStringLiteral(name), initializer);
}
function findProperty(obj: ObjectLiteralExpression, name: string): PropertyAssignment | undefined {
return find(obj.properties, (p): p is PropertyAssignment => isPropertyAssignment(p) && !!p.name && isStringLiteral(p.name) && p.name.text === name);
}
function getInstallCommand(fileName: string, packageName: string): InstallPackageAction {
return { type: "install package", file: fileName, packageName };
}

View File

@ -8,9 +8,9 @@ namespace ts.codefix {
registerCodeFix({
errorCodes,
getCodeActions(context) {
const { program, sourceFile, span } = context;
const { sourceFile, span } = context;
const changes = textChanges.ChangeTracker.with(context, t =>
addMissingMembers(getClass(sourceFile, span.start), sourceFile, program.getTypeChecker(), t, context.preferences));
addMissingMembers(getClass(sourceFile, span.start), sourceFile, context, t, context.preferences));
return changes.length === 0 ? undefined : [createCodeFixAction(fixId, changes, Diagnostics.Implement_inherited_abstract_class, fixId, Diagnostics.Implement_all_inherited_abstract_classes)];
},
fixIds: [fixId],
@ -19,7 +19,7 @@ namespace ts.codefix {
return codeFixAll(context, errorCodes, (changes, diag) => {
const classDeclaration = getClass(diag.file, diag.start);
if (addToSeen(seenClassDeclarations, getNodeId(classDeclaration))) {
addMissingMembers(classDeclaration, context.sourceFile, context.program.getTypeChecker(), changes, context.preferences);
addMissingMembers(classDeclaration, context.sourceFile, context, changes, context.preferences);
}
});
},
@ -32,15 +32,16 @@ namespace ts.codefix {
return cast(token.parent, isClassLike);
}
function addMissingMembers(classDeclaration: ClassLikeDeclaration, sourceFile: SourceFile, checker: TypeChecker, changeTracker: textChanges.ChangeTracker, preferences: UserPreferences): void {
function addMissingMembers(classDeclaration: ClassLikeDeclaration, sourceFile: SourceFile, context: TypeConstructionContext, changeTracker: textChanges.ChangeTracker, preferences: UserPreferences): void {
const extendsNode = getEffectiveBaseTypeNode(classDeclaration)!;
const checker = context.program.getTypeChecker();
const instantiatedExtendsType = checker.getTypeAtLocation(extendsNode);
// Note that this is ultimately derived from a map indexed by symbol names,
// so duplicates cannot occur.
const abstractAndNonPrivateExtendsSymbols = checker.getPropertiesOfType(instantiatedExtendsType).filter(symbolPointsToNonPrivateAndAbstractMember);
createMissingMemberNodes(classDeclaration, abstractAndNonPrivateExtendsSymbols, checker, preferences, member => changeTracker.insertNodeAtClassStart(sourceFile, classDeclaration, member));
createMissingMemberNodes(classDeclaration, abstractAndNonPrivateExtendsSymbols, context, preferences, member => changeTracker.insertNodeAtClassStart(sourceFile, classDeclaration, member));
}
function symbolPointsToNonPrivateAndAbstractMember(symbol: Symbol): boolean {

View File

@ -6,11 +6,10 @@ namespace ts.codefix {
registerCodeFix({
errorCodes,
getCodeActions(context) {
const { program, sourceFile, span } = context;
const { sourceFile, span } = context;
const classDeclaration = getClass(sourceFile, span.start);
const checker = program.getTypeChecker();
return mapDefined<ExpressionWithTypeArguments, CodeFixAction>(getClassImplementsHeritageClauseElements(classDeclaration), implementedTypeNode => {
const changes = textChanges.ChangeTracker.with(context, t => addMissingDeclarations(checker, implementedTypeNode, sourceFile, classDeclaration, t, context.preferences));
const changes = textChanges.ChangeTracker.with(context, t => addMissingDeclarations(context, implementedTypeNode, sourceFile, classDeclaration, t, context.preferences));
return changes.length === 0 ? undefined : createCodeFixAction(fixId, changes, [Diagnostics.Implement_interface_0, implementedTypeNode.getText(sourceFile)], fixId, Diagnostics.Implement_all_unimplemented_interfaces);
});
},
@ -21,7 +20,7 @@ namespace ts.codefix {
const classDeclaration = getClass(diag.file, diag.start);
if (addToSeen(seenClassDeclarations, getNodeId(classDeclaration))) {
for (const implementedTypeNode of getClassImplementsHeritageClauseElements(classDeclaration)!) {
addMissingDeclarations(context.program.getTypeChecker(), implementedTypeNode, diag.file, classDeclaration, changes, context.preferences);
addMissingDeclarations(context, implementedTypeNode, diag.file, classDeclaration, changes, context.preferences);
}
}
});
@ -37,13 +36,14 @@ namespace ts.codefix {
}
function addMissingDeclarations(
checker: TypeChecker,
context: TypeConstructionContext,
implementedTypeNode: ExpressionWithTypeArguments,
sourceFile: SourceFile,
classDeclaration: ClassLikeDeclaration,
changeTracker: textChanges.ChangeTracker,
preferences: UserPreferences,
): void {
const checker = context.program.getTypeChecker();
const maybeHeritageClauseSymbol = getHeritageClauseSymbolTable(classDeclaration, checker);
// Note that this is ultimately derived from a map indexed by symbol names,
// so duplicates cannot occur.
@ -60,12 +60,12 @@ namespace ts.codefix {
createMissingIndexSignatureDeclaration(implementedType, IndexKind.String);
}
createMissingMemberNodes(classDeclaration, nonPrivateAndNotExistedInHeritageClauseMembers, checker, preferences, member => changeTracker.insertNodeAtClassStart(sourceFile, classDeclaration, member));
createMissingMemberNodes(classDeclaration, nonPrivateAndNotExistedInHeritageClauseMembers, context, preferences, member => changeTracker.insertNodeAtClassStart(sourceFile, classDeclaration, member));
function createMissingIndexSignatureDeclaration(type: InterfaceType, kind: IndexKind): void {
const indexInfoOfKind = checker.getIndexInfoOfType(type, kind);
if (indexInfoOfKind) {
changeTracker.insertNodeAtClassStart(sourceFile, classDeclaration, checker.indexInfoToIndexSignatureDeclaration(indexInfoOfKind, kind, classDeclaration)!);
changeTracker.insertNodeAtClassStart(sourceFile, classDeclaration, checker.indexInfoToIndexSignatureDeclaration(indexInfoOfKind, kind, classDeclaration, /*flags*/ undefined, getNoopSymbolTrackerWithResolver(context))!);
}
}
}

View File

@ -0,0 +1,24 @@
/* @internal */
namespace ts.codefix {
const fixId = "enableExperimentalDecorators";
const errorCodes = [
Diagnostics.Experimental_support_for_decorators_is_a_feature_that_is_subject_to_change_in_a_future_release_Set_the_experimentalDecorators_option_to_remove_this_warning.code
];
registerCodeFix({
errorCodes,
getCodeActions: (context) => {
const { configFile } = context.program.getCompilerOptions();
if (configFile === undefined) {
return undefined;
}
const changes = textChanges.ChangeTracker.with(context, changeTracker => makeChange(changeTracker, configFile));
return [createCodeFixActionNoFixId(fixId, changes, Diagnostics.Enable_the_experimentalDecorators_option_in_your_configuration_file)];
},
fixIds: [fixId],
});
function makeChange(changeTracker: textChanges.ChangeTracker, configFile: TsConfigSourceFile) {
setJsonCompilerOptionValue(changeTracker, configFile, "experimentalDecorators", createTrue());
}
}

View File

@ -6,23 +6,48 @@ namespace ts.codefix {
* @param possiblyMissingSymbols The collection of symbols to filter and then get insertions for.
* @returns Empty string iff there are no member insertions.
*/
export function createMissingMemberNodes(classDeclaration: ClassLikeDeclaration, possiblyMissingSymbols: ReadonlyArray<Symbol>, checker: TypeChecker, preferences: UserPreferences, out: (node: ClassElement) => void): void {
export function createMissingMemberNodes(classDeclaration: ClassLikeDeclaration, possiblyMissingSymbols: ReadonlyArray<Symbol>, context: TypeConstructionContext, preferences: UserPreferences, out: (node: ClassElement) => void): void {
const classMembers = classDeclaration.symbol.members!;
for (const symbol of possiblyMissingSymbols) {
if (!classMembers.has(symbol.escapedName)) {
addNewNodeForMemberSymbol(symbol, classDeclaration, checker, preferences, out);
addNewNodeForMemberSymbol(symbol, classDeclaration, context, preferences, out);
}
}
}
function getModuleSpecifierResolverHost(context: TypeConstructionContext): SymbolTracker["moduleResolverHost"] {
return {
directoryExists: context.host.directoryExists ? d => context.host.directoryExists!(d) : undefined,
fileExists: context.host.fileExists ? f => context.host.fileExists!(f) : undefined,
getCurrentDirectory: context.host.getCurrentDirectory ? () => context.host.getCurrentDirectory!() : undefined,
readFile: context.host.readFile ? f => context.host.readFile!(f) : undefined,
useCaseSensitiveFileNames: context.host.useCaseSensitiveFileNames ? () => context.host.useCaseSensitiveFileNames!() : undefined,
getSourceFiles: () => context.program.getSourceFiles(),
getCommonSourceDirectory: () => context.program.getCommonSourceDirectory(),
};
}
export function getNoopSymbolTrackerWithResolver(context: TypeConstructionContext): SymbolTracker {
return {
trackSymbol: noop,
moduleResolverHost: getModuleSpecifierResolverHost(context),
};
}
export interface TypeConstructionContext {
program: Program;
host: ModuleSpecifierResolutionHost;
}
/**
* @returns Empty string iff there we can't figure out a representation for `symbol` in `enclosingDeclaration`.
*/
function addNewNodeForMemberSymbol(symbol: Symbol, enclosingDeclaration: ClassLikeDeclaration, checker: TypeChecker, preferences: UserPreferences, out: (node: Node) => void): void {
function addNewNodeForMemberSymbol(symbol: Symbol, enclosingDeclaration: ClassLikeDeclaration, context: TypeConstructionContext, preferences: UserPreferences, out: (node: Node) => void): void {
const declarations = symbol.getDeclarations();
if (!(declarations && declarations.length)) {
return undefined;
}
const checker = context.program.getTypeChecker();
const declaration = declarations[0];
const name = getSynthesizedDeepClone(getNameOfDeclaration(declaration), /*includeTrivia*/ false) as PropertyName;
@ -36,7 +61,7 @@ namespace ts.codefix {
case SyntaxKind.SetAccessor:
case SyntaxKind.PropertySignature:
case SyntaxKind.PropertyDeclaration:
const typeNode = checker.typeToTypeNode(type, enclosingDeclaration);
const typeNode = checker.typeToTypeNode(type, enclosingDeclaration, /*flags*/ undefined, getNoopSymbolTrackerWithResolver(context));
out(createProperty(
/*decorators*/undefined,
modifiers,
@ -83,13 +108,13 @@ namespace ts.codefix {
}
function outputMethod(signature: Signature, modifiers: NodeArray<Modifier> | undefined, name: PropertyName, body?: Block): void {
const method = signatureToMethodDeclaration(checker, signature, enclosingDeclaration, modifiers, name, optional, body);
const method = signatureToMethodDeclaration(context, signature, enclosingDeclaration, modifiers, name, optional, body);
if (method) out(method);
}
}
function signatureToMethodDeclaration(
checker: TypeChecker,
context: TypeConstructionContext,
signature: Signature,
enclosingDeclaration: ClassLikeDeclaration,
modifiers: NodeArray<Modifier> | undefined,
@ -97,7 +122,8 @@ namespace ts.codefix {
optional: boolean,
body: Block | undefined,
): MethodDeclaration | undefined {
const signatureDeclaration = <MethodDeclaration>checker.signatureToSignatureDeclaration(signature, SyntaxKind.MethodDeclaration, enclosingDeclaration, NodeBuilderFlags.NoTruncation | NodeBuilderFlags.SuppressAnyReturnType);
const program = context.program;
const signatureDeclaration = <MethodDeclaration>program.getTypeChecker().signatureToSignatureDeclaration(signature, SyntaxKind.MethodDeclaration, enclosingDeclaration, NodeBuilderFlags.NoTruncation | NodeBuilderFlags.SuppressAnyReturnType, getNoopSymbolTrackerWithResolver(context));
if (!signatureDeclaration) {
return undefined;
}
@ -117,18 +143,20 @@ namespace ts.codefix {
inJs: boolean,
makeStatic: boolean,
preferences: UserPreferences,
body: boolean,
contextNode: Node,
): MethodDeclaration {
const body = !isInterfaceDeclaration(contextNode);
const { typeArguments, arguments: args, parent } = call;
const checker = context.program.getTypeChecker();
const tracker = getNoopSymbolTrackerWithResolver(context);
const types = map(args, arg =>
// Widen the type so we don't emit nonsense annotations like "function fn(x: 3) {"
checker.typeToTypeNode(checker.getBaseTypeOfLiteralType(checker.getTypeAtLocation(arg))));
checker.typeToTypeNode(checker.getBaseTypeOfLiteralType(checker.getTypeAtLocation(arg)), contextNode, /*flags*/ undefined, tracker));
const names = map(args, arg =>
isIdentifier(arg) ? arg.text :
isPropertyAccessExpression(arg) ? arg.name.text : undefined);
const contextualType = checker.getContextualType(call);
const returnType = inJs ? undefined : contextualType && checker.typeToTypeNode(contextualType, call) || createKeywordTypeNode(SyntaxKind.AnyKeyword);
const returnType = inJs ? undefined : contextualType && checker.typeToTypeNode(contextualType, contextNode, /*flags*/ undefined, tracker) || createKeywordTypeNode(SyntaxKind.AnyKeyword);
return createMethod(
/*decorators*/ undefined,
/*modifiers*/ makeStatic ? [createToken(SyntaxKind.StaticKeyword)] : undefined,
@ -249,4 +277,46 @@ namespace ts.codefix {
}
return undefined;
}
export function setJsonCompilerOptionValue(
changeTracker: textChanges.ChangeTracker,
configFile: TsConfigSourceFile,
optionName: string,
optionValue: Expression,
) {
const tsconfigObjectLiteral = getTsConfigObjectLiteralExpression(configFile);
if (!tsconfigObjectLiteral) return undefined;
const compilerOptionsProperty = findJsonProperty(tsconfigObjectLiteral, "compilerOptions");
if (compilerOptionsProperty === undefined) {
changeTracker.insertNodeAtObjectStart(configFile, tsconfigObjectLiteral, createJsonPropertyAssignment(
"compilerOptions",
createObjectLiteral([
createJsonPropertyAssignment(optionName, optionValue),
])));
return;
}
const compilerOptions = compilerOptionsProperty.initializer;
if (!isObjectLiteralExpression(compilerOptions)) {
return;
}
const optionProperty = findJsonProperty(compilerOptions, optionName);
if (optionProperty === undefined) {
changeTracker.insertNodeAtObjectStart(configFile, compilerOptions, createJsonPropertyAssignment(optionName, optionValue));
}
else {
changeTracker.replaceNode(configFile, optionProperty.initializer, optionValue);
}
}
export function createJsonPropertyAssignment(name: string, initializer: Expression) {
return createPropertyAssignment(createStringLiteral(name), initializer);
}
export function findJsonProperty(obj: ObjectLiteralExpression, name: string): PropertyAssignment | undefined {
return find(obj.properties, (p): p is PropertyAssignment => isPropertyAssignment(p) && !!p.name && isStringLiteral(p.name) && p.name.text === name);
}
}

View File

@ -367,7 +367,9 @@ namespace ts.Completions {
}
function getSymbolName(symbol: Symbol, origin: SymbolOriginInfo | undefined, target: ScriptTarget): string {
return origin && originIsExport(origin) && origin.isDefaultExport && symbol.escapedName === InternalSymbolName.Default
return origin && originIsExport(origin) && (
(origin.isDefaultExport && symbol.escapedName === InternalSymbolName.Default) ||
(symbol.escapedName === InternalSymbolName.ExportEquals))
// Name of "export default foo;" is "foo". Name of "export default 0" is the filename converted to camelCase.
? firstDefined(symbol.declarations, d => isExportAssignment(d) && isIdentifier(d.expression) ? d.expression.text : undefined)
|| codefix.moduleSymbolToValidIdentifier(origin.moduleSymbol, target)

View File

@ -39,6 +39,11 @@ namespace ts.FindAllReferences {
readonly isForRename?: boolean;
/** True if we are searching for implementations. We will have a different method of adding references if so. */
readonly implementations?: boolean;
/**
* True to opt in for enhanced renaming of shorthand properties and import/export specifiers.
* Default is false for backwards compatibility.
*/
readonly providePrefixAndSuffixTextForRename?: boolean;
}
export function findReferencedSymbols(program: Program, cancellationToken: CancellationToken, sourceFiles: ReadonlyArray<SourceFile>, sourceFile: SourceFile, position: number): ReferencedSymbol[] | undefined {
@ -106,7 +111,7 @@ namespace ts.FindAllReferences {
return flattenEntries(Core.getReferencedSymbolsForNode(position, node, program, sourceFiles, cancellationToken, options, sourceFilesSet));
}
function flattenEntries(referenceSymbols: SymbolAndEntries[] | undefined): ReadonlyArray<Entry> | undefined {
function flattenEntries(referenceSymbols: ReadonlyArray<SymbolAndEntries> | undefined): ReadonlyArray<Entry> | undefined {
return referenceSymbols && flatMap(referenceSymbols, r => r.references);
}
@ -157,8 +162,8 @@ namespace ts.FindAllReferences {
return { displayParts, kind: symbolKind };
}
export function toRenameLocation(entry: Entry, originalNode: Node, checker: TypeChecker): RenameLocation {
return { ...entryToDocumentSpan(entry), ...getPrefixAndSuffixText(entry, originalNode, checker) };
export function toRenameLocation(entry: Entry, originalNode: Node, checker: TypeChecker, providePrefixAndSuffixText: boolean): RenameLocation {
return { ...entryToDocumentSpan(entry), ...(providePrefixAndSuffixText && getPrefixAndSuffixText(entry, originalNode, checker)) };
}
export function toReferenceEntry(entry: Entry): ReferenceEntry {
@ -277,6 +282,11 @@ namespace ts.FindAllReferences {
return createTextSpanFromBounds(start, end);
}
export function getTextSpanOfEntry(entry: Entry) {
return entry.kind === EntryKind.Span ? entry.textSpan :
getTextSpan(entry.node, entry.node.getSourceFile());
}
/** A node is considered a writeAccess iff it is a name of a declaration or a target of an assignment */
function isWriteAccessForReference(node: Node): boolean {
const decl = getDeclarationFromName(node);
@ -348,7 +358,7 @@ namespace ts.FindAllReferences {
/* @internal */
namespace ts.FindAllReferences.Core {
/** Core find-all-references algorithm. Handles special cases before delegating to `getReferencedSymbolsForSymbol`. */
export function getReferencedSymbolsForNode(position: number, node: Node, program: Program, sourceFiles: ReadonlyArray<SourceFile>, cancellationToken: CancellationToken, options: Options = {}, sourceFilesSet: ReadonlyMap<true> = arrayToSet(sourceFiles, f => f.fileName)): SymbolAndEntries[] | undefined {
export function getReferencedSymbolsForNode(position: number, node: Node, program: Program, sourceFiles: ReadonlyArray<SourceFile>, cancellationToken: CancellationToken, options: Options = {}, sourceFilesSet: ReadonlyMap<true> = arrayToSet(sourceFiles, f => f.fileName)): ReadonlyArray<SymbolAndEntries> | undefined {
if (isSourceFile(node)) {
const reference = GoToDefinition.getReferenceAtPosition(node, position, program);
const moduleSymbol = reference && program.getTypeChecker().getMergedSymbol(reference.file.symbol);
@ -363,7 +373,7 @@ namespace ts.FindAllReferences.Core {
}
const checker = program.getTypeChecker();
let symbol = checker.getSymbolAtLocation(node);
const symbol = checker.getSymbolAtLocation(node);
// Could not find a symbol e.g. unknown identifier
if (!symbol) {
@ -375,23 +385,95 @@ namespace ts.FindAllReferences.Core {
return getReferencedSymbolsForModule(program, symbol.parent!, /*excludeImportTypeOfExportEquals*/ false, sourceFiles, sourceFilesSet);
}
let moduleReferences: SymbolAndEntries[] = emptyArray;
const moduleSourceFile = isModuleSymbol(symbol);
let referencedNode: Node | undefined = node;
if (moduleSourceFile) {
const exportEquals = symbol.exports!.get(InternalSymbolName.ExportEquals);
// If !!exportEquals, we're about to add references to `import("mod")` anyway, so don't double-count them.
moduleReferences = getReferencedSymbolsForModule(program, symbol, !!exportEquals, sourceFiles, sourceFilesSet);
if (!exportEquals || !sourceFilesSet.has(moduleSourceFile.fileName)) return moduleReferences;
// Continue to get references to 'export ='.
symbol = skipAlias(exportEquals, checker);
referencedNode = undefined;
const moduleReferences = getReferencedSymbolsForModuleIfDeclaredBySourceFile(symbol, program, sourceFiles, cancellationToken, options, sourceFilesSet);
if (moduleReferences && !(symbol.flags & SymbolFlags.Transient)) {
return moduleReferences;
}
return concatenate(moduleReferences, getReferencedSymbolsForSymbol(symbol, referencedNode, sourceFiles, sourceFilesSet, checker, cancellationToken, options));
const aliasedSymbol = getMergedAliasedSymbolOfNamespaceExportDeclaration(node, symbol, checker);
const moduleReferencesOfExportTarget = aliasedSymbol &&
getReferencedSymbolsForModuleIfDeclaredBySourceFile(aliasedSymbol, program, sourceFiles, cancellationToken, options, sourceFilesSet);
const references = getReferencedSymbolsForSymbol(symbol, node, sourceFiles, sourceFilesSet, checker, cancellationToken, options);
return mergeReferences(program, moduleReferences, references, moduleReferencesOfExportTarget);
}
function isModuleSymbol(symbol: Symbol): SourceFile | undefined {
return symbol.flags & SymbolFlags.Module ? find(symbol.declarations, isSourceFile) : undefined;
function getMergedAliasedSymbolOfNamespaceExportDeclaration(node: Node, symbol: Symbol, checker: TypeChecker) {
if (node.parent && isNamespaceExportDeclaration(node.parent)) {
const aliasedSymbol = checker.getAliasedSymbol(symbol);
const targetSymbol = checker.getMergedSymbol(aliasedSymbol);
if (aliasedSymbol !== targetSymbol) {
return targetSymbol;
}
}
return undefined;
}
function getReferencedSymbolsForModuleIfDeclaredBySourceFile(symbol: Symbol, program: Program, sourceFiles: ReadonlyArray<SourceFile>, cancellationToken: CancellationToken, options: Options, sourceFilesSet: ReadonlyMap<true>) {
const moduleSourceFile = symbol.flags & SymbolFlags.Module ? find(symbol.declarations, isSourceFile) : undefined;
if (!moduleSourceFile) return undefined;
const exportEquals = symbol.exports!.get(InternalSymbolName.ExportEquals);
// If !!exportEquals, we're about to add references to `import("mod")` anyway, so don't double-count them.
const moduleReferences = getReferencedSymbolsForModule(program, symbol, !!exportEquals, sourceFiles, sourceFilesSet);
if (!exportEquals || !sourceFilesSet.has(moduleSourceFile.fileName)) return moduleReferences;
// Continue to get references to 'export ='.
const checker = program.getTypeChecker();
symbol = skipAlias(exportEquals, checker);
return mergeReferences(program, moduleReferences, getReferencedSymbolsForSymbol(symbol, /*node*/ undefined, sourceFiles, sourceFilesSet, checker, cancellationToken, options));
}
/**
* Merges the references by sorting them (by file index in sourceFiles and their location in it) that point to same definition symbol
*/
function mergeReferences(program: Program, ...referencesToMerge: (SymbolAndEntries[] | undefined)[]): SymbolAndEntries[] | undefined {
let result: SymbolAndEntries[] | undefined;
for (const references of referencesToMerge) {
if (!references || !references.length) continue;
if (!result) {
result = references;
continue;
}
for (const entry of references) {
if (!entry.definition || entry.definition.type !== DefinitionKind.Symbol) {
result.push(entry);
continue;
}
const symbol = entry.definition.symbol;
const refIndex = findIndex(result, ref => !!ref.definition &&
ref.definition.type === DefinitionKind.Symbol &&
ref.definition.symbol === symbol);
if (refIndex === -1) {
result.push(entry);
continue;
}
const reference = result[refIndex];
result[refIndex] = {
definition: reference.definition,
references: reference.references.concat(entry.references).sort((entry1, entry2) => {
const entry1File = getSourceFileIndexOfEntry(program, entry1);
const entry2File = getSourceFileIndexOfEntry(program, entry2);
if (entry1File !== entry2File) {
return compareValues(entry1File, entry2File);
}
const entry1Span = getTextSpanOfEntry(entry1);
const entry2Span = getTextSpanOfEntry(entry2);
return entry1Span.start !== entry2Span.start ?
compareValues(entry1Span.start, entry2Span.start) :
compareValues(entry1Span.length, entry2Span.length);
})
};
}
}
return result;
}
function getSourceFileIndexOfEntry(program: Program, entry: Entry) {
const sourceFile = entry.kind === EntryKind.Span ?
program.getSourceFile(entry.fileName)! :
entry.node.getSourceFile();
return program.getSourceFiles().indexOf(sourceFile);
}
function getReferencedSymbolsForModule(program: Program, symbol: Symbol, excludeImportTypeOfExportEquals: boolean, sourceFiles: ReadonlyArray<SourceFile>, sourceFilesSet: ReadonlyMap<true>): SymbolAndEntries[] {
@ -430,7 +512,7 @@ namespace ts.FindAllReferences.Core {
break;
default:
// This may be merged with something.
Debug.fail("Expected a module symbol to be declared by a SourceFile or ModuleDeclaration.");
Debug.assert(!!(symbol.flags & SymbolFlags.Transient), "Expected a module symbol to be declared by a SourceFile or ModuleDeclaration.");
}
}
@ -484,7 +566,7 @@ namespace ts.FindAllReferences.Core {
/** Core find-all-references algorithm for a normal symbol. */
function getReferencedSymbolsForSymbol(originalSymbol: Symbol, node: Node | undefined, sourceFiles: ReadonlyArray<SourceFile>, sourceFilesSet: ReadonlyMap<true>, checker: TypeChecker, cancellationToken: CancellationToken, options: Options): SymbolAndEntries[] {
const symbol = node && skipPastExportOrImportSpecifierOrUnion(originalSymbol, node, checker, !!options.isForRename) || originalSymbol;
const symbol = node && skipPastExportOrImportSpecifierOrUnion(originalSymbol, node, checker, /*useLocalSymbolForExportSpecifier*/ !isForRenameWithPrefixAndSuffixText(options)) || originalSymbol;
// Compute the meaning from the location and the symbol it references
const searchMeaning = node ? getIntersectingMeaningFromDeclarations(node, symbol) : SemanticMeaning.All;
@ -492,7 +574,7 @@ namespace ts.FindAllReferences.Core {
const result: SymbolAndEntries[] = [];
const state = new State(sourceFiles, sourceFilesSet, node ? getSpecialSearchKind(node) : SpecialSearchKind.None, checker, cancellationToken, searchMeaning, options, result);
const exportSpecifier = !options.isForRename ? undefined : find(symbol.declarations, isExportSpecifier);
const exportSpecifier = !isForRenameWithPrefixAndSuffixText(options) ? undefined : find(symbol.declarations, isExportSpecifier);
if (exportSpecifier) {
// When renaming at an export specifier, rename the export and not the thing being exported.
getReferencesAtExportSpecifier(exportSpecifier.name, symbol, exportSpecifier, state.createSearch(node, originalSymbol, /*comingFrom*/ undefined), state, /*addReferencesHere*/ true, /*alwaysGetReferences*/ true);
@ -502,7 +584,7 @@ namespace ts.FindAllReferences.Core {
searchForImportsOfExport(node, symbol, { exportingModuleSymbol: Debug.assertDefined(symbol.parent, "Expected export symbol to have a parent"), exportKind: ExportKind.Default }, state);
}
else {
const search = state.createSearch(node, symbol, /*comingFrom*/ undefined, { allSearchSymbols: node ? populateSearchSymbolSet(symbol, node, checker, !!options.isForRename, !!options.implementations) : [symbol] });
const search = state.createSearch(node, symbol, /*comingFrom*/ undefined, { allSearchSymbols: node ? populateSearchSymbolSet(symbol, node, checker, !!options.isForRename, !!options.providePrefixAndSuffixTextForRename, !!options.implementations) : [symbol] });
// Try to get the smallest valid scope that we can limit our search to;
// otherwise we'll need to search globally (i.e. include each file).
@ -538,14 +620,16 @@ namespace ts.FindAllReferences.Core {
}
/** Handle a few special cases relating to export/import specifiers. */
function skipPastExportOrImportSpecifierOrUnion(symbol: Symbol, node: Node, checker: TypeChecker, isForRename: boolean): Symbol | undefined {
function skipPastExportOrImportSpecifierOrUnion(symbol: Symbol, node: Node, checker: TypeChecker, useLocalSymbolForExportSpecifier: boolean): Symbol | undefined {
const { parent } = node;
if (isExportSpecifier(parent) && !isForRename) {
if (isExportSpecifier(parent) && useLocalSymbolForExportSpecifier) {
return getLocalSymbolForExportSpecifier(node as Identifier, symbol, parent, checker);
}
// If the symbol is declared as part of a declaration like `{ type: "a" } | { type: "b" }`, use the property on the union type to get more references.
return firstDefined(symbol.declarations, decl => {
if (!decl.parent) {
// Ignore UMD module and global merge
if (symbol.flags & SymbolFlags.Transient) return undefined;
// Assertions for GH#21814. We should be handling SourceFile symbols in `getReferencedSymbolsForModule` instead of getting here.
Debug.fail(`Unexpected symbol at ${Debug.showSyntaxKind(node)}: ${Debug.showSymbol(symbol)}`);
}
@ -583,6 +667,12 @@ namespace ts.FindAllReferences.Core {
Class,
}
function getNonModuleSymbolOfMergedModuleSymbol(symbol: Symbol) {
if (!(symbol.flags & (SymbolFlags.Module | SymbolFlags.Transient))) return undefined;
const decl = symbol.declarations && find(symbol.declarations, d => !isSourceFile(d) && !isModuleDeclaration(d));
return decl && decl.symbol;
}
/**
* Holds all state needed for the finding references.
* Unlike `Search`, there is only one `State`.
@ -643,7 +733,7 @@ namespace ts.FindAllReferences.Core {
// The other two forms seem to be handled downstream (e.g. in `skipPastExportOrImportSpecifier`), so special-casing the first form
// here appears to be intentional).
const {
text = stripQuotes(unescapeLeadingUnderscores((getLocalSymbolForExportDefault(symbol) || symbol).escapedName)),
text = stripQuotes(unescapeLeadingUnderscores((getLocalSymbolForExportDefault(symbol) || getNonModuleSymbolOfMergedModuleSymbol(symbol) || symbol).escapedName)),
allSearchSymbols = [symbol],
} = searchOptions;
const escapedText = escapeLeadingUnderscores(text);
@ -1071,6 +1161,8 @@ namespace ts.FindAllReferences.Core {
addReferencesHere: boolean,
alwaysGetReferences?: boolean,
): void {
Debug.assert(!alwaysGetReferences || !!state.options.providePrefixAndSuffixTextForRename, "If alwaysGetReferences is true, then prefix/suffix text must be enabled");
const { parent, propertyName, name } = exportSpecifier;
const exportDeclaration = parent.parent;
const localSymbol = getLocalSymbolForExportSpecifier(referenceLocation, referenceSymbol, exportSpecifier, state.checker);
@ -1102,7 +1194,7 @@ namespace ts.FindAllReferences.Core {
}
// For `export { foo as bar }`, rename `foo`, but not `bar`.
if (!state.options.isForRename || alwaysGetReferences) {
if (!isForRenameWithPrefixAndSuffixText(state.options) || alwaysGetReferences) {
const exportKind = referenceLocation.originalKeywordKind === SyntaxKind.DefaultKeyword ? ExportKind.Default : ExportKind.Named;
const exportSymbol = Debug.assertDefined(exportSpecifier.symbol);
const exportInfo = Debug.assertDefined(getExportInfo(exportSymbol, exportKind, state.checker));
@ -1110,7 +1202,7 @@ namespace ts.FindAllReferences.Core {
}
// At `export { x } from "foo"`, also search for the imported symbol `"foo".x`.
if (search.comingFrom !== ImportExport.Export && exportDeclaration.moduleSpecifier && !propertyName && !state.options.isForRename) {
if (search.comingFrom !== ImportExport.Export && exportDeclaration.moduleSpecifier && !propertyName && !isForRenameWithPrefixAndSuffixText(state.options)) {
const imported = state.checker.getExportSpecifierLocalTargetSymbol(exportSpecifier);
if (imported) searchForImportedSymbol(imported, state);
}
@ -1145,7 +1237,7 @@ namespace ts.FindAllReferences.Core {
const { symbol } = importOrExport;
if (importOrExport.kind === ImportExport.Import) {
if (!state.options.isForRename) {
if (!(isForRenameWithPrefixAndSuffixText(state.options))) {
searchForImportedSymbol(symbol, state);
}
}
@ -1514,16 +1606,16 @@ namespace ts.FindAllReferences.Core {
// For certain symbol kinds, we need to include other symbols in the search set.
// This is not needed when searching for re-exports.
function populateSearchSymbolSet(symbol: Symbol, location: Node, checker: TypeChecker, isForRename: boolean, implementations: boolean): Symbol[] {
function populateSearchSymbolSet(symbol: Symbol, location: Node, checker: TypeChecker, isForRename: boolean, providePrefixAndSuffixText: boolean, implementations: boolean): Symbol[] {
const result: Symbol[] = [];
forEachRelatedSymbol<void>(symbol, location, checker, isForRename,
forEachRelatedSymbol<void>(symbol, location, checker, isForRename, !(isForRename && providePrefixAndSuffixText),
(sym, root, base) => { result.push(base || root || sym); },
/*allowBaseTypes*/ () => !implementations);
return result;
}
function forEachRelatedSymbol<T>(
symbol: Symbol, location: Node, checker: TypeChecker, isForRenamePopulateSearchSymbolSet: boolean,
symbol: Symbol, location: Node, checker: TypeChecker, isForRenamePopulateSearchSymbolSet: boolean, onlyIncludeBindingElementAtReferenceLocation: boolean,
cbSymbol: (symbol: Symbol, rootSymbol?: Symbol, baseSymbol?: Symbol, kind?: NodeEntryKind) => T | undefined,
allowBaseTypes: (rootSymbol: Symbol) => boolean,
): T | undefined {
@ -1566,6 +1658,13 @@ namespace ts.FindAllReferences.Core {
if (res2) return res2;
}
const aliasedSymbol = getMergedAliasedSymbolOfNamespaceExportDeclaration(location, symbol, checker);
if (aliasedSymbol) {
// In case of UMD module and global merging, search for global as well
const res = cbSymbol(aliasedSymbol, /*rootSymbol*/ undefined, /*baseSymbol*/ undefined, EntryKind.Node);
if (res) return res;
}
const res = fromRoot(symbol);
if (res) return res;
@ -1577,9 +1676,25 @@ namespace ts.FindAllReferences.Core {
}
// symbolAtLocation for a binding element is the local symbol. See if the search symbol is the property.
// Don't do this when populating search set for a rename -- just rename the local.
// Don't do this when populating search set for a rename when prefix and suffix text will be provided -- just rename the local.
if (!isForRenamePopulateSearchSymbolSet) {
const bindingElementPropertySymbol = isObjectBindingElementWithoutPropertyName(location.parent) ? getPropertySymbolFromBindingElement(checker, location.parent) : undefined;
let bindingElementPropertySymbol: Symbol | undefined;
if (onlyIncludeBindingElementAtReferenceLocation) {
bindingElementPropertySymbol = isObjectBindingElementWithoutPropertyName(location.parent) ? getPropertySymbolFromBindingElement(checker, location.parent) : undefined;
}
else {
bindingElementPropertySymbol = getPropertySymbolOfObjectBindingPatternWithoutPropertyName(symbol, checker);
}
return bindingElementPropertySymbol && fromRoot(bindingElementPropertySymbol, EntryKind.SearchedPropertyFoundLocal);
}
Debug.assert(isForRenamePopulateSearchSymbolSet);
// due to the above assert and the arguments at the uses of this function,
// (onlyIncludeBindingElementAtReferenceLocation <=> !providePrefixAndSuffixTextForRename) holds
const includeOriginalSymbolOfBindingElement = onlyIncludeBindingElementAtReferenceLocation;
if (includeOriginalSymbolOfBindingElement) {
const bindingElementPropertySymbol = getPropertySymbolOfObjectBindingPatternWithoutPropertyName(symbol, checker);
return bindingElementPropertySymbol && fromRoot(bindingElementPropertySymbol, EntryKind.SearchedPropertyFoundLocal);
}
@ -1597,6 +1712,13 @@ namespace ts.FindAllReferences.Core {
? getPropertySymbolsFromBaseTypes(rootSymbol.parent, rootSymbol.name, checker, base => cbSymbol(sym, rootSymbol, base, kind))
: undefined));
}
function getPropertySymbolOfObjectBindingPatternWithoutPropertyName(symbol: Symbol, checker: TypeChecker): Symbol | undefined {
const bindingElement = getDeclarationOfKind<BindingElement>(symbol, SyntaxKind.BindingElement);
if (bindingElement && isObjectBindingElementWithoutPropertyName(bindingElement)) {
return getPropertySymbolFromBindingElement(checker, bindingElement);
}
}
}
interface RelatedSymbol {
@ -1606,6 +1728,7 @@ namespace ts.FindAllReferences.Core {
function getRelatedSymbol(search: Search, referenceSymbol: Symbol, referenceLocation: Node, state: State): RelatedSymbol | undefined {
const { checker } = state;
return forEachRelatedSymbol(referenceSymbol, referenceLocation, checker, /*isForRenamePopulateSearchSymbolSet*/ false,
/*onlyIncludeBindingElementAtReferenceLocation*/ !state.options.isForRename || !!state.options.providePrefixAndSuffixTextForRename,
(sym, rootSymbol, baseSymbol, kind): RelatedSymbol | undefined => search.includes(baseSymbol || rootSymbol || sym)
// For a base type, use the symbol for the derived type. For a synthetic (e.g. union) property, use the union symbol.
? { symbol: rootSymbol && !(getCheckFlags(sym) & CheckFlags.Synthetic) ? rootSymbol : sym, kind }
@ -1651,7 +1774,8 @@ namespace ts.FindAllReferences.Core {
function isImplementation(node: Node): boolean {
return !!(node.flags & NodeFlags.Ambient)
|| (isVariableLike(node) ? hasInitializer(node)
? !(isInterfaceDeclaration(node) || isTypeAliasDeclaration(node))
: (isVariableLike(node) ? hasInitializer(node)
: isFunctionLikeDeclaration(node) ? !!node.body
: isClassLike(node) || isModuleOrEnumDeclaration(node));
}
@ -1696,4 +1820,8 @@ namespace ts.FindAllReferences.Core {
t.symbol && t.symbol.flags & (SymbolFlags.Class | SymbolFlags.Interface) ? t.symbol : undefined);
return res.length === 0 ? undefined : res;
}
function isForRenameWithPrefixAndSuffixText(options: Options) {
return options.isForRename && options.providePrefixAndSuffixTextForRename;
}
}

View File

@ -460,6 +460,12 @@ namespace ts.refactor {
const oldImportsNeededByNewFile = new SymbolSet();
const newFileImportsFromOldFile = new SymbolSet();
const containsJsx = find(toMove, statement => !!(statement.transformFlags & TransformFlags.ContainsJsx));
const jsxNamespaceSymbol = getJsxNamespaceSymbol(containsJsx);
if (jsxNamespaceSymbol) { // Might not exist (e.g. in non-compiling code)
oldImportsNeededByNewFile.add(jsxNamespaceSymbol);
}
for (const statement of toMove) {
forEachTopLevelDeclaration(statement, decl => {
movedSymbols.add(Debug.assertDefined(isExpressionStatement(decl) ? checker.getSymbolAtLocation(decl.expression.left) : decl.symbol));
@ -485,6 +491,11 @@ namespace ts.refactor {
for (const statement of oldFile.statements) {
if (contains(toMove, statement)) continue;
// jsxNamespaceSymbol will only be set iff it is in oldImportsNeededByNewFile.
if (jsxNamespaceSymbol && !!(statement.transformFlags & TransformFlags.ContainsJsx)) {
unusedImportsFromOldFile.delete(jsxNamespaceSymbol);
}
forEachReference(statement, checker, symbol => {
if (movedSymbols.has(symbol)) oldFileImportsFromNewFile.add(symbol);
unusedImportsFromOldFile.delete(symbol);
@ -492,6 +503,23 @@ namespace ts.refactor {
}
return { movedSymbols, newFileImportsFromOldFile, oldFileImportsFromNewFile, oldImportsNeededByNewFile, unusedImportsFromOldFile };
function getJsxNamespaceSymbol(containsJsx: Node | undefined) {
if (containsJsx === undefined) {
return undefined;
}
const jsxNamespace = checker.getJsxNamespace(containsJsx);
// Strictly speaking, this could resolve to a symbol other than the JSX namespace.
// This will produce erroneous output (probably, an incorrectly copied import) but
// is expected to be very rare and easily reversible.
const jsxNamespaceSymbol = checker.resolveName(jsxNamespace, containsJsx, SymbolFlags.Namespace, /*excludeGlobals*/ true);
return !!jsxNamespaceSymbol && some(jsxNamespaceSymbol.declarations, isInImport)
? jsxNamespaceSymbol
: undefined;
}
}
// Below should all be utilities
@ -512,7 +540,7 @@ namespace ts.refactor {
}
function isVariableDeclarationInImport(decl: VariableDeclaration) {
return isSourceFile(decl.parent.parent.parent) &&
decl.initializer && isRequireCall(decl.initializer, /*checkArgumentIsStringLiteralLike*/ true);
!!decl.initializer && isRequireCall(decl.initializer, /*checkArgumentIsStringLiteralLike*/ true);
}
function filterImport(i: SupportedImport, moduleSpecifier: StringLiteralLike, keep: (name: Identifier) => boolean): SupportedImportStatement | undefined {

View File

@ -1,14 +1,14 @@
/* @internal */
namespace ts.Rename {
export function getRenameInfo(program: Program, sourceFile: SourceFile, position: number): RenameInfo {
export function getRenameInfo(program: Program, sourceFile: SourceFile, position: number, options?: RenameInfoOptions): RenameInfo {
const node = getTouchingPropertyName(sourceFile, position);
const renameInfo = node && nodeIsEligibleForRename(node)
? getRenameInfoForNode(node, program.getTypeChecker(), sourceFile, declaration => program.isSourceFileDefaultLibrary(declaration.getSourceFile()))
? getRenameInfoForNode(node, program.getTypeChecker(), sourceFile, declaration => program.isSourceFileDefaultLibrary(declaration.getSourceFile()), options)
: undefined;
return renameInfo || getRenameInfoError(Diagnostics.You_cannot_rename_this_element);
}
function getRenameInfoForNode(node: Node, typeChecker: TypeChecker, sourceFile: SourceFile, isDefinedInLibraryFile: (declaration: Node) => boolean): RenameInfo | undefined {
function getRenameInfoForNode(node: Node, typeChecker: TypeChecker, sourceFile: SourceFile, isDefinedInLibraryFile: (declaration: Node) => boolean, options?: RenameInfoOptions): RenameInfo | undefined {
const symbol = typeChecker.getSymbolAtLocation(node);
if (!symbol) return;
// Only allow a symbol to be renamed if it actually has at least one declaration.
@ -26,7 +26,7 @@ namespace ts.Rename {
}
if (isStringLiteralLike(node) && tryGetImportFromModuleSpecifier(node)) {
return getRenameInfoForModule(node, sourceFile, symbol);
return options && options.allowRenameOfImportPath ? getRenameInfoForModule(node, sourceFile, symbol) : undefined;
}
const kind = SymbolDisplay.getSymbolKind(typeChecker, symbol, node);

View File

@ -1549,7 +1549,7 @@ namespace ts {
return DocumentHighlights.getDocumentHighlights(program, cancellationToken, sourceFile, position, sourceFilesToSearch);
}
function findRenameLocations(fileName: string, position: number, findInStrings: boolean, findInComments: boolean): RenameLocation[] | undefined {
function findRenameLocations(fileName: string, position: number, findInStrings: boolean, findInComments: boolean, providePrefixAndSuffixTextForRename?: boolean): RenameLocation[] | undefined {
synchronizeHostData();
const sourceFile = getValidSourceFile(fileName);
const node = getTouchingPropertyName(sourceFile, position);
@ -1559,7 +1559,8 @@ namespace ts {
({ fileName: sourceFile.fileName, textSpan: createTextSpanFromNode(node.tagName, sourceFile) }));
}
else {
return getReferencesWorker(node, position, { findInStrings, findInComments, isForRename: true }, FindAllReferences.toRenameLocation);
return getReferencesWorker(node, position, { findInStrings, findInComments, providePrefixAndSuffixTextForRename, isForRename: true },
(entry, originalNode, checker) => FindAllReferences.toRenameLocation(entry, originalNode, checker, providePrefixAndSuffixTextForRename || false));
}
}
@ -2062,9 +2063,9 @@ namespace ts {
}
}
function getRenameInfo(fileName: string, position: number): RenameInfo {
function getRenameInfo(fileName: string, position: number, options?: RenameInfoOptions): RenameInfo {
synchronizeHostData();
return Rename.getRenameInfo(program, getValidSourceFile(fileName), position);
return Rename.getRenameInfo(program, getValidSourceFile(fileName), position, options);
}
function getRefactorContext(file: SourceFile, positionOrRange: number | TextRange, preferences: UserPreferences, formatOptions?: FormatCodeSettings): RefactorContext {

View File

@ -164,13 +164,13 @@ namespace ts {
* Returns a JSON-encoded value of the type:
* { canRename: boolean, localizedErrorMessage: string, displayName: string, fullDisplayName: string, kind: string, kindModifiers: string, triggerSpan: { start; length } }
*/
getRenameInfo(fileName: string, position: number): string;
getRenameInfo(fileName: string, position: number, options?: RenameInfoOptions): string;
/**
* Returns a JSON-encoded value of the type:
* { fileName: string, textSpan: { start: number, length: number } }[]
*/
findRenameLocations(fileName: string, position: number, findInStrings: boolean, findInComments: boolean): string;
findRenameLocations(fileName: string, position: number, findInStrings: boolean, findInComments: boolean, providePrefixAndSuffixTextForRename?: boolean): string;
/**
* Returns a JSON-encoded value of the type:
@ -831,17 +831,17 @@ namespace ts {
);
}
public getRenameInfo(fileName: string, position: number): string {
public getRenameInfo(fileName: string, position: number, options?: RenameInfoOptions): string {
return this.forwardJSONCall(
`getRenameInfo('${fileName}', ${position})`,
() => this.languageService.getRenameInfo(fileName, position)
() => this.languageService.getRenameInfo(fileName, position, options)
);
}
public findRenameLocations(fileName: string, position: number, findInStrings: boolean, findInComments: boolean): string {
public findRenameLocations(fileName: string, position: number, findInStrings: boolean, findInComments: boolean, providePrefixAndSuffixTextForRename?: boolean): string {
return this.forwardJSONCall(
`findRenameLocations('${fileName}', ${position}, ${findInStrings}, ${findInComments})`,
() => this.languageService.findRenameLocations(fileName, position, findInStrings, findInComments)
`findRenameLocations('${fileName}', ${position}, ${findInStrings}, ${findInComments}, ${providePrefixAndSuffixTextForRename})`,
() => this.languageService.findRenameLocations(fileName, position, findInStrings, findInComments, providePrefixAndSuffixTextForRename)
);
}

View File

@ -452,7 +452,7 @@ namespace ts.SignatureHelp {
}
function getContainingArgumentInfo(node: Node, position: number, sourceFile: SourceFile, checker: TypeChecker, isManuallyInvoked: boolean): ArgumentListInfo | undefined {
for (let n = node; isManuallyInvoked || (!isBlock(n) && !isSourceFile(n)); n = n.parent) {
for (let n = node; !isSourceFile(n) && (isManuallyInvoked || !isBlock(n)); n = n.parent) {
// If the node is not a subspan of its parent, this is a big problem.
// There have been crashes that might be caused by this violation.
Debug.assert(rangeContainsRange(n.parent, n), "Not a subspan", () => `Child: ${Debug.showSyntaxKind(n)}, parent: ${Debug.showSyntaxKind(n.parent)}`);

View File

@ -61,6 +61,7 @@
"codefixes/fixClassDoesntImplementInheritedAbstractMember.ts",
"codefixes/fixClassSuperMustPrecedeThisAccess.ts",
"codefixes/fixConstructorForDerivedNeedSuperCall.ts",
"codefixes/fixEnableExperimentalDecorators.ts",
"codefixes/fixExtendsInterfaceBecomesImplements.ts",
"codefixes/fixForgottenThisPropertyAccess.ts",
"codefixes/fixUnusedIdentifier.ts",

View File

@ -294,8 +294,8 @@ namespace ts {
getSignatureHelpItems(fileName: string, position: number, options: SignatureHelpItemsOptions | undefined): SignatureHelpItems | undefined;
getRenameInfo(fileName: string, position: number): RenameInfo;
findRenameLocations(fileName: string, position: number, findInStrings: boolean, findInComments: boolean): ReadonlyArray<RenameLocation> | undefined;
getRenameInfo(fileName: string, position: number, options?: RenameInfoOptions): RenameInfo;
findRenameLocations(fileName: string, position: number, findInStrings: boolean, findInComments: boolean, providePrefixAndSuffixTextForRename?: boolean): ReadonlyArray<RenameLocation> | undefined;
getDefinitionAtPosition(fileName: string, position: number): ReadonlyArray<DefinitionInfo> | undefined;
getDefinitionAndBoundSpan(fileName: string, position: number): DefinitionInfoAndBoundSpan | undefined;
@ -848,6 +848,10 @@ namespace ts {
localizedErrorMessage: string;
}
export interface RenameInfoOptions {
readonly allowRenameOfImportPath?: boolean;
}
export interface SignatureHelpParameter {
name: string;
documentation: SymbolDisplayPart[];

View File

@ -1,5 +1,5 @@
{
"extends": "../tsconfig-base",
"extends": "../tsconfig-noncomposite-base",
"compilerOptions": {
"outFile": "../../built/local/run.js",
"composite": false,

View File

@ -85,7 +85,7 @@ namespace ts {
// We shouldn't have any errors about invalid tsconfig files in these tests
assert(config && !error, flattenDiagnosticMessageText(error && error.messageText, "\n"));
const file = parseJsonConfigFileContent(config, parseConfigHostFromCompilerHost(host), getDirectoryPath(entryPointConfigFileName), {}, entryPointConfigFileName);
const file = parseJsonConfigFileContent(config, parseConfigHostFromCompilerHostLike(host), getDirectoryPath(entryPointConfigFileName), {}, entryPointConfigFileName);
file.options.configFilePath = entryPointConfigFileName;
const prog = createProgram({
rootNames: file.fileNames,

View File

@ -95,6 +95,68 @@ namespace ts {
module: ModuleKind.ES2015,
emitDecoratorMetadata: true,
experimentalDecorators: true
});
});
emitsCorrectly("sourceMapExternalSourceFiles",
[
{
file: "source.ts",
// The text of length 'changed' is made to be on two lines so we know the line map change
text: `\`multi
line\`
'change'`
},
],
{
before: [
context => node => visitNode(node, function visitor(node: Node): Node {
if (isStringLiteral(node) && node.text === "change") {
const text = "'changed'";
const lineMap = computeLineStarts(text);
setSourceMapRange(node, {
pos: 0, end: text.length, source: {
text,
fileName: "another.html",
lineMap,
getLineAndCharacterOfPosition: pos => computeLineAndCharacterOfPosition(lineMap, pos)
}
});
return node;
}
return visitEachChild(node, visitor, context);
})
]
},
{ sourceMap: true }
);
emitsCorrectly("skipTriviaExternalSourceFiles",
[
{
file: "source.ts",
// The source file contains preceding trivia (e.g. whitespace) to try to confuse the `skipSourceTrivia` function.
text: " original;"
},
],
{
before: [
context => node => visitNode(node, function visitor(node: Node): Node {
if (isIdentifier(node) && node.text === "original") {
const newNode = createIdentifier("changed");
setSourceMapRange(newNode, {
pos: 0,
end: 7,
// Do not provide a custom skipTrivia function for `source`.
source: createSourceMapSource("another.html", "changed;")
});
return newNode;
}
return visitEachChild(node, visitor, context);
})
]
},
{ sourceMap: true }
);
});
}

View File

@ -234,39 +234,46 @@ namespace ts {
// Update a timestamp in the middle project
tick();
touch(fs, "/src/logic/index.ts");
const originalWriteFile = fs.writeFileSync;
const writtenFiles = createMap<true>();
fs.writeFileSync = (path, data, encoding) => {
writtenFiles.set(path, true);
originalWriteFile.call(fs, path, data, encoding);
};
// Because we haven't reset the build context, the builder should assume there's nothing to do right now
const status = builder.getUpToDateStatusOfFile(builder.resolveProjectName("/src/logic"));
assert.equal(status.type, UpToDateStatusType.UpToDate, "Project should be assumed to be up-to-date");
verifyInvalidation(/*expectedToWriteTests*/ false);
// Rebuild this project
tick();
builder.invalidateProject("/src/logic");
builder.buildInvalidatedProject();
// The file should be updated
assert.equal(fs.statSync("/src/logic/index.js").mtimeMs, time(), "JS file should have been rebuilt");
assert.isBelow(fs.statSync("/src/tests/index.js").mtimeMs, time(), "Downstream JS file should *not* have been rebuilt");
// Does not build tests or core because there is no change in declaration file
tick();
builder.buildInvalidatedProject();
assert.isBelow(fs.statSync("/src/tests/index.js").mtimeMs, time(), "Downstream JS file should have been rebuilt");
assert.isBelow(fs.statSync("/src/core/index.js").mtimeMs, time(), "Upstream JS file should not have been rebuilt");
// Rebuild this project
tick();
fs.writeFileSync("/src/logic/index.ts", `${fs.readFileSync("/src/logic/index.ts")}
export class cNew {}`);
builder.invalidateProject("/src/logic");
builder.buildInvalidatedProject();
// The file should be updated
assert.equal(fs.statSync("/src/logic/index.js").mtimeMs, time(), "JS file should have been rebuilt");
assert.isBelow(fs.statSync("/src/tests/index.js").mtimeMs, time(), "Downstream JS file should *not* have been rebuilt");
verifyInvalidation(/*expectedToWriteTests*/ true);
// Build downstream projects should update 'tests', but not 'core'
tick();
builder.buildInvalidatedProject();
assert.isBelow(fs.statSync("/src/tests/index.js").mtimeMs, time(), "Downstream JS file should have been rebuilt");
assert.isBelow(fs.statSync("/src/core/index.js").mtimeMs, time(), "Upstream JS file should not have been rebuilt");
function verifyInvalidation(expectedToWriteTests: boolean) {
// Rebuild this project
tick();
builder.invalidateProject("/src/logic");
builder.buildInvalidatedProject();
// The file should be updated
assert.isTrue(writtenFiles.has("/src/logic/index.js"), "JS file should have been rebuilt");
assert.equal(fs.statSync("/src/logic/index.js").mtimeMs, time(), "JS file should have been rebuilt");
assert.isFalse(writtenFiles.has("/src/tests/index.js"), "Downstream JS file should *not* have been rebuilt");
assert.isBelow(fs.statSync("/src/tests/index.js").mtimeMs, time(), "Downstream JS file should *not* have been rebuilt");
writtenFiles.clear();
// Build downstream projects should update 'tests', but not 'core'
tick();
builder.buildInvalidatedProject();
if (expectedToWriteTests) {
assert.isTrue(writtenFiles.has("/src/tests/index.js"), "Downstream JS file should have been rebuilt");
}
else {
assert.equal(writtenFiles.size, 0, "Should not write any new files");
}
assert.equal(fs.statSync("/src/tests/index.js").mtimeMs, time(), "Downstream JS file should have new timestamp");
assert.isBelow(fs.statSync("/src/core/index.js").mtimeMs, time(), "Upstream JS file should not have been rebuilt");
}
});
});
@ -462,11 +469,20 @@ export const b = new A();`);
describe("unittests:: tsbuild - baseline sectioned sourcemaps", () => {
let fs: vfs.FileSystem | undefined;
const actualReadFileMap = createMap<number>();
before(() => {
fs = outFileFs.shadow();
const host = new fakes.SolutionBuilderHost(fs);
const builder = createSolutionBuilder(host, ["/src/third"], { dry: false, force: false, verbose: false });
host.clearDiagnostics();
const originalReadFile = host.readFile;
host.readFile = path => {
// Dont record libs
if (path.startsWith("/src/")) {
actualReadFileMap.set(path, (actualReadFileMap.get(path) || 0) + 1);
}
return originalReadFile.call(host, path);
};
builder.buildAllProjects();
host.assertDiagnosticMessages(/*none*/);
});
@ -478,6 +494,38 @@ export const b = new A();`);
// tslint:disable-next-line:no-null-keyword
Harness.Baseline.runBaseline("outfile-concat.js", patch ? vfs.formatPatch(patch) : null);
});
it("verify readFile calls", () => {
const expected = [
// Configs
"/src/third/tsconfig.json",
"/src/second/tsconfig.json",
"/src/first/tsconfig.json",
// Source files
"/src/third/third_part1.ts",
"/src/second/second_part1.ts",
"/src/second/second_part2.ts",
"/src/first/first_PART1.ts",
"/src/first/first_part2.ts",
"/src/first/first_part3.ts",
// outputs
"/src/first/bin/first-output.js",
"/src/first/bin/first-output.js.map",
"/src/first/bin/first-output.d.ts",
"/src/first/bin/first-output.d.ts.map",
"/src/2/second-output.js",
"/src/2/second-output.js.map",
"/src/2/second-output.d.ts",
"/src/2/second-output.d.ts.map"
];
assert.equal(actualReadFileMap.size, expected.length, `Expected: ${JSON.stringify(expected)} \nActual: ${JSON.stringify(arrayFrom(actualReadFileMap.entries()))}`);
expected.forEach(expectedValue => {
const actual = actualReadFileMap.get(expectedValue);
assert.equal(actual, 1, `Mismatch in read file call number for: ${expectedValue}\nExpected: ${JSON.stringify(expected)} \nActual: ${JSON.stringify(arrayFrom(actualReadFileMap.entries()))}`);
});
});
});
describe("unittests:: tsbuild - downstream prepend projects always get rebuilt", () => {

View File

@ -2,18 +2,37 @@ namespace ts.tscWatch {
import projectsLocation = TestFSWithWatch.tsbuildProjectsLocation;
import getFilePathInProject = TestFSWithWatch.getTsBuildProjectFilePath;
import getFileFromProject = TestFSWithWatch.getTsBuildProjectFile;
type TsBuildWatchSystem = WatchedSystem & { writtenFiles: Map<true>; };
function createTsBuildWatchSystem(fileOrFolderList: ReadonlyArray<TestFSWithWatch.FileOrFolderOrSymLink>, params?: TestFSWithWatch.TestServerHostCreationParameters) {
const host = createWatchedSystem(fileOrFolderList, params) as TsBuildWatchSystem;
const originalWriteFile = host.writeFile;
host.writtenFiles = createMap<true>();
host.writeFile = (fileName, content) => {
originalWriteFile.call(host, fileName, content);
const path = host.toFullPath(fileName);
host.writtenFiles.set(path, true);
};
return host;
}
export function createSolutionBuilder(system: WatchedSystem, rootNames: ReadonlyArray<string>, defaultOptions?: BuildOptions) {
const host = createSolutionBuilderWithWatchHost(system);
return ts.createSolutionBuilder(host, rootNames, defaultOptions || { watch: true });
}
function createSolutionBuilderWithWatch(host: WatchedSystem, rootNames: ReadonlyArray<string>, defaultOptions?: BuildOptions) {
function createSolutionBuilderWithWatch(host: TsBuildWatchSystem, rootNames: ReadonlyArray<string>, defaultOptions?: BuildOptions) {
const solutionBuilder = createSolutionBuilder(host, rootNames, defaultOptions);
solutionBuilder.buildAllProjects();
solutionBuilder.startWatching();
return solutionBuilder;
}
type OutputFileStamp = [string, Date | undefined, boolean];
function transformOutputToOutputFileStamp(f: string, host: TsBuildWatchSystem): OutputFileStamp {
return [f, host.getModifiedTime(f), host.writtenFiles.has(host.toFullPath(f))] as OutputFileStamp;
}
describe("unittests:: tsbuild-watch program updates", () => {
const project = "sample1";
const enum SubProject {
@ -61,12 +80,11 @@ namespace ts.tscWatch {
return [`${file}.js`, `${file}.d.ts`];
}
type OutputFileStamp = [string, Date | undefined];
function getOutputStamps(host: WatchedSystem, subProject: SubProject, baseFileNameWithoutExtension: string): OutputFileStamp[] {
return getOutputFileNames(subProject, baseFileNameWithoutExtension).map(f => [f, host.getModifiedTime(f)] as OutputFileStamp);
function getOutputStamps(host: TsBuildWatchSystem, subProject: SubProject, baseFileNameWithoutExtension: string): OutputFileStamp[] {
return getOutputFileNames(subProject, baseFileNameWithoutExtension).map(f => transformOutputToOutputFileStamp(f, host));
}
function getOutputFileStamps(host: WatchedSystem, additionalFiles?: ReadonlyArray<[SubProject, string]>): OutputFileStamp[] {
function getOutputFileStamps(host: TsBuildWatchSystem, additionalFiles?: ReadonlyArray<[SubProject, string]>): OutputFileStamp[] {
const result = [
...getOutputStamps(host, SubProject.core, "anotherModule"),
...getOutputStamps(host, SubProject.core, "index"),
@ -76,18 +94,21 @@ namespace ts.tscWatch {
if (additionalFiles) {
additionalFiles.forEach(([subProject, baseFileNameWithoutExtension]) => result.push(...getOutputStamps(host, subProject, baseFileNameWithoutExtension)));
}
host.writtenFiles.clear();
return result;
}
function verifyChangedFiles(actualStamps: OutputFileStamp[], oldTimeStamps: OutputFileStamp[], changedFiles: string[]) {
function verifyChangedFiles(actualStamps: OutputFileStamp[], oldTimeStamps: OutputFileStamp[], changedFiles: ReadonlyArray<string>, modifiedTimeStampFiles: ReadonlyArray<string>) {
for (let i = 0; i < oldTimeStamps.length; i++) {
const actual = actualStamps[i];
const old = oldTimeStamps[i];
if (contains(changedFiles, actual[0])) {
assert.isTrue((actual[1] || 0) > (old[1] || 0), `${actual[0]} expected to written`);
const expectedIsChanged = contains(changedFiles, actual[0]);
assert.equal(actual[2], contains(changedFiles, actual[0]), `Expected ${actual[0]} to be written.`);
if (expectedIsChanged || contains(modifiedTimeStampFiles, actual[0])) {
assert.isTrue((actual[1] || 0) > (old[1] || 0), `${actual[0]} file expected to have newer modified time because it is expected to ${expectedIsChanged ? "be changed" : "have modified time stamp"}`);
}
else {
assert.equal(actual[1], old[1], `${actual[0]} expected to not change`);
assert.equal(actual[1], old[1], `${actual[0]} expected to not change or have timestamp modified.`);
}
}
}
@ -101,7 +122,7 @@ namespace ts.tscWatch {
const testProjectExpectedWatchedDirectoriesRecursive = [projectPath(SubProject.core), projectPath(SubProject.logic)];
function createSolutionInWatchMode(allFiles: ReadonlyArray<File>, defaultOptions?: BuildOptions, disableConsoleClears?: boolean) {
const host = createWatchedSystem(allFiles, { currentDirectory: projectsLocation });
const host = createTsBuildWatchSystem(allFiles, { currentDirectory: projectsLocation });
createSolutionBuilderWithWatch(host, [`${project}/${SubProject.tests}`], defaultOptions);
verifyWatches(host);
checkOutputErrorsInitial(host, emptyArray, disableConsoleClears);
@ -112,7 +133,7 @@ namespace ts.tscWatch {
return host;
}
function verifyWatches(host: WatchedSystem) {
function verifyWatches(host: TsBuildWatchSystem) {
checkWatchedFiles(host, testProjectExpectedWatchedFiles);
checkWatchedDirectories(host, emptyArray, /*recursive*/ false);
checkWatchedDirectories(host, testProjectExpectedWatchedDirectoriesRecursive, /*recursive*/ true);
@ -134,30 +155,50 @@ namespace ts.tscWatch {
const host = createSolutionInWatchMode(allFiles);
return { host, verifyChangeWithFile, verifyChangeAfterTimeout, verifyWatches };
function verifyChangeWithFile(fileName: string, content: string) {
function verifyChangeWithFile(fileName: string, content: string, local?: boolean) {
const outputFileStamps = getOutputFileStamps(host, additionalFiles);
host.writeFile(fileName, content);
verifyChangeAfterTimeout(outputFileStamps);
verifyChangeAfterTimeout(outputFileStamps, local);
}
function verifyChangeAfterTimeout(outputFileStamps: OutputFileStamp[]) {
function verifyChangeAfterTimeout(outputFileStamps: OutputFileStamp[], local?: boolean) {
host.checkTimeoutQueueLengthAndRun(1); // Builds core
const changedCore = getOutputFileStamps(host, additionalFiles);
verifyChangedFiles(changedCore, outputFileStamps, [
...getOutputFileNames(SubProject.core, "anotherModule"), // This should not be written really
...getOutputFileNames(SubProject.core, "index"),
...(additionalFiles ? getOutputFileNames(SubProject.core, newFileWithoutExtension) : emptyArray)
]);
host.checkTimeoutQueueLengthAndRun(1); // Builds logic
verifyChangedFiles(
changedCore,
outputFileStamps,
additionalFiles ?
getOutputFileNames(SubProject.core, newFileWithoutExtension) :
getOutputFileNames(SubProject.core, "index"), // Written files are new file or core index file thats changed
[
...getOutputFileNames(SubProject.core, "anotherModule"),
...(additionalFiles ? getOutputFileNames(SubProject.core, "index") : emptyArray)
]
);
host.checkTimeoutQueueLengthAndRun(1); // Builds logic or updates timestamps
const changedLogic = getOutputFileStamps(host, additionalFiles);
verifyChangedFiles(changedLogic, changedCore, [
...getOutputFileNames(SubProject.logic, "index") // Again these need not be written
]);
verifyChangedFiles(
changedLogic,
changedCore,
additionalFiles || local ?
emptyArray :
getOutputFileNames(SubProject.logic, "index"),
additionalFiles || local ?
getOutputFileNames(SubProject.logic, "index") :
emptyArray
);
host.checkTimeoutQueueLengthAndRun(1); // Builds tests
const changedTests = getOutputFileStamps(host, additionalFiles);
verifyChangedFiles(changedTests, changedLogic, [
...getOutputFileNames(SubProject.tests, "index") // Again these need not be written
]);
verifyChangedFiles(
changedTests,
changedLogic,
additionalFiles || local ?
emptyArray :
getOutputFileNames(SubProject.tests, "index"),
additionalFiles || local ?
getOutputFileNames(SubProject.tests, "index") :
emptyArray
);
host.checkTimeoutQueueLength(0);
checkOutputErrorsIncremental(host, emptyArray);
verifyWatches();
@ -193,19 +234,9 @@ export class someClass2 { }`);
});
it("non local change does not start build of referencing projects", () => {
const host = createSolutionInWatchMode(allFiles);
const outputFileStamps = getOutputFileStamps(host);
host.writeFile(core[1].path, `${core[1].content}
function foo() { }`);
host.checkTimeoutQueueLengthAndRun(1); // Builds core
const changedCore = getOutputFileStamps(host);
verifyChangedFiles(changedCore, outputFileStamps, [
...getOutputFileNames(SubProject.core, "anotherModule"), // This should not be written really
...getOutputFileNames(SubProject.core, "index"),
]);
host.checkTimeoutQueueLength(0);
checkOutputErrorsIncremental(host, emptyArray);
verifyWatches(host);
const { verifyChangeWithFile } = createSolutionInWatchModeToVerifyChanges();
verifyChangeWithFile(core[1].path, `${core[1].content}
function foo() { }`, /*local*/ true);
});
it("builds when new file is added, and its subsequent updates", () => {
@ -242,7 +273,7 @@ export class someClass2 { }`);
it("watches config files that are not present", () => {
const allFiles = [libFile, ...core, logic[1], ...tests];
const host = createWatchedSystem(allFiles, { currentDirectory: projectsLocation });
const host = createTsBuildWatchSystem(allFiles, { currentDirectory: projectsLocation });
createSolutionBuilderWithWatch(host, [`${project}/${SubProject.tests}`]);
checkWatchedFiles(host, [core[0], core[1], core[2]!, logic[0], ...tests].map(f => f.path.toLowerCase())); // tslint:disable-line no-unnecessary-type-assertion (TODO: type assertion should be necessary)
checkWatchedDirectories(host, emptyArray, /*recursive*/ false);
@ -268,14 +299,10 @@ export class someClass2 { }`);
host.writeFile(logic[0].path, logic[0].content);
host.checkTimeoutQueueLengthAndRun(1); // Builds logic
const changedLogic = getOutputFileStamps(host);
verifyChangedFiles(changedLogic, initial, [
...getOutputFileNames(SubProject.logic, "index")
]);
verifyChangedFiles(changedLogic, initial, getOutputFileNames(SubProject.logic, "index"), emptyArray);
host.checkTimeoutQueueLengthAndRun(1); // Builds tests
const changedTests = getOutputFileStamps(host);
verifyChangedFiles(changedTests, changedLogic, [
...getOutputFileNames(SubProject.tests, "index")
]);
verifyChangedFiles(changedTests, changedLogic, getOutputFileNames(SubProject.tests, "index"), emptyArray);
host.checkTimeoutQueueLength(0);
checkOutputErrorsIncremental(host, emptyArray);
verifyWatches(host);
@ -305,7 +332,7 @@ export class someClass2 { }`);
};
const projectFiles = [coreTsConfig, coreIndex, logicTsConfig, logicIndex];
const host = createWatchedSystem([libFile, ...projectFiles], { currentDirectory: projectsLocation });
const host = createTsBuildWatchSystem([libFile, ...projectFiles], { currentDirectory: projectsLocation });
createSolutionBuilderWithWatch(host, [`${project}/${SubProject.logic}`]);
verifyWatches();
checkOutputErrorsInitial(host, emptyArray);
@ -318,6 +345,7 @@ export class someClass2 { }`);
verifyChangeInCore(`${coreIndex.content}
function myFunc() { return 10; }`);
// TODO:: local change does not build logic.js because builder doesnt find any changes in input files to generate output
// Make local change to function bar
verifyChangeInCore(`${coreIndex.content}
function myFunc() { return 100; }`);
@ -328,14 +356,20 @@ function myFunc() { return 100; }`);
host.checkTimeoutQueueLengthAndRun(1); // Builds core
const changedCore = getOutputFileStamps();
verifyChangedFiles(changedCore, outputFileStamps, [
...getOutputFileNames(SubProject.core, "index")
]);
verifyChangedFiles(
changedCore,
outputFileStamps,
getOutputFileNames(SubProject.core, "index"),
emptyArray
);
host.checkTimeoutQueueLengthAndRun(1); // Builds logic
const changedLogic = getOutputFileStamps();
verifyChangedFiles(changedLogic, changedCore, [
...getOutputFileNames(SubProject.logic, "index")
]);
verifyChangedFiles(
changedLogic,
changedCore,
getOutputFileNames(SubProject.logic, "index"),
emptyArray
);
host.checkTimeoutQueueLength(0);
checkOutputErrorsIncremental(host, emptyArray);
verifyWatches();
@ -346,6 +380,7 @@ function myFunc() { return 100; }`);
...getOutputStamps(host, SubProject.core, "index"),
...getOutputStamps(host, SubProject.logic, "index"),
];
host.writtenFiles.clear();
return result;
}
@ -389,7 +424,7 @@ createSomeObject().message;`
};
const files = [libFile, libraryTs, libraryTsconfig, appTs, appTsconfig];
const host = createWatchedSystem(files, { currentDirectory: `${projectsLocation}/${project}` });
const host = createTsBuildWatchSystem(files, { currentDirectory: `${projectsLocation}/${project}` });
createSolutionBuilderWithWatch(host, ["App"]);
checkOutputErrorsInitial(host, emptyArray);
@ -418,7 +453,7 @@ let y: string = 10;`);
host.checkTimeoutQueueLengthAndRun(1); // Builds logic
const changedLogic = getOutputFileStamps(host);
verifyChangedFiles(changedLogic, outputFileStamps, emptyArray);
verifyChangedFiles(changedLogic, outputFileStamps, emptyArray, emptyArray);
host.checkTimeoutQueueLength(0);
checkOutputErrorsIncremental(host, [
`sample1/logic/index.ts(8,5): error TS2322: Type '10' is not assignable to type 'string'.\n`
@ -429,7 +464,7 @@ let x: string = 10;`);
host.checkTimeoutQueueLengthAndRun(1); // Builds core
const changedCore = getOutputFileStamps(host);
verifyChangedFiles(changedCore, changedLogic, emptyArray);
verifyChangedFiles(changedCore, changedLogic, emptyArray, emptyArray);
host.checkTimeoutQueueLength(0);
checkOutputErrorsIncremental(host, [
`sample1/core/index.ts(5,5): error TS2322: Type '10' is not assignable to type 'string'.\n`,
@ -444,11 +479,118 @@ let x: string = 10;`);
it("when preserveWatchOutput is passed on command line", () => {
verifyIncrementalErrors({ preserveWatchOutput: true, watch: true }, /*disabledConsoleClear*/ true);
});
describe("when declaration emit errors are present", () => {
const solution = "solution";
const subProject = "app";
const subProjectLocation = `${projectsLocation}/${solution}/${subProject}`;
const fileWithError: File = {
path: `${subProjectLocation}/fileWithError.ts`,
content: `export var myClassWithError = class {
tags() { }
private p = 12
};`
};
const fileWithFixedError: File = {
path: fileWithError.path,
content: fileWithError.content.replace("private p = 12", "")
};
const fileWithoutError: File = {
path: `${subProjectLocation}/fileWithoutError.ts`,
content: `export class myClass { }`
};
const tsconfig: File = {
path: `${subProjectLocation}/tsconfig.json`,
content: JSON.stringify({ compilerOptions: { composite: true } })
};
const expectedDtsEmitErrors = [
`${subProject}/fileWithError.ts(1,12): error TS4094: Property 'p' of exported class expression may not be private or protected.\n`
];
const outputs = [
changeExtension(fileWithError.path, Extension.Js),
changeExtension(fileWithError.path, Extension.Dts),
changeExtension(fileWithoutError.path, Extension.Js),
changeExtension(fileWithoutError.path, Extension.Dts)
];
function verifyDtsErrors(host: TsBuildWatchSystem, isIncremental: boolean, expectedErrors: ReadonlyArray<string>) {
(isIncremental ? checkOutputErrorsIncremental : checkOutputErrorsInitial)(host, expectedErrors);
outputs.forEach(f => assert.equal(host.fileExists(f), !expectedErrors.length, `Expected file ${f} to ${!expectedErrors.length ? "exist" : "not exist"}`));
}
function createSolutionWithWatch(withFixedError?: true) {
const files = [libFile, withFixedError ? fileWithFixedError : fileWithError, fileWithoutError, tsconfig];
const host = createTsBuildWatchSystem(files, { currentDirectory: `${projectsLocation}/${solution}` });
createSolutionBuilderWithWatch(host, [subProject]);
verifyDtsErrors(host, /*isIncremental*/ false, withFixedError ? emptyArray : expectedDtsEmitErrors);
return host;
}
function incrementalBuild(host: TsBuildWatchSystem) {
host.checkTimeoutQueueLengthAndRun(1); // Build the app
host.checkTimeoutQueueLength(0);
}
function fixError(host: TsBuildWatchSystem) {
// Fix error
host.writeFile(fileWithError.path, fileWithFixedError.content);
host.writtenFiles.clear();
incrementalBuild(host);
verifyDtsErrors(host, /*isIncremental*/ true, emptyArray);
}
it("when fixing error files all files are emitted", () => {
const host = createSolutionWithWatch();
fixError(host);
});
it("when file with no error changes, declaration errors are reported", () => {
const host = createSolutionWithWatch();
host.writeFile(fileWithoutError.path, fileWithoutError.content.replace(/myClass/g, "myClass2"));
incrementalBuild(host);
verifyDtsErrors(host, /*isIncremental*/ true, expectedDtsEmitErrors);
});
describe("when reporting errors on introducing error", () => {
function createSolutionWithIncrementalError() {
const host = createSolutionWithWatch(/*withFixedError*/ true);
host.writeFile(fileWithError.path, fileWithError.content);
host.writtenFiles.clear();
incrementalBuild(host);
checkOutputErrorsIncremental(host, expectedDtsEmitErrors);
assert.equal(host.writtenFiles.size, 0, `Expected not to write any files: ${arrayFrom(host.writtenFiles.keys())}`);
return host;
}
function verifyWrittenFile(host: TsBuildWatchSystem, f: string) {
assert.isTrue(host.writtenFiles.has(host.toFullPath(f)), `Expected to write ${f}: ${arrayFrom(host.writtenFiles.keys())}`);
}
it("when fixing errors only changed file is emitted", () => {
const host = createSolutionWithIncrementalError();
fixError(host);
assert.equal(host.writtenFiles.size, 2, `Expected to write only changed files: ${arrayFrom(host.writtenFiles.keys())}`);
verifyWrittenFile(host, outputs[0]);
verifyWrittenFile(host, outputs[1]);
});
it("when file with no error changes, declaration errors are reported", () => {
const host = createSolutionWithIncrementalError();
host.writeFile(fileWithoutError.path, fileWithoutError.content.replace(/myClass/g, "myClass2"));
host.writtenFiles.clear();
incrementalBuild(host);
checkOutputErrorsIncremental(host, expectedDtsEmitErrors);
assert.equal(host.writtenFiles.size, 0, `Expected not to write any files: ${arrayFrom(host.writtenFiles.keys())}`);
});
});
});
});
describe("tsc-watch and tsserver works with project references", () => {
describe("invoking when references are already built", () => {
function verifyWatchesOfProject(host: WatchedSystem, expectedWatchedFiles: ReadonlyArray<string>, expectedWatchedDirectoriesRecursive: ReadonlyArray<string>, expectedWatchedDirectories?: ReadonlyArray<string>) {
function verifyWatchesOfProject(host: TsBuildWatchSystem, expectedWatchedFiles: ReadonlyArray<string>, expectedWatchedDirectoriesRecursive: ReadonlyArray<string>, expectedWatchedDirectories?: ReadonlyArray<string>) {
checkWatchedFilesDetailed(host, expectedWatchedFiles, 1);
checkWatchedDirectoriesDetailed(host, expectedWatchedDirectories || emptyArray, 1, /*recursive*/ false);
checkWatchedDirectoriesDetailed(host, expectedWatchedDirectoriesRecursive, 1, /*recursive*/ true);
@ -457,9 +599,9 @@ let x: string = 10;`);
function createSolutionOfProject(allFiles: ReadonlyArray<File>,
currentDirectory: string,
solutionBuilderconfig: string,
getOutputFileStamps: (host: WatchedSystem) => ReadonlyArray<OutputFileStamp>) {
getOutputFileStamps: (host: TsBuildWatchSystem) => ReadonlyArray<OutputFileStamp>) {
// Build the composite project
const host = createWatchedSystem(allFiles, { currentDirectory });
const host = createTsBuildWatchSystem(allFiles, { currentDirectory });
const solutionBuilder = createSolutionBuilder(host, [solutionBuilderconfig], {});
solutionBuilder.buildAllProjects();
const outputFileStamps = getOutputFileStamps(host);
@ -474,7 +616,7 @@ let x: string = 10;`);
currentDirectory: string,
solutionBuilderconfig: string,
watchConfig: string,
getOutputFileStamps: (host: WatchedSystem) => ReadonlyArray<OutputFileStamp>) {
getOutputFileStamps: (host: TsBuildWatchSystem) => ReadonlyArray<OutputFileStamp>) {
// Build the composite project
const { host, solutionBuilder } = createSolutionOfProject(allFiles, currentDirectory, solutionBuilderconfig, getOutputFileStamps);
@ -489,7 +631,7 @@ let x: string = 10;`);
currentDirectory: string,
solutionBuilderconfig: string,
openFileName: string,
getOutputFileStamps: (host: WatchedSystem) => ReadonlyArray<OutputFileStamp>) {
getOutputFileStamps: (host: TsBuildWatchSystem) => ReadonlyArray<OutputFileStamp>) {
// Build the composite project
const { host, solutionBuilder } = createSolutionOfProject(allFiles, currentDirectory, solutionBuilderconfig, getOutputFileStamps);
@ -525,12 +667,12 @@ let x: string = 10;`);
return createSolutionAndServiceOfProject(allFiles, projectsLocation, `${project}/${SubProject.tests}`, tests[1].path, getOutputFileStamps);
}
function verifyWatches(host: WatchedSystem, withTsserver?: boolean) {
function verifyWatches(host: TsBuildWatchSystem, withTsserver?: boolean) {
verifyWatchesOfProject(host, withTsserver ? expectedWatchedFiles.filter(f => f !== tests[1].path.toLowerCase()) : expectedWatchedFiles, expectedWatchedDirectoriesRecursive);
}
function verifyScenario(
edit: (host: WatchedSystem, solutionBuilder: SolutionBuilder) => void,
edit: (host: TsBuildWatchSystem, solutionBuilder: SolutionBuilder) => void,
expectedFilesAfterEdit: ReadonlyArray<string>
) {
it("with tsc-watch", () => {
@ -633,7 +775,7 @@ export function gfoo() {
}
function verifyWatchState(
host: WatchedSystem,
host: TsBuildWatchSystem,
watch: Watch,
expectedProgramFiles: ReadonlyArray<string>,
expectedWatchedFiles: ReadonlyArray<string>,
@ -720,20 +862,20 @@ export function gfoo() {
return createSolutionAndServiceOfProject(allFiles, getProjectPath(project), configToBuild, cTs.path, getOutputFileStamps);
}
function getOutputFileStamps(host: WatchedSystem) {
return expectedFiles.map(file => [file, host.getModifiedTime(file)] as OutputFileStamp);
function getOutputFileStamps(host: TsBuildWatchSystem) {
return expectedFiles.map(file => transformOutputToOutputFileStamp(file, host));
}
function verifyProgram(host: WatchedSystem, watch: Watch) {
function verifyProgram(host: TsBuildWatchSystem, watch: Watch) {
verifyWatchState(host, watch, expectedProgramFiles, expectedWatchedFiles, expectedWatchedDirectoriesRecursive, defaultDependencies, expectedWatchedDirectories);
}
function verifyProject(host: WatchedSystem, service: projectSystem.TestProjectService, orphanInfos?: ReadonlyArray<string>) {
function verifyProject(host: TsBuildWatchSystem, service: projectSystem.TestProjectService, orphanInfos?: ReadonlyArray<string>) {
verifyServerState(host, service, expectedProgramFiles, expectedWatchedFiles, expectedWatchedDirectoriesRecursive, orphanInfos);
}
function verifyServerState(
host: WatchedSystem,
host: TsBuildWatchSystem,
service: projectSystem.TestProjectService,
expectedProgramFiles: ReadonlyArray<string>,
expectedWatchedFiles: ReadonlyArray<string>,
@ -753,13 +895,13 @@ export function gfoo() {
}
function verifyScenario(
edit: (host: WatchedSystem, solutionBuilder: SolutionBuilder) => void,
edit: (host: TsBuildWatchSystem, solutionBuilder: SolutionBuilder) => void,
expectedEditErrors: ReadonlyArray<string>,
expectedProgramFiles: ReadonlyArray<string>,
expectedWatchedFiles: ReadonlyArray<string>,
expectedWatchedDirectoriesRecursive: ReadonlyArray<string>,
dependencies: ReadonlyArray<[string, ReadonlyArray<string>]>,
revert?: (host: WatchedSystem) => void,
revert?: (host: TsBuildWatchSystem) => void,
orphanInfosAfterEdit?: ReadonlyArray<string>,
orphanInfosAfterRevert?: ReadonlyArray<string>) {
it("with tsc-watch", () => {
@ -978,8 +1120,8 @@ export function gfoo() {
[refs.path, [refs.path]],
[cTsFile.path, [cTsFile.path, refs.path, bDts]]
];
function getOutputFileStamps(host: WatchedSystem) {
return expectedFiles.map(file => [file, host.getModifiedTime(file)] as OutputFileStamp);
function getOutputFileStamps(host: TsBuildWatchSystem) {
return expectedFiles.map(file => transformOutputToOutputFileStamp(file, host));
}
const { host, watch } = createSolutionAndWatchModeOfProject(allFiles, getProjectPath(project), "tsconfig.c.json", "tsconfig.c.json", getOutputFileStamps);
verifyWatchState(host, watch, expectedProgramFiles, expectedWatchedFiles, expectedWatchedDirectoriesRecursive, defaultDependencies);

View File

@ -103,6 +103,30 @@ namespace ts.projectSystem {
assert.isFalse(proj3.languageServiceEnabled);
});
it("should not crash when opening a file in a project with a disabled language service", () => {
const file1 = {
path: "/a/b/f1.js",
content: "let x =1;",
fileSize: 50 * 1024 * 1024
};
const file2 = {
path: "/a/b/f2.js",
content: "let x =1;",
fileSize: 100
};
const projName = "proj1";
const host = createServerHost([file1, file2]);
const projectService = createProjectService(host, { useSingleInferredProject: true }, { eventHandler: noop });
projectService.openExternalProject({ rootFiles: toExternalFiles([file1.path, file2.path]), options: {}, projectFileName: projName });
const proj1 = projectService.findProject(projName)!;
assert.isFalse(proj1.languageServiceEnabled);
assert.doesNotThrow(() => projectService.openClientFile(file2.path));
});
describe("ignoreConfigFiles", () => {
it("external project including config file", () => {
const file1 = {

View File

@ -7,8 +7,18 @@ namespace ts.projectSystem {
const session = createSession(createServerHost([aTs, bTs]));
openFilesForSession([bTs], session);
const response = executeSessionRequest<protocol.RenameRequest, protocol.RenameResponse>(session, protocol.CommandTypes.Rename, protocolFileLocationFromSubstring(bTs, 'a";'));
assert.deepEqual<protocol.RenameResponseBody | undefined>(response, {
const response1 = executeSessionRequest<protocol.RenameRequest, protocol.RenameResponse>(session, protocol.CommandTypes.Rename, protocolFileLocationFromSubstring(bTs, 'a";'));
assert.deepEqual<protocol.RenameResponseBody | undefined>(response1, {
info: {
canRename: false,
localizedErrorMessage: "You cannot rename this element."
},
locs: [{ file: bTs.path, locs: [protocolRenameSpanFromSubstring(bTs.content, "./a")] }],
});
session.getProjectService().setHostConfiguration({ preferences: { allowRenameOfImportPath: true } });
const response2 = executeSessionRequest<protocol.RenameRequest, protocol.RenameResponse>(session, protocol.CommandTypes.Rename, protocolFileLocationFromSubstring(bTs, 'a";'));
assert.deepEqual<protocol.RenameResponseBody | undefined>(response2, {
info: {
canRename: true,
fileToRename: aTs.path,
@ -22,13 +32,39 @@ namespace ts.projectSystem {
});
});
it("works with prefixText and suffixText", () => {
it("works with prefixText and suffixText when enabled", () => {
const aTs: File = { path: "/a.ts", content: "const x = 0; const o = { x };" };
const session = createSession(createServerHost([aTs]));
const host = createServerHost([aTs]);
const session = createSession(host);
openFilesForSession([aTs], session);
const response = executeSessionRequest<protocol.RenameRequest, protocol.RenameResponse>(session, protocol.CommandTypes.Rename, protocolFileLocationFromSubstring(aTs, "x"));
assert.deepEqual<protocol.RenameResponseBody | undefined>(response, {
// rename with prefixText and suffixText disabled
const response1 = executeSessionRequest<protocol.RenameRequest, protocol.RenameResponse>(session, protocol.CommandTypes.Rename, protocolFileLocationFromSubstring(aTs, "x"));
assert.deepEqual<protocol.RenameResponseBody | undefined>(response1, {
info: {
canRename: true,
fileToRename: undefined,
displayName: "x",
fullDisplayName: "x",
kind: ScriptElementKind.constElement,
kindModifiers: ScriptElementKindModifier.none,
triggerSpan: protocolTextSpanFromSubstring(aTs.content, "x"),
},
locs: [
{
file: aTs.path,
locs: [
protocolRenameSpanFromSubstring(aTs.content, "x"),
protocolRenameSpanFromSubstring(aTs.content, "x", { index: 1 }),
],
},
],
});
// rename with prefixText and suffixText enabled
session.getProjectService().setHostConfiguration({ preferences: { providePrefixAndSuffixTextForRename: true } });
const response2 = executeSessionRequest<protocol.RenameRequest, protocol.RenameResponse>(session, protocol.CommandTypes.Rename, protocolFileLocationFromSubstring(aTs, "x"));
assert.deepEqual<protocol.RenameResponseBody | undefined>(response2, {
info: {
canRename: true,
fileToRename: undefined,

View File

@ -204,12 +204,11 @@ namespace ts {
reportWatchModeWithoutSysSupport();
}
// TODO: change this to host if watch => watchHost otherwiue without watch
const buildHost = buildOptions.watch ?
createSolutionBuilderWithWatchHost(sys, reportDiagnostic, createBuilderStatusReporter(sys, shouldBePretty()), createWatchStatusReporter()) :
createSolutionBuilderHost(sys, reportDiagnostic, createBuilderStatusReporter(sys, shouldBePretty()), createReportErrorSummary(buildOptions));
buildHost.beforeCreateProgram = enableStatistics;
buildHost.afterProgramEmitAndDiagnostics = reportStatistics;
createSolutionBuilderWithWatchHost(sys, createEmitAndSemanticDiagnosticsBuilderProgram, reportDiagnostic, createBuilderStatusReporter(sys, shouldBePretty()), createWatchStatusReporter()) :
createSolutionBuilderHost(sys, createAbstractBuilder, reportDiagnostic, createBuilderStatusReporter(sys, shouldBePretty()), createReportErrorSummary(buildOptions));
updateCreateProgram(buildHost);
buildHost.afterProgramEmitAndDiagnostics = (program: BuilderProgram) => reportStatistics(program.getProgram());
const builder = createSolutionBuilder(buildHost, projects, buildOptions);
if (buildOptions.clean) {
@ -234,7 +233,7 @@ namespace ts {
const host = createCompilerHost(options);
const currentDirectory = host.getCurrentDirectory();
const getCanonicalFileName = createGetCanonicalFileName(host.useCaseSensitiveFileNames());
changeCompilerHostToUseCache(host, fileName => toPath(fileName, currentDirectory, getCanonicalFileName), /*useCacheForSourceFile*/ false);
changeCompilerHostLikeToUseCache(host, fileName => toPath(fileName, currentDirectory, getCanonicalFileName));
enableStatistics(options);
const programOptions: CreateProgramOptions = {
@ -255,15 +254,19 @@ namespace ts {
return sys.exit(exitStatus);
}
function updateWatchCompilationHost(watchCompilerHost: WatchCompilerHost<EmitAndSemanticDiagnosticsBuilderProgram>) {
const compileUsingBuilder = watchCompilerHost.createProgram;
watchCompilerHost.createProgram = (rootNames, options, host, oldProgram, configFileParsingDiagnostics, projectReferences) => {
function updateCreateProgram<T extends BuilderProgram>(host: { createProgram: CreateProgram<T>; }) {
const compileUsingBuilder = host.createProgram;
host.createProgram = (rootNames, options, host, oldProgram, configFileParsingDiagnostics, projectReferences) => {
Debug.assert(rootNames !== undefined || (options === undefined && !!oldProgram));
if (options !== undefined) {
enableStatistics(options);
}
return compileUsingBuilder(rootNames, options, host, oldProgram, configFileParsingDiagnostics, projectReferences);
};
}
function updateWatchCompilationHost(watchCompilerHost: WatchCompilerHost<EmitAndSemanticDiagnosticsBuilderProgram>) {
updateCreateProgram(watchCompilerHost);
const emitFilesUsingBuilder = watchCompilerHost.afterProgramCreate!; // TODO: GH#18217
watchCompilerHost.afterProgramCreate = builderProgram => {
emitFilesUsingBuilder(builderProgram);

View File

@ -1,5 +1,5 @@
{
"extends": "../tsconfig-base",
"extends": "../tsconfig-noncomposite-base",
"compilerOptions": {
"outFile": "../../built/local/tsc.js"
},

View File

@ -0,0 +1,8 @@
{
"extends": "./tsconfig-base",
"compilerOptions": {
"declaration": false,
"declarationMap": false,
"composite": false
}
}

View File

@ -967,4 +967,12 @@ namespace ts.server {
(process as any).noAsar = true;
// Start listening
ioSession.listen();
if (Debug.isDebugging) {
Debug.enableDebugInfo();
}
if (ts.sys.tryEnableSourceMapsForHost && /^development$/i.test(ts.sys.getEnvironmentVariable("NODE_ENV"))) {
ts.sys.tryEnableSourceMapsForHost();
}
}

View File

@ -1,5 +1,5 @@
{
"extends": "../tsconfig-base",
"extends": "../tsconfig-noncomposite-base",
"compilerOptions": {
"outFile": "../../built/local/tsserver.js",

View File

@ -1,5 +1,5 @@
{
"extends": "../tsconfig-base",
"extends": "../tsconfig-noncomposite-base",
"compilerOptions": {
"removeComments": true,
"outFile": "../../built/local/typingsInstaller.js",

Some files were not shown because too many files have changed in this diff Show More