mirror of
https://github.com/microsoft/TypeScript.git
synced 2026-02-07 05:41:22 -06:00
Merge remote-tracking branch 'origin/master' into release-4.2
This commit is contained in:
commit
12e76910b2
28
.travis.yml
28
.travis.yml
@ -1,28 +0,0 @@
|
||||
language: node_js
|
||||
|
||||
node_js:
|
||||
- 'node'
|
||||
- '12'
|
||||
- '10'
|
||||
|
||||
env:
|
||||
- workerCount=3 timeout=600000
|
||||
|
||||
matrix:
|
||||
fast_finish: true
|
||||
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
- /^release-.*/
|
||||
|
||||
install:
|
||||
- npm uninstall typescript --no-save
|
||||
- npm ci
|
||||
|
||||
cache:
|
||||
directories:
|
||||
- node_modules
|
||||
|
||||
git:
|
||||
depth: 1
|
||||
16
Gulpfile.js
16
Gulpfile.js
@ -5,7 +5,6 @@ const log = require("fancy-log");
|
||||
const newer = require("gulp-newer");
|
||||
const sourcemaps = require("gulp-sourcemaps");
|
||||
const del = require("del");
|
||||
const fold = require("travis-fold");
|
||||
const rename = require("gulp-rename");
|
||||
const concat = require("gulp-concat");
|
||||
const merge2 = require("merge2");
|
||||
@ -346,9 +345,6 @@ const runEslintRulesTests = () => runConsoleTests("scripts/eslint/built/tests",
|
||||
task("run-eslint-rules-tests", series(buildEslintRules, runEslintRulesTests));
|
||||
task("run-eslint-rules-tests").description = "Runs the eslint rule tests";
|
||||
|
||||
const lintFoldStart = async () => { if (fold.isTravis()) console.log(fold.start("lint")); };
|
||||
const lintFoldEnd = async () => { if (fold.isTravis()) console.log(fold.end("lint")); };
|
||||
|
||||
/** @type { (folder: string) => { (): Promise<any>; displayName?: string } } */
|
||||
const eslint = (folder) => async () => {
|
||||
|
||||
@ -374,20 +370,20 @@ const eslint = (folder) => async () => {
|
||||
|
||||
const lintScripts = eslint("scripts");
|
||||
lintScripts.displayName = "lint-scripts";
|
||||
task("lint-scripts", series([buildEslintRules, lintFoldStart, lintScripts, lintFoldEnd]));
|
||||
task("lint-scripts", series([buildEslintRules, lintScripts]));
|
||||
task("lint-scripts").description = "Runs eslint on the scripts sources.";
|
||||
|
||||
const lintCompiler = eslint("src");
|
||||
lintCompiler.displayName = "lint-compiler";
|
||||
task("lint-compiler", series([buildEslintRules, lintFoldStart, lintCompiler, lintFoldEnd]));
|
||||
task("lint-compiler", series([buildEslintRules, lintCompiler]));
|
||||
task("lint-compiler").description = "Runs eslint on the compiler sources.";
|
||||
task("lint-compiler").flags = {
|
||||
" --ci": "Runs eslint additional rules",
|
||||
};
|
||||
|
||||
const lint = series([buildEslintRules, lintFoldStart, lintScripts, lintCompiler, lintFoldEnd]);
|
||||
const lint = series([buildEslintRules, lintScripts, lintCompiler]);
|
||||
lint.displayName = "lint";
|
||||
task("lint", series([buildEslintRules, lintFoldStart, lint, lintFoldEnd]));
|
||||
task("lint", series([buildEslintRules, lint]));
|
||||
task("lint").description = "Runs eslint on the compiler and scripts sources.";
|
||||
task("lint").flags = {
|
||||
" --ci": "Runs eslint additional rules",
|
||||
@ -429,9 +425,7 @@ const buildOtherOutputs = parallel(buildCancellationToken, buildTypingsInstaller
|
||||
task("other-outputs", series(preBuild, buildOtherOutputs));
|
||||
task("other-outputs").description = "Builds miscelaneous scripts and documents distributed with the LKG";
|
||||
|
||||
const buildFoldStart = async () => { if (fold.isTravis()) console.log(fold.start("build")); };
|
||||
const buildFoldEnd = async () => { if (fold.isTravis()) console.log(fold.end("build")); };
|
||||
task("local", series(buildFoldStart, preBuild, parallel(localize, buildTsc, buildServer, buildServices, buildLssl, buildOtherOutputs), buildFoldEnd));
|
||||
task("local", series(preBuild, parallel(localize, buildTsc, buildServer, buildServices, buildLssl, buildOtherOutputs)));
|
||||
task("local").description = "Builds the full compiler and services";
|
||||
task("local").flags = {
|
||||
" --built": "Compile using the built version of the compiler."
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
|
||||
# TypeScript
|
||||
|
||||
[](https://travis-ci.org/microsoft/TypeScript)
|
||||
[](https://github.com/microsoft/TypeScript/actions?query=workflow%3ACI)
|
||||
[](https://dev.azure.com/typescript/TypeScript/_build?definitionId=7)
|
||||
[](https://www.npmjs.com/package/typescript)
|
||||
[](https://www.npmjs.com/package/typescript)
|
||||
|
||||
@ -106281,7 +106281,7 @@ var ts;
|
||||
var oldSourceFile = oldSourceFiles_1[_i];
|
||||
var newFile = getSourceFileByPath(oldSourceFile.resolvedPath);
|
||||
if (shouldCreateNewSourceFile || !newFile ||
|
||||
// old file wasnt redirect but new file is
|
||||
// old file wasn't redirect but new file is
|
||||
(oldSourceFile.resolvedPath === oldSourceFile.path && newFile.resolvedPath !== oldSourceFile.path)) {
|
||||
host.onReleaseOldSourceFile(oldSourceFile, oldProgram.getCompilerOptions(), !!getSourceFileByPath(oldSourceFile.path));
|
||||
}
|
||||
@ -109397,7 +109397,7 @@ var ts;
|
||||
var newReferences;
|
||||
// if not using old state, every file is changed
|
||||
if (!useOldState ||
|
||||
// File wasnt present in old state
|
||||
// File wasn't present in old state
|
||||
!(oldInfo = oldState.fileInfos.get(sourceFilePath)) ||
|
||||
// versions dont match
|
||||
oldInfo.version !== info.version ||
|
||||
@ -110549,7 +110549,7 @@ var ts;
|
||||
var resolvedModules = [];
|
||||
var compilerOptions = resolutionHost.getCompilationSettings();
|
||||
var hasInvalidatedNonRelativeUnresolvedImport = logChanges && isFileWithInvalidatedNonRelativeUnresolvedImports(path);
|
||||
// All the resolutions in this file are invalidated if this file wasnt resolved using same redirect
|
||||
// All the resolutions in this file are invalidated if this file wasn't resolved using same redirect
|
||||
var program = resolutionHost.getCurrentProgram();
|
||||
var oldRedirect = program && program.getResolvedProjectReferenceToRedirect(containingFile);
|
||||
var unmatchedRedirects = oldRedirect ?
|
||||
|
||||
@ -106475,7 +106475,7 @@ var ts;
|
||||
var oldSourceFile = oldSourceFiles_1[_i];
|
||||
var newFile = getSourceFileByPath(oldSourceFile.resolvedPath);
|
||||
if (shouldCreateNewSourceFile || !newFile ||
|
||||
// old file wasnt redirect but new file is
|
||||
// old file wasn't redirect but new file is
|
||||
(oldSourceFile.resolvedPath === oldSourceFile.path && newFile.resolvedPath !== oldSourceFile.path)) {
|
||||
host.onReleaseOldSourceFile(oldSourceFile, oldProgram.getCompilerOptions(), !!getSourceFileByPath(oldSourceFile.path));
|
||||
}
|
||||
@ -109591,7 +109591,7 @@ var ts;
|
||||
var newReferences;
|
||||
// if not using old state, every file is changed
|
||||
if (!useOldState ||
|
||||
// File wasnt present in old state
|
||||
// File wasn't present in old state
|
||||
!(oldInfo = oldState.fileInfos.get(sourceFilePath)) ||
|
||||
// versions dont match
|
||||
oldInfo.version !== info.version ||
|
||||
@ -110743,7 +110743,7 @@ var ts;
|
||||
var resolvedModules = [];
|
||||
var compilerOptions = resolutionHost.getCompilationSettings();
|
||||
var hasInvalidatedNonRelativeUnresolvedImport = logChanges && isFileWithInvalidatedNonRelativeUnresolvedImports(path);
|
||||
// All the resolutions in this file are invalidated if this file wasnt resolved using same redirect
|
||||
// All the resolutions in this file are invalidated if this file wasn't resolved using same redirect
|
||||
var program = resolutionHost.getCurrentProgram();
|
||||
var oldRedirect = program && program.getResolvedProjectReferenceToRedirect(containingFile);
|
||||
var unmatchedRedirects = oldRedirect ?
|
||||
|
||||
@ -106475,7 +106475,7 @@ var ts;
|
||||
var oldSourceFile = oldSourceFiles_1[_i];
|
||||
var newFile = getSourceFileByPath(oldSourceFile.resolvedPath);
|
||||
if (shouldCreateNewSourceFile || !newFile ||
|
||||
// old file wasnt redirect but new file is
|
||||
// old file wasn't redirect but new file is
|
||||
(oldSourceFile.resolvedPath === oldSourceFile.path && newFile.resolvedPath !== oldSourceFile.path)) {
|
||||
host.onReleaseOldSourceFile(oldSourceFile, oldProgram.getCompilerOptions(), !!getSourceFileByPath(oldSourceFile.path));
|
||||
}
|
||||
@ -109591,7 +109591,7 @@ var ts;
|
||||
var newReferences;
|
||||
// if not using old state, every file is changed
|
||||
if (!useOldState ||
|
||||
// File wasnt present in old state
|
||||
// File wasn't present in old state
|
||||
!(oldInfo = oldState.fileInfos.get(sourceFilePath)) ||
|
||||
// versions dont match
|
||||
oldInfo.version !== info.version ||
|
||||
@ -110743,7 +110743,7 @@ var ts;
|
||||
var resolvedModules = [];
|
||||
var compilerOptions = resolutionHost.getCompilationSettings();
|
||||
var hasInvalidatedNonRelativeUnresolvedImport = logChanges && isFileWithInvalidatedNonRelativeUnresolvedImports(path);
|
||||
// All the resolutions in this file are invalidated if this file wasnt resolved using same redirect
|
||||
// All the resolutions in this file are invalidated if this file wasn't resolved using same redirect
|
||||
var program = resolutionHost.getCurrentProgram();
|
||||
var oldRedirect = program && program.getResolvedProjectReferenceToRedirect(containingFile);
|
||||
var unmatchedRedirects = oldRedirect ?
|
||||
|
||||
@ -106475,7 +106475,7 @@ var ts;
|
||||
var oldSourceFile = oldSourceFiles_1[_i];
|
||||
var newFile = getSourceFileByPath(oldSourceFile.resolvedPath);
|
||||
if (shouldCreateNewSourceFile || !newFile ||
|
||||
// old file wasnt redirect but new file is
|
||||
// old file wasn't redirect but new file is
|
||||
(oldSourceFile.resolvedPath === oldSourceFile.path && newFile.resolvedPath !== oldSourceFile.path)) {
|
||||
host.onReleaseOldSourceFile(oldSourceFile, oldProgram.getCompilerOptions(), !!getSourceFileByPath(oldSourceFile.path));
|
||||
}
|
||||
@ -109591,7 +109591,7 @@ var ts;
|
||||
var newReferences;
|
||||
// if not using old state, every file is changed
|
||||
if (!useOldState ||
|
||||
// File wasnt present in old state
|
||||
// File wasn't present in old state
|
||||
!(oldInfo = oldState.fileInfos.get(sourceFilePath)) ||
|
||||
// versions dont match
|
||||
oldInfo.version !== info.version ||
|
||||
@ -110743,7 +110743,7 @@ var ts;
|
||||
var resolvedModules = [];
|
||||
var compilerOptions = resolutionHost.getCompilationSettings();
|
||||
var hasInvalidatedNonRelativeUnresolvedImport = logChanges && isFileWithInvalidatedNonRelativeUnresolvedImports(path);
|
||||
// All the resolutions in this file are invalidated if this file wasnt resolved using same redirect
|
||||
// All the resolutions in this file are invalidated if this file wasn't resolved using same redirect
|
||||
var program = resolutionHost.getCurrentProgram();
|
||||
var oldRedirect = program && program.getResolvedProjectReferenceToRedirect(containingFile);
|
||||
var unmatchedRedirects = oldRedirect ?
|
||||
|
||||
@ -106270,7 +106270,7 @@ var ts;
|
||||
var oldSourceFile = oldSourceFiles_1[_i];
|
||||
var newFile = getSourceFileByPath(oldSourceFile.resolvedPath);
|
||||
if (shouldCreateNewSourceFile || !newFile ||
|
||||
// old file wasnt redirect but new file is
|
||||
// old file wasn't redirect but new file is
|
||||
(oldSourceFile.resolvedPath === oldSourceFile.path && newFile.resolvedPath !== oldSourceFile.path)) {
|
||||
host.onReleaseOldSourceFile(oldSourceFile, oldProgram.getCompilerOptions(), !!getSourceFileByPath(oldSourceFile.path));
|
||||
}
|
||||
@ -109386,7 +109386,7 @@ var ts;
|
||||
var newReferences;
|
||||
// if not using old state, every file is changed
|
||||
if (!useOldState ||
|
||||
// File wasnt present in old state
|
||||
// File wasn't present in old state
|
||||
!(oldInfo = oldState.fileInfos.get(sourceFilePath)) ||
|
||||
// versions dont match
|
||||
oldInfo.version !== info.version ||
|
||||
@ -110538,7 +110538,7 @@ var ts;
|
||||
var resolvedModules = [];
|
||||
var compilerOptions = resolutionHost.getCompilationSettings();
|
||||
var hasInvalidatedNonRelativeUnresolvedImport = logChanges && isFileWithInvalidatedNonRelativeUnresolvedImports(path);
|
||||
// All the resolutions in this file are invalidated if this file wasnt resolved using same redirect
|
||||
// All the resolutions in this file are invalidated if this file wasn't resolved using same redirect
|
||||
var program = resolutionHost.getCurrentProgram();
|
||||
var oldRedirect = program && program.getResolvedProjectReferenceToRedirect(containingFile);
|
||||
var unmatchedRedirects = oldRedirect ?
|
||||
|
||||
262
package-lock.json
generated
262
package-lock.json
generated
@ -279,18 +279,18 @@
|
||||
}
|
||||
},
|
||||
"@octokit/auth-token": {
|
||||
"version": "2.4.4",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.4.4.tgz",
|
||||
"integrity": "sha512-LNfGu3Ro9uFAYh10MUZVaT7X2CnNm2C8IDQmabx+3DygYIQjs9FwzFAHN/0t6mu5HEPhxcb1XOuxdpY82vCg2Q==",
|
||||
"version": "2.4.5",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.4.5.tgz",
|
||||
"integrity": "sha512-BpGYsPgJt05M7/L/5FoE1PiAbdxXFZkX/3kDYcsvd1v6UhlnE5e96dTDr0ezX/EFwciQxf3cNV0loipsURU+WA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@octokit/types": "^6.0.0"
|
||||
"@octokit/types": "^6.0.3"
|
||||
}
|
||||
},
|
||||
"@octokit/core": {
|
||||
"version": "3.2.4",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/core/-/core-3.2.4.tgz",
|
||||
"integrity": "sha512-d9dTsqdePBqOn7aGkyRFe7pQpCXdibSJ5SFnrTr0axevObZrpz3qkWm7t/NjYv5a66z6vhfteriaq4FRz3e0Qg==",
|
||||
"version": "3.2.5",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/core/-/core-3.2.5.tgz",
|
||||
"integrity": "sha512-+DCtPykGnvXKWWQI0E1XD+CCeWSBhB6kwItXqfFmNBlIlhczuDPbg+P6BtLnVBaRJDAjv+1mrUJuRsFSjktopg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@octokit/auth-token": "^2.4.4",
|
||||
@ -302,67 +302,67 @@
|
||||
}
|
||||
},
|
||||
"@octokit/endpoint": {
|
||||
"version": "6.0.10",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-6.0.10.tgz",
|
||||
"integrity": "sha512-9+Xef8nT7OKZglfkOMm7IL6VwxXUQyR7DUSU0LH/F7VNqs8vyd7es5pTfz9E7DwUIx7R3pGscxu1EBhYljyu7Q==",
|
||||
"version": "6.0.11",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-6.0.11.tgz",
|
||||
"integrity": "sha512-fUIPpx+pZyoLW4GCs3yMnlj2LfoXTWDUVPTC4V3MUEKZm48W+XYpeWSZCv+vYF1ZABUm2CqnDVf1sFtIYrj7KQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@octokit/types": "^6.0.0",
|
||||
"@octokit/types": "^6.0.3",
|
||||
"is-plain-object": "^5.0.0",
|
||||
"universal-user-agent": "^6.0.0"
|
||||
}
|
||||
},
|
||||
"@octokit/graphql": {
|
||||
"version": "4.5.8",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-4.5.8.tgz",
|
||||
"integrity": "sha512-WnCtNXWOrupfPJgXe+vSmprZJUr0VIu14G58PMlkWGj3cH+KLZEfKMmbUQ6C3Wwx6fdhzVW1CD5RTnBdUHxhhA==",
|
||||
"version": "4.6.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-4.6.0.tgz",
|
||||
"integrity": "sha512-CJ6n7izLFXLvPZaWzCQDjU/RP+vHiZmWdOunaCS87v+2jxMsW9FB5ktfIxybRBxZjxuJGRnxk7xJecWTVxFUYQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@octokit/request": "^5.3.0",
|
||||
"@octokit/types": "^6.0.0",
|
||||
"@octokit/types": "^6.0.3",
|
||||
"universal-user-agent": "^6.0.0"
|
||||
}
|
||||
},
|
||||
"@octokit/openapi-types": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-2.2.0.tgz",
|
||||
"integrity": "sha512-274lNUDonw10kT8wHg8fCcUc1ZjZHbWv0/TbAwb0ojhBQqZYc1cQ/4yqTVTtPMDeZ//g7xVEYe/s3vURkRghPg==",
|
||||
"version": "4.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-4.0.1.tgz",
|
||||
"integrity": "sha512-k2hRcfcLRyPJjtYfJLzg404n7HZ6sUpAWAR/uNI8tf96NgatWOpw1ocdF+WFfx/trO1ivBh7ckynO1rn+xAw/Q==",
|
||||
"dev": true
|
||||
},
|
||||
"@octokit/plugin-paginate-rest": {
|
||||
"version": "2.7.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.7.0.tgz",
|
||||
"integrity": "sha512-+zARyncLjt9b0FjqPAbJo4ss7HOlBi1nprq+cPlw5vu2+qjy7WvlXhtXFdRHQbSL1Pt+bfAKaLADEkkvg8sP8w==",
|
||||
"version": "2.9.1",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.9.1.tgz",
|
||||
"integrity": "sha512-8wnuWGjwDIEobbBet2xAjZwgiMVTgIer5wBsnGXzV3lJ4yqphLU2FEMpkhSrDx7y+WkZDfZ+V+1cFMZ1mAaFag==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@octokit/types": "^6.0.1"
|
||||
"@octokit/types": "^6.8.0"
|
||||
}
|
||||
},
|
||||
"@octokit/plugin-request-log": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-1.0.2.tgz",
|
||||
"integrity": "sha512-oTJSNAmBqyDR41uSMunLQKMX0jmEXbwD1fpz8FG27lScV3RhtGfBa1/BBLym+PxcC16IBlF7KH9vP1BUYxA+Eg==",
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-1.0.3.tgz",
|
||||
"integrity": "sha512-4RFU4li238jMJAzLgAwkBAw+4Loile5haQMQr+uhFq27BmyJXcXSKvoQKqh0agsZEiUlW6iSv3FAgvmGkur7OQ==",
|
||||
"dev": true
|
||||
},
|
||||
"@octokit/plugin-rest-endpoint-methods": {
|
||||
"version": "4.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-4.4.1.tgz",
|
||||
"integrity": "sha512-+v5PcvrUcDeFXf8hv1gnNvNLdm4C0+2EiuWt9EatjjUmfriM1pTMM+r4j1lLHxeBQ9bVDmbywb11e3KjuavieA==",
|
||||
"version": "4.10.1",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-4.10.1.tgz",
|
||||
"integrity": "sha512-YGMiEidTORzgUmYZu0eH4q2k8kgQSHQMuBOBYiKxUYs/nXea4q/Ze6tDzjcRAPmHNJYXrENs1bEMlcdGKT+8ug==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@octokit/types": "^6.1.0",
|
||||
"@octokit/types": "^6.8.2",
|
||||
"deprecation": "^2.3.1"
|
||||
}
|
||||
},
|
||||
"@octokit/request": {
|
||||
"version": "5.4.12",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.4.12.tgz",
|
||||
"integrity": "sha512-MvWYdxengUWTGFpfpefBBpVmmEYfkwMoxonIB3sUGp5rhdgwjXL1ejo6JbgzG/QD9B/NYt/9cJX1pxXeSIUCkg==",
|
||||
"version": "5.4.14",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.4.14.tgz",
|
||||
"integrity": "sha512-VkmtacOIQp9daSnBmDI92xNIeLuSRDOIuplp/CJomkvzt7M18NXgG044Cx/LFKLgjKt9T2tZR6AtJayba9GTSA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@octokit/endpoint": "^6.0.1",
|
||||
"@octokit/request-error": "^2.0.0",
|
||||
"@octokit/types": "^6.0.3",
|
||||
"@octokit/types": "^6.7.1",
|
||||
"deprecation": "^2.0.0",
|
||||
"is-plain-object": "^5.0.0",
|
||||
"node-fetch": "^2.6.1",
|
||||
@ -379,35 +379,35 @@
|
||||
}
|
||||
},
|
||||
"@octokit/request-error": {
|
||||
"version": "2.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.0.4.tgz",
|
||||
"integrity": "sha512-LjkSiTbsxIErBiRh5wSZvpZqT4t0/c9+4dOe0PII+6jXR+oj/h66s7E4a/MghV7iT8W9ffoQ5Skoxzs96+gBPA==",
|
||||
"version": "2.0.5",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-2.0.5.tgz",
|
||||
"integrity": "sha512-T/2wcCFyM7SkXzNoyVNWjyVlUwBvW3igM3Btr/eKYiPmucXTtkxt2RBsf6gn3LTzaLSLTQtNmvg+dGsOxQrjZg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@octokit/types": "^6.0.0",
|
||||
"@octokit/types": "^6.0.3",
|
||||
"deprecation": "^2.0.0",
|
||||
"once": "^1.4.0"
|
||||
}
|
||||
},
|
||||
"@octokit/rest": {
|
||||
"version": "18.0.12",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-18.0.12.tgz",
|
||||
"integrity": "sha512-hNRCZfKPpeaIjOVuNJzkEL6zacfZlBPV8vw8ReNeyUkVvbuCvvrrx8K8Gw2eyHHsmd4dPlAxIXIZ9oHhJfkJpw==",
|
||||
"version": "18.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-18.1.0.tgz",
|
||||
"integrity": "sha512-YQfpTzWV3jdzDPyXQVO54f5I2t1zxk/S53Vbe+Aa5vQj6MdTx6sNEWzmUzUO8lSVowbGOnjcQHzW1A8ATr+/7g==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@octokit/core": "^3.2.3",
|
||||
"@octokit/plugin-paginate-rest": "^2.6.2",
|
||||
"@octokit/plugin-request-log": "^1.0.2",
|
||||
"@octokit/plugin-rest-endpoint-methods": "4.4.1"
|
||||
"@octokit/plugin-rest-endpoint-methods": "4.10.1"
|
||||
}
|
||||
},
|
||||
"@octokit/types": {
|
||||
"version": "6.2.1",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-6.2.1.tgz",
|
||||
"integrity": "sha512-jHs9OECOiZxuEzxMZcXmqrEO8GYraHF+UzNVH2ACYh8e/Y7YoT+hUf9ldvVd6zIvWv4p3NdxbQ0xx3ku5BnSiA==",
|
||||
"version": "6.8.2",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-6.8.2.tgz",
|
||||
"integrity": "sha512-RpG0NJd7OKSkWptiFhy1xCLkThs5YoDIKM21lEtDmUvSpbaIEfrxzckWLUGDFfF8RydSyngo44gDv8m2hHruUg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@octokit/openapi-types": "^2.2.0",
|
||||
"@octokit/openapi-types": "^4.0.0",
|
||||
"@types/node": ">= 8"
|
||||
}
|
||||
},
|
||||
@ -585,9 +585,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"@types/node": {
|
||||
"version": "14.14.20",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.20.tgz",
|
||||
"integrity": "sha512-Y93R97Ouif9JEOWPIUyU+eyIdyRqQR0I8Ez1dzku4hDx34NWh4HbtIc3WNzwB1Y9ULvNGeu5B8h8bVL5cAk4/A==",
|
||||
"version": "14.14.25",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.25.tgz",
|
||||
"integrity": "sha512-EPpXLOVqDvisVxtlbvzfyqSsFeQxltFbluZNRndIb8tr9KiBnYNLzrc1N3pyKUCww2RNrfHDViqDWWE1LCJQtQ==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/node-fetch": {
|
||||
@ -632,12 +632,6 @@
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"@types/travis-fold": {
|
||||
"version": "0.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/travis-fold/-/travis-fold-0.1.0.tgz",
|
||||
"integrity": "sha512-qrXB0Div8vIzA8P809JRlh9lD4mSOYwRBJbU1zcj0BWhULP15Zx0oQyJtjaOnkNR5RZcYQDbgimj40M1GDmhcQ==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/undertaker": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/@types/undertaker/-/undertaker-1.2.3.tgz",
|
||||
@ -1382,9 +1376,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"before-after-hook": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.1.0.tgz",
|
||||
"integrity": "sha512-IWIbu7pMqyw3EAJHzzHbWa85b6oud/yfKYg5rqB5hNE8CeMi3nX+2C2sj0HswfblST86hpVEOAb9x34NZd6P7A==",
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.1.1.tgz",
|
||||
"integrity": "sha512-5ekuQOvO04MDj7kYZJaMab2S8SPjGJbotVNyv7QYFCOAwrGZs/YnoDNlh1U+m5hl7H2D/+n0taaAV/tfyd3KMA==",
|
||||
"dev": true
|
||||
},
|
||||
"binary-extensions": {
|
||||
@ -1743,9 +1737,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"call-bind": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.1.tgz",
|
||||
"integrity": "sha512-tvAvUwNcRikl3RVF20X9lsYmmepsovzTWeJiXjO0PkJp15uy/6xKFZOQtuiSULwYW+6ToZBprphCgWXC2dSgcQ==",
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz",
|
||||
"integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"function-bind": "^1.1.1",
|
||||
@ -1765,9 +1759,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"chai": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/chai/-/chai-4.2.0.tgz",
|
||||
"integrity": "sha512-XQU3bhBukrOsQCuwZndwGcCVQHyZi53fQ6Ys1Fym7E4olpIqqZZhhoFJoaKVvV17lWQoXYwgWN2nF5crA8J2jw==",
|
||||
"version": "4.3.0",
|
||||
"resolved": "https://registry.npmjs.org/chai/-/chai-4.3.0.tgz",
|
||||
"integrity": "sha512-/BFd2J30EcOwmdOgXvVsmM48l0Br0nmZPlO0uOW4XKh6kpsUumRXBgPV+IlaqFaqr9cYbeoZAM1Npx0i4A+aiA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"assertion-error": "^1.1.0",
|
||||
@ -2565,18 +2559,18 @@
|
||||
}
|
||||
},
|
||||
"elliptic": {
|
||||
"version": "6.5.3",
|
||||
"resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.3.tgz",
|
||||
"integrity": "sha512-IMqzv5wNQf+E6aHeIqATs0tOLeOTwj1QKbRcS3jBbYkl5oLAserA8yJTT7/VyHUYG91PRmPyeQDObKLPpeS4dw==",
|
||||
"version": "6.5.4",
|
||||
"resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.4.tgz",
|
||||
"integrity": "sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"bn.js": "^4.4.0",
|
||||
"brorand": "^1.0.1",
|
||||
"bn.js": "^4.11.9",
|
||||
"brorand": "^1.1.0",
|
||||
"hash.js": "^1.0.0",
|
||||
"hmac-drbg": "^1.0.0",
|
||||
"inherits": "^2.0.1",
|
||||
"minimalistic-assert": "^1.0.0",
|
||||
"minimalistic-crypto-utils": "^1.0.0"
|
||||
"hmac-drbg": "^1.0.1",
|
||||
"inherits": "^2.0.4",
|
||||
"minimalistic-assert": "^1.0.1",
|
||||
"minimalistic-crypto-utils": "^1.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"bn.js": {
|
||||
@ -3757,9 +3751,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"get-intrinsic": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.0.2.tgz",
|
||||
"integrity": "sha512-aeX0vrFm21ILl3+JpFFRNe9aUvp6VFZb2/CTbgLb8j75kOhvoNYjt9d8KA/tJG4gSo8nzEDedRl0h7vDmBYRVg==",
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz",
|
||||
"integrity": "sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"function-bind": "^1.1.1",
|
||||
@ -4684,25 +4678,39 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"es-abstract": {
|
||||
"version": "1.18.0-next.1",
|
||||
"resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.0-next.1.tgz",
|
||||
"integrity": "sha512-I4UGspA0wpZXWENrdA0uHbnhte683t3qT/1VFH9aX2dA5PPSf6QW5HHXf5HImaqPmjXaVeVk4RGWnaylmV7uAA==",
|
||||
"version": "1.18.0-next.2",
|
||||
"resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.0-next.2.tgz",
|
||||
"integrity": "sha512-Ih4ZMFHEtZupnUh6497zEL4y2+w8+1ljnCyaTa+adcoafI1GOvMwFlDjBLfWR7y9VLfrjRJe9ocuHY1PSR9jjw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"call-bind": "^1.0.2",
|
||||
"es-to-primitive": "^1.2.1",
|
||||
"function-bind": "^1.1.1",
|
||||
"get-intrinsic": "^1.0.2",
|
||||
"has": "^1.0.3",
|
||||
"has-symbols": "^1.0.1",
|
||||
"is-callable": "^1.2.2",
|
||||
"is-negative-zero": "^2.0.0",
|
||||
"is-negative-zero": "^2.0.1",
|
||||
"is-regex": "^1.1.1",
|
||||
"object-inspect": "^1.8.0",
|
||||
"object-inspect": "^1.9.0",
|
||||
"object-keys": "^1.1.1",
|
||||
"object.assign": "^4.1.1",
|
||||
"string.prototype.trimend": "^1.0.1",
|
||||
"string.prototype.trimstart": "^1.0.1"
|
||||
"object.assign": "^4.1.2",
|
||||
"string.prototype.trimend": "^1.0.3",
|
||||
"string.prototype.trimstart": "^1.0.3"
|
||||
}
|
||||
},
|
||||
"is-negative-zero": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.1.tgz",
|
||||
"integrity": "sha512-2z6JzQvZRa9A2Y7xC6dQQm4FSTSTNWjKIYYTt4246eMTJmIo0Q+ZyOsU66X8lxK1AbB92dFeglPLrhwpeRKO6w==",
|
||||
"dev": true
|
||||
},
|
||||
"object-inspect": {
|
||||
"version": "1.9.0",
|
||||
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.9.0.tgz",
|
||||
"integrity": "sha512-i3Bp9iTqwhaLZBxGkRfo5ZbE07BQRT7MGu8+nNgwW9ItGp1TzCTw2DLEoWwjClxBjOFI/hWljTAmYGCEwmtnOw==",
|
||||
"dev": true
|
||||
},
|
||||
"object.assign": {
|
||||
"version": "4.1.2",
|
||||
"resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.2.tgz",
|
||||
@ -4714,6 +4722,26 @@
|
||||
"has-symbols": "^1.0.1",
|
||||
"object-keys": "^1.1.1"
|
||||
}
|
||||
},
|
||||
"string.prototype.trimend": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.3.tgz",
|
||||
"integrity": "sha512-ayH0pB+uf0U28CtjlLvL7NaohvR1amUvVZk+y3DYb0Ey2PUV5zPkkKy9+U1ndVEIXO8hNg18eIv9Jntbii+dKw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"call-bind": "^1.0.0",
|
||||
"define-properties": "^1.1.3"
|
||||
}
|
||||
},
|
||||
"string.prototype.trimstart": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.3.tgz",
|
||||
"integrity": "sha512-oBIBUy5lea5tt0ovtOFiEQaBkoBBkyJhZXzJYrSmDo5IUUqbOPvVezuRs/agBIdZ2p2Eo1FD6bD9USyBLfl3xg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"call-bind": "^1.0.0",
|
||||
"define-properties": "^1.1.3"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -6258,9 +6286,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"pathval": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.0.tgz",
|
||||
"integrity": "sha1-uULm1L3mUwBe9rcTYd74cn0GReA=",
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz",
|
||||
"integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==",
|
||||
"dev": true
|
||||
},
|
||||
"pbkdf2": {
|
||||
@ -7824,12 +7852,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"travis-fold": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/travis-fold/-/travis-fold-0.1.2.tgz",
|
||||
"integrity": "sha1-/sAF+dyqJZo/lFnOWmkGq6TFRdo=",
|
||||
"dev": true
|
||||
},
|
||||
"tsconfig-paths": {
|
||||
"version": "3.9.0",
|
||||
"resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.9.0.tgz",
|
||||
@ -7920,9 +7942,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"uglify-js": {
|
||||
"version": "3.12.4",
|
||||
"resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.12.4.tgz",
|
||||
"integrity": "sha512-L5i5jg/SHkEqzN18gQMTWsZk3KelRsfD1wUVNqtq0kzqWQqcJjyL8yc1o8hJgRrWqrAl2mUFbhfznEIoi7zi2A==",
|
||||
"version": "3.12.7",
|
||||
"resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.12.7.tgz",
|
||||
"integrity": "sha512-SIZhkoh+U/wjW+BHGhVwE9nt8tWJspncloBcFapkpGRwNPqcH8pzX36BXe3TPBjzHWPMUZotpCigak/udWNr1Q==",
|
||||
"dev": true,
|
||||
"optional": true
|
||||
},
|
||||
@ -8290,25 +8312,39 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"es-abstract": {
|
||||
"version": "1.18.0-next.1",
|
||||
"resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.0-next.1.tgz",
|
||||
"integrity": "sha512-I4UGspA0wpZXWENrdA0uHbnhte683t3qT/1VFH9aX2dA5PPSf6QW5HHXf5HImaqPmjXaVeVk4RGWnaylmV7uAA==",
|
||||
"version": "1.18.0-next.2",
|
||||
"resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.0-next.2.tgz",
|
||||
"integrity": "sha512-Ih4ZMFHEtZupnUh6497zEL4y2+w8+1ljnCyaTa+adcoafI1GOvMwFlDjBLfWR7y9VLfrjRJe9ocuHY1PSR9jjw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"call-bind": "^1.0.2",
|
||||
"es-to-primitive": "^1.2.1",
|
||||
"function-bind": "^1.1.1",
|
||||
"get-intrinsic": "^1.0.2",
|
||||
"has": "^1.0.3",
|
||||
"has-symbols": "^1.0.1",
|
||||
"is-callable": "^1.2.2",
|
||||
"is-negative-zero": "^2.0.0",
|
||||
"is-negative-zero": "^2.0.1",
|
||||
"is-regex": "^1.1.1",
|
||||
"object-inspect": "^1.8.0",
|
||||
"object-inspect": "^1.9.0",
|
||||
"object-keys": "^1.1.1",
|
||||
"object.assign": "^4.1.1",
|
||||
"string.prototype.trimend": "^1.0.1",
|
||||
"string.prototype.trimstart": "^1.0.1"
|
||||
"object.assign": "^4.1.2",
|
||||
"string.prototype.trimend": "^1.0.3",
|
||||
"string.prototype.trimstart": "^1.0.3"
|
||||
}
|
||||
},
|
||||
"is-negative-zero": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.1.tgz",
|
||||
"integrity": "sha512-2z6JzQvZRa9A2Y7xC6dQQm4FSTSTNWjKIYYTt4246eMTJmIo0Q+ZyOsU66X8lxK1AbB92dFeglPLrhwpeRKO6w==",
|
||||
"dev": true
|
||||
},
|
||||
"object-inspect": {
|
||||
"version": "1.9.0",
|
||||
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.9.0.tgz",
|
||||
"integrity": "sha512-i3Bp9iTqwhaLZBxGkRfo5ZbE07BQRT7MGu8+nNgwW9ItGp1TzCTw2DLEoWwjClxBjOFI/hWljTAmYGCEwmtnOw==",
|
||||
"dev": true
|
||||
},
|
||||
"object.assign": {
|
||||
"version": "4.1.2",
|
||||
"resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.2.tgz",
|
||||
@ -8320,6 +8356,26 @@
|
||||
"has-symbols": "^1.0.1",
|
||||
"object-keys": "^1.1.1"
|
||||
}
|
||||
},
|
||||
"string.prototype.trimend": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.3.tgz",
|
||||
"integrity": "sha512-ayH0pB+uf0U28CtjlLvL7NaohvR1amUvVZk+y3DYb0Ey2PUV5zPkkKy9+U1ndVEIXO8hNg18eIv9Jntbii+dKw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"call-bind": "^1.0.0",
|
||||
"define-properties": "^1.1.3"
|
||||
}
|
||||
},
|
||||
"string.prototype.trimstart": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.3.tgz",
|
||||
"integrity": "sha512-oBIBUy5lea5tt0ovtOFiEQaBkoBBkyJhZXzJYrSmDo5IUUqbOPvVezuRs/agBIdZ2p2Eo1FD6bD9USyBLfl3xg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"call-bind": "^1.0.0",
|
||||
"define-properties": "^1.1.3"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@ -52,7 +52,6 @@
|
||||
"@types/q": "latest",
|
||||
"@types/source-map-support": "latest",
|
||||
"@types/through2": "latest",
|
||||
"@types/travis-fold": "latest",
|
||||
"@types/xml2js": "^0.4.0",
|
||||
"@typescript-eslint/eslint-plugin": "4.5.0",
|
||||
"@typescript-eslint/experimental-utils": "4.5.0",
|
||||
@ -95,7 +94,6 @@
|
||||
"remove-internal": "^2.9.2",
|
||||
"source-map-support": "latest",
|
||||
"through2": "latest",
|
||||
"travis-fold": "latest",
|
||||
"typescript": "^4.0.0-dev.20200624",
|
||||
"vinyl": "latest",
|
||||
"vinyl-sourcemaps-apply": "latest",
|
||||
|
||||
@ -174,14 +174,14 @@ namespace ts {
|
||||
const binder = createBinder();
|
||||
|
||||
export function bindSourceFile(file: SourceFile, options: CompilerOptions) {
|
||||
tracing.push(tracing.Phase.Bind, "bindSourceFile", { path: file.path }, /*separateBeginAndEnd*/ true);
|
||||
tracing?.push(tracing.Phase.Bind, "bindSourceFile", { path: file.path }, /*separateBeginAndEnd*/ true);
|
||||
performance.mark("beforeBind");
|
||||
perfLogger.logStartBindFile("" + file.fileName);
|
||||
binder(file, options);
|
||||
perfLogger.logStopBindFile();
|
||||
performance.mark("afterBind");
|
||||
performance.measure("Bind", "beforeBind", "afterBind");
|
||||
tracing.pop();
|
||||
tracing?.pop();
|
||||
}
|
||||
|
||||
function createBinder(): (file: SourceFile, options: CompilerOptions) => void {
|
||||
@ -2540,6 +2540,11 @@ namespace ts {
|
||||
node.flowNode = currentFlow;
|
||||
}
|
||||
return checkContextualIdentifier(<Identifier>node);
|
||||
case SyntaxKind.QualifiedName:
|
||||
if (currentFlow && parent.kind === SyntaxKind.TypeQuery) {
|
||||
node.flowNode = currentFlow;
|
||||
}
|
||||
break;
|
||||
case SyntaxKind.SuperKeyword:
|
||||
node.flowNode = currentFlow;
|
||||
break;
|
||||
@ -3435,7 +3440,7 @@ namespace ts {
|
||||
|
||||
function shouldReportErrorOnModuleDeclaration(node: ModuleDeclaration): boolean {
|
||||
const instanceState = getModuleInstanceState(node);
|
||||
return instanceState === ModuleInstanceState.Instantiated || (instanceState === ModuleInstanceState.ConstEnumOnly && !!options.preserveConstEnums);
|
||||
return instanceState === ModuleInstanceState.Instantiated || (instanceState === ModuleInstanceState.ConstEnumOnly && shouldPreserveConstEnums(options));
|
||||
}
|
||||
|
||||
function checkUnreachable(node: Node): boolean {
|
||||
|
||||
@ -216,7 +216,7 @@ namespace ts {
|
||||
|
||||
// if not using old state, every file is changed
|
||||
if (!useOldState ||
|
||||
// File wasnt present in old state
|
||||
// File wasn't present in old state
|
||||
!(oldInfo = oldState!.fileInfos.get(sourceFilePath)) ||
|
||||
// versions dont match
|
||||
oldInfo.version !== info.version ||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -2617,14 +2617,17 @@ namespace ts {
|
||||
if (ownConfig.extendedConfigPath) {
|
||||
// copy the resolution stack so it is never reused between branches in potential diamond-problem scenarios.
|
||||
resolutionStack = resolutionStack.concat([resolvedPath]);
|
||||
const extendedConfig = getExtendedConfig(sourceFile, ownConfig.extendedConfigPath, host, basePath, resolutionStack, errors, extendedConfigCache);
|
||||
const extendedConfig = getExtendedConfig(sourceFile, ownConfig.extendedConfigPath, host, resolutionStack, errors, extendedConfigCache);
|
||||
if (extendedConfig && isSuccessfulParsedTsconfig(extendedConfig)) {
|
||||
const baseRaw = extendedConfig.raw;
|
||||
const raw = ownConfig.raw;
|
||||
let relativeDifference: string | undefined ;
|
||||
const setPropertyInRawIfNotUndefined = (propertyName: string) => {
|
||||
const value = raw[propertyName] || baseRaw[propertyName];
|
||||
if (value) {
|
||||
raw[propertyName] = value;
|
||||
if (!raw[propertyName] && baseRaw[propertyName]) {
|
||||
raw[propertyName] = map(baseRaw[propertyName], (path: string) => isRootedDiskPath(path) ? path : combinePaths(
|
||||
relativeDifference ||= convertToRelativePath(getDirectoryPath(ownConfig.extendedConfigPath!), basePath, createGetCanonicalFileName(host.useCaseSensitiveFileNames)),
|
||||
path
|
||||
));
|
||||
}
|
||||
};
|
||||
setPropertyInRawIfNotUndefined("include");
|
||||
@ -2786,7 +2789,6 @@ namespace ts {
|
||||
sourceFile: TsConfigSourceFile | undefined,
|
||||
extendedConfigPath: string,
|
||||
host: ParseConfigHost,
|
||||
basePath: string,
|
||||
resolutionStack: string[],
|
||||
errors: Push<Diagnostic>,
|
||||
extendedConfigCache?: ESMap<string, ExtendedConfigCacheEntry>
|
||||
@ -2801,25 +2803,8 @@ namespace ts {
|
||||
else {
|
||||
extendedResult = readJsonConfigFile(extendedConfigPath, path => host.readFile(path));
|
||||
if (!extendedResult.parseDiagnostics.length) {
|
||||
const extendedDirname = getDirectoryPath(extendedConfigPath);
|
||||
extendedConfig = parseConfig(/*json*/ undefined, extendedResult, host, extendedDirname,
|
||||
extendedConfig = parseConfig(/*json*/ undefined, extendedResult, host, getDirectoryPath(extendedConfigPath),
|
||||
getBaseFileName(extendedConfigPath), resolutionStack, errors, extendedConfigCache);
|
||||
|
||||
if (isSuccessfulParsedTsconfig(extendedConfig)) {
|
||||
// Update the paths to reflect base path
|
||||
const relativeDifference = convertToRelativePath(extendedDirname, basePath, identity);
|
||||
const updatePath = (path: string) => isRootedDiskPath(path) ? path : combinePaths(relativeDifference, path);
|
||||
const mapPropertiesInRawIfNotUndefined = (propertyName: string) => {
|
||||
if (raw[propertyName]) {
|
||||
raw[propertyName] = map(raw[propertyName], updatePath);
|
||||
}
|
||||
};
|
||||
|
||||
const { raw } = extendedConfig;
|
||||
mapPropertiesInRawIfNotUndefined("include");
|
||||
mapPropertiesInRawIfNotUndefined("exclude");
|
||||
mapPropertiesInRawIfNotUndefined("files");
|
||||
}
|
||||
}
|
||||
if (extendedConfigCache) {
|
||||
extendedConfigCache.set(path, { extendedResult, extendedConfig });
|
||||
|
||||
@ -307,7 +307,7 @@
|
||||
"category": "Error",
|
||||
"code": 1102
|
||||
},
|
||||
"A 'for-await-of' statement is only allowed within an async function or async generator.": {
|
||||
"'for await' loops are only allowed within async functions and at the top levels of modules.": {
|
||||
"category": "Error",
|
||||
"code": 1103
|
||||
},
|
||||
@ -1352,6 +1352,15 @@
|
||||
"category": "Message",
|
||||
"code": 1430
|
||||
},
|
||||
"'for await' loops are only allowed at the top level of a file when that file is a module, but this file has no imports or exports. Consider adding an empty 'export {}' to make this file a module.": {
|
||||
"category": "Error",
|
||||
"code": 1431
|
||||
},
|
||||
"Top-level 'for await' loops are only allowed when the 'module' option is set to 'esnext' or 'system', and the 'target' option is set to 'es2017' or higher.": {
|
||||
"category": "Error",
|
||||
"code": 1432
|
||||
},
|
||||
|
||||
"The types of '{0}' are incompatible between these types.": {
|
||||
"category": "Error",
|
||||
"code": 2200
|
||||
@ -3235,6 +3244,18 @@
|
||||
"category": "Error",
|
||||
"code": 2797
|
||||
},
|
||||
"The declaration was marked as deprecated here.": {
|
||||
"category": "Error",
|
||||
"code": 2798
|
||||
},
|
||||
"Type produces a tuple type that is too large to represent.": {
|
||||
"category": "Error",
|
||||
"code": 2799
|
||||
},
|
||||
"Expression produces a tuple type that is too large to represent.": {
|
||||
"category": "Error",
|
||||
"code": 2800
|
||||
},
|
||||
|
||||
"Import declaration '{0}' is using private name '{1}'.": {
|
||||
"category": "Error",
|
||||
@ -3524,6 +3545,10 @@
|
||||
"category": "Error",
|
||||
"code": 4083
|
||||
},
|
||||
"Exported type alias '{0}' has or is using private name '{1}' from module {2}.": {
|
||||
"category": "Error",
|
||||
"code": 4084
|
||||
},
|
||||
"Conflicting definitions for '{0}' found at '{1}' and '{2}'. Consider installing a specific version of this library to resolve the conflict.": {
|
||||
"category": "Error",
|
||||
"code": 4090
|
||||
@ -3813,6 +3838,10 @@
|
||||
"category": "Error",
|
||||
"code": 5090
|
||||
},
|
||||
"Option 'preserveConstEnums' cannot be disabled when 'isolatedModules' is enabled.": {
|
||||
"category": "Error",
|
||||
"code": 5091
|
||||
},
|
||||
|
||||
"Generates a sourcemap for each corresponding '.d.ts' file.": {
|
||||
"category": "Message",
|
||||
@ -4894,7 +4923,7 @@
|
||||
"category": "Message",
|
||||
"code": 6384
|
||||
},
|
||||
"'{0}' is deprecated": {
|
||||
"'{0}' is deprecated.": {
|
||||
"category": "Suggestion",
|
||||
"code": 6385,
|
||||
"reportsDeprecated": true
|
||||
@ -4903,6 +4932,11 @@
|
||||
"category": "Message",
|
||||
"code": 6386
|
||||
},
|
||||
"The signature '{0}' of '{1}' is deprecated.": {
|
||||
"category": "Suggestion",
|
||||
"code": 6387,
|
||||
"reportsDeprecated": true
|
||||
},
|
||||
|
||||
"The expected type comes from property '{0}' which is declared here on type '{1}'": {
|
||||
"category": "Message",
|
||||
|
||||
@ -340,17 +340,17 @@ namespace ts {
|
||||
sourceFiles: sourceFileOrBundle.sourceFiles.map(file => relativeToBuildInfo(getNormalizedAbsolutePath(file.fileName, host.getCurrentDirectory())))
|
||||
};
|
||||
}
|
||||
tracing.push(tracing.Phase.Emit, "emitJsFileOrBundle", { jsFilePath });
|
||||
tracing?.push(tracing.Phase.Emit, "emitJsFileOrBundle", { jsFilePath });
|
||||
emitJsFileOrBundle(sourceFileOrBundle, jsFilePath, sourceMapFilePath, relativeToBuildInfo);
|
||||
tracing.pop();
|
||||
tracing?.pop();
|
||||
|
||||
tracing.push(tracing.Phase.Emit, "emitDeclarationFileOrBundle", { declarationFilePath });
|
||||
tracing?.push(tracing.Phase.Emit, "emitDeclarationFileOrBundle", { declarationFilePath });
|
||||
emitDeclarationFileOrBundle(sourceFileOrBundle, declarationFilePath, declarationMapPath, relativeToBuildInfo);
|
||||
tracing.pop();
|
||||
tracing?.pop();
|
||||
|
||||
tracing.push(tracing.Phase.Emit, "emitBuildInfo", { buildInfoPath });
|
||||
tracing?.push(tracing.Phase.Emit, "emitBuildInfo", { buildInfoPath });
|
||||
emitBuildInfo(bundleBuildInfo, buildInfoPath);
|
||||
tracing.pop();
|
||||
tracing?.pop();
|
||||
|
||||
if (!emitSkipped && emittedFilesList) {
|
||||
if (!emitOnlyDtsFiles) {
|
||||
|
||||
@ -255,16 +255,8 @@ namespace ts.moduleSpecifiers {
|
||||
return firstDefined(imports, ({ text }) => pathIsRelative(text) ? hasJSFileExtension(text) : undefined) || false;
|
||||
}
|
||||
|
||||
function numberOfDirectorySeparators(str: string) {
|
||||
const match = str.match(/\//g);
|
||||
return match ? match.length : 0;
|
||||
}
|
||||
|
||||
function comparePathsByRedirectAndNumberOfDirectorySeparators(a: ModulePath, b: ModulePath) {
|
||||
return compareBooleans(b.isRedirect, a.isRedirect) || compareValues(
|
||||
numberOfDirectorySeparators(a.path),
|
||||
numberOfDirectorySeparators(b.path)
|
||||
);
|
||||
return compareBooleans(b.isRedirect, a.isRedirect) || compareNumberOfDirectorySeparators(a.path, b.path);
|
||||
}
|
||||
|
||||
function getNearestAncestorDirectoryWithPackageJson(host: ModuleSpecifierResolutionHost, fileName: string) {
|
||||
@ -286,40 +278,47 @@ namespace ts.moduleSpecifiers {
|
||||
const getCanonicalFileName = hostGetCanonicalFileName(host);
|
||||
const cwd = host.getCurrentDirectory();
|
||||
const referenceRedirect = host.isSourceOfProjectReferenceRedirect(importedFileName) ? host.getProjectReferenceRedirect(importedFileName) : undefined;
|
||||
const redirects = host.redirectTargetsMap.get(toPath(importedFileName, cwd, getCanonicalFileName)) || emptyArray;
|
||||
const importedPath = toPath(importedFileName, cwd, getCanonicalFileName);
|
||||
const redirects = host.redirectTargetsMap.get(importedPath) || emptyArray;
|
||||
const importedFileNames = [...(referenceRedirect ? [referenceRedirect] : emptyArray), importedFileName, ...redirects];
|
||||
const targets = importedFileNames.map(f => getNormalizedAbsolutePath(f, cwd));
|
||||
if (!preferSymlinks) {
|
||||
const result = forEach(targets, p => cb(p, referenceRedirect === p));
|
||||
// Symlinks inside ignored paths are already filtered out of the symlink cache,
|
||||
// so we only need to remove them from the realpath filenames.
|
||||
const result = forEach(targets, p => !containsIgnoredPath(p) && cb(p, referenceRedirect === p));
|
||||
if (result) return result;
|
||||
}
|
||||
const links = host.getSymlinkCache
|
||||
? host.getSymlinkCache()
|
||||
: discoverProbableSymlinks(host.getSourceFiles(), getCanonicalFileName, cwd);
|
||||
|
||||
const symlinkedDirectories = links.getSymlinkedDirectories();
|
||||
const useCaseSensitiveFileNames = !host.useCaseSensitiveFileNames || host.useCaseSensitiveFileNames();
|
||||
const result = symlinkedDirectories && forEachEntry(symlinkedDirectories, (resolved, path) => {
|
||||
if (resolved === false) return undefined;
|
||||
if (startsWithDirectory(importingFileName, resolved.realPath, getCanonicalFileName)) {
|
||||
return undefined; // Don't want to a package to globally import from itself
|
||||
const symlinkedDirectories = links.getSymlinkedDirectoriesByRealpath();
|
||||
const fullImportedFileName = getNormalizedAbsolutePath(importedFileName, cwd);
|
||||
const result = symlinkedDirectories && forEachAncestorDirectory(getDirectoryPath(fullImportedFileName), realPathDirectory => {
|
||||
const symlinkDirectories = symlinkedDirectories.get(ensureTrailingDirectorySeparator(toPath(realPathDirectory, cwd, getCanonicalFileName)));
|
||||
if (!symlinkDirectories) return undefined; // Continue to ancestor directory
|
||||
|
||||
// Don't want to a package to globally import from itself (importNameCodeFix_symlink_own_package.ts)
|
||||
if (startsWithDirectory(importingFileName, realPathDirectory, getCanonicalFileName)) {
|
||||
return false; // Stop search, each ancestor directory will also hit this condition
|
||||
}
|
||||
|
||||
return forEach(targets, target => {
|
||||
if (!containsPath(resolved.real, target, !useCaseSensitiveFileNames)) {
|
||||
if (!startsWithDirectory(target, realPathDirectory, getCanonicalFileName)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const relative = getRelativePathFromDirectory(resolved.real, target, getCanonicalFileName);
|
||||
const option = resolvePath(path, relative);
|
||||
if (!host.fileExists || host.fileExists(option)) {
|
||||
const relative = getRelativePathFromDirectory(realPathDirectory, target, getCanonicalFileName);
|
||||
for (const symlinkDirectory of symlinkDirectories) {
|
||||
const option = resolvePath(symlinkDirectory, relative);
|
||||
const result = cb(option, target === referenceRedirect);
|
||||
if (result) return result;
|
||||
}
|
||||
});
|
||||
});
|
||||
return result ||
|
||||
(preferSymlinks ? forEach(targets, p => cb(p, p === referenceRedirect)) : undefined);
|
||||
return result || (preferSymlinks
|
||||
? forEach(targets, p => containsIgnoredPath(p) ? undefined : cb(p, p === referenceRedirect))
|
||||
: undefined);
|
||||
}
|
||||
|
||||
interface ModulePath {
|
||||
|
||||
@ -558,63 +558,56 @@ namespace ts {
|
||||
* and while doing so, handles traversing the structure without relying on the callstack to encode the tree structure.
|
||||
*/
|
||||
export function forEachChildRecursively<T>(rootNode: Node, cbNode: (node: Node, parent: Node) => T | "skip" | undefined, cbNodes?: (nodes: NodeArray<Node>, parent: Node) => T | "skip" | undefined): T | undefined {
|
||||
|
||||
const stack: Node[] = [rootNode];
|
||||
while (stack.length) {
|
||||
const parent = stack.pop()!;
|
||||
const res = visitAllPossibleChildren(parent, gatherPossibleChildren(parent));
|
||||
if (res) {
|
||||
return res;
|
||||
}
|
||||
const queue: (Node | NodeArray<Node>)[] = gatherPossibleChildren(rootNode);
|
||||
const parents: Node[] = []; // tracks parent references for elements in queue
|
||||
while (parents.length < queue.length) {
|
||||
parents.push(rootNode);
|
||||
}
|
||||
|
||||
return;
|
||||
|
||||
function gatherPossibleChildren(node: Node) {
|
||||
const children: (Node | NodeArray<Node>)[] = [];
|
||||
forEachChild(node, addWorkItem, addWorkItem); // By using a stack above and `unshift` here, we emulate a depth-first preorder traversal
|
||||
return children;
|
||||
|
||||
function addWorkItem(n: Node | NodeArray<Node>) {
|
||||
children.unshift(n);
|
||||
}
|
||||
}
|
||||
|
||||
function visitAllPossibleChildren(parent: Node, children: readonly (Node | NodeArray<Node>)[]) {
|
||||
for (const child of children) {
|
||||
if (isArray(child)) {
|
||||
if (cbNodes) {
|
||||
const res = cbNodes(child, parent);
|
||||
if (res) {
|
||||
if (res === "skip") continue;
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = child.length - 1; i >= 0; i--) {
|
||||
const realChild = child[i];
|
||||
const res = cbNode(realChild, parent);
|
||||
if (res) {
|
||||
if (res === "skip") continue;
|
||||
return res;
|
||||
}
|
||||
stack.push(realChild);
|
||||
}
|
||||
}
|
||||
else {
|
||||
stack.push(child);
|
||||
const res = cbNode(child, parent);
|
||||
while (queue.length !== 0) {
|
||||
const current = queue.pop()!;
|
||||
const parent = parents.pop()!;
|
||||
if (isArray(current)) {
|
||||
if (cbNodes) {
|
||||
const res = cbNodes(current, parent);
|
||||
if (res) {
|
||||
if (res === "skip") continue;
|
||||
return res;
|
||||
}
|
||||
}
|
||||
for (let i = current.length - 1; i >= 0; --i) {
|
||||
queue.push(current[i]);
|
||||
parents.push(parent);
|
||||
}
|
||||
}
|
||||
else {
|
||||
const res = cbNode(current, parent);
|
||||
if (res) {
|
||||
if (res === "skip") continue;
|
||||
return res;
|
||||
}
|
||||
if (current.kind >= SyntaxKind.FirstNode) {
|
||||
// add children in reverse order to the queue, so popping gives the first child
|
||||
for (const child of gatherPossibleChildren(current)) {
|
||||
queue.push(child);
|
||||
parents.push(current);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function gatherPossibleChildren(node: Node) {
|
||||
const children: (Node | NodeArray<Node>)[] = [];
|
||||
forEachChild(node, addWorkItem, addWorkItem); // By using a stack above and `unshift` here, we emulate a depth-first preorder traversal
|
||||
return children;
|
||||
|
||||
function addWorkItem(n: Node | NodeArray<Node>) {
|
||||
children.unshift(n);
|
||||
}
|
||||
}
|
||||
|
||||
export function createSourceFile(fileName: string, sourceText: string, languageVersion: ScriptTarget, setParentNodes = false, scriptKind?: ScriptKind): SourceFile {
|
||||
tracing.push(tracing.Phase.Parse, "createSourceFile", { path: fileName }, /*separateBeginAndEnd*/ true);
|
||||
tracing?.push(tracing.Phase.Parse, "createSourceFile", { path: fileName }, /*separateBeginAndEnd*/ true);
|
||||
performance.mark("beforeParse");
|
||||
let result: SourceFile;
|
||||
|
||||
@ -629,7 +622,7 @@ namespace ts {
|
||||
|
||||
performance.mark("afterParse");
|
||||
performance.measure("Parse", "beforeParse", "afterParse");
|
||||
tracing.pop();
|
||||
tracing?.pop();
|
||||
return result;
|
||||
}
|
||||
|
||||
@ -1644,8 +1637,8 @@ namespace ts {
|
||||
// with magic property names like '__proto__'. The 'identifiers' object is used to share a single string instance for
|
||||
// each identifier in order to reduce memory consumption.
|
||||
function createIdentifier(isIdentifier: boolean, diagnosticMessage?: DiagnosticMessage, privateIdentifierDiagnosticMessage?: DiagnosticMessage): Identifier {
|
||||
identifierCount++;
|
||||
if (isIdentifier) {
|
||||
identifierCount++;
|
||||
const pos = getNodePos();
|
||||
// Store original token kind if it is not just an Identifier so we can report appropriate error later in type checker
|
||||
const originalKeywordKind = token();
|
||||
@ -1659,6 +1652,12 @@ namespace ts {
|
||||
return createIdentifier(/*isIdentifier*/ true);
|
||||
}
|
||||
|
||||
if (token() === SyntaxKind.Unknown && scanner.tryScan(() => scanner.reScanInvalidIdentifier() === SyntaxKind.Identifier)) {
|
||||
// Scanner has already recorded an 'Invalid character' error, so no need to add another from the parser.
|
||||
return createIdentifier(/*isIdentifier*/ true);
|
||||
}
|
||||
|
||||
identifierCount++;
|
||||
// Only for end of file because the error gets reported incorrectly on embedded script tags.
|
||||
const reportAtCurrentPosition = token() === SyntaxKind.EndOfFileToken;
|
||||
|
||||
@ -7539,6 +7538,7 @@ namespace ts {
|
||||
function parseTagComments(indent: number, initialMargin?: string): string | undefined {
|
||||
const comments: string[] = [];
|
||||
let state = JSDocState.BeginningOfLine;
|
||||
let previousWhitespace = true;
|
||||
let margin: number | undefined;
|
||||
function pushComment(text: string) {
|
||||
if (!margin) {
|
||||
@ -7564,7 +7564,8 @@ namespace ts {
|
||||
indent = 0;
|
||||
break;
|
||||
case SyntaxKind.AtToken:
|
||||
if (state === JSDocState.SavingBackticks) {
|
||||
if (state === JSDocState.SavingBackticks || !previousWhitespace && state === JSDocState.SavingComments) {
|
||||
// @ doesn't start a new tag inside ``, and inside a comment, only after whitespace
|
||||
comments.push(scanner.getTokenText());
|
||||
break;
|
||||
}
|
||||
@ -7621,6 +7622,7 @@ namespace ts {
|
||||
pushComment(scanner.getTokenText());
|
||||
break;
|
||||
}
|
||||
previousWhitespace = token() === SyntaxKind.WhitespaceTrivia;
|
||||
tok = nextTokenJSDoc();
|
||||
}
|
||||
|
||||
@ -7695,13 +7697,14 @@ namespace ts {
|
||||
skipWhitespaceOrAsterisk();
|
||||
|
||||
const { name, isBracketed } = parseBracketNameInPropertyAndParamTag();
|
||||
skipWhitespace();
|
||||
const indentText = skipWhitespaceOrAsterisk();
|
||||
|
||||
if (isNameFirst) {
|
||||
typeExpression = tryParseTypeExpression();
|
||||
}
|
||||
|
||||
const comment = parseTagComments(indent + scanner.getStartPos() - start);
|
||||
const comment = parseTrailingTagComments(start, getNodePos(), indent, indentText);
|
||||
|
||||
const nestedTypeLiteral = target !== PropertyLikeParse.CallbackParameter && parseNestedTypeLiteral(typeExpression, name, target, indent);
|
||||
if (nestedTypeLiteral) {
|
||||
typeExpression = nestedTypeLiteral;
|
||||
|
||||
@ -762,7 +762,7 @@ namespace ts {
|
||||
* Determines whether `fileName` starts with the specified `directoryName` using the provided path canonicalization callback.
|
||||
* Comparison is case-sensitive between the canonical paths.
|
||||
*
|
||||
* @deprecated Use `containsPath` if possible.
|
||||
* Use `containsPath` if file names are not already reduced and absolute.
|
||||
*/
|
||||
export function startsWithDirectory(fileName: string, directoryName: string, getCanonicalFileName: GetCanonicalFileName): boolean {
|
||||
const canonicalFileName = getCanonicalFileName(fileName);
|
||||
|
||||
@ -2,7 +2,6 @@
|
||||
/** Performance measurements for the compiler. */
|
||||
namespace ts.performance {
|
||||
let perfHooks: PerformanceHooks | undefined;
|
||||
let perfObserver: PerformanceObserver | undefined;
|
||||
// when set, indicates the implementation of `Performance` to use for user timing.
|
||||
// when unset, indicates user timing is unavailable or disabled.
|
||||
let performanceImpl: Performance | undefined;
|
||||
@ -41,6 +40,10 @@ namespace ts.performance {
|
||||
}
|
||||
|
||||
export const nullTimer: Timer = { enter: noop, exit: noop };
|
||||
|
||||
let enabled = false;
|
||||
let timeorigin = timestamp();
|
||||
const marks = new Map<string, number>();
|
||||
const counts = new Map<string, number>();
|
||||
const durations = new Map<string, number>();
|
||||
|
||||
@ -50,7 +53,12 @@ namespace ts.performance {
|
||||
* @param markName The name of the mark.
|
||||
*/
|
||||
export function mark(markName: string) {
|
||||
performanceImpl?.mark(markName);
|
||||
if (enabled) {
|
||||
const count = counts.get(markName) ?? 0;
|
||||
counts.set(markName, count + 1);
|
||||
marks.set(markName, timestamp());
|
||||
performanceImpl?.mark(markName);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -63,7 +71,13 @@ namespace ts.performance {
|
||||
* used.
|
||||
*/
|
||||
export function measure(measureName: string, startMarkName?: string, endMarkName?: string) {
|
||||
performanceImpl?.measure(measureName, startMarkName, endMarkName);
|
||||
if (enabled) {
|
||||
const end = (endMarkName !== undefined ? marks.get(endMarkName) : undefined) ?? timestamp();
|
||||
const start = (startMarkName !== undefined ? marks.get(startMarkName) : undefined) ?? timeorigin;
|
||||
const previousDuration = durations.get(measureName) || 0;
|
||||
durations.set(measureName, previousDuration + (end - start));
|
||||
performanceImpl?.measure(measureName, startMarkName, endMarkName);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -97,35 +111,36 @@ namespace ts.performance {
|
||||
* Indicates whether the performance API is enabled.
|
||||
*/
|
||||
export function isEnabled() {
|
||||
return !!performanceImpl;
|
||||
return enabled;
|
||||
}
|
||||
|
||||
/** Enables (and resets) performance measurements for the compiler. */
|
||||
export function enable() {
|
||||
if (!performanceImpl) {
|
||||
export function enable(system: System = sys) {
|
||||
if (!enabled) {
|
||||
enabled = true;
|
||||
perfHooks ||= tryGetNativePerformanceHooks();
|
||||
if (!perfHooks) return false;
|
||||
perfObserver ||= new perfHooks.PerformanceObserver(updateStatisticsFromList);
|
||||
perfObserver.observe({ entryTypes: ["mark", "measure"] });
|
||||
performanceImpl = perfHooks.performance;
|
||||
if (perfHooks) {
|
||||
timeorigin = perfHooks.performance.timeOrigin;
|
||||
// NodeJS's Web Performance API is currently slower than expected, but we'd still like
|
||||
// to be able to leverage native trace events when node is run with either `--cpu-prof`
|
||||
// or `--prof`, if we're running with our own `--generateCpuProfile` flag, or when
|
||||
// running in debug mode (since its possible to generate a cpu profile while debugging).
|
||||
if (perfHooks.shouldWriteNativeEvents || system?.cpuProfilingEnabled?.() || system?.debugMode) {
|
||||
performanceImpl = perfHooks.performance;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/** Disables performance measurements for the compiler. */
|
||||
export function disable() {
|
||||
perfObserver?.disconnect();
|
||||
performanceImpl = undefined;
|
||||
counts.clear();
|
||||
durations.clear();
|
||||
}
|
||||
|
||||
function updateStatisticsFromList(list: PerformanceObserverEntryList) {
|
||||
for (const mark of list.getEntriesByType("mark")) {
|
||||
counts.set(mark.name, (counts.get(mark.name) || 0) + 1);
|
||||
}
|
||||
for (const measure of list.getEntriesByType("measure")) {
|
||||
durations.set(measure.name, (durations.get(measure.name) || 0) + measure.duration);
|
||||
if (enabled) {
|
||||
marks.clear();
|
||||
counts.clear();
|
||||
durations.clear();
|
||||
performanceImpl = undefined;
|
||||
enabled = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -4,6 +4,8 @@ namespace ts {
|
||||
// between browsers and NodeJS:
|
||||
|
||||
export interface PerformanceHooks {
|
||||
/** Indicates whether we should write native performance events */
|
||||
shouldWriteNativeEvents: boolean;
|
||||
performance: Performance;
|
||||
PerformanceObserver: PerformanceObserverConstructor;
|
||||
}
|
||||
@ -37,6 +39,7 @@ namespace ts {
|
||||
export type PerformanceEntryList = PerformanceEntry[];
|
||||
|
||||
// Browser globals for the Web Performance User Timings API
|
||||
declare const process: any;
|
||||
declare const performance: Performance | undefined;
|
||||
declare const PerformanceObserver: PerformanceObserverConstructor | undefined;
|
||||
|
||||
@ -55,6 +58,10 @@ namespace ts {
|
||||
typeof PerformanceObserver === "function" &&
|
||||
hasRequiredAPI(performance, PerformanceObserver)) {
|
||||
return {
|
||||
// For now we always write native performance events when running in the browser. We may
|
||||
// make this conditional in the future if we find that native web performance hooks
|
||||
// in the browser also slow down compilation.
|
||||
shouldWriteNativeEvents: true,
|
||||
performance,
|
||||
PerformanceObserver
|
||||
};
|
||||
@ -62,10 +69,12 @@ namespace ts {
|
||||
}
|
||||
|
||||
function tryGetNodePerformanceHooks(): PerformanceHooks | undefined {
|
||||
if (typeof module === "object" && typeof require === "function") {
|
||||
if (typeof process !== "undefined" && process.nextTick && !process.browser && typeof module === "object" && typeof require === "function") {
|
||||
try {
|
||||
const { performance, PerformanceObserver } = require("perf_hooks") as typeof import("perf_hooks");
|
||||
if (hasRequiredAPI(performance, PerformanceObserver)) {
|
||||
let performance: Performance;
|
||||
const { performance: nodePerformance, PerformanceObserver } = require("perf_hooks") as typeof import("perf_hooks");
|
||||
if (hasRequiredAPI(nodePerformance, PerformanceObserver)) {
|
||||
performance = nodePerformance;
|
||||
// There is a bug in Node's performance.measure prior to 12.16.3/13.13.0 that does not
|
||||
// match the Web Performance API specification. Node's implementation did not allow
|
||||
// optional `start` and `end` arguments for `performance.measure`.
|
||||
@ -73,26 +82,25 @@ namespace ts {
|
||||
const version = new Version(process.versions.node);
|
||||
const range = new VersionRange("<12.16.3 || 13 <13.13");
|
||||
if (range.test(version)) {
|
||||
return {
|
||||
performance: {
|
||||
get timeOrigin() { return performance.timeOrigin; },
|
||||
now() { return performance.now(); },
|
||||
mark(name) { return performance.mark(name); },
|
||||
measure(name, start = "nodeStart", end?) {
|
||||
if (end === undefined) {
|
||||
end = "__performance.measure-fix__";
|
||||
performance.mark(end);
|
||||
}
|
||||
performance.measure(name, start, end);
|
||||
if (end === "__performance.measure-fix__") {
|
||||
performance.clearMarks("__performance.measure-fix__");
|
||||
}
|
||||
performance = {
|
||||
get timeOrigin() { return nodePerformance.timeOrigin; },
|
||||
now() { return nodePerformance.now(); },
|
||||
mark(name) { return nodePerformance.mark(name); },
|
||||
measure(name, start = "nodeStart", end?) {
|
||||
if (end === undefined) {
|
||||
end = "__performance.measure-fix__";
|
||||
nodePerformance.mark(end);
|
||||
}
|
||||
},
|
||||
PerformanceObserver
|
||||
nodePerformance.measure(name, start, end);
|
||||
if (end === "__performance.measure-fix__") {
|
||||
nodePerformance.clearMarks("__performance.measure-fix__");
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
return {
|
||||
// By default, only write native events when generating a cpu profile or using the v8 profiler.
|
||||
shouldWriteNativeEvents: false,
|
||||
performance,
|
||||
PerformanceObserver
|
||||
};
|
||||
|
||||
@ -407,7 +407,7 @@ namespace ts {
|
||||
const lineEnd = i < lastLineInFile ? getPositionOfLineAndCharacter(file, i + 1, 0) : file.text.length;
|
||||
let lineContent = file.text.slice(lineStart, lineEnd);
|
||||
lineContent = lineContent.replace(/\s+$/g, ""); // trim from end
|
||||
lineContent = lineContent.replace("\t", " "); // convert tabs to single spaces
|
||||
lineContent = lineContent.replace(/\t/g, " "); // convert tabs to single spaces
|
||||
|
||||
// Output the gutter and the actual contents of the line.
|
||||
context += indent + formatColorAndReset(padLeft(i + 1 + "", gutterWidth), gutterStyleSequence) + gutterSeparator;
|
||||
@ -833,7 +833,7 @@ namespace ts {
|
||||
// Track source files that are source files found by searching under node_modules, as these shouldn't be compiled.
|
||||
const sourceFilesFoundSearchingNodeModules = new Map<string, boolean>();
|
||||
|
||||
tracing.push(tracing.Phase.Program, "createProgram", { configFilePath: options.configFilePath, rootDir: options.rootDir }, /*separateBeginAndEnd*/ true);
|
||||
tracing?.push(tracing.Phase.Program, "createProgram", { configFilePath: options.configFilePath, rootDir: options.rootDir }, /*separateBeginAndEnd*/ true);
|
||||
performance.mark("beforeProgram");
|
||||
|
||||
const host = createProgramOptions.host || createCompilerHost(options);
|
||||
@ -919,15 +919,15 @@ namespace ts {
|
||||
forEachResolvedProjectReference
|
||||
});
|
||||
|
||||
tracing.push(tracing.Phase.Program, "shouldProgramCreateNewSourceFiles", { hasOldProgram: !!oldProgram });
|
||||
tracing?.push(tracing.Phase.Program, "shouldProgramCreateNewSourceFiles", { hasOldProgram: !!oldProgram });
|
||||
const shouldCreateNewSourceFile = shouldProgramCreateNewSourceFiles(oldProgram, options);
|
||||
tracing.pop();
|
||||
tracing?.pop();
|
||||
// We set `structuralIsReused` to `undefined` because `tryReuseStructureFromOldProgram` calls `tryReuseStructureFromOldProgram` which checks
|
||||
// `structuralIsReused`, which would be a TDZ violation if it was not set in advance to `undefined`.
|
||||
let structureIsReused: StructureIsReused;
|
||||
tracing.push(tracing.Phase.Program, "tryReuseStructureFromOldProgram", {});
|
||||
tracing?.push(tracing.Phase.Program, "tryReuseStructureFromOldProgram", {});
|
||||
structureIsReused = tryReuseStructureFromOldProgram(); // eslint-disable-line prefer-const
|
||||
tracing.pop();
|
||||
tracing?.pop();
|
||||
if (structureIsReused !== StructureIsReused.Completely) {
|
||||
processingDefaultLibFiles = [];
|
||||
processingOtherFiles = [];
|
||||
@ -964,15 +964,15 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
|
||||
tracing.push(tracing.Phase.Program, "processRootFiles", { count: rootNames.length });
|
||||
tracing?.push(tracing.Phase.Program, "processRootFiles", { count: rootNames.length });
|
||||
forEach(rootNames, (name, index) => processRootFile(name, /*isDefaultLib*/ false, /*ignoreNoDefaultLib*/ false, { kind: FileIncludeKind.RootFile, index }));
|
||||
tracing.pop();
|
||||
tracing?.pop();
|
||||
|
||||
// load type declarations specified via 'types' argument or implicitly from types/ and node_modules/@types folders
|
||||
const typeReferences: string[] = rootNames.length ? getAutomaticTypeDirectiveNames(options, host) : emptyArray;
|
||||
|
||||
if (typeReferences.length) {
|
||||
tracing.push(tracing.Phase.Program, "processTypeReferences", { count: typeReferences.length });
|
||||
tracing?.push(tracing.Phase.Program, "processTypeReferences", { count: typeReferences.length });
|
||||
// This containingFilename needs to match with the one used in managed-side
|
||||
const containingDirectory = options.configFilePath ? getDirectoryPath(options.configFilePath) : host.getCurrentDirectory();
|
||||
const containingFilename = combinePaths(containingDirectory, inferredTypesContainingFile);
|
||||
@ -980,7 +980,7 @@ namespace ts {
|
||||
for (let i = 0; i < typeReferences.length; i++) {
|
||||
processTypeReferenceDirective(typeReferences[i], resolutions[i], { kind: FileIncludeKind.AutomaticTypeDirectiveFile, typeReference: typeReferences[i], packageId: resolutions[i]?.packageId });
|
||||
}
|
||||
tracing.pop();
|
||||
tracing?.pop();
|
||||
}
|
||||
|
||||
// Do not process the default library if:
|
||||
@ -1016,7 +1016,7 @@ namespace ts {
|
||||
for (const oldSourceFile of oldSourceFiles) {
|
||||
const newFile = getSourceFileByPath(oldSourceFile.resolvedPath);
|
||||
if (shouldCreateNewSourceFile || !newFile ||
|
||||
// old file wasnt redirect but new file is
|
||||
// old file wasn't redirect but new file is
|
||||
(oldSourceFile.resolvedPath === oldSourceFile.path && newFile.resolvedPath !== oldSourceFile.path)) {
|
||||
host.onReleaseOldSourceFile(oldSourceFile, oldProgram.getCompilerOptions(), !!getSourceFileByPath(oldSourceFile.path));
|
||||
}
|
||||
@ -1108,7 +1108,7 @@ namespace ts {
|
||||
verifyCompilerOptions();
|
||||
performance.mark("afterProgram");
|
||||
performance.measure("Program", "beforeProgram", "afterProgram");
|
||||
tracing.pop();
|
||||
tracing?.pop();
|
||||
|
||||
return program;
|
||||
|
||||
@ -1116,12 +1116,12 @@ namespace ts {
|
||||
if (!moduleNames.length) return emptyArray;
|
||||
const containingFileName = getNormalizedAbsolutePath(containingFile.originalFileName, currentDirectory);
|
||||
const redirectedReference = getRedirectReferenceForResolution(containingFile);
|
||||
tracing.push(tracing.Phase.Program, "resolveModuleNamesWorker", { containingFileName });
|
||||
tracing?.push(tracing.Phase.Program, "resolveModuleNamesWorker", { containingFileName });
|
||||
performance.mark("beforeResolveModule");
|
||||
const result = actualResolveModuleNamesWorker(moduleNames, containingFileName, reusedNames, redirectedReference);
|
||||
performance.mark("afterResolveModule");
|
||||
performance.measure("ResolveModule", "beforeResolveModule", "afterResolveModule");
|
||||
tracing.pop();
|
||||
tracing?.pop();
|
||||
return result;
|
||||
}
|
||||
|
||||
@ -1129,12 +1129,12 @@ namespace ts {
|
||||
if (!typeDirectiveNames.length) return [];
|
||||
const containingFileName = !isString(containingFile) ? getNormalizedAbsolutePath(containingFile.originalFileName, currentDirectory) : containingFile;
|
||||
const redirectedReference = !isString(containingFile) ? getRedirectReferenceForResolution(containingFile) : undefined;
|
||||
tracing.push(tracing.Phase.Program, "resolveTypeReferenceDirectiveNamesWorker", { containingFileName });
|
||||
tracing?.push(tracing.Phase.Program, "resolveTypeReferenceDirectiveNamesWorker", { containingFileName });
|
||||
performance.mark("beforeResolveTypeReference");
|
||||
const result = actualResolveTypeReferenceDirectiveNamesWorker(typeDirectiveNames, containingFileName, redirectedReference);
|
||||
performance.mark("afterResolveTypeReference");
|
||||
performance.measure("ResolveTypeReference", "beforeResolveTypeReference", "afterResolveTypeReference");
|
||||
tracing.pop();
|
||||
tracing?.pop();
|
||||
return result;
|
||||
}
|
||||
|
||||
@ -1655,7 +1655,7 @@ namespace ts {
|
||||
|
||||
function emitBuildInfo(writeFileCallback?: WriteFileCallback): EmitResult {
|
||||
Debug.assert(!outFile(options));
|
||||
tracing.push(tracing.Phase.Emit, "emitBuildInfo", {}, /*separateBeginAndEnd*/ true);
|
||||
tracing?.push(tracing.Phase.Emit, "emitBuildInfo", {}, /*separateBeginAndEnd*/ true);
|
||||
performance.mark("beforeEmit");
|
||||
const emitResult = emitFiles(
|
||||
notImplementedResolver,
|
||||
@ -1668,7 +1668,7 @@ namespace ts {
|
||||
|
||||
performance.mark("afterEmit");
|
||||
performance.measure("Emit", "beforeEmit", "afterEmit");
|
||||
tracing.pop();
|
||||
tracing?.pop();
|
||||
return emitResult;
|
||||
}
|
||||
|
||||
@ -1729,9 +1729,9 @@ namespace ts {
|
||||
}
|
||||
|
||||
function emit(sourceFile?: SourceFile, writeFileCallback?: WriteFileCallback, cancellationToken?: CancellationToken, emitOnlyDtsFiles?: boolean, transformers?: CustomTransformers, forceDtsEmit?: boolean): EmitResult {
|
||||
tracing.push(tracing.Phase.Emit, "emit", { path: sourceFile?.path }, /*separateBeginAndEnd*/ true);
|
||||
tracing?.push(tracing.Phase.Emit, "emit", { path: sourceFile?.path }, /*separateBeginAndEnd*/ true);
|
||||
const result = runWithCancellationToken(() => emitWorker(program, sourceFile, writeFileCallback, cancellationToken, emitOnlyDtsFiles, transformers, forceDtsEmit));
|
||||
tracing.pop();
|
||||
tracing?.pop();
|
||||
return result;
|
||||
}
|
||||
|
||||
@ -2485,13 +2485,13 @@ namespace ts {
|
||||
|
||||
// Get source file from normalized fileName
|
||||
function findSourceFile(fileName: string, path: Path, isDefaultLib: boolean, ignoreNoDefaultLib: boolean, reason: FileIncludeReason, packageId: PackageId | undefined): SourceFile | undefined {
|
||||
tracing.push(tracing.Phase.Program, "findSourceFile", {
|
||||
tracing?.push(tracing.Phase.Program, "findSourceFile", {
|
||||
fileName,
|
||||
isDefaultLib: isDefaultLib || undefined,
|
||||
fileIncludeKind: (FileIncludeKind as any)[reason.kind],
|
||||
});
|
||||
const result = findSourceFileWorker(fileName, path, isDefaultLib, ignoreNoDefaultLib, reason, packageId);
|
||||
tracing.pop();
|
||||
tracing?.pop();
|
||||
return result;
|
||||
}
|
||||
|
||||
@ -2792,9 +2792,9 @@ namespace ts {
|
||||
resolvedTypeReferenceDirective: ResolvedTypeReferenceDirective | undefined,
|
||||
reason: FileIncludeReason
|
||||
): void {
|
||||
tracing.push(tracing.Phase.Program, "processTypeReferenceDirective", { directive: typeReferenceDirective, hasResolved: !!resolveModuleNamesReusingOldState, refKind: reason.kind, refPath: isReferencedFile(reason) ? reason.file : undefined });
|
||||
tracing?.push(tracing.Phase.Program, "processTypeReferenceDirective", { directive: typeReferenceDirective, hasResolved: !!resolveModuleNamesReusingOldState, refKind: reason.kind, refPath: isReferencedFile(reason) ? reason.file : undefined });
|
||||
processTypeReferenceDirectiveWorker(typeReferenceDirective, resolvedTypeReferenceDirective, reason);
|
||||
tracing.pop();
|
||||
tracing?.pop();
|
||||
}
|
||||
|
||||
function processTypeReferenceDirectiveWorker(
|
||||
@ -3157,6 +3157,10 @@ namespace ts {
|
||||
createDiagnosticForOptionName(Diagnostics.Option_isolatedModules_can_only_be_used_when_either_option_module_is_provided_or_option_target_is_ES2015_or_higher, "isolatedModules", "target");
|
||||
}
|
||||
|
||||
if (options.preserveConstEnums === false) {
|
||||
createDiagnosticForOptionName(Diagnostics.Option_preserveConstEnums_cannot_be_disabled_when_isolatedModules_is_enabled, "preserveConstEnums", "isolatedModules");
|
||||
}
|
||||
|
||||
const firstNonExternalModuleSourceFile = find(files, f => !isExternalModule(f) && !isSourceFileJS(f) && !f.isDeclarationFile && f.scriptKind !== ScriptKind.JSON);
|
||||
if (firstNonExternalModuleSourceFile) {
|
||||
const span = getErrorSpanForNode(firstNonExternalModuleSourceFile, firstNonExternalModuleSourceFile);
|
||||
@ -3756,7 +3760,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
function handleDirectoryCouldBeSymlink(directory: string) {
|
||||
if (!host.getResolvedProjectReferences()) return;
|
||||
if (!host.getResolvedProjectReferences() || containsIgnoredPath(directory)) return;
|
||||
|
||||
// Because we already watch node_modules, handle symlinks in there
|
||||
if (!originalRealpath || !stringContains(directory, nodeModulesPathPart)) return;
|
||||
@ -3773,7 +3777,7 @@ namespace ts {
|
||||
return;
|
||||
}
|
||||
|
||||
symlinkCache.setSymlinkedDirectory(directoryPath, {
|
||||
symlinkCache.setSymlinkedDirectory(directory, {
|
||||
real: ensureTrailingDirectorySeparator(real),
|
||||
realPath
|
||||
});
|
||||
|
||||
@ -16,6 +16,8 @@ namespace ts {
|
||||
setFilesWithInvalidatedNonRelativeUnresolvedImports(filesWithUnresolvedImports: ESMap<Path, readonly string[]>): void;
|
||||
createHasInvalidatedResolution(forceAllFilesAsInvalidated?: boolean): HasInvalidatedResolution;
|
||||
hasChangedAutomaticTypeDirectiveNames(): boolean;
|
||||
isFileWithInvalidatedNonRelativeUnresolvedImports(path: Path): boolean;
|
||||
|
||||
|
||||
startCachingPerDirectoryResolution(): void;
|
||||
finishCachingPerDirectoryResolution(): void;
|
||||
@ -208,6 +210,7 @@ namespace ts {
|
||||
invalidateResolutionsOfFailedLookupLocations,
|
||||
setFilesWithInvalidatedNonRelativeUnresolvedImports,
|
||||
createHasInvalidatedResolution,
|
||||
isFileWithInvalidatedNonRelativeUnresolvedImports,
|
||||
updateTypeRootsWatch,
|
||||
closeTypeRootsWatch,
|
||||
clear
|
||||
@ -361,7 +364,7 @@ namespace ts {
|
||||
const compilerOptions = resolutionHost.getCompilationSettings();
|
||||
const hasInvalidatedNonRelativeUnresolvedImport = logChanges && isFileWithInvalidatedNonRelativeUnresolvedImports(path);
|
||||
|
||||
// All the resolutions in this file are invalidated if this file wasnt resolved using same redirect
|
||||
// All the resolutions in this file are invalidated if this file wasn't resolved using same redirect
|
||||
const program = resolutionHost.getCurrentProgram();
|
||||
const oldRedirect = program && program.getResolvedProjectReferenceToRedirect(containingFile);
|
||||
const unmatchedRedirects = oldRedirect ?
|
||||
|
||||
@ -43,6 +43,7 @@ namespace ts {
|
||||
reScanJsxToken(): JsxTokenSyntaxKind;
|
||||
reScanLessThanToken(): SyntaxKind;
|
||||
reScanQuestionToken(): SyntaxKind;
|
||||
reScanInvalidIdentifier(): SyntaxKind;
|
||||
scanJsxToken(): JsxTokenSyntaxKind;
|
||||
scanJsDocToken(): JSDocSyntaxKind;
|
||||
scan(): SyntaxKind;
|
||||
@ -966,6 +967,7 @@ namespace ts {
|
||||
reScanJsxToken,
|
||||
reScanLessThanToken,
|
||||
reScanQuestionToken,
|
||||
reScanInvalidIdentifier,
|
||||
scanJsxToken,
|
||||
scanJsDocToken,
|
||||
scan,
|
||||
@ -2041,14 +2043,9 @@ namespace ts {
|
||||
}
|
||||
return token = SyntaxKind.PrivateIdentifier;
|
||||
default:
|
||||
if (isIdentifierStart(ch, languageVersion)) {
|
||||
pos += charSize(ch);
|
||||
while (pos < end && isIdentifierPart(ch = codePointAt(text, pos), languageVersion)) pos += charSize(ch);
|
||||
tokenValue = text.substring(tokenPos, pos);
|
||||
if (ch === CharacterCodes.backslash) {
|
||||
tokenValue += scanIdentifierParts();
|
||||
}
|
||||
return token = getIdentifierToken();
|
||||
const identifierKind = scanIdentifier(ch, languageVersion);
|
||||
if (identifierKind) {
|
||||
return token = identifierKind;
|
||||
}
|
||||
else if (isWhiteSpaceSingleLine(ch)) {
|
||||
pos += charSize(ch);
|
||||
@ -2066,6 +2063,32 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
|
||||
function reScanInvalidIdentifier(): SyntaxKind {
|
||||
Debug.assert(token === SyntaxKind.Unknown, "'reScanInvalidIdentifier' should only be called when the current token is 'SyntaxKind.Unknown'.");
|
||||
pos = tokenPos = startPos;
|
||||
tokenFlags = 0;
|
||||
const ch = codePointAt(text, pos);
|
||||
const identifierKind = scanIdentifier(ch, ScriptTarget.ESNext);
|
||||
if (identifierKind) {
|
||||
return token = identifierKind;
|
||||
}
|
||||
pos += charSize(ch);
|
||||
return token; // Still `SyntaKind.Unknown`
|
||||
}
|
||||
|
||||
function scanIdentifier(startCharacter: number, languageVersion: ScriptTarget) {
|
||||
let ch = startCharacter;
|
||||
if (isIdentifierStart(ch, languageVersion)) {
|
||||
pos += charSize(ch);
|
||||
while (pos < end && isIdentifierPart(ch = codePointAt(text, pos), languageVersion)) pos += charSize(ch);
|
||||
tokenValue = text.substring(tokenPos, pos);
|
||||
if (ch === CharacterCodes.backslash) {
|
||||
tokenValue += scanIdentifierParts();
|
||||
}
|
||||
return getIdentifierToken();
|
||||
}
|
||||
}
|
||||
|
||||
function reScanGreaterToken(): SyntaxKind {
|
||||
if (token === SyntaxKind.GreaterThanToken) {
|
||||
if (text.charCodeAt(pos) === CharacterCodes.greaterThan) {
|
||||
|
||||
@ -1116,6 +1116,7 @@ namespace ts {
|
||||
exit(exitCode?: number): void;
|
||||
/*@internal*/ enableCPUProfiler?(path: string, continuation: () => void): boolean;
|
||||
/*@internal*/ disableCPUProfiler?(continuation: () => void): boolean;
|
||||
/*@internal*/ cpuProfilingEnabled?(): boolean;
|
||||
realpath?(path: string): string;
|
||||
/*@internal*/ getEnvironmentVariable(name: string): string;
|
||||
/*@internal*/ tryEnableSourceMapsForHost?(): void;
|
||||
@ -1286,6 +1287,7 @@ namespace ts {
|
||||
},
|
||||
enableCPUProfiler,
|
||||
disableCPUProfiler,
|
||||
cpuProfilingEnabled: () => !!activeSession || contains(process.execArgv, "--cpu-prof") || contains(process.execArgv, "--prof"),
|
||||
realpath,
|
||||
debugMode: !!process.env.NODE_INSPECTOR_IPC || !!process.env.VSCODE_INSPECTOR_OPTIONS || some(<string[]>process.execArgv, arg => /^--(inspect|debug)(-brk)?(=\d+)?$/i.test(arg)),
|
||||
tryEnableSourceMapsForHost() {
|
||||
@ -1558,7 +1560,7 @@ namespace ts {
|
||||
return event === "rename" &&
|
||||
(!relativeName ||
|
||||
relativeName === lastDirectoryPart ||
|
||||
relativeName.lastIndexOf(lastDirectoryPartWithDirectorySeparator!) === relativeName.length - lastDirectoryPartWithDirectorySeparator!.length) &&
|
||||
(relativeName.lastIndexOf(lastDirectoryPartWithDirectorySeparator!) !== -1 && relativeName.lastIndexOf(lastDirectoryPartWithDirectorySeparator!) === relativeName.length - lastDirectoryPartWithDirectorySeparator!.length)) &&
|
||||
!fileSystemEntryExists(fileOrDirectory, entryKind) ?
|
||||
invokeCallbackAndUpdateWatcher(watchMissingFileSystemEntry) :
|
||||
callback(event, relativeName);
|
||||
|
||||
@ -1,16 +1,25 @@
|
||||
/* Tracing events for the compiler. */
|
||||
|
||||
/*@internal*/
|
||||
/** Tracing events for the compiler. */
|
||||
namespace ts.tracing {
|
||||
namespace ts { // eslint-disable-line one-namespace-per-file
|
||||
// should be used as tracing?.___
|
||||
export let tracing: typeof tracingEnabled | undefined;
|
||||
// enable the above using startTracing()
|
||||
}
|
||||
|
||||
// `tracingEnabled` should never be used directly, only through the above
|
||||
/* @internal */
|
||||
namespace ts.tracingEnabled { // eslint-disable-line one-namespace-per-file
|
||||
export const enum Mode {
|
||||
Project,
|
||||
Build,
|
||||
Server,
|
||||
}
|
||||
|
||||
let fs: typeof import("fs") | false | undefined;
|
||||
let fs: typeof import("fs");
|
||||
|
||||
let traceCount = 0;
|
||||
let traceFd: number | undefined;
|
||||
let traceFd = 0;
|
||||
|
||||
let mode: Mode;
|
||||
|
||||
@ -22,23 +31,19 @@ namespace ts.tracing {
|
||||
[key: string]: string | number | boolean | null | undefined | Args | readonly (string | number | boolean | null | undefined | Args)[];
|
||||
};
|
||||
|
||||
/** Starts tracing for the given project (unless the `fs` module is unavailable). */
|
||||
/** Starts tracing for the given project. */
|
||||
export function startTracing(tracingMode: Mode, traceDir: string, configFilePath?: string) {
|
||||
Debug.assert(!traceFd, "Tracing already started");
|
||||
Debug.assert(!tracing, "Tracing already started");
|
||||
|
||||
if (fs === undefined) {
|
||||
try {
|
||||
fs = require("fs");
|
||||
}
|
||||
catch {
|
||||
fs = false;
|
||||
catch (e) {
|
||||
throw new Error(`tracing requires having fs\n(original error: ${e.message || e})`);
|
||||
}
|
||||
}
|
||||
|
||||
if (!fs) {
|
||||
return;
|
||||
}
|
||||
|
||||
mode = tracingMode;
|
||||
|
||||
if (legendPath === undefined) {
|
||||
@ -51,9 +56,9 @@ namespace ts.tracing {
|
||||
}
|
||||
|
||||
const countPart =
|
||||
mode === Mode.Build ? `.${process.pid}-${++traceCount}` :
|
||||
mode === Mode.Server ? `.${process.pid}` :
|
||||
``;
|
||||
mode === Mode.Build ? `.${process.pid}-${++traceCount}`
|
||||
: mode === Mode.Server ? `.${process.pid}`
|
||||
: ``;
|
||||
const tracePath = combinePaths(traceDir, `trace${countPart}.json`);
|
||||
const typesPath = combinePaths(traceDir, `types${countPart}.json`);
|
||||
|
||||
@ -64,6 +69,7 @@ namespace ts.tracing {
|
||||
});
|
||||
|
||||
traceFd = fs.openSync(tracePath, "w");
|
||||
tracing = tracingEnabled; // only when traceFd is properly set
|
||||
|
||||
// Start with a prefix that contains some metadata that the devtools profiler expects (also avoids a warning on import)
|
||||
const meta = { cat: "__metadata", ph: "M", ts: 1000 * timestamp(), pid: 1, tid: 1 };
|
||||
@ -75,19 +81,14 @@ namespace ts.tracing {
|
||||
.map(v => JSON.stringify(v)).join(",\n"));
|
||||
}
|
||||
|
||||
/** Stops tracing for the in-progress project and dumps the type catalog (unless the `fs` module is unavailable). */
|
||||
/** Stops tracing for the in-progress project and dumps the type catalog. */
|
||||
export function stopTracing(typeCatalog?: readonly Type[]) {
|
||||
if (!traceFd) {
|
||||
Debug.assert(!fs, "Tracing is not in progress");
|
||||
return;
|
||||
}
|
||||
|
||||
Debug.assert(fs);
|
||||
Debug.assert(tracing, "Tracing is not in progress");
|
||||
Debug.assert(!!typeCatalog === (mode !== Mode.Server)); // Have a type catalog iff not in server mode
|
||||
|
||||
fs.writeSync(traceFd, `\n]\n`);
|
||||
fs.closeSync(traceFd);
|
||||
traceFd = undefined;
|
||||
tracing = undefined;
|
||||
|
||||
if (typeCatalog) {
|
||||
dumpTypes(typeCatalog);
|
||||
@ -99,10 +100,6 @@ namespace ts.tracing {
|
||||
}
|
||||
}
|
||||
|
||||
export function isTracing() {
|
||||
return !!traceFd;
|
||||
}
|
||||
|
||||
export const enum Phase {
|
||||
Parse = "parse",
|
||||
Program = "program",
|
||||
@ -114,7 +111,6 @@ namespace ts.tracing {
|
||||
}
|
||||
|
||||
export function instant(phase: Phase, name: string, args?: Args) {
|
||||
if (!traceFd) return;
|
||||
writeEvent("I", phase, name, args, `"s":"g"`);
|
||||
}
|
||||
|
||||
@ -127,40 +123,38 @@ namespace ts.tracing {
|
||||
* these operations.
|
||||
*/
|
||||
export function push(phase: Phase, name: string, args?: Args, separateBeginAndEnd = false) {
|
||||
if (!traceFd) return;
|
||||
if (separateBeginAndEnd) {
|
||||
writeEvent("B", phase, name, args);
|
||||
}
|
||||
eventStack.push({ phase, name, args, time: 1000 * timestamp(), separateBeginAndEnd });
|
||||
}
|
||||
export function pop() {
|
||||
if (!traceFd) return;
|
||||
Debug.assert(eventStack.length > 0);
|
||||
writeStackEvent(eventStack.length - 1, 1000 * timestamp());
|
||||
eventStack.length--;
|
||||
}
|
||||
export function popAll() {
|
||||
if (!traceFd) return;
|
||||
const endTime = 1000 * timestamp();
|
||||
for (let i = eventStack.length - 1; i >= 0; i--) {
|
||||
writeStackEvent(i, endTime);
|
||||
}
|
||||
eventStack.length = 0;
|
||||
}
|
||||
// sample every 10ms
|
||||
const sampleInterval = 1000 * 10;
|
||||
function writeStackEvent(index: number, endTime: number) {
|
||||
const { phase, name, args, time, separateBeginAndEnd } = eventStack[index];
|
||||
if (separateBeginAndEnd) {
|
||||
writeEvent("E", phase, name, args, /*extras*/ undefined, endTime);
|
||||
}
|
||||
else {
|
||||
// test if [time,endTime) straddles a sampling point
|
||||
else if (sampleInterval - (time % sampleInterval) <= endTime - time) {
|
||||
writeEvent("X", phase, name, args, `"dur":${endTime - time}`, time);
|
||||
}
|
||||
}
|
||||
|
||||
function writeEvent(eventType: string, phase: Phase, name: string, args: Args | undefined, extras?: string,
|
||||
time: number = 1000 * timestamp()) {
|
||||
Debug.assert(traceFd);
|
||||
Debug.assert(fs);
|
||||
|
||||
// In server mode, there's no easy way to dump type information, so we drop events that would require it.
|
||||
if (mode === Mode.Server && phase === Phase.CheckTypes) return;
|
||||
@ -182,8 +176,6 @@ namespace ts.tracing {
|
||||
}
|
||||
|
||||
function dumpTypes(types: readonly Type[]) {
|
||||
Debug.assert(fs);
|
||||
|
||||
performance.mark("beginDumpTypes");
|
||||
|
||||
const typesPath = legend[legend.length - 1].typesPath!;
|
||||
@ -293,7 +285,6 @@ namespace ts.tracing {
|
||||
if (!legendPath) {
|
||||
return;
|
||||
}
|
||||
Debug.assert(fs);
|
||||
|
||||
fs.writeFileSync(legendPath, JSON.stringify(legend));
|
||||
}
|
||||
@ -304,3 +295,9 @@ namespace ts.tracing {
|
||||
typesPath?: string;
|
||||
}
|
||||
}
|
||||
|
||||
/*@internal*/
|
||||
namespace ts { // eslint-disable-line one-namespace-per-file
|
||||
// define after tracingEnabled is initialized
|
||||
export const startTracing = tracingEnabled.startTracing;
|
||||
}
|
||||
|
||||
@ -223,9 +223,9 @@ namespace ts {
|
||||
// Transform each node.
|
||||
const transformed: T[] = [];
|
||||
for (const node of nodes) {
|
||||
tracing.push(tracing.Phase.Emit, "transformNodes", node.kind === SyntaxKind.SourceFile ? { path: (node as any as SourceFile).path } : { kind: node.kind, pos: node.pos, end: node.end });
|
||||
tracing?.push(tracing.Phase.Emit, "transformNodes", node.kind === SyntaxKind.SourceFile ? { path: (node as any as SourceFile).path } : { kind: node.kind, pos: node.pos, end: node.end });
|
||||
transformed.push((allowDtsFiles ? transformation : transformRoot)(node));
|
||||
tracing.pop();
|
||||
tracing?.pop();
|
||||
}
|
||||
|
||||
// prevent modification of the lexical environment.
|
||||
|
||||
@ -337,6 +337,15 @@ namespace ts {
|
||||
if (shouldTransformPrivateFields && isPrivateIdentifierPropertyAccessExpression(node.expression)) {
|
||||
// Transform call expressions of private names to properly bind the `this` parameter.
|
||||
const { thisArg, target } = factory.createCallBinding(node.expression, hoistVariableDeclaration, languageVersion);
|
||||
if (isCallChain(node)) {
|
||||
return factory.updateCallChain(
|
||||
node,
|
||||
factory.createPropertyAccessChain(visitNode(target, visitor), node.questionDotToken, "call"),
|
||||
/*questionDotToken*/ undefined,
|
||||
/*typeArguments*/ undefined,
|
||||
[visitNode(thisArg, visitor, isExpression), ...visitNodes(node.arguments, visitor, isExpression)]
|
||||
);
|
||||
}
|
||||
return factory.updateCallExpression(
|
||||
node,
|
||||
factory.createPropertyAccessExpression(visitNode(target, visitor), "call"),
|
||||
|
||||
@ -80,6 +80,7 @@ namespace ts {
|
||||
reportNonlocalAugmentation
|
||||
};
|
||||
let errorNameNode: DeclarationName | undefined;
|
||||
let errorFallbackNode: Declaration | undefined;
|
||||
|
||||
let currentSourceFile: SourceFile;
|
||||
let refs: ESMap<NodeId, SourceFile>;
|
||||
@ -161,9 +162,9 @@ namespace ts {
|
||||
}
|
||||
|
||||
function reportPrivateInBaseOfClassExpression(propertyName: string) {
|
||||
if (errorNameNode) {
|
||||
if (errorNameNode || errorFallbackNode) {
|
||||
context.addDiagnostic(
|
||||
createDiagnosticForNode(errorNameNode, Diagnostics.Property_0_of_exported_class_expression_may_not_be_private_or_protected, propertyName));
|
||||
createDiagnosticForNode((errorNameNode || errorFallbackNode)!, Diagnostics.Property_0_of_exported_class_expression_may_not_be_private_or_protected, propertyName));
|
||||
}
|
||||
}
|
||||
|
||||
@ -199,8 +200,8 @@ namespace ts {
|
||||
}
|
||||
|
||||
function reportTruncationError() {
|
||||
if (errorNameNode) {
|
||||
context.addDiagnostic(createDiagnosticForNode(errorNameNode, Diagnostics.The_inferred_type_of_this_node_exceeds_the_maximum_length_the_compiler_will_serialize_An_explicit_type_annotation_is_needed));
|
||||
if (errorNameNode || errorFallbackNode) {
|
||||
context.addDiagnostic(createDiagnosticForNode((errorNameNode || errorFallbackNode)!, Diagnostics.The_inferred_type_of_this_node_exceeds_the_maximum_length_the_compiler_will_serialize_An_explicit_type_annotation_is_needed));
|
||||
}
|
||||
}
|
||||
|
||||
@ -217,12 +218,12 @@ namespace ts {
|
||||
|
||||
function transformDeclarationsForJS(sourceFile: SourceFile, bundled?: boolean) {
|
||||
const oldDiag = getSymbolAccessibilityDiagnostic;
|
||||
getSymbolAccessibilityDiagnostic = (s) => ({
|
||||
getSymbolAccessibilityDiagnostic = (s) => (s.errorNode && canProduceDiagnostics(s.errorNode) ? createGetSymbolAccessibilityDiagnosticForNode(s.errorNode)(s) : ({
|
||||
diagnosticMessage: s.errorModuleName
|
||||
? Diagnostics.Declaration_emit_for_this_file_requires_using_private_name_0_from_module_1_An_explicit_type_annotation_may_unblock_declaration_emit
|
||||
: Diagnostics.Declaration_emit_for_this_file_requires_using_private_name_0_An_explicit_type_annotation_may_unblock_declaration_emit,
|
||||
errorNode: s.errorNode || sourceFile
|
||||
});
|
||||
}));
|
||||
const result = resolver.getDeclarationStatementsForSourceFile(sourceFile, declarationEmitNodeBuilderFlags, symbolTracker, bundled);
|
||||
getSymbolAccessibilityDiagnostic = oldDiag;
|
||||
return result;
|
||||
@ -1102,7 +1103,9 @@ namespace ts {
|
||||
diagnosticMessage: Diagnostics.Default_export_of_the_module_has_or_is_using_private_name_0,
|
||||
errorNode: input
|
||||
});
|
||||
errorFallbackNode = input;
|
||||
const varDecl = factory.createVariableDeclaration(newId, /*exclamationToken*/ undefined, resolver.createTypeOfExpression(input.expression, input, declarationEmitNodeBuilderFlags, symbolTracker), /*initializer*/ undefined);
|
||||
errorFallbackNode = undefined;
|
||||
const statement = factory.createVariableStatement(needsDeclare ? [factory.createModifier(SyntaxKind.DeclareKeyword)] : [], factory.createVariableDeclarationList([varDecl], NodeFlags.Const));
|
||||
return [statement, factory.updateExportAssignment(input, input.decorators, input.modifiers, newId)];
|
||||
}
|
||||
@ -1326,6 +1329,8 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
case SyntaxKind.ClassDeclaration: {
|
||||
errorNameNode = input.name;
|
||||
errorFallbackNode = input;
|
||||
const modifiers = factory.createNodeArray(ensureModifiers(input));
|
||||
const typeParameters = ensureTypeParams(input, input.typeParameters);
|
||||
const ctor = getFirstConstructorWithBody(input);
|
||||
@ -1462,6 +1467,8 @@ namespace ts {
|
||||
if (node as Node === input) {
|
||||
return node;
|
||||
}
|
||||
errorFallbackNode = undefined;
|
||||
errorNameNode = undefined;
|
||||
return node && setOriginalNode(preserveJsDoc(node, input), input);
|
||||
}
|
||||
}
|
||||
|
||||
@ -27,7 +27,10 @@ namespace ts {
|
||||
| TypeAliasDeclaration
|
||||
| ConstructorDeclaration
|
||||
| IndexSignatureDeclaration
|
||||
| PropertyAccessExpression;
|
||||
| PropertyAccessExpression
|
||||
| JSDocTypedefTag
|
||||
| JSDocCallbackTag
|
||||
| JSDocEnumTag;
|
||||
|
||||
export function canProduceDiagnostics(node: Node): node is DeclarationDiagnosticProducing {
|
||||
return isVariableDeclaration(node) ||
|
||||
@ -48,7 +51,8 @@ namespace ts {
|
||||
isTypeAliasDeclaration(node) ||
|
||||
isConstructorDeclaration(node) ||
|
||||
isIndexSignatureDeclaration(node) ||
|
||||
isPropertyAccessExpression(node);
|
||||
isPropertyAccessExpression(node) ||
|
||||
isJSDocTypeAlias(node);
|
||||
}
|
||||
|
||||
export function createGetSymbolAccessibilityDiagnosticForNodeName(node: DeclarationDiagnosticProducing) {
|
||||
@ -124,7 +128,7 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
|
||||
export function createGetSymbolAccessibilityDiagnosticForNode(node: DeclarationDiagnosticProducing): (symbolAccessibilityResult: SymbolAccessibilityResult) => SymbolAccessibilityDiagnostic | undefined {
|
||||
export function createGetSymbolAccessibilityDiagnosticForNode(node: DeclarationDiagnosticProducing): GetSymbolAccessibilityDiagnostic {
|
||||
if (isVariableDeclaration(node) || isPropertyDeclaration(node) || isPropertySignature(node) || isPropertyAccessExpression(node) || isBindingElement(node) || isConstructorDeclaration(node)) {
|
||||
return getVariableDeclarationTypeVisibilityError;
|
||||
}
|
||||
@ -149,7 +153,7 @@ namespace ts {
|
||||
else if (isImportEqualsDeclaration(node)) {
|
||||
return getImportEntityNameVisibilityError;
|
||||
}
|
||||
else if (isTypeAliasDeclaration(node)) {
|
||||
else if (isTypeAliasDeclaration(node) || isJSDocTypeAlias(node)) {
|
||||
return getTypeAliasDeclarationVisibilityError;
|
||||
}
|
||||
else {
|
||||
@ -474,11 +478,13 @@ namespace ts {
|
||||
};
|
||||
}
|
||||
|
||||
function getTypeAliasDeclarationVisibilityError(): SymbolAccessibilityDiagnostic {
|
||||
function getTypeAliasDeclarationVisibilityError(symbolAccessibilityResult: SymbolAccessibilityResult): SymbolAccessibilityDiagnostic {
|
||||
return {
|
||||
diagnosticMessage: Diagnostics.Exported_type_alias_0_has_or_is_using_private_name_1,
|
||||
errorNode: (node as TypeAliasDeclaration).type,
|
||||
typeName: (node as TypeAliasDeclaration).name
|
||||
diagnosticMessage: symbolAccessibilityResult.errorModuleName
|
||||
? Diagnostics.Exported_type_alias_0_has_or_is_using_private_name_1_from_module_2
|
||||
: Diagnostics.Exported_type_alias_0_has_or_is_using_private_name_1,
|
||||
errorNode: isJSDocTypeAlias(node) ? Debug.checkDefined(node.typeExpression) : (node as TypeAliasDeclaration).type,
|
||||
typeName: isJSDocTypeAlias(node) ? getNameOfDeclaration(node) : (node as TypeAliasDeclaration).name,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@ -1874,9 +1874,8 @@ namespace ts {
|
||||
for (const exportName of exportedNames) {
|
||||
// Mark the node to prevent triggering this rule again.
|
||||
noSubstitution[getNodeId(expression)] = true;
|
||||
expression = createExportExpression(exportName, expression);
|
||||
expression = factory.createParenthesizedExpression(createExportExpression(exportName, expression));
|
||||
}
|
||||
|
||||
return expression;
|
||||
}
|
||||
}
|
||||
|
||||
@ -2316,8 +2316,7 @@ namespace ts {
|
||||
*/
|
||||
function shouldEmitEnumDeclaration(node: EnumDeclaration) {
|
||||
return !isEnumConst(node)
|
||||
|| compilerOptions.preserveConstEnums
|
||||
|| compilerOptions.isolatedModules;
|
||||
|| shouldPreserveConstEnums(compilerOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -2507,7 +2506,7 @@ namespace ts {
|
||||
// If we can't find a parse tree node, assume the node is instantiated.
|
||||
return true;
|
||||
}
|
||||
return isInstantiatedModule(node, !!compilerOptions.preserveConstEnums || !!compilerOptions.isolatedModules);
|
||||
return isInstantiatedModule(node, shouldPreserveConstEnums(compilerOptions));
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -4972,10 +4972,6 @@ namespace ts {
|
||||
Unit = Literal | UniqueESSymbol | Nullable,
|
||||
StringOrNumberLiteral = StringLiteral | NumberLiteral,
|
||||
/* @internal */
|
||||
StringLikeLiteral = StringLiteral | TemplateLiteral,
|
||||
/* @internal */
|
||||
FreshableLiteral = Literal | TemplateLiteral,
|
||||
/* @internal */
|
||||
StringOrNumberLiteralOrUnique = StringLiteral | NumberLiteral | UniqueESSymbol,
|
||||
/* @internal */
|
||||
DefinitelyFalsy = StringLiteral | NumberLiteral | BigIntLiteral | BooleanLiteral | Void | Undefined | Null,
|
||||
@ -5069,9 +5065,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
export type FreshableLiteralType = LiteralType | TemplateLiteralType;
|
||||
/* @internal */
|
||||
export type FreshableType = FreshableLiteralType | FreshableIntrinsicType;
|
||||
export type FreshableType = LiteralType | FreshableIntrinsicType;
|
||||
|
||||
// String literal types (TypeFlags.StringLiteral)
|
||||
// Numeric literal types (TypeFlags.NumberLiteral)
|
||||
@ -5475,8 +5469,6 @@ namespace ts {
|
||||
export interface TemplateLiteralType extends InstantiableType {
|
||||
texts: readonly string[]; // Always one element longer than types
|
||||
types: readonly Type[]; // Always at least one element
|
||||
freshType: TemplateLiteralType; // Fresh version of type
|
||||
regularType: TemplateLiteralType; // Regular version of type
|
||||
}
|
||||
|
||||
export interface StringMappingType extends InstantiableType {
|
||||
|
||||
@ -2430,7 +2430,7 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
|
||||
export function getExternalModuleName(node: AnyImportOrReExport | ImportTypeNode | ImportCall): Expression | undefined {
|
||||
export function getExternalModuleName(node: AnyImportOrReExport | ImportTypeNode | ImportCall | ModuleDeclaration): Expression | undefined {
|
||||
switch (node.kind) {
|
||||
case SyntaxKind.ImportDeclaration:
|
||||
case SyntaxKind.ExportDeclaration:
|
||||
@ -2441,6 +2441,8 @@ namespace ts {
|
||||
return isLiteralImportTypeNode(node) ? node.argument.literal : undefined;
|
||||
case SyntaxKind.CallExpression:
|
||||
return node.arguments[0];
|
||||
case SyntaxKind.ModuleDeclaration:
|
||||
return node.name.kind === SyntaxKind.StringLiteral ? node.name : undefined;
|
||||
default:
|
||||
return Debug.assertNever(node);
|
||||
}
|
||||
@ -4118,11 +4120,21 @@ namespace ts {
|
||||
return file.moduleName || getExternalModuleNameFromPath(host, file.fileName, referenceFile && referenceFile.fileName);
|
||||
}
|
||||
|
||||
function getCanonicalAbsolutePath(host: ResolveModuleNameResolutionHost, path: string) {
|
||||
return host.getCanonicalFileName(getNormalizedAbsolutePath(path, host.getCurrentDirectory()));
|
||||
}
|
||||
|
||||
export function getExternalModuleNameFromDeclaration(host: ResolveModuleNameResolutionHost, resolver: EmitResolver, declaration: ImportEqualsDeclaration | ImportDeclaration | ExportDeclaration | ModuleDeclaration | ImportTypeNode): string | undefined {
|
||||
const file = resolver.getExternalModuleFileFromDeclaration(declaration);
|
||||
if (!file || file.isDeclarationFile) {
|
||||
return undefined;
|
||||
}
|
||||
// If the declaration already uses a non-relative name, and is outside the common source directory, continue to use it
|
||||
const specifier = getExternalModuleName(declaration);
|
||||
if (specifier && isStringLiteralLike(specifier) && !pathIsRelative(specifier.text) &&
|
||||
getCanonicalAbsolutePath(host, file.path).indexOf(getCanonicalAbsolutePath(host, ensureTrailingDirectorySeparator(host.getCommonSourceDirectory()))) === -1) {
|
||||
return undefined;
|
||||
}
|
||||
return getResolvedExternalModuleName(host, file);
|
||||
}
|
||||
|
||||
@ -4868,6 +4880,12 @@ namespace ts {
|
||||
return baseStr + "." + entityNameToString(expr.name);
|
||||
}
|
||||
}
|
||||
else if (isElementAccessExpression(expr)) {
|
||||
const baseStr = tryGetPropertyAccessOrIdentifierToString(expr.expression);
|
||||
if (baseStr !== undefined && isPropertyName(expr.argumentExpression)) {
|
||||
return baseStr + "." + getPropertyNameForPropertyNameNode(expr.argumentExpression);
|
||||
}
|
||||
}
|
||||
else if (isIdentifier(expr)) {
|
||||
return unescapeLeadingUnderscores(expr.escapedText);
|
||||
}
|
||||
@ -5625,7 +5643,7 @@ namespace ts {
|
||||
|
||||
function Type(this: Type, checker: TypeChecker, flags: TypeFlags) {
|
||||
this.flags = flags;
|
||||
if (Debug.isDebugging || tracing.isTracing()) {
|
||||
if (Debug.isDebugging || tracing) {
|
||||
this.checker = checker;
|
||||
}
|
||||
}
|
||||
@ -6001,6 +6019,10 @@ namespace ts {
|
||||
return !!(compilerOptions.declaration || compilerOptions.composite);
|
||||
}
|
||||
|
||||
export function shouldPreserveConstEnums(compilerOptions: CompilerOptions): boolean {
|
||||
return !!(compilerOptions.preserveConstEnums || compilerOptions.isolatedModules);
|
||||
}
|
||||
|
||||
export function isIncrementalCompilation(options: CompilerOptions) {
|
||||
return !!(options.incremental || options.composite);
|
||||
}
|
||||
@ -6079,25 +6101,43 @@ namespace ts {
|
||||
}
|
||||
|
||||
export interface SymlinkCache {
|
||||
/** Gets a map from symlink to realpath. Keys have trailing directory separators. */
|
||||
getSymlinkedDirectories(): ReadonlyESMap<Path, SymlinkedDirectory | false> | undefined;
|
||||
/** Gets a map from realpath to symlinks. Keys have trailing directory separators. */
|
||||
getSymlinkedDirectoriesByRealpath(): MultiMap<Path, string> | undefined;
|
||||
/** Gets a map from symlink to realpath */
|
||||
getSymlinkedFiles(): ReadonlyESMap<Path, string> | undefined;
|
||||
setSymlinkedDirectory(path: Path, directory: SymlinkedDirectory | false): void;
|
||||
setSymlinkedFile(path: Path, real: string): void;
|
||||
setSymlinkedDirectory(symlink: string, real: SymlinkedDirectory | false): void;
|
||||
setSymlinkedFile(symlinkPath: Path, real: string): void;
|
||||
}
|
||||
|
||||
export function createSymlinkCache(): SymlinkCache {
|
||||
export function createSymlinkCache(cwd: string, getCanonicalFileName: GetCanonicalFileName): SymlinkCache {
|
||||
let symlinkedDirectories: ESMap<Path, SymlinkedDirectory | false> | undefined;
|
||||
let symlinkedDirectoriesByRealpath: MultiMap<Path, string> | undefined;
|
||||
let symlinkedFiles: ESMap<Path, string> | undefined;
|
||||
return {
|
||||
getSymlinkedFiles: () => symlinkedFiles,
|
||||
getSymlinkedDirectories: () => symlinkedDirectories,
|
||||
getSymlinkedDirectoriesByRealpath: () => symlinkedDirectoriesByRealpath,
|
||||
setSymlinkedFile: (path, real) => (symlinkedFiles || (symlinkedFiles = new Map())).set(path, real),
|
||||
setSymlinkedDirectory: (path, directory) => (symlinkedDirectories || (symlinkedDirectories = new Map())).set(path, directory),
|
||||
setSymlinkedDirectory: (symlink, real) => {
|
||||
// Large, interconnected dependency graphs in pnpm will have a huge number of symlinks
|
||||
// where both the realpath and the symlink path are inside node_modules/.pnpm. Since
|
||||
// this path is never a candidate for a module specifier, we can ignore it entirely.
|
||||
let symlinkPath = toPath(symlink, cwd, getCanonicalFileName);
|
||||
if (!containsIgnoredPath(symlinkPath)) {
|
||||
symlinkPath = ensureTrailingDirectorySeparator(symlinkPath);
|
||||
if (real !== false && !symlinkedDirectories?.has(symlinkPath)) {
|
||||
(symlinkedDirectoriesByRealpath ||= createMultiMap()).add(ensureTrailingDirectorySeparator(real.realPath), symlink);
|
||||
}
|
||||
(symlinkedDirectories || (symlinkedDirectories = new Map())).set(symlinkPath, real);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export function discoverProbableSymlinks(files: readonly SourceFile[], getCanonicalFileName: GetCanonicalFileName, cwd: string): SymlinkCache {
|
||||
const cache = createSymlinkCache();
|
||||
const cache = createSymlinkCache(cwd, getCanonicalFileName);
|
||||
const symlinks = flatten<readonly [string, string]>(mapDefined(files, sf =>
|
||||
sf.resolvedModules && compact(arrayFrom(mapIterator(sf.resolvedModules.values(), res =>
|
||||
res && res.originalPath && res.resolvedFileName !== res.originalPath ? [res.resolvedFileName, res.originalPath] as const : undefined)))));
|
||||
@ -6105,7 +6145,7 @@ namespace ts {
|
||||
const [commonResolved, commonOriginal] = guessDirectorySymlink(resolvedPath, originalPath, cwd, getCanonicalFileName) || emptyArray;
|
||||
if (commonResolved && commonOriginal) {
|
||||
cache.setSymlinkedDirectory(
|
||||
toPath(commonOriginal, cwd, getCanonicalFileName),
|
||||
commonOriginal,
|
||||
{ real: commonResolved, realPath: toPath(commonResolved, cwd, getCanonicalFileName) });
|
||||
}
|
||||
}
|
||||
@ -6113,8 +6153,8 @@ namespace ts {
|
||||
}
|
||||
|
||||
function guessDirectorySymlink(a: string, b: string, cwd: string, getCanonicalFileName: GetCanonicalFileName): [string, string] | undefined {
|
||||
const aParts = getPathComponents(toPath(a, cwd, getCanonicalFileName));
|
||||
const bParts = getPathComponents(toPath(b, cwd, getCanonicalFileName));
|
||||
const aParts = getPathComponents(getNormalizedAbsolutePath(a, cwd));
|
||||
const bParts = getPathComponents(getNormalizedAbsolutePath(b, cwd));
|
||||
let isDirectory = false;
|
||||
while (!isNodeModulesOrScopedPackageDirectory(aParts[aParts.length - 2], getCanonicalFileName) &&
|
||||
!isNodeModulesOrScopedPackageDirectory(bParts[bParts.length - 2], getCanonicalFileName) &&
|
||||
@ -6546,6 +6586,18 @@ namespace ts {
|
||||
return false;
|
||||
}
|
||||
|
||||
function numberOfDirectorySeparators(str: string) {
|
||||
const match = str.match(/\//g);
|
||||
return match ? match.length : 0;
|
||||
}
|
||||
|
||||
export function compareNumberOfDirectorySeparators(path1: string, path2: string) {
|
||||
return compareValues(
|
||||
numberOfDirectorySeparators(path1),
|
||||
numberOfDirectorySeparators(path2)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extension boundaries by priority. Lower numbers indicate higher priorities, and are
|
||||
* aligned to the offset of the highest priority extension in the
|
||||
@ -7066,4 +7118,8 @@ namespace ts {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export function containsIgnoredPath(path: string) {
|
||||
return some(ignoredPaths, p => stringContains(path, p));
|
||||
}
|
||||
}
|
||||
|
||||
@ -311,13 +311,16 @@ namespace ts {
|
||||
// nodes like variable declarations and binding elements can returned a view of their flags
|
||||
// that includes the modifiers from their container. i.e. flags like export/declare aren't
|
||||
// stored on the variable declaration directly, but on the containing variable statement
|
||||
// (if it has one). Similarly, flags for let/const are store on the variable declaration
|
||||
// (if it has one). Similarly, flags for let/const are stored on the variable declaration
|
||||
// list. By calling this function, all those flags are combined so that the client can treat
|
||||
// the node as if it actually had those flags.
|
||||
export function getCombinedNodeFlags(node: Node): NodeFlags {
|
||||
return getCombinedFlags(node, n => n.flags);
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
export const supportedLocaleDirectories = ["cs", "de", "es", "fr", "it", "ja", "ko", "pl", "pt-br", "ru", "tr", "zh-cn", "zh-tw"];
|
||||
|
||||
/**
|
||||
* Checks to see if the locale is in the appropriate format,
|
||||
* and if it is, attempts to set the appropriate language.
|
||||
@ -326,7 +329,8 @@ namespace ts {
|
||||
locale: string,
|
||||
sys: { getExecutingFilePath(): string, resolvePath(path: string): string, fileExists(fileName: string): boolean, readFile(fileName: string): string | undefined },
|
||||
errors?: Push<Diagnostic>) {
|
||||
const matchResult = /^([a-z]+)([_\-]([a-z]+))?$/.exec(locale.toLowerCase());
|
||||
const lowerCaseLocale = locale.toLowerCase();
|
||||
const matchResult = /^([a-z]+)([_\-]([a-z]+))?$/.exec(lowerCaseLocale);
|
||||
|
||||
if (!matchResult) {
|
||||
if (errors) {
|
||||
@ -340,7 +344,7 @@ namespace ts {
|
||||
|
||||
// First try the entire locale, then fall back to just language if that's all we have.
|
||||
// Either ways do not fail, and fallback to the English diagnostic strings.
|
||||
if (!trySetLanguageAndTerritory(language, territory, errors)) {
|
||||
if (contains(supportedLocaleDirectories, lowerCaseLocale) && !trySetLanguageAndTerritory(language, territory, errors)) {
|
||||
trySetLanguageAndTerritory(language, /*territory*/ undefined, errors);
|
||||
}
|
||||
|
||||
|
||||
@ -501,7 +501,7 @@ namespace ts {
|
||||
updateSolutionBuilderHost(sys, cb, buildHost);
|
||||
const builder = createSolutionBuilder(buildHost, projects, buildOptions);
|
||||
const exitStatus = buildOptions.clean ? builder.clean() : builder.build();
|
||||
tracing.dumpLegend();
|
||||
tracing?.dumpLegend();
|
||||
return sys.exit(exitStatus);
|
||||
}
|
||||
|
||||
@ -662,11 +662,12 @@ namespace ts {
|
||||
|
||||
function enableStatisticsAndTracing(system: System, compilerOptions: CompilerOptions, isBuildMode: boolean) {
|
||||
if (canReportDiagnostics(system, compilerOptions)) {
|
||||
performance.enable();
|
||||
performance.enable(system);
|
||||
}
|
||||
|
||||
if (canTrace(system, compilerOptions)) {
|
||||
tracing.startTracing(isBuildMode ? tracing.Mode.Build : tracing.Mode.Project, compilerOptions.generateTrace!, compilerOptions.configFilePath);
|
||||
startTracing(isBuildMode ? tracingEnabled.Mode.Build : tracingEnabled.Mode.Project,
|
||||
compilerOptions.generateTrace!, compilerOptions.configFilePath);
|
||||
}
|
||||
}
|
||||
|
||||
@ -674,7 +675,7 @@ namespace ts {
|
||||
const compilerOptions = program.getCompilerOptions();
|
||||
|
||||
if (canTrace(sys, compilerOptions)) {
|
||||
tracing.stopTracing(program.getTypeCatalog());
|
||||
tracing?.stopTracing(program.getTypeCatalog());
|
||||
}
|
||||
|
||||
let statistics: Statistic[];
|
||||
|
||||
@ -605,7 +605,7 @@ namespace ts.server {
|
||||
return notImplemented();
|
||||
}
|
||||
|
||||
getDocCommentTemplateAtPosition(_fileName: string, _position: number): TextInsertion {
|
||||
getDocCommentTemplateAtPosition(_fileName: string, _position: number, _options?: DocCommentTemplateOptions): TextInsertion {
|
||||
return notImplemented();
|
||||
}
|
||||
|
||||
|
||||
@ -1453,9 +1453,9 @@ namespace FourSlash {
|
||||
}
|
||||
|
||||
public baselineRename(marker: string, options: FourSlashInterface.RenameOptions) {
|
||||
const position = this.getMarkerByName(marker).position;
|
||||
const { fileName, position } = this.getMarkerByName(marker);
|
||||
const locations = this.languageService.findRenameLocations(
|
||||
this.activeFile.fileName,
|
||||
fileName,
|
||||
position,
|
||||
options.findInStrings ?? false,
|
||||
options.findInComments ?? false,
|
||||
@ -2802,7 +2802,7 @@ namespace FourSlash {
|
||||
const matchingName = completions?.filter(e => e.name === options.name);
|
||||
const detailMessage = matchingName?.length
|
||||
? `\n Found ${matchingName.length} with name '${options.name}' from source(s) ${matchingName.map(e => `'${e.source}'`).join(", ")}.`
|
||||
: "";
|
||||
: ` (In fact, there were no completions with name '${options.name}' at all.)`;
|
||||
return this.raiseError(`No completions were found for the given name, source, and preferences.` + detailMessage);
|
||||
}
|
||||
const codeActions = details.codeActions;
|
||||
@ -3022,7 +3022,7 @@ namespace FourSlash {
|
||||
this.editScriptAndUpdateMarkers(fileName, span.start, span.start + insertedText.length, deletedText);
|
||||
}
|
||||
if (expectedTextArray.length !== actualTextArray.length) {
|
||||
this.raiseError(`Expected ${expectedTextArray.length} import fixes, got ${actualTextArray.length}`);
|
||||
this.raiseError(`Expected ${expectedTextArray.length} import fixes, got ${actualTextArray.length}:\n\n${actualTextArray.join("\n\n" + "-".repeat(20) + "\n\n")}`);
|
||||
}
|
||||
ts.zipWith(expectedTextArray, actualTextArray, (expected, actual, index) => {
|
||||
if (expected !== actual) {
|
||||
@ -3048,9 +3048,9 @@ namespace FourSlash {
|
||||
assert.deepEqual(actualModuleSpecifiers, moduleSpecifiers);
|
||||
}
|
||||
|
||||
public verifyDocCommentTemplate(expected: ts.TextInsertion | undefined) {
|
||||
public verifyDocCommentTemplate(expected: ts.TextInsertion | undefined, options?: ts.DocCommentTemplateOptions) {
|
||||
const name = "verifyDocCommentTemplate";
|
||||
const actual = this.languageService.getDocCommentTemplateAtPosition(this.activeFile.fileName, this.currentCaretPosition)!;
|
||||
const actual = this.languageService.getDocCommentTemplateAtPosition(this.activeFile.fileName, this.currentCaretPosition, options || { generateReturnInDocTemplate: true })!;
|
||||
|
||||
if (expected === undefined) {
|
||||
if (actual) {
|
||||
|
||||
@ -432,9 +432,9 @@ namespace FourSlashInterface {
|
||||
this.state.verifyNoMatchingBracePosition(bracePosition);
|
||||
}
|
||||
|
||||
public docCommentTemplateAt(marker: string | FourSlash.Marker, expectedOffset: number, expectedText: string) {
|
||||
public docCommentTemplateAt(marker: string | FourSlash.Marker, expectedOffset: number, expectedText: string, options?: ts.DocCommentTemplateOptions) {
|
||||
this.state.goToMarker(marker);
|
||||
this.state.verifyDocCommentTemplate({ newText: expectedText.replace(/\r?\n/g, "\r\n"), caretOffset: expectedOffset });
|
||||
this.state.verifyDocCommentTemplate({ newText: expectedText.replace(/\r?\n/g, "\r\n"), caretOffset: expectedOffset }, options);
|
||||
}
|
||||
|
||||
public noDocCommentTemplateAt(marker: string | FourSlash.Marker) {
|
||||
|
||||
@ -558,8 +558,8 @@ namespace Harness.LanguageService {
|
||||
getFormattingEditsAfterKeystroke(fileName: string, position: number, key: string, options: ts.FormatCodeOptions): ts.TextChange[] {
|
||||
return unwrapJSONCallResult(this.shim.getFormattingEditsAfterKeystroke(fileName, position, key, JSON.stringify(options)));
|
||||
}
|
||||
getDocCommentTemplateAtPosition(fileName: string, position: number): ts.TextInsertion {
|
||||
return unwrapJSONCallResult(this.shim.getDocCommentTemplateAtPosition(fileName, position));
|
||||
getDocCommentTemplateAtPosition(fileName: string, position: number, options?: ts.DocCommentTemplateOptions): ts.TextInsertion {
|
||||
return unwrapJSONCallResult(this.shim.getDocCommentTemplateAtPosition(fileName, position, options));
|
||||
}
|
||||
isValidBraceCompletionAtPosition(fileName: string, position: number, openingBrace: number): boolean {
|
||||
return unwrapJSONCallResult(this.shim.isValidBraceCompletionAtPosition(fileName, position, openingBrace));
|
||||
|
||||
4
src/lib/es5.d.ts
vendored
4
src/lib/es5.d.ts
vendored
@ -4331,8 +4331,8 @@ declare namespace Intl {
|
||||
|
||||
interface DateTimeFormatOptions {
|
||||
localeMatcher?: "best fit" | "lookup";
|
||||
weekday?: "long" | "short" | "narrow";
|
||||
era?: "long" | "short" | "narrow";
|
||||
weekday?: "long" | "short" | "narrow";
|
||||
era?: "long" | "short" | "narrow";
|
||||
year?: "numeric" | "2-digit";
|
||||
month?: "numeric" | "2-digit" | "long" | "short" | "narrow";
|
||||
day?: "numeric" | "2-digit";
|
||||
|
||||
@ -207,7 +207,7 @@ namespace ts.server {
|
||||
const defaultTypeSafeList: SafeList = {
|
||||
"jquery": {
|
||||
// jquery files can have names like "jquery-1.10.2.min.js" (or "jquery.intellisense.js")
|
||||
match: /jquery(-(\.?\d+)+)?(\.intellisense)?(\.min)?\.js$/i,
|
||||
match: /jquery(-[\d\.]+)?(\.intellisense)?(\.min)?\.js$/i,
|
||||
types: ["jquery"]
|
||||
},
|
||||
"WinJS": {
|
||||
@ -922,13 +922,12 @@ namespace ts.server {
|
||||
case ActionSet:
|
||||
// Update the typing files and update the project
|
||||
project.updateTypingFiles(this.typingsCache.updateTypingsForProject(response.projectName, response.compilerOptions, response.typeAcquisition, response.unresolvedImports, response.typings));
|
||||
break;
|
||||
return;
|
||||
case ActionInvalidate:
|
||||
// Do not clear resolution cache, there was changes detected in typings, so enque typing request and let it get us correct results
|
||||
this.typingsCache.enqueueInstallTypingsForProject(project, project.lastCachedUnresolvedImportsList, /*forceRefresh*/ true);
|
||||
return;
|
||||
}
|
||||
this.delayUpdateProjectGraphAndEnsureProjectStructureForOpenFiles(project);
|
||||
}
|
||||
|
||||
/*@internal*/
|
||||
@ -1982,28 +1981,17 @@ namespace ts.server {
|
||||
totalNonTsFileSize += this.host.getFileSize(fileName);
|
||||
|
||||
if (totalNonTsFileSize > maxProgramSizeForNonTsFiles || totalNonTsFileSize > availableSpace) {
|
||||
this.logger.info(getExceedLimitMessage({ propertyReader, hasTSFileExtension: ts.hasTSFileExtension, host: this.host }, totalNonTsFileSize)); // eslint-disable-line @typescript-eslint/no-unnecessary-qualifier
|
||||
const top5LargestFiles = fileNames.map(f => propertyReader.getFileName(f))
|
||||
.filter(name => !hasTSFileExtension(name))
|
||||
.map(name => ({ name, size: this.host.getFileSize!(name) }))
|
||||
.sort((a, b) => b.size - a.size)
|
||||
.slice(0, 5);
|
||||
this.logger.info(`Non TS file size exceeded limit (${totalNonTsFileSize}). Largest files: ${top5LargestFiles.map(file => `${file.name}:${file.size}`).join(", ")}`);
|
||||
// Keep the size as zero since it's disabled
|
||||
return fileName;
|
||||
}
|
||||
}
|
||||
|
||||
this.projectToSizeMap.set(name, totalNonTsFileSize);
|
||||
|
||||
return;
|
||||
|
||||
function getExceedLimitMessage(context: { propertyReader: FilePropertyReader<any>, hasTSFileExtension: (filename: string) => boolean, host: ServerHost }, totalNonTsFileSize: number) {
|
||||
const files = getTop5LargestFiles(context);
|
||||
|
||||
return `Non TS file size exceeded limit (${totalNonTsFileSize}). Largest files: ${files.map(file => `${file.name}:${file.size}`).join(", ")}`;
|
||||
}
|
||||
function getTop5LargestFiles({ propertyReader, hasTSFileExtension, host }: { propertyReader: FilePropertyReader<any>, hasTSFileExtension: (filename: string) => boolean, host: ServerHost }) {
|
||||
return fileNames.map(f => propertyReader.getFileName(f))
|
||||
.filter(name => hasTSFileExtension(name))
|
||||
.map(name => ({ name, size: host.getFileSize!(name) })) // TODO: GH#18217
|
||||
.sort((a, b) => b.size - a.size)
|
||||
.slice(0, 5);
|
||||
}
|
||||
}
|
||||
|
||||
private createExternalProject(projectFileName: string, files: protocol.ExternalFile[], options: protocol.ExternalProjectCompilerOptions, typeAcquisition: TypeAcquisition, excludedFiles: NormalizedPath[]) {
|
||||
|
||||
@ -102,7 +102,7 @@ namespace ts.server {
|
||||
|
||||
/**
|
||||
* The project root can be script info - if root is present,
|
||||
* or it could be just normalized path if root wasnt present on the host(only for non inferred project)
|
||||
* or it could be just normalized path if root wasn't present on the host(only for non inferred project)
|
||||
*/
|
||||
/* @internal */
|
||||
export interface ProjectRootFile {
|
||||
@ -147,7 +147,8 @@ namespace ts.server {
|
||||
/*@internal*/
|
||||
private hasAddedorRemovedFiles = false;
|
||||
|
||||
private lastFileExceededProgramSize: string | undefined;
|
||||
/*@internal*/
|
||||
lastFileExceededProgramSize: string | undefined;
|
||||
|
||||
// wrapper over the real language service that will suppress all semantic operations
|
||||
protected languageService: LanguageService;
|
||||
@ -1057,13 +1058,16 @@ namespace ts.server {
|
||||
|
||||
/*@internal*/
|
||||
updateTypingFiles(typingFiles: SortedReadonlyArray<string>) {
|
||||
enumerateInsertsAndDeletes<string, string>(typingFiles, this.typingFiles, getStringComparer(!this.useCaseSensitiveFileNames()),
|
||||
if (enumerateInsertsAndDeletes<string, string>(typingFiles, this.typingFiles, getStringComparer(!this.useCaseSensitiveFileNames()),
|
||||
/*inserted*/ noop,
|
||||
removed => this.detachScriptInfoFromProject(removed)
|
||||
);
|
||||
this.typingFiles = typingFiles;
|
||||
// Invalidate files with unresolved imports
|
||||
this.resolutionCache.setFilesWithInvalidatedNonRelativeUnresolvedImports(this.cachedUnresolvedImportsPerFile);
|
||||
)) {
|
||||
// If typing files changed, then only schedule project update
|
||||
this.typingFiles = typingFiles;
|
||||
// Invalidate files with unresolved imports
|
||||
this.resolutionCache.setFilesWithInvalidatedNonRelativeUnresolvedImports(this.cachedUnresolvedImportsPerFile);
|
||||
this.projectService.delayUpdateProjectGraphAndEnsureProjectStructureForOpenFiles(this);
|
||||
}
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
@ -1577,6 +1581,10 @@ namespace ts.server {
|
||||
|
||||
protected enablePlugin(pluginConfigEntry: PluginImport, searchPaths: string[], pluginConfigOverrides: Map<any> | undefined) {
|
||||
this.projectService.logger.info(`Enabling plugin ${pluginConfigEntry.name} from candidate paths: ${searchPaths.join(",")}`);
|
||||
if (!pluginConfigEntry.name || parsePackageName(pluginConfigEntry.name).rest) {
|
||||
this.projectService.logger.info(`Skipped loading plugin ${pluginConfigEntry.name || JSON.stringify(pluginConfigEntry)} because only package name is allowed plugin name`);
|
||||
return;
|
||||
}
|
||||
|
||||
const log = (message: string) => this.projectService.logger.info(message);
|
||||
let errorLogs: string[] | undefined;
|
||||
|
||||
@ -861,6 +861,11 @@ namespace ts.server.protocol {
|
||||
* Length of the span.
|
||||
*/
|
||||
length: number;
|
||||
/**
|
||||
* Optional parameter for the semantic highlighting response, if absent it
|
||||
* defaults to "original".
|
||||
*/
|
||||
format?: "original" | "2020"
|
||||
}
|
||||
|
||||
/**
|
||||
@ -3268,6 +3273,8 @@ namespace ts.server.protocol {
|
||||
readonly provideRefactorNotApplicableReason?: boolean;
|
||||
readonly allowRenameOfImportPath?: boolean;
|
||||
readonly includePackageJsonAutoImports?: "auto" | "on" | "off";
|
||||
|
||||
readonly generateReturnInDocTemplate?: boolean;
|
||||
}
|
||||
|
||||
export interface CompilerOptions {
|
||||
|
||||
@ -208,25 +208,25 @@ namespace ts.server {
|
||||
try {
|
||||
if (this.operationHost.isCancellationRequested()) {
|
||||
stop = true;
|
||||
tracing.instant(tracing.Phase.Session, "stepCanceled", { seq: this.requestId, early: true });
|
||||
tracing?.instant(tracing.Phase.Session, "stepCanceled", { seq: this.requestId, early: true });
|
||||
}
|
||||
else {
|
||||
tracing.push(tracing.Phase.Session, "stepAction", { seq: this.requestId });
|
||||
tracing?.push(tracing.Phase.Session, "stepAction", { seq: this.requestId });
|
||||
action(this);
|
||||
tracing.pop();
|
||||
tracing?.pop();
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
// Cancellation or an error may have left incomplete events on the tracing stack.
|
||||
tracing.popAll();
|
||||
tracing?.popAll();
|
||||
|
||||
stop = true;
|
||||
// ignore cancellation request
|
||||
if (e instanceof OperationCanceledException) {
|
||||
tracing.instant(tracing.Phase.Session, "stepCanceled", { seq: this.requestId });
|
||||
tracing?.instant(tracing.Phase.Session, "stepCanceled", { seq: this.requestId });
|
||||
}
|
||||
else {
|
||||
tracing.instant(tracing.Phase.Session, "stepError", { seq: this.requestId, message: (<Error>e).message });
|
||||
tracing?.instant(tracing.Phase.Session, "stepError", { seq: this.requestId, message: (<Error>e).message });
|
||||
this.operationHost.logError(e, `delayed processing of request ${this.requestId}`);
|
||||
}
|
||||
}
|
||||
@ -947,7 +947,7 @@ namespace ts.server {
|
||||
}
|
||||
|
||||
public event<T extends object>(body: T, eventName: string): void {
|
||||
tracing.instant(tracing.Phase.Session, "event", { eventName });
|
||||
tracing?.instant(tracing.Phase.Session, "event", { eventName });
|
||||
this.send(toEvent(eventName, body));
|
||||
}
|
||||
|
||||
@ -1115,7 +1115,8 @@ namespace ts.server {
|
||||
|
||||
private getEncodedSemanticClassifications(args: protocol.EncodedSemanticClassificationsRequestArgs) {
|
||||
const { file, project } = this.getFileAndProject(args);
|
||||
return project.getLanguageService().getEncodedSemanticClassifications(file, args);
|
||||
const format = args.format === "2020" ? SemanticClassificationFormat.TwentyTwenty : SemanticClassificationFormat.Original;
|
||||
return project.getLanguageService().getEncodedSemanticClassifications(file, args, format);
|
||||
}
|
||||
|
||||
private getProject(projectFileName: string | undefined): Project | undefined {
|
||||
@ -1640,7 +1641,7 @@ namespace ts.server {
|
||||
private getDocCommentTemplate(args: protocol.FileLocationRequestArgs) {
|
||||
const { file, languageService } = this.getFileAndLanguageServiceForSyntacticOperation(args);
|
||||
const position = this.getPositionInFile(args, file);
|
||||
return languageService.getDocCommentTemplateAtPosition(file, position);
|
||||
return languageService.getDocCommentTemplateAtPosition(file, position, this.getPreferences(file));
|
||||
}
|
||||
|
||||
private getSpanOfEnclosingComment(args: protocol.SpanOfEnclosingCommentRequestArgs) {
|
||||
@ -2961,12 +2962,12 @@ namespace ts.server {
|
||||
request = this.parseMessage(message);
|
||||
relevantFile = request.arguments && (request as protocol.FileRequest).arguments.file ? (request as protocol.FileRequest).arguments : undefined;
|
||||
|
||||
tracing.instant(tracing.Phase.Session, "request", { seq: request.seq, command: request.command });
|
||||
tracing?.instant(tracing.Phase.Session, "request", { seq: request.seq, command: request.command });
|
||||
perfLogger.logStartCommand("" + request.command, this.toStringMessage(message).substring(0, 100));
|
||||
|
||||
tracing.push(tracing.Phase.Session, "executeCommand", { seq: request.seq, command: request.command }, /*separateBeginAndEnd*/ true);
|
||||
tracing?.push(tracing.Phase.Session, "executeCommand", { seq: request.seq, command: request.command }, /*separateBeginAndEnd*/ true);
|
||||
const { response, responseRequired } = this.executeCommand(request);
|
||||
tracing.pop();
|
||||
tracing?.pop();
|
||||
|
||||
if (this.logger.hasLevel(LogLevel.requestTime)) {
|
||||
const elapsedTime = hrTimeToMilliseconds(this.hrtime(start)).toFixed(4);
|
||||
@ -2980,7 +2981,7 @@ namespace ts.server {
|
||||
|
||||
// Note: Log before writing the response, else the editor can complete its activity before the server does
|
||||
perfLogger.logStopCommand("" + request.command, "Success");
|
||||
tracing.instant(tracing.Phase.Session, "response", { seq: request.seq, command: request.command, success: !!response });
|
||||
tracing?.instant(tracing.Phase.Session, "response", { seq: request.seq, command: request.command, success: !!response });
|
||||
if (response) {
|
||||
this.doOutput(response, request.command, request.seq, /*success*/ true);
|
||||
}
|
||||
@ -2990,19 +2991,19 @@ namespace ts.server {
|
||||
}
|
||||
catch (err) {
|
||||
// Cancellation or an error may have left incomplete events on the tracing stack.
|
||||
tracing.popAll();
|
||||
tracing?.popAll();
|
||||
|
||||
if (err instanceof OperationCanceledException) {
|
||||
// Handle cancellation exceptions
|
||||
perfLogger.logStopCommand("" + (request && request.command), "Canceled: " + err);
|
||||
tracing.instant(tracing.Phase.Session, "commandCanceled", { seq: request?.seq, command: request?.command });
|
||||
tracing?.instant(tracing.Phase.Session, "commandCanceled", { seq: request?.seq, command: request?.command });
|
||||
this.doOutput({ canceled: true }, request!.command, request!.seq, /*success*/ true);
|
||||
return;
|
||||
}
|
||||
|
||||
this.logErrorWorker(err, this.toStringMessage(message), relevantFile);
|
||||
perfLogger.logStopCommand("" + (request && request.command), "Error: " + err);
|
||||
tracing.instant(tracing.Phase.Session, "commandError", { seq: request?.seq, command: request?.command, message: (<Error>err).message });
|
||||
tracing?.instant(tracing.Phase.Session, "commandError", { seq: request?.seq, command: request?.command, message: (<Error>err).message });
|
||||
|
||||
this.doOutput(
|
||||
/*info*/ undefined,
|
||||
|
||||
@ -1,7 +1,10 @@
|
||||
/* @internal */
|
||||
namespace ts.codefix {
|
||||
registerCodeFix({
|
||||
errorCodes: [Diagnostics.await_expressions_are_only_allowed_at_the_top_level_of_a_file_when_that_file_is_a_module_but_this_file_has_no_imports_or_exports_Consider_adding_an_empty_export_to_make_this_file_a_module.code],
|
||||
errorCodes: [
|
||||
Diagnostics.await_expressions_are_only_allowed_at_the_top_level_of_a_file_when_that_file_is_a_module_but_this_file_has_no_imports_or_exports_Consider_adding_an_empty_export_to_make_this_file_a_module.code,
|
||||
Diagnostics.for_await_loops_are_only_allowed_at_the_top_level_of_a_file_when_that_file_is_a_module_but_this_file_has_no_imports_or_exports_Consider_adding_an_empty_export_to_make_this_file_a_module.code,
|
||||
],
|
||||
getCodeActions: context => {
|
||||
const { sourceFile } = context;
|
||||
const changes = textChanges.ChangeTracker.with(context, changes => {
|
||||
|
||||
@ -65,7 +65,7 @@ namespace ts.codefix {
|
||||
const isInJavascript = isInJSFile(functionToConvert);
|
||||
const setOfExpressionsToReturn = getAllPromiseExpressionsToReturn(functionToConvert, checker);
|
||||
const functionToConvertRenamed = renameCollidingVarNames(functionToConvert, checker, synthNamesMap);
|
||||
const returnStatements = functionToConvertRenamed.body && isBlock(functionToConvertRenamed.body) ? getReturnStatementsWithPromiseHandlers(functionToConvertRenamed.body) : emptyArray;
|
||||
const returnStatements = functionToConvertRenamed.body && isBlock(functionToConvertRenamed.body) ? getReturnStatementsWithPromiseHandlers(functionToConvertRenamed.body, checker) : emptyArray;
|
||||
const transformer: Transformer = { checker, synthNamesMap, setOfExpressionsToReturn, isInJSFile: isInJavascript };
|
||||
if (!returnStatements.length) {
|
||||
return;
|
||||
@ -90,10 +90,10 @@ namespace ts.codefix {
|
||||
}
|
||||
}
|
||||
|
||||
function getReturnStatementsWithPromiseHandlers(body: Block): readonly ReturnStatement[] {
|
||||
function getReturnStatementsWithPromiseHandlers(body: Block, checker: TypeChecker): readonly ReturnStatement[] {
|
||||
const res: ReturnStatement[] = [];
|
||||
forEachReturnStatement(body, ret => {
|
||||
if (isReturnStatementWithFixablePromiseHandler(ret)) res.push(ret);
|
||||
if (isReturnStatementWithFixablePromiseHandler(ret, checker)) res.push(ret);
|
||||
});
|
||||
return res;
|
||||
}
|
||||
@ -374,13 +374,14 @@ namespace ts.codefix {
|
||||
case SyntaxKind.NullKeyword:
|
||||
// do not produce a transformed statement for a null argument
|
||||
break;
|
||||
case SyntaxKind.PropertyAccessExpression:
|
||||
case SyntaxKind.Identifier: // identifier includes undefined
|
||||
if (!argName) {
|
||||
// undefined was argument passed to promise handler
|
||||
break;
|
||||
}
|
||||
|
||||
const synthCall = factory.createCallExpression(getSynthesizedDeepClone(func as Identifier), /*typeArguments*/ undefined, isSynthIdentifier(argName) ? [argName.identifier] : []);
|
||||
const synthCall = factory.createCallExpression(getSynthesizedDeepClone(func as Identifier | PropertyAccessExpression), /*typeArguments*/ undefined, isSynthIdentifier(argName) ? [argName.identifier] : []);
|
||||
if (shouldReturn(parent, transformer)) {
|
||||
return maybeAnnotateAndReturn(synthCall, parent.typeArguments?.[0]);
|
||||
}
|
||||
@ -410,7 +411,7 @@ namespace ts.codefix {
|
||||
for (const statement of funcBody.statements) {
|
||||
if (isReturnStatement(statement)) {
|
||||
seenReturnStatement = true;
|
||||
if (isReturnStatementWithFixablePromiseHandler(statement)) {
|
||||
if (isReturnStatementWithFixablePromiseHandler(statement, transformer.checker)) {
|
||||
refactoredStmts = refactoredStmts.concat(getInnerTransformationBody(transformer, [statement], prevArgName));
|
||||
}
|
||||
else {
|
||||
@ -432,7 +433,7 @@ namespace ts.codefix {
|
||||
seenReturnStatement);
|
||||
}
|
||||
else {
|
||||
const innerRetStmts = isFixablePromiseHandler(funcBody) ? [factory.createReturnStatement(funcBody)] : emptyArray;
|
||||
const innerRetStmts = isFixablePromiseHandler(funcBody, transformer.checker) ? [factory.createReturnStatement(funcBody)] : emptyArray;
|
||||
const innerCbBody = getInnerTransformationBody(transformer, innerRetStmts, prevArgName);
|
||||
|
||||
if (innerCbBody.length > 0) {
|
||||
@ -536,6 +537,9 @@ namespace ts.codefix {
|
||||
else if (isIdentifier(funcNode)) {
|
||||
name = getMapEntryOrDefault(funcNode);
|
||||
}
|
||||
else if (isPropertyAccessExpression(funcNode) && isIdentifier(funcNode.name)) {
|
||||
name = getMapEntryOrDefault(funcNode.name);
|
||||
}
|
||||
|
||||
// return undefined argName when arg is null or undefined
|
||||
// eslint-disable-next-line no-in-operator
|
||||
|
||||
@ -3,7 +3,7 @@ namespace ts.codefix {
|
||||
const fixId = "fixAwaitInSyncFunction";
|
||||
const errorCodes = [
|
||||
Diagnostics.await_expressions_are_only_allowed_within_async_functions_and_at_the_top_levels_of_modules.code,
|
||||
Diagnostics.A_for_await_of_statement_is_only_allowed_within_an_async_function_or_async_generator.code,
|
||||
Diagnostics.for_await_loops_are_only_allowed_within_async_functions_and_at_the_top_levels_of_modules.code,
|
||||
];
|
||||
registerCodeFix({
|
||||
errorCodes,
|
||||
|
||||
@ -1,7 +1,10 @@
|
||||
/* @internal */
|
||||
namespace ts.codefix {
|
||||
registerCodeFix({
|
||||
errorCodes: [Diagnostics.Top_level_await_expressions_are_only_allowed_when_the_module_option_is_set_to_esnext_or_system_and_the_target_option_is_set_to_es2017_or_higher.code],
|
||||
errorCodes: [
|
||||
Diagnostics.Top_level_await_expressions_are_only_allowed_when_the_module_option_is_set_to_esnext_or_system_and_the_target_option_is_set_to_es2017_or_higher.code,
|
||||
Diagnostics.Top_level_for_await_loops_are_only_allowed_when_the_module_option_is_set_to_esnext_or_system_and_the_target_option_is_set_to_es2017_or_higher.code,
|
||||
],
|
||||
getCodeActions: context => {
|
||||
const compilerOptions = context.program.getCompilerOptions();
|
||||
const { configFile } = compilerOptions;
|
||||
|
||||
@ -321,7 +321,7 @@ namespace ts.codefix {
|
||||
return typeNode;
|
||||
}
|
||||
|
||||
function createDummyParameters(argCount: number, names: (string | undefined)[] | undefined, types: (TypeNode | undefined)[] | undefined, minArgumentCount: number | undefined, inJs: boolean): ParameterDeclaration[] {
|
||||
function createDummyParameters(argCount: number, names: (string | undefined)[] | undefined, types: (TypeNode | undefined)[] | undefined, minArgumentCount: number | undefined, inJs: boolean): ParameterDeclaration[] {
|
||||
const parameters: ParameterDeclaration[] = [];
|
||||
for (let i = 0; i < argCount; i++) {
|
||||
const newParameter = factory.createParameterDeclaration(
|
||||
|
||||
@ -215,7 +215,7 @@ namespace ts.codefix {
|
||||
: getAllReExportingModules(sourceFile, exportedSymbol, moduleSymbol, symbolName, host, program, /*useAutoImportProvider*/ true);
|
||||
const useRequire = shouldUseRequire(sourceFile, program);
|
||||
const preferTypeOnlyImport = compilerOptions.importsNotUsedAsValues === ImportsNotUsedAsValues.Error && !isSourceFileJS(sourceFile) && isValidTypeOnlyAliasUseSite(getTokenAtPosition(sourceFile, position));
|
||||
const moduleSpecifier = first(getNewImportInfos(program, sourceFile, position, preferTypeOnlyImport, useRequire, exportInfos, host, preferences)).moduleSpecifier;
|
||||
const moduleSpecifier = getBestFix(getNewImportInfos(program, sourceFile, position, preferTypeOnlyImport, useRequire, exportInfos, host, preferences), sourceFile, program, host).moduleSpecifier;
|
||||
const fix = getImportFixForSymbol(sourceFile, exportInfos, moduleSymbol, symbolName, program, position, preferTypeOnlyImport, useRequire, host, preferences);
|
||||
return { moduleSpecifier, codeAction: codeFixActionToCodeAction(codeActionForFix({ host, formatContext, preferences }, sourceFile, symbolName, fix, getQuotePreference(sourceFile, preferences))) };
|
||||
}
|
||||
@ -223,7 +223,7 @@ namespace ts.codefix {
|
||||
function getImportFixForSymbol(sourceFile: SourceFile, exportInfos: readonly SymbolExportInfo[], moduleSymbol: Symbol, symbolName: string, program: Program, position: number | undefined, preferTypeOnlyImport: boolean, useRequire: boolean, host: LanguageServiceHost, preferences: UserPreferences) {
|
||||
Debug.assert(exportInfos.some(info => info.moduleSymbol === moduleSymbol), "Some exportInfo should match the specified moduleSymbol");
|
||||
// We sort the best codefixes first, so taking `first` is best.
|
||||
return first(getFixForImport(exportInfos, symbolName, position, preferTypeOnlyImport, useRequire, program, sourceFile, host, preferences));
|
||||
return getBestFix(getFixForImport(exportInfos, symbolName, position, preferTypeOnlyImport, useRequire, program, sourceFile, host, preferences), sourceFile, program, host);
|
||||
}
|
||||
|
||||
function codeFixActionToCodeAction({ description, changes, commands }: CodeFixAction): CodeAction {
|
||||
@ -424,28 +424,13 @@ namespace ts.codefix {
|
||||
): readonly (FixAddNewImport | FixUseImportType)[] {
|
||||
const isJs = isSourceFileJS(sourceFile);
|
||||
const compilerOptions = program.getCompilerOptions();
|
||||
const { allowsImportingSpecifier } = createAutoImportFilter(sourceFile, program, host);
|
||||
|
||||
const choicesForEachExportingModule = flatMap(moduleSymbols, ({ moduleSymbol, importKind, exportedSymbolIsTypeOnly }) =>
|
||||
return flatMap(moduleSymbols, ({ moduleSymbol, importKind, exportedSymbolIsTypeOnly }) =>
|
||||
moduleSpecifiers.getModuleSpecifiers(moduleSymbol, program.getTypeChecker(), compilerOptions, sourceFile, createModuleSpecifierResolutionHost(program, host), preferences)
|
||||
.map((moduleSpecifier): FixAddNewImport | FixUseImportType =>
|
||||
// `position` should only be undefined at a missing jsx namespace, in which case we shouldn't be looking for pure types.
|
||||
exportedSymbolIsTypeOnly && isJs
|
||||
? { kind: ImportFixKind.ImportType, moduleSpecifier, position: Debug.checkDefined(position, "position should be defined") }
|
||||
: { kind: ImportFixKind.AddNew, moduleSpecifier, importKind, useRequire, typeOnly: preferTypeOnlyImport }));
|
||||
|
||||
// Sort by presence in package.json, then shortest paths first
|
||||
return sort(choicesForEachExportingModule, (a, b) => {
|
||||
const allowsImportingA = allowsImportingSpecifier(a.moduleSpecifier);
|
||||
const allowsImportingB = allowsImportingSpecifier(b.moduleSpecifier);
|
||||
if (allowsImportingA && !allowsImportingB) {
|
||||
return -1;
|
||||
}
|
||||
if (allowsImportingB && !allowsImportingA) {
|
||||
return 1;
|
||||
}
|
||||
return a.moduleSpecifier.length - b.moduleSpecifier.length;
|
||||
});
|
||||
}
|
||||
|
||||
function getFixesForAddImport(
|
||||
@ -479,7 +464,31 @@ namespace ts.codefix {
|
||||
const info = errorCode === Diagnostics._0_refers_to_a_UMD_global_but_the_current_file_is_a_module_Consider_adding_an_import_instead.code
|
||||
? getFixesInfoForUMDImport(context, symbolToken)
|
||||
: isIdentifier(symbolToken) ? getFixesInfoForNonUMDImport(context, symbolToken, useAutoImportProvider) : undefined;
|
||||
return info && { ...info, fixes: sort(info.fixes, (a, b) => a.kind - b.kind) };
|
||||
return info && { ...info, fixes: sortFixes(info.fixes, context.sourceFile, context.program, context.host) };
|
||||
}
|
||||
|
||||
function sortFixes(fixes: readonly ImportFix[], sourceFile: SourceFile, program: Program, host: LanguageServiceHost): readonly ImportFix[] {
|
||||
const { allowsImportingSpecifier } = createAutoImportFilter(sourceFile, program, host);
|
||||
return sort(fixes, (a, b) => compareValues(a.kind, b.kind) || compareModuleSpecifiers(a, b, allowsImportingSpecifier));
|
||||
}
|
||||
|
||||
function getBestFix<T extends ImportFix>(fixes: readonly T[], sourceFile: SourceFile, program: Program, host: LanguageServiceHost): T {
|
||||
// These will always be placed first if available, and are better than other kinds
|
||||
if (fixes[0].kind === ImportFixKind.UseNamespace || fixes[0].kind === ImportFixKind.AddToExisting) {
|
||||
return fixes[0];
|
||||
}
|
||||
const { allowsImportingSpecifier } = createAutoImportFilter(sourceFile, program, host);
|
||||
return fixes.reduce((best, fix) =>
|
||||
compareModuleSpecifiers(fix, best, allowsImportingSpecifier) === Comparison.LessThan ? fix : best
|
||||
);
|
||||
}
|
||||
|
||||
function compareModuleSpecifiers(a: ImportFix, b: ImportFix, allowsImportingSpecifier: (specifier: string) => boolean): Comparison {
|
||||
if (a.kind !== ImportFixKind.UseNamespace && b.kind !== ImportFixKind.UseNamespace) {
|
||||
return compareBooleans(allowsImportingSpecifier(a.moduleSpecifier), allowsImportingSpecifier(b.moduleSpecifier))
|
||||
|| compareNumberOfDirectorySeparators(a.moduleSpecifier, b.moduleSpecifier);
|
||||
}
|
||||
return Comparison.EqualTo;
|
||||
}
|
||||
|
||||
function getFixesInfoForUMDImport({ sourceFile, program, host, preferences }: CodeFixContextBase, token: Node): FixesInfo | undefined {
|
||||
@ -600,7 +609,7 @@ namespace ts.codefix {
|
||||
const exported = getDefaultLikeExportWorker(importingFile, moduleSymbol, checker, compilerOptions);
|
||||
if (!exported) return undefined;
|
||||
const { symbol, kind } = exported;
|
||||
const info = getDefaultExportInfoWorker(symbol, moduleSymbol, checker, compilerOptions);
|
||||
const info = getDefaultExportInfoWorker(symbol, checker, compilerOptions);
|
||||
return info && { symbol, kind, ...info };
|
||||
}
|
||||
|
||||
@ -636,7 +645,7 @@ namespace ts.codefix {
|
||||
return allowSyntheticDefaults ? ImportKind.Default : ImportKind.CommonJS;
|
||||
}
|
||||
|
||||
function getDefaultExportInfoWorker(defaultExport: Symbol, moduleSymbol: Symbol, checker: TypeChecker, compilerOptions: CompilerOptions): { readonly symbolForMeaning: Symbol, readonly name: string } | undefined {
|
||||
function getDefaultExportInfoWorker(defaultExport: Symbol, checker: TypeChecker, compilerOptions: CompilerOptions): { readonly symbolForMeaning: Symbol, readonly name: string } | undefined {
|
||||
const localSymbol = getLocalSymbolForExportDefault(defaultExport);
|
||||
if (localSymbol) return { symbolForMeaning: localSymbol, name: localSymbol.name };
|
||||
|
||||
@ -650,7 +659,7 @@ namespace ts.codefix {
|
||||
// but we can still offer completions for it.
|
||||
// - `aliased.parent` will be undefined if the module is exporting `globalThis.something`,
|
||||
// or another expression that resolves to a global.
|
||||
return getDefaultExportInfoWorker(aliased, aliased.parent, checker, compilerOptions);
|
||||
return getDefaultExportInfoWorker(aliased, checker, compilerOptions);
|
||||
}
|
||||
}
|
||||
|
||||
@ -658,15 +667,13 @@ namespace ts.codefix {
|
||||
defaultExport.escapedName !== InternalSymbolName.ExportEquals) {
|
||||
return { symbolForMeaning: defaultExport, name: defaultExport.getName() };
|
||||
}
|
||||
return { symbolForMeaning: defaultExport, name: moduleSymbolToValidIdentifier(moduleSymbol, compilerOptions.target) };
|
||||
return { symbolForMeaning: defaultExport, name: getNameForExportedSymbol(defaultExport, compilerOptions.target) };
|
||||
}
|
||||
|
||||
function getNameForExportDefault(symbol: Symbol): string | undefined {
|
||||
return symbol.declarations && firstDefined(symbol.declarations, declaration => {
|
||||
if (isExportAssignment(declaration)) {
|
||||
if (isIdentifier(declaration.expression)) {
|
||||
return declaration.expression.text;
|
||||
}
|
||||
return tryCast(skipOuterExpressions(declaration.expression), isIdentifier)?.text;
|
||||
}
|
||||
else if (isExportSpecifier(declaration)) {
|
||||
Debug.assert(declaration.name.text === InternalSymbolName.Default, "Expected the specifier to be a default export");
|
||||
|
||||
@ -732,7 +732,7 @@ namespace ts.Completions {
|
||||
exportedSymbol,
|
||||
moduleSymbol,
|
||||
sourceFile,
|
||||
getNameForExportedSymbol(symbol, compilerOptions.target!),
|
||||
getNameForExportedSymbol(symbol, compilerOptions.target),
|
||||
host,
|
||||
program,
|
||||
formatContext,
|
||||
@ -855,6 +855,7 @@ namespace ts.Completions {
|
||||
host: LanguageServiceHost
|
||||
): CompletionData | Request | undefined {
|
||||
const typeChecker = program.getTypeChecker();
|
||||
const compilerOptions = program.getCompilerOptions();
|
||||
|
||||
let start = timestamp();
|
||||
let currentToken = getTokenAtPosition(sourceFile, position); // TODO: GH#15853
|
||||
@ -1386,7 +1387,7 @@ namespace ts.Completions {
|
||||
|
||||
// Get all entities in the current scope.
|
||||
completionKind = CompletionKind.Global;
|
||||
isNewIdentifierLocation = isNewIdentifierDefinitionLocation(contextToken);
|
||||
isNewIdentifierLocation = isNewIdentifierDefinitionLocation();
|
||||
|
||||
if (previousToken !== contextToken) {
|
||||
Debug.assert(!!previousToken, "Expected 'contextToken' to be defined when different from 'previousToken'.");
|
||||
@ -1504,6 +1505,8 @@ namespace ts.Completions {
|
||||
: KeywordCompletionFilters.TypeKeywords;
|
||||
}
|
||||
|
||||
const variableDeclaration = getVariableDeclaration(location);
|
||||
|
||||
filterMutate(symbols, symbol => {
|
||||
if (!isSourceFile(location)) {
|
||||
// export = /**/ here we want to get all meanings, so any symbol is ok
|
||||
@ -1511,7 +1514,28 @@ namespace ts.Completions {
|
||||
return true;
|
||||
}
|
||||
|
||||
symbol = skipAlias(symbol, typeChecker);
|
||||
// Filter out variables from their own initializers
|
||||
// `const a = /* no 'a' here */`
|
||||
if (variableDeclaration && symbol.valueDeclaration === variableDeclaration) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// External modules can have global export declarations that will be
|
||||
// available as global keywords in all scopes. But if the external module
|
||||
// already has an explicit export and user only wants to user explicit
|
||||
// module imports then the global keywords will be filtered out so auto
|
||||
// import suggestions will win in the completion
|
||||
const symbolOrigin = skipAlias(symbol, typeChecker);
|
||||
// We only want to filter out the global keywords
|
||||
// Auto Imports are not available for scripts so this conditional is always false
|
||||
if (!!sourceFile.externalModuleIndicator
|
||||
&& !compilerOptions.allowUmdGlobalAccess
|
||||
&& symbolToSortTextMap[getSymbolId(symbol)] === SortText.GlobalsOrKeywords
|
||||
&& symbolToSortTextMap[getSymbolId(symbolOrigin)] === SortText.AutoImportSuggestions) {
|
||||
return false;
|
||||
}
|
||||
// Continue with origin symbol
|
||||
symbol = symbolOrigin;
|
||||
|
||||
// import m = /**/ <-- It can only access namespace (if typing import = x. this would get member symbols and not namespace)
|
||||
if (isInRightSideOfInternalImportEqualsDeclaration(location)) {
|
||||
@ -1529,6 +1553,19 @@ namespace ts.Completions {
|
||||
});
|
||||
}
|
||||
|
||||
function getVariableDeclaration(property: Node): VariableDeclaration | undefined {
|
||||
const variableDeclaration = findAncestor(property, node =>
|
||||
isFunctionBlock(node) || isArrowFunctionBody(node) || isBindingPattern(node)
|
||||
? "quit"
|
||||
: isVariableDeclaration(node));
|
||||
|
||||
return variableDeclaration as VariableDeclaration | undefined;
|
||||
}
|
||||
|
||||
function isArrowFunctionBody(node: Node) {
|
||||
return node.parent && isArrowFunction(node.parent) && node.parent.body === node;
|
||||
};
|
||||
|
||||
function isTypeOnlyCompletion(): boolean {
|
||||
return insideJsDocTagTypeExpression
|
||||
|| !isContextTokenValueLocation(contextToken) &&
|
||||
@ -1812,18 +1849,19 @@ namespace ts.Completions {
|
||||
return false;
|
||||
}
|
||||
|
||||
function isNewIdentifierDefinitionLocation(previousToken: Node | undefined): boolean {
|
||||
if (previousToken) {
|
||||
const containingNodeKind = previousToken.parent.kind;
|
||||
function isNewIdentifierDefinitionLocation(): boolean {
|
||||
if (contextToken) {
|
||||
const containingNodeKind = contextToken.parent.kind;
|
||||
// Previous token may have been a keyword that was converted to an identifier.
|
||||
switch (keywordForNode(previousToken)) {
|
||||
switch (keywordForNode(contextToken)) {
|
||||
case SyntaxKind.CommaToken:
|
||||
return containingNodeKind === SyntaxKind.CallExpression // func( a, |
|
||||
|| containingNodeKind === SyntaxKind.Constructor // constructor( a, | /* public, protected, private keywords are allowed here, so show completion */
|
||||
|| containingNodeKind === SyntaxKind.NewExpression // new C(a, |
|
||||
|| containingNodeKind === SyntaxKind.ArrayLiteralExpression // [a, |
|
||||
|| containingNodeKind === SyntaxKind.BinaryExpression // const x = (a, |
|
||||
|| containingNodeKind === SyntaxKind.FunctionType; // var x: (s: string, list|
|
||||
|| containingNodeKind === SyntaxKind.FunctionType // var x: (s: string, list|
|
||||
|| containingNodeKind === SyntaxKind.ObjectLiteralExpression; // const obj = { x, |
|
||||
|
||||
case SyntaxKind.OpenParenToken:
|
||||
return containingNodeKind === SyntaxKind.CallExpression // func( |
|
||||
@ -1845,7 +1883,8 @@ namespace ts.Completions {
|
||||
return containingNodeKind === SyntaxKind.ModuleDeclaration; // module A.|
|
||||
|
||||
case SyntaxKind.OpenBraceToken:
|
||||
return containingNodeKind === SyntaxKind.ClassDeclaration; // class A{ |
|
||||
return containingNodeKind === SyntaxKind.ClassDeclaration // class A { |
|
||||
|| containingNodeKind === SyntaxKind.ObjectLiteralExpression; // const obj = { |
|
||||
|
||||
case SyntaxKind.EqualsToken:
|
||||
return containingNodeKind === SyntaxKind.VariableDeclaration // const x = a|
|
||||
|
||||
@ -390,7 +390,8 @@ namespace ts.formatting {
|
||||
sourceFile));
|
||||
}
|
||||
|
||||
function formatSpanWorker(originalRange: TextRange,
|
||||
function formatSpanWorker(
|
||||
originalRange: TextRange,
|
||||
enclosingNode: Node,
|
||||
initialIndentation: number,
|
||||
delta: number,
|
||||
@ -424,16 +425,20 @@ namespace ts.formatting {
|
||||
}
|
||||
|
||||
if (!formattingScanner.isOnToken()) {
|
||||
const indentation = SmartIndenter.nodeWillIndentChild(options, enclosingNode, /*child*/ undefined, sourceFile, /*indentByDefault*/ false)
|
||||
? initialIndentation + options.indentSize!
|
||||
: initialIndentation;
|
||||
const leadingTrivia = formattingScanner.getCurrentLeadingTrivia();
|
||||
if (leadingTrivia) {
|
||||
indentTriviaItems(leadingTrivia, initialIndentation, /*indentNextTokenOrTrivia*/ false,
|
||||
indentTriviaItems(leadingTrivia, indentation, /*indentNextTokenOrTrivia*/ false,
|
||||
item => processRange(item, sourceFile.getLineAndCharacterOfPosition(item.pos), enclosingNode, enclosingNode, /*dynamicIndentation*/ undefined!));
|
||||
if (options.trimTrailingWhitespace !== false) {
|
||||
trimTrailingWhitespacesForRemainingRange();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (options.trimTrailingWhitespace !== false) {
|
||||
trimTrailingWhitespacesForRemainingRange();
|
||||
}
|
||||
|
||||
return edits;
|
||||
|
||||
// local functions
|
||||
@ -498,10 +503,11 @@ namespace ts.formatting {
|
||||
// - we need to get the indentation on last line and the delta of parent
|
||||
return { indentation: indentationOnLastIndentedLine, delta: parentDynamicIndentation.getDelta(node) };
|
||||
}
|
||||
else if (SmartIndenter.childStartsOnTheSameLineWithElseInIfStatement(parent, node, startLine, sourceFile)) {
|
||||
return { indentation: parentDynamicIndentation.getIndentation(), delta };
|
||||
}
|
||||
else if (SmartIndenter.argumentStartsOnSameLineAsPreviousArgument(parent, node, startLine, sourceFile)) {
|
||||
else if (
|
||||
SmartIndenter.childStartsOnTheSameLineWithElseInIfStatement(parent, node, startLine, sourceFile) ||
|
||||
SmartIndenter.childIsUnindentedBranchOfConditionalExpression(parent, node, startLine, sourceFile) ||
|
||||
SmartIndenter.argumentStartsOnSameLineAsPreviousArgument(parent, node, startLine, sourceFile)
|
||||
) {
|
||||
return { indentation: parentDynamicIndentation.getIndentation(), delta };
|
||||
}
|
||||
else {
|
||||
@ -592,6 +598,7 @@ namespace ts.formatting {
|
||||
case SyntaxKind.JsxOpeningElement:
|
||||
case SyntaxKind.JsxClosingElement:
|
||||
case SyntaxKind.JsxSelfClosingElement:
|
||||
case SyntaxKind.ExpressionWithTypeArguments:
|
||||
return false;
|
||||
}
|
||||
break;
|
||||
|
||||
@ -182,7 +182,25 @@ namespace ts.formatting {
|
||||
|
||||
if (useActualIndentation) {
|
||||
// check if current node is a list item - if yes, take indentation from it
|
||||
let actualIndentation = getActualIndentationForListItem(current, sourceFile, options, !parentAndChildShareLine);
|
||||
const firstListChild = getContainingList(current, sourceFile)?.[0];
|
||||
// A list indents its children if the children begin on a later line than the list itself:
|
||||
//
|
||||
// f1( L0 - List start
|
||||
// { L1 - First child start: indented, along with all other children
|
||||
// prop: 0
|
||||
// },
|
||||
// {
|
||||
// prop: 1
|
||||
// }
|
||||
// )
|
||||
//
|
||||
// f2({ L0 - List start and first child start: children are not indented.
|
||||
// prop: 0 Object properties are indented only one level, because the list
|
||||
// }, { itself contributes nothing.
|
||||
// prop: 1 L3 - The indentation of the second object literal is best understood by
|
||||
// }) looking at the relationship between the list and *first* list item.
|
||||
const listIndentsChild = !!firstListChild && getStartLineAndCharacterForNode(firstListChild, sourceFile).line > containingListOrParentStart.line;
|
||||
let actualIndentation = getActualIndentationForListItem(current, sourceFile, options, listIndentsChild);
|
||||
if (actualIndentation !== Value.Unknown) {
|
||||
return actualIndentation + indentationDelta;
|
||||
}
|
||||
@ -323,6 +341,49 @@ namespace ts.formatting {
|
||||
return false;
|
||||
}
|
||||
|
||||
// A multiline conditional typically increases the indentation of its whenTrue and whenFalse children:
|
||||
//
|
||||
// condition
|
||||
// ? whenTrue
|
||||
// : whenFalse;
|
||||
//
|
||||
// However, that indentation does not apply if the subexpressions themselves span multiple lines,
|
||||
// applying their own indentation:
|
||||
//
|
||||
// (() => {
|
||||
// return complexCalculationForCondition();
|
||||
// })() ? {
|
||||
// whenTrue: 'multiline object literal'
|
||||
// } : (
|
||||
// whenFalse('multiline parenthesized expression')
|
||||
// );
|
||||
//
|
||||
// In these cases, we must discard the indentation increase that would otherwise be applied to the
|
||||
// whenTrue and whenFalse children to avoid double-indenting their contents. To identify this scenario,
|
||||
// we check for the whenTrue branch beginning on the line that the condition ends, and the whenFalse
|
||||
// branch beginning on the line that the whenTrue branch ends.
|
||||
export function childIsUnindentedBranchOfConditionalExpression(parent: Node, child: TextRangeWithKind, childStartLine: number, sourceFile: SourceFileLike): boolean {
|
||||
if (isConditionalExpression(parent) && (child === parent.whenTrue || child === parent.whenFalse)) {
|
||||
const conditionEndLine = getLineAndCharacterOfPosition(sourceFile, parent.condition.end).line;
|
||||
if (child === parent.whenTrue) {
|
||||
return childStartLine === conditionEndLine;
|
||||
}
|
||||
else {
|
||||
// On the whenFalse side, we have to look at the whenTrue side, because if that one was
|
||||
// indented, whenFalse must also be indented:
|
||||
//
|
||||
// const y = true
|
||||
// ? 1 : ( L1: whenTrue indented because it's on a new line
|
||||
// 0 L2: indented two stops, one because whenTrue was indented
|
||||
// ); and one because of the parentheses spanning multiple lines
|
||||
const trueStartLine = getStartLineAndCharacterForNode(parent.whenTrue, sourceFile).line;
|
||||
const trueEndLine = getLineAndCharacterOfPosition(sourceFile, parent.whenTrue.end).line;
|
||||
return conditionEndLine === trueStartLine && trueEndLine === childStartLine;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export function argumentStartsOnSameLineAsPreviousArgument(parent: Node, child: TextRangeWithKind, childStartLine: number, sourceFile: SourceFileLike): boolean {
|
||||
if (isCallOrNewExpression(parent)) {
|
||||
if (!parent.arguments) return false;
|
||||
@ -581,7 +642,7 @@ namespace ts.formatting {
|
||||
if (childKind === SyntaxKind.TypeLiteral || childKind === SyntaxKind.TupleType) {
|
||||
return false;
|
||||
}
|
||||
// falls through
|
||||
break;
|
||||
}
|
||||
// No explicit rule for given nodes so the result will follow the default value argument
|
||||
return indentByDefault;
|
||||
|
||||
@ -251,7 +251,7 @@ namespace ts.JsDoc {
|
||||
* @param position The (character-indexed) position in the file where the check should
|
||||
* be performed.
|
||||
*/
|
||||
export function getDocCommentTemplateAtPosition(newLine: string, sourceFile: SourceFile, position: number): TextInsertion | undefined {
|
||||
export function getDocCommentTemplateAtPosition(newLine: string, sourceFile: SourceFile, position: number, options?: DocCommentTemplateOptions): TextInsertion | undefined {
|
||||
const tokenAtPos = getTokenAtPosition(sourceFile, position);
|
||||
const existingDocComment = findAncestor(tokenAtPos, isJSDoc);
|
||||
if (existingDocComment && (existingDocComment.comment !== undefined || length(existingDocComment.tags))) {
|
||||
@ -265,7 +265,7 @@ namespace ts.JsDoc {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const commentOwnerInfo = getCommentOwnerInfo(tokenAtPos);
|
||||
const commentOwnerInfo = getCommentOwnerInfo(tokenAtPos, options);
|
||||
if (!commentOwnerInfo) {
|
||||
return undefined;
|
||||
}
|
||||
@ -325,10 +325,10 @@ namespace ts.JsDoc {
|
||||
readonly parameters?: readonly ParameterDeclaration[];
|
||||
readonly hasReturn?: boolean;
|
||||
}
|
||||
function getCommentOwnerInfo(tokenAtPos: Node): CommentOwnerInfo | undefined {
|
||||
return forEachAncestor(tokenAtPos, getCommentOwnerInfoWorker);
|
||||
function getCommentOwnerInfo(tokenAtPos: Node, options: DocCommentTemplateOptions | undefined): CommentOwnerInfo | undefined {
|
||||
return forEachAncestor(tokenAtPos, n => getCommentOwnerInfoWorker(n, options));
|
||||
}
|
||||
function getCommentOwnerInfoWorker(commentOwner: Node): CommentOwnerInfo | undefined | "quit" {
|
||||
function getCommentOwnerInfoWorker(commentOwner: Node, options: DocCommentTemplateOptions | undefined): CommentOwnerInfo | undefined | "quit" {
|
||||
switch (commentOwner.kind) {
|
||||
case SyntaxKind.FunctionDeclaration:
|
||||
case SyntaxKind.FunctionExpression:
|
||||
@ -337,10 +337,10 @@ namespace ts.JsDoc {
|
||||
case SyntaxKind.MethodSignature:
|
||||
case SyntaxKind.ArrowFunction:
|
||||
const host = commentOwner as ArrowFunction | FunctionDeclaration | MethodDeclaration | ConstructorDeclaration | MethodSignature;
|
||||
return { commentOwner, parameters: host.parameters, hasReturn: hasReturn(host) };
|
||||
return { commentOwner, parameters: host.parameters, hasReturn: hasReturn(host, options) };
|
||||
|
||||
case SyntaxKind.PropertyAssignment:
|
||||
return getCommentOwnerInfoWorker((commentOwner as PropertyAssignment).initializer);
|
||||
return getCommentOwnerInfoWorker((commentOwner as PropertyAssignment).initializer, options);
|
||||
|
||||
case SyntaxKind.ClassDeclaration:
|
||||
case SyntaxKind.InterfaceDeclaration:
|
||||
@ -357,7 +357,7 @@ namespace ts.JsDoc {
|
||||
? getRightHandSideOfAssignment(varDeclarations[0].initializer)
|
||||
: undefined;
|
||||
return host
|
||||
? { commentOwner, parameters: host.parameters, hasReturn: hasReturn(host) }
|
||||
? { commentOwner, parameters: host.parameters, hasReturn: hasReturn(host, options) }
|
||||
: { commentOwner };
|
||||
}
|
||||
|
||||
@ -371,27 +371,28 @@ namespace ts.JsDoc {
|
||||
return commentOwner.parent.kind === SyntaxKind.ModuleDeclaration ? undefined : { commentOwner };
|
||||
|
||||
case SyntaxKind.ExpressionStatement:
|
||||
return getCommentOwnerInfoWorker((commentOwner as ExpressionStatement).expression);
|
||||
return getCommentOwnerInfoWorker((commentOwner as ExpressionStatement).expression, options);
|
||||
case SyntaxKind.BinaryExpression: {
|
||||
const be = commentOwner as BinaryExpression;
|
||||
if (getAssignmentDeclarationKind(be) === AssignmentDeclarationKind.None) {
|
||||
return "quit";
|
||||
}
|
||||
return isFunctionLike(be.right)
|
||||
? { commentOwner, parameters: be.right.parameters, hasReturn: hasReturn(be.right) }
|
||||
? { commentOwner, parameters: be.right.parameters, hasReturn: hasReturn(be.right, options) }
|
||||
: { commentOwner };
|
||||
}
|
||||
case SyntaxKind.PropertyDeclaration:
|
||||
const init = (commentOwner as PropertyDeclaration).initializer;
|
||||
if (init && (isFunctionExpression(init) || isArrowFunction(init))) {
|
||||
return { commentOwner, parameters: init.parameters, hasReturn: hasReturn(init) };
|
||||
return { commentOwner, parameters: init.parameters, hasReturn: hasReturn(init, options) };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function hasReturn(node: Node) {
|
||||
return isArrowFunction(node) && isExpression(node.body)
|
||||
|| isFunctionLikeDeclaration(node) && node.body && isBlock(node.body) && !!forEachReturnStatement(node.body, n => n);
|
||||
function hasReturn(node: Node, options: DocCommentTemplateOptions | undefined) {
|
||||
return !!options?.generateReturnInDocTemplate &&
|
||||
(isArrowFunction(node) && isExpression(node.body)
|
||||
|| isFunctionLikeDeclaration(node) && node.body && isBlock(node.body) && !!forEachReturnStatement(node.body, n => n));
|
||||
}
|
||||
|
||||
function getRightHandSideOfAssignment(rightHandSide: Expression): FunctionExpression | ArrowFunction | ConstructorDeclaration | undefined {
|
||||
|
||||
@ -183,6 +183,17 @@ namespace ts.NavigationBar {
|
||||
endNode();
|
||||
}
|
||||
|
||||
function addNodeWithRecursiveInitializer(node: VariableDeclaration | PropertyAssignment | BindingElement | PropertyDeclaration): void {
|
||||
if (node.initializer && isFunctionOrClassExpression(node.initializer)) {
|
||||
startNode(node);
|
||||
forEachChild(node.initializer, addChildrenRecursively);
|
||||
endNode();
|
||||
}
|
||||
else {
|
||||
addNodeWithRecursiveChild(node, node.initializer);
|
||||
}
|
||||
}
|
||||
|
||||
/** Look for navigation bar items in node's subtree, adding them to the current `parent`. */
|
||||
function addChildrenRecursively(node: Node | undefined): void {
|
||||
curCancellationToken.throwIfCancellationRequested();
|
||||
@ -215,8 +226,12 @@ namespace ts.NavigationBar {
|
||||
break;
|
||||
|
||||
case SyntaxKind.PropertyDeclaration:
|
||||
if (!hasDynamicName(<ClassElement>node)) {
|
||||
addNodeWithRecursiveInitializer(<PropertyDeclaration>node);
|
||||
}
|
||||
break;
|
||||
case SyntaxKind.PropertySignature:
|
||||
if (!hasDynamicName((<ClassElement | TypeElement>node))) {
|
||||
if (!hasDynamicName(<TypeElement>node)) {
|
||||
addLeafNode(node);
|
||||
}
|
||||
break;
|
||||
@ -255,22 +270,16 @@ namespace ts.NavigationBar {
|
||||
break;
|
||||
case SyntaxKind.BindingElement:
|
||||
case SyntaxKind.PropertyAssignment:
|
||||
case SyntaxKind.VariableDeclaration:
|
||||
const { name, initializer } = <VariableDeclaration | PropertyAssignment | BindingElement>node;
|
||||
if (isBindingPattern(name)) {
|
||||
addChildrenRecursively(name);
|
||||
}
|
||||
else if (initializer && isFunctionOrClassExpression(initializer)) {
|
||||
// Add a node for the VariableDeclaration, but not for the initializer.
|
||||
startNode(node);
|
||||
forEachChild(initializer, addChildrenRecursively);
|
||||
endNode();
|
||||
case SyntaxKind.VariableDeclaration: {
|
||||
const child = <VariableDeclaration | PropertyAssignment | BindingElement>node;
|
||||
if (isBindingPattern(child.name)) {
|
||||
addChildrenRecursively(child.name);
|
||||
}
|
||||
else {
|
||||
addNodeWithRecursiveChild(node, initializer);
|
||||
addNodeWithRecursiveInitializer(child);
|
||||
}
|
||||
break;
|
||||
|
||||
}
|
||||
case SyntaxKind.FunctionDeclaration:
|
||||
const nameNode = (<FunctionLikeDeclaration>node).name;
|
||||
// If we see a function declaration track as a possible ES5 class
|
||||
|
||||
@ -51,18 +51,14 @@ namespace ts.refactor.convertParamsToDestructuredObject {
|
||||
changes: textChanges.ChangeTracker,
|
||||
functionDeclaration: ValidFunctionDeclaration,
|
||||
groupedReferences: GroupedReferences): void {
|
||||
const newParamDeclaration = map(createNewParameters(functionDeclaration, program, host), param => getSynthesizedDeepClone(param));
|
||||
changes.replaceNodeRangeWithNodes(
|
||||
sourceFile,
|
||||
first(functionDeclaration.parameters),
|
||||
last(functionDeclaration.parameters),
|
||||
newParamDeclaration,
|
||||
{ joiner: ", ",
|
||||
// indentation is set to 0 because otherwise the object parameter will be indented if there is a `this` parameter
|
||||
indentation: 0,
|
||||
leadingTriviaOption: textChanges.LeadingTriviaOption.IncludeAll,
|
||||
trailingTriviaOption: textChanges.TrailingTriviaOption.Include
|
||||
});
|
||||
const signature = groupedReferences.signature;
|
||||
const newFunctionDeclarationParams = map(createNewParameters(functionDeclaration, program, host), param => getSynthesizedDeepClone(param));
|
||||
|
||||
if (signature) {
|
||||
const newSignatureParams = map(createNewParameters(signature, program, host), param => getSynthesizedDeepClone(param));
|
||||
replaceParameters(signature, newSignatureParams);
|
||||
}
|
||||
replaceParameters(functionDeclaration, newFunctionDeclarationParams);
|
||||
|
||||
const functionCalls = sortAndDeduplicate(groupedReferences.functionCalls, /*comparer*/ (a, b) => compareValues(a.pos, b.pos));
|
||||
for (const call of functionCalls) {
|
||||
@ -76,6 +72,21 @@ namespace ts.refactor.convertParamsToDestructuredObject {
|
||||
{ leadingTriviaOption: textChanges.LeadingTriviaOption.IncludeAll, trailingTriviaOption: textChanges.TrailingTriviaOption.Include });
|
||||
}
|
||||
}
|
||||
|
||||
function replaceParameters(declarationOrSignature: ValidFunctionDeclaration | ValidMethodSignature, parameterDeclarations: ParameterDeclaration[]) {
|
||||
changes.replaceNodeRangeWithNodes(
|
||||
sourceFile,
|
||||
first(declarationOrSignature.parameters),
|
||||
last(declarationOrSignature.parameters),
|
||||
parameterDeclarations,
|
||||
{
|
||||
joiner: ", ",
|
||||
// indentation is set to 0 because otherwise the object parameter will be indented if there is a `this` parameter
|
||||
indentation: 0,
|
||||
leadingTriviaOption: textChanges.LeadingTriviaOption.IncludeAll,
|
||||
trailingTriviaOption: textChanges.TrailingTriviaOption.Include
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function getGroupedReferences(functionDeclaration: ValidFunctionDeclaration, program: Program, cancellationToken: CancellationToken): GroupedReferences {
|
||||
@ -99,13 +110,41 @@ namespace ts.refactor.convertParamsToDestructuredObject {
|
||||
const functionSymbols = map(functionNames, getSymbolTargetAtLocation);
|
||||
const classSymbols = map(classNames, getSymbolTargetAtLocation);
|
||||
const isConstructor = isConstructorDeclaration(functionDeclaration);
|
||||
const contextualSymbols = map(functionNames, name => getSymbolForContextualType(name, checker));
|
||||
|
||||
for (const entry of referenceEntries) {
|
||||
if (entry.kind !== FindAllReferences.EntryKind.Node) {
|
||||
if (entry.kind === FindAllReferences.EntryKind.Span) {
|
||||
groupedReferences.valid = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
/* Declarations in object literals may be implementations of method signatures which have a different symbol from the declaration
|
||||
For example:
|
||||
interface IFoo { m(a: number): void }
|
||||
const foo: IFoo = { m(a: number): void {} }
|
||||
In these cases we get the symbol for the signature from the contextual type.
|
||||
*/
|
||||
if (contains(contextualSymbols, getSymbolTargetAtLocation(entry.node))) {
|
||||
if (isValidMethodSignature(entry.node.parent)) {
|
||||
groupedReferences.signature = entry.node.parent;
|
||||
continue;
|
||||
}
|
||||
const call = entryToFunctionCall(entry);
|
||||
if (call) {
|
||||
groupedReferences.functionCalls.push(call);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
const contextualSymbol = getSymbolForContextualType(entry.node, checker);
|
||||
if (contextualSymbol && contains(contextualSymbols, contextualSymbol)) {
|
||||
const decl = entryToDeclaration(entry);
|
||||
if (decl) {
|
||||
groupedReferences.declarations.push(decl);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
/* We compare symbols because in some cases find all references wil return a reference that may or may not be to the refactored function.
|
||||
Example from the refactorConvertParamsToDestructuredObject_methodCallUnion.ts test:
|
||||
class A { foo(a: number, b: number) { return a + b; } }
|
||||
@ -175,6 +214,20 @@ namespace ts.refactor.convertParamsToDestructuredObject {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the symbol for the contextual type of the node if it is not a union or intersection.
|
||||
*/
|
||||
function getSymbolForContextualType(node: Node, checker: TypeChecker): Symbol | undefined {
|
||||
const element = getContainingObjectLiteralElement(node);
|
||||
if (element) {
|
||||
const contextualType = checker.getContextualTypeForObjectLiteralElement(<ObjectLiteralElementLike>element);
|
||||
const symbol = contextualType?.getSymbol();
|
||||
if (symbol && !(getCheckFlags(symbol) & CheckFlags.Synthetic)) {
|
||||
return symbol;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function entryToImportOrExport(entry: FindAllReferences.NodeEntry): Node | undefined {
|
||||
const node = entry.node;
|
||||
|
||||
@ -292,6 +345,10 @@ namespace ts.refactor.convertParamsToDestructuredObject {
|
||||
return false;
|
||||
}
|
||||
|
||||
function isValidMethodSignature(node: Node): node is ValidMethodSignature {
|
||||
return isMethodSignature(node) && (isInterfaceDeclaration(node.parent) || isTypeLiteralNode(node.parent));
|
||||
}
|
||||
|
||||
function isValidFunctionDeclaration(
|
||||
functionDeclaration: FunctionLikeDeclaration,
|
||||
checker: TypeChecker): functionDeclaration is ValidFunctionDeclaration {
|
||||
@ -300,6 +357,11 @@ namespace ts.refactor.convertParamsToDestructuredObject {
|
||||
case SyntaxKind.FunctionDeclaration:
|
||||
return hasNameOrDefault(functionDeclaration) && isSingleImplementation(functionDeclaration, checker);
|
||||
case SyntaxKind.MethodDeclaration:
|
||||
if (isObjectLiteralExpression(functionDeclaration.parent)) {
|
||||
const contextualSymbol = getSymbolForContextualType(functionDeclaration.name, checker);
|
||||
// don't offer the refactor when there are multiple signatures since we won't know which ones the user wants to change
|
||||
return contextualSymbol?.declarations.length === 1 && isSingleImplementation(functionDeclaration, checker);
|
||||
}
|
||||
return isSingleImplementation(functionDeclaration, checker);
|
||||
case SyntaxKind.Constructor:
|
||||
if (isClassDeclaration(functionDeclaration.parent)) {
|
||||
@ -398,7 +460,7 @@ namespace ts.refactor.convertParamsToDestructuredObject {
|
||||
return objectLiteral;
|
||||
}
|
||||
|
||||
function createNewParameters(functionDeclaration: ValidFunctionDeclaration, program: Program, host: LanguageServiceHost): NodeArray<ParameterDeclaration> {
|
||||
function createNewParameters(functionDeclaration: ValidFunctionDeclaration | ValidMethodSignature, program: Program, host: LanguageServiceHost): NodeArray<ParameterDeclaration> {
|
||||
const checker = program.getTypeChecker();
|
||||
const refactorableParameters = getRefactorableParameters(functionDeclaration.parameters);
|
||||
const bindingElements = map(refactorableParameters, createBindingElementFromParameterDeclaration);
|
||||
@ -584,6 +646,10 @@ namespace ts.refactor.convertParamsToDestructuredObject {
|
||||
parameters: NodeArray<ValidParameterDeclaration>;
|
||||
}
|
||||
|
||||
interface ValidMethodSignature extends MethodSignature {
|
||||
parameters: NodeArray<ValidParameterDeclaration>;
|
||||
}
|
||||
|
||||
type ValidFunctionDeclaration = ValidConstructor | ValidFunction | ValidMethod | ValidArrowFunction | ValidFunctionExpression;
|
||||
|
||||
interface ValidParameterDeclaration extends ParameterDeclaration {
|
||||
@ -595,6 +661,7 @@ namespace ts.refactor.convertParamsToDestructuredObject {
|
||||
interface GroupedReferences {
|
||||
functionCalls: (CallExpression | NewExpression)[];
|
||||
declarations: Node[];
|
||||
signature?: ValidMethodSignature;
|
||||
classReferences?: ClassReferences;
|
||||
valid: boolean;
|
||||
}
|
||||
|
||||
@ -1143,7 +1143,7 @@ namespace ts {
|
||||
|
||||
public throwIfCancellationRequested(): void {
|
||||
if (this.isCancellationRequested()) {
|
||||
tracing.instant(tracing.Phase.Session, "cancellationThrown", { kind: "CancellationTokenObject" });
|
||||
tracing?.instant(tracing.Phase.Session, "cancellationThrown", { kind: "CancellationTokenObject" });
|
||||
throw new OperationCanceledException();
|
||||
}
|
||||
}
|
||||
@ -1174,7 +1174,7 @@ namespace ts {
|
||||
|
||||
public throwIfCancellationRequested(): void {
|
||||
if (this.isCancellationRequested()) {
|
||||
tracing.instant(tracing.Phase.Session, "cancellationThrown", { kind: "ThrottledCancellationToken" });
|
||||
tracing?.instant(tracing.Phase.Session, "cancellationThrown", { kind: "ThrottledCancellationToken" });
|
||||
throw new OperationCanceledException();
|
||||
}
|
||||
}
|
||||
@ -2004,8 +2004,8 @@ namespace ts {
|
||||
: Promise.reject("Host does not implement `installPackage`");
|
||||
}
|
||||
|
||||
function getDocCommentTemplateAtPosition(fileName: string, position: number): TextInsertion | undefined {
|
||||
return JsDoc.getDocCommentTemplateAtPosition(getNewLineOrDefaultFromHost(host), syntaxTreeCache.getCurrentSourceFile(fileName), position);
|
||||
function getDocCommentTemplateAtPosition(fileName: string, position: number, options?: DocCommentTemplateOptions): TextInsertion | undefined {
|
||||
return JsDoc.getDocCommentTemplateAtPosition(getNewLineOrDefaultFromHost(host), syntaxTreeCache.getCurrentSourceFile(fileName), position, options);
|
||||
}
|
||||
|
||||
function isValidBraceCompletionAtPosition(fileName: string, position: number, openingBrace: number): boolean {
|
||||
|
||||
@ -263,7 +263,7 @@ namespace ts {
|
||||
/**
|
||||
* Returns JSON-encoded value of the type TextInsertion.
|
||||
*/
|
||||
getDocCommentTemplateAtPosition(fileName: string, position: number): string;
|
||||
getDocCommentTemplateAtPosition(fileName: string, position: number, options?: DocCommentTemplateOptions): string;
|
||||
|
||||
/**
|
||||
* Returns JSON-encoded boolean to indicate whether we should support brace location
|
||||
@ -999,10 +999,10 @@ namespace ts {
|
||||
});
|
||||
}
|
||||
|
||||
public getDocCommentTemplateAtPosition(fileName: string, position: number): string {
|
||||
public getDocCommentTemplateAtPosition(fileName: string, position: number, options?: DocCommentTemplateOptions): string {
|
||||
return this.forwardJSONCall(
|
||||
`getDocCommentTemplateAtPosition('${fileName}', ${position})`,
|
||||
() => this.languageService.getDocCommentTemplateAtPosition(fileName, position)
|
||||
() => this.languageService.getDocCommentTemplateAtPosition(fileName, position, options)
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@ -517,7 +517,6 @@ namespace ts.SignatureHelp {
|
||||
if (argumentIndex !== 0) {
|
||||
Debug.assertLessThan(argumentIndex, argumentCount);
|
||||
}
|
||||
|
||||
let selectedItemIndex = 0;
|
||||
let itemsSeen = 0;
|
||||
for (let i = 0; i < items.length; i++) {
|
||||
@ -541,8 +540,19 @@ namespace ts.SignatureHelp {
|
||||
}
|
||||
|
||||
Debug.assert(selectedItemIndex !== -1); // If candidates is non-empty it should always include bestSignature. We check for an empty candidates before calling this function.
|
||||
|
||||
return { items: flatMapToMutable(items, identity), applicableSpan, selectedItemIndex, argumentIndex, argumentCount };
|
||||
const help = { items: flatMapToMutable(items, identity), applicableSpan, selectedItemIndex, argumentIndex, argumentCount };
|
||||
const selected = help.items[selectedItemIndex];
|
||||
if (selected.isVariadic) {
|
||||
const firstRest = findIndex(selected.parameters, p => !!p.isRest);
|
||||
if (-1 < firstRest && firstRest < selected.parameters.length - 1) {
|
||||
// We don't have any code to get this correct; instead, don't highlight a current parameter AT ALL
|
||||
help.argumentIndex = selected.parameters.length;
|
||||
}
|
||||
else {
|
||||
help.argumentIndex = Math.min(help.argumentIndex, selected.parameters.length - 1);
|
||||
}
|
||||
}
|
||||
return help;
|
||||
}
|
||||
|
||||
function createTypeHelpItems(
|
||||
@ -638,8 +648,9 @@ namespace ts.SignatureHelp {
|
||||
const param = checker.symbolToParameterDeclaration(parameter, enclosingDeclaration, signatureHelpNodeBuilderFlags)!;
|
||||
printer.writeNode(EmitHint.Unspecified, param, sourceFile, writer);
|
||||
});
|
||||
const isOptional = checker.isOptionalParameter(<ParameterDeclaration>parameter.valueDeclaration);
|
||||
return { name: parameter.name, documentation: parameter.getDocumentationComment(checker), displayParts, isOptional };
|
||||
const isOptional = checker.isOptionalParameter(parameter.valueDeclaration as ParameterDeclaration);
|
||||
const isRest = !!((parameter as TransientSymbol).checkFlags & CheckFlags.RestParameter);
|
||||
return { name: parameter.name, documentation: parameter.getDocumentationComment(checker), displayParts, isOptional, isRest };
|
||||
}
|
||||
|
||||
function createSignatureHelpParameterForTypeParameter(typeParameter: TypeParameter, checker: TypeChecker, enclosingDeclaration: Node, sourceFile: SourceFile, printer: Printer): SignatureHelpParameter {
|
||||
@ -647,6 +658,6 @@ namespace ts.SignatureHelp {
|
||||
const param = checker.typeParameterToDeclaration(typeParameter, enclosingDeclaration, signatureHelpNodeBuilderFlags)!;
|
||||
printer.writeNode(EmitHint.Unspecified, param, sourceFile, writer);
|
||||
});
|
||||
return { name: typeParameter.symbol.name, documentation: typeParameter.symbol.getDocumentationComment(checker), displayParts, isOptional: false };
|
||||
return { name: typeParameter.symbol.name, documentation: typeParameter.symbol.getDocumentationComment(checker), displayParts, isOptional: false, isRest: false };
|
||||
}
|
||||
}
|
||||
|
||||
@ -114,7 +114,7 @@ namespace ts {
|
||||
return !isAsyncFunction(node) &&
|
||||
node.body &&
|
||||
isBlock(node.body) &&
|
||||
hasReturnStatementWithPromiseHandler(node.body) &&
|
||||
hasReturnStatementWithPromiseHandler(node.body, checker) &&
|
||||
returnsPromise(node, checker);
|
||||
}
|
||||
|
||||
@ -129,25 +129,25 @@ namespace ts {
|
||||
return isBinaryExpression(commonJsModuleIndicator) ? commonJsModuleIndicator.left : commonJsModuleIndicator;
|
||||
}
|
||||
|
||||
function hasReturnStatementWithPromiseHandler(body: Block): boolean {
|
||||
return !!forEachReturnStatement(body, isReturnStatementWithFixablePromiseHandler);
|
||||
function hasReturnStatementWithPromiseHandler(body: Block, checker: TypeChecker): boolean {
|
||||
return !!forEachReturnStatement(body, statement => isReturnStatementWithFixablePromiseHandler(statement, checker));
|
||||
}
|
||||
|
||||
export function isReturnStatementWithFixablePromiseHandler(node: Node): node is ReturnStatement & { expression: CallExpression } {
|
||||
return isReturnStatement(node) && !!node.expression && isFixablePromiseHandler(node.expression);
|
||||
export function isReturnStatementWithFixablePromiseHandler(node: Node, checker: TypeChecker): node is ReturnStatement & { expression: CallExpression } {
|
||||
return isReturnStatement(node) && !!node.expression && isFixablePromiseHandler(node.expression, checker);
|
||||
}
|
||||
|
||||
// Should be kept up to date with transformExpression in convertToAsyncFunction.ts
|
||||
export function isFixablePromiseHandler(node: Node): boolean {
|
||||
export function isFixablePromiseHandler(node: Node, checker: TypeChecker): boolean {
|
||||
// ensure outermost call exists and is a promise handler
|
||||
if (!isPromiseHandler(node) || !node.arguments.every(isFixablePromiseArgument)) {
|
||||
if (!isPromiseHandler(node) || !node.arguments.every(arg => isFixablePromiseArgument(arg, checker))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// ensure all chained calls are valid
|
||||
let currentNode = node.expression;
|
||||
while (isPromiseHandler(currentNode) || isPropertyAccessExpression(currentNode)) {
|
||||
if (isCallExpression(currentNode) && !currentNode.arguments.every(isFixablePromiseArgument)) {
|
||||
if (isCallExpression(currentNode) && !currentNode.arguments.every(arg => isFixablePromiseArgument(arg, checker))) {
|
||||
return false;
|
||||
}
|
||||
currentNode = currentNode.expression;
|
||||
@ -171,7 +171,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
// should be kept up to date with getTransformationBody in convertToAsyncFunction.ts
|
||||
function isFixablePromiseArgument(arg: Expression): boolean {
|
||||
function isFixablePromiseArgument(arg: Expression, checker: TypeChecker): boolean {
|
||||
switch (arg.kind) {
|
||||
case SyntaxKind.FunctionDeclaration:
|
||||
case SyntaxKind.FunctionExpression:
|
||||
@ -179,8 +179,16 @@ namespace ts {
|
||||
visitedNestedConvertibleFunctions.set(getKeyFromNode(arg as FunctionLikeDeclaration), true);
|
||||
// falls through
|
||||
case SyntaxKind.NullKeyword:
|
||||
case SyntaxKind.Identifier: // identifier includes undefined
|
||||
return true;
|
||||
case SyntaxKind.Identifier:
|
||||
case SyntaxKind.PropertyAccessExpression: {
|
||||
const symbol = checker.getSymbolAtLocation(arg);
|
||||
if (!symbol) {
|
||||
return false;
|
||||
}
|
||||
return checker.isUndefinedSymbol(symbol) ||
|
||||
some(skipAlias(symbol, checker).declarations, d => isFunctionLike(d) || hasInitializer(d) && !!d.initializer && isFunctionLike(d.initializer));
|
||||
}
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -490,7 +490,7 @@ namespace ts {
|
||||
getFormattingEditsForDocument(fileName: string, options: FormatCodeOptions | FormatCodeSettings): TextChange[];
|
||||
getFormattingEditsAfterKeystroke(fileName: string, position: number, key: string, options: FormatCodeOptions | FormatCodeSettings): TextChange[];
|
||||
|
||||
getDocCommentTemplateAtPosition(fileName: string, position: number): TextInsertion | undefined;
|
||||
getDocCommentTemplateAtPosition(fileName: string, position: number, options?: DocCommentTemplateOptions): TextInsertion | undefined;
|
||||
|
||||
isValidBraceCompletionAtPosition(fileName: string, position: number, openingBrace: number): boolean;
|
||||
/**
|
||||
@ -1073,11 +1073,16 @@ namespace ts {
|
||||
readonly allowRenameOfImportPath?: boolean;
|
||||
}
|
||||
|
||||
export interface DocCommentTemplateOptions {
|
||||
readonly generateReturnInDocTemplate?: boolean;
|
||||
}
|
||||
|
||||
export interface SignatureHelpParameter {
|
||||
name: string;
|
||||
documentation: SymbolDisplayPart[];
|
||||
displayParts: SymbolDisplayPart[];
|
||||
isOptional: boolean;
|
||||
isRest?: boolean;
|
||||
}
|
||||
|
||||
export interface SelectionRange {
|
||||
|
||||
@ -2495,7 +2495,7 @@ namespace ts {
|
||||
// Editors can pass in undefined or empty string - we want to infer the preference in those cases.
|
||||
const quotePreference = getQuotePreference(sourceFile, preferences);
|
||||
const quoted = JSON.stringify(text);
|
||||
return quotePreference === QuotePreference.Single ? `'${stripQuotes(quoted).replace("'", "\\'").replace('\\"', '"')}'` : quoted;
|
||||
return quotePreference === QuotePreference.Single ? `'${stripQuotes(quoted).replace(/'/g, "\\'").replace(/\\"/g, '"')}'` : quoted;
|
||||
}
|
||||
|
||||
export function isEqualityOperatorKind(kind: SyntaxKind): kind is EqualityOperator {
|
||||
@ -2880,10 +2880,10 @@ namespace ts {
|
||||
return isArray(valueOrArray) ? first(valueOrArray) : valueOrArray;
|
||||
}
|
||||
|
||||
export function getNameForExportedSymbol(symbol: Symbol, scriptTarget: ScriptTarget) {
|
||||
if (symbol.escapedName === InternalSymbolName.ExportEquals || symbol.escapedName === InternalSymbolName.Default) {
|
||||
export function getNameForExportedSymbol(symbol: Symbol, scriptTarget: ScriptTarget | undefined) {
|
||||
if (!(symbol.flags & SymbolFlags.Transient) && (symbol.escapedName === InternalSymbolName.ExportEquals || symbol.escapedName === InternalSymbolName.Default)) {
|
||||
// Name of "export default foo;" is "foo". Name of "export default 0" is the filename converted to camelCase.
|
||||
return firstDefined(symbol.declarations, d => isExportAssignment(d) && isIdentifier(d.expression) ? d.expression.text : undefined)
|
||||
return firstDefined(symbol.declarations, d => isExportAssignment(d) ? tryCast(skipOuterExpressions(d.expression), isIdentifier)?.text : undefined)
|
||||
|| codefix.moduleSymbolToValidIdentifier(getSymbolParentOrFail(symbol), scriptTarget);
|
||||
}
|
||||
return symbol.name;
|
||||
|
||||
@ -68,6 +68,9 @@ namespace Harness {
|
||||
|
||||
cwd = config.path ? path.join(cwd, config.path) : submoduleDir;
|
||||
}
|
||||
const npmVersionText = exec("npm", ["--version"], { cwd, stdio: "pipe" })?.trim();
|
||||
const npmVersion = npmVersionText ? ts.Version.tryParse(npmVersionText.trim()) : undefined;
|
||||
const isV7OrLater = !!npmVersion && npmVersion.major >= 7;
|
||||
if (fs.existsSync(path.join(cwd, "package.json"))) {
|
||||
if (fs.existsSync(path.join(cwd, "package-lock.json"))) {
|
||||
fs.unlinkSync(path.join(cwd, "package-lock.json"));
|
||||
@ -75,24 +78,25 @@ namespace Harness {
|
||||
if (fs.existsSync(path.join(cwd, "node_modules"))) {
|
||||
del.sync(path.join(cwd, "node_modules"), { force: true });
|
||||
}
|
||||
exec("npm", ["i", "--ignore-scripts"], { cwd, timeout: timeout / 2 }); // NPM shouldn't take the entire timeout - if it takes a long time, it should be terminated and we should log the failure
|
||||
exec("npm", ["i", "--ignore-scripts", ...(isV7OrLater ? ["--legacy-peer-deps"] : [])], { cwd, timeout: timeout / 2 }); // NPM shouldn't take the entire timeout - if it takes a long time, it should be terminated and we should log the failure
|
||||
}
|
||||
const args = [path.join(IO.getWorkspaceRoot(), "built/local/tsc.js")];
|
||||
if (types) {
|
||||
args.push("--types", types.join(","));
|
||||
// Also actually install those types (for, eg, the js projects which need node)
|
||||
if (types.length) {
|
||||
exec("npm", ["i", ...types.map(t => `@types/${t}`), "--no-save", "--ignore-scripts"], { cwd: originalCwd, timeout: timeout / 2 }); // NPM shouldn't take the entire timeout - if it takes a long time, it should be terminated and we should log the failure
|
||||
exec("npm", ["i", ...types.map(t => `@types/${t}`), "--no-save", "--ignore-scripts", ...(isV7OrLater ? ["--legacy-peer-deps"] : [])], { cwd: originalCwd, timeout: timeout / 2 }); // NPM shouldn't take the entire timeout - if it takes a long time, it should be terminated and we should log the failure
|
||||
}
|
||||
}
|
||||
args.push("--noEmit");
|
||||
Baseline.runBaseline(`${cls.kind()}/${directoryName}.log`, cls.report(cp.spawnSync(`node`, args, { cwd, timeout, shell: true }), cwd));
|
||||
|
||||
function exec(command: string, args: string[], options: { cwd: string, timeout?: number }): void {
|
||||
function exec(command: string, args: string[], options: { cwd: string, timeout?: number, stdio?: import("child_process").StdioOptions }): string | undefined {
|
||||
const res = cp.spawnSync(isWorker ? `${command} 2>&1` : command, args, { shell: true, stdio, ...options });
|
||||
if (res.status !== 0) {
|
||||
throw new Error(`${command} ${args.join(" ")} for ${directoryName} failed: ${res.stdout && res.stdout.toString()}`);
|
||||
}
|
||||
return options.stdio === "pipe" ? res.stdout.toString("utf8") : undefined;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@ -262,4 +262,4 @@ namespace Harness {
|
||||
}
|
||||
|
||||
startTestEnvironment();
|
||||
}
|
||||
}
|
||||
|
||||
@ -112,6 +112,7 @@
|
||||
"unittests/services/transpile.ts",
|
||||
"unittests/tsbuild/amdModulesWithOut.ts",
|
||||
"unittests/tsbuild/configFileErrors.ts",
|
||||
"unittests/tsbuild/configFileExtends.ts",
|
||||
"unittests/tsbuild/containerOnlyReferenced.ts",
|
||||
"unittests/tsbuild/declarationEmit.ts",
|
||||
"unittests/tsbuild/demo.ts",
|
||||
@ -183,6 +184,7 @@
|
||||
"unittests/tsserver/openFile.ts",
|
||||
"unittests/tsserver/packageJsonInfo.ts",
|
||||
"unittests/tsserver/partialSemanticServer.ts",
|
||||
"unittests/tsserver/plugins.ts",
|
||||
"unittests/tsserver/projectErrors.ts",
|
||||
"unittests/tsserver/projectReferenceCompileOnSave.ts",
|
||||
"unittests/tsserver/projectReferenceErrors.ts",
|
||||
|
||||
@ -331,7 +331,7 @@ namespace ts {
|
||||
* Comments
|
||||
* @author Early Close Caret > <a@b>
|
||||
* @author No Line Breaks:
|
||||
* <the.email@address> must be on the same line to parse
|
||||
* <the email @address> must be on the same line to parse
|
||||
* @author Long Comment <long@comment.org> I
|
||||
* want to keep commenting down here, I dunno.
|
||||
*/`);
|
||||
@ -340,6 +340,22 @@ namespace ts {
|
||||
`/**
|
||||
* @example
|
||||
* Some\n\n * text\r\n * with newlines.
|
||||
*/`);
|
||||
parsesCorrectly("Chained tags, no leading whitespace", `/**@a @b @c@d*/`);
|
||||
parsesCorrectly("Initial star is not a tag", `/***@a*/`);
|
||||
parsesCorrectly("Initial star space is not a tag", `/*** @a*/`);
|
||||
parsesCorrectly("Initial email address is not a tag", `/**bill@example.com*/`);
|
||||
parsesCorrectly("no space before @ is not a new tag",
|
||||
`/**
|
||||
* @param this (@is@)
|
||||
* @param fine its@fine
|
||||
@zerowidth
|
||||
*@singlestar
|
||||
**@doublestar
|
||||
*/`);
|
||||
parsesCorrectly("@@ does not start a new tag",
|
||||
`/**
|
||||
* @param this is (@@fine@@and) is one comment
|
||||
*/`);
|
||||
});
|
||||
});
|
||||
|
||||
@ -124,3 +124,30 @@ describe("unittests:: Public APIs:: getTypeAtLocation", () => {
|
||||
assert.equal(type.flags, ts.TypeFlags.Any);
|
||||
});
|
||||
});
|
||||
|
||||
describe("unittests:: Public APIs:: validateLocaleAndSetLanguage", () => {
|
||||
let savedUILocale: string | undefined;
|
||||
beforeEach(() => savedUILocale = ts.getUILocale());
|
||||
afterEach(() => ts.setUILocale(savedUILocale));
|
||||
|
||||
function verifyValidateLocale(locale: string, expectedToReadFile: boolean) {
|
||||
it(`Verifying ${locale} ${expectedToReadFile ? "reads" : "does not read"} file`, () => {
|
||||
const errors: ts.Diagnostic[] = [];
|
||||
ts.validateLocaleAndSetLanguage(locale, {
|
||||
getExecutingFilePath: () => "/tsc.js",
|
||||
resolvePath: ts.identity,
|
||||
fileExists: fileName => {
|
||||
assert.isTrue(expectedToReadFile, `Locale : ${locale} ${expectedToReadFile ? "should" : "should not"} check if ${fileName} exists.`);
|
||||
return expectedToReadFile;
|
||||
},
|
||||
readFile: fileName => {
|
||||
assert.isTrue(expectedToReadFile, `Locale : ${locale} ${expectedToReadFile ? "should" : "should not"} read ${fileName}.`);
|
||||
// Throw error here so that actual change to localized diagnostics messages doesnt take place
|
||||
throw new Error("cannot read file");
|
||||
}
|
||||
}, errors);
|
||||
});
|
||||
}
|
||||
ts.supportedLocaleDirectories.forEach(locale => verifyValidateLocale(locale, /*expctedToReadFile*/ true));
|
||||
["en", "en-us"].forEach(locale => verifyValidateLocale(locale, /*expctedToReadFile*/ false));
|
||||
});
|
||||
|
||||
@ -539,6 +539,7 @@ function [#|f|]():Promise<void | Response> {
|
||||
}
|
||||
`
|
||||
);
|
||||
|
||||
_testConvertToAsyncFunction("convertToAsyncFunction_NoRes3", `
|
||||
function [#|f|]():Promise<void | Response> {
|
||||
return fetch('https://typescriptlang.org').catch(rej => console.log(rej));
|
||||
@ -600,6 +601,7 @@ function [#|f|]():Promise<void> {
|
||||
}
|
||||
`
|
||||
);
|
||||
|
||||
_testConvertToAsyncFunction("convertToAsyncFunction_ResRef", `
|
||||
function [#|f|]():Promise<boolean> {
|
||||
return fetch('https://typescriptlang.org').then(res);
|
||||
@ -609,6 +611,75 @@ function res(result){
|
||||
}
|
||||
`
|
||||
);
|
||||
|
||||
_testConvertToAsyncFunction("convertToAsyncFunction_ResRef1", `
|
||||
class Foo {
|
||||
public [#|method|](): Promise<boolean> {
|
||||
return fetch('a').then(this.foo);
|
||||
}
|
||||
|
||||
private foo(res) {
|
||||
return res.ok;
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
_testConvertToAsyncFunction("convertToAsyncFunction_ResRef2", `
|
||||
class Foo {
|
||||
public [#|method|](): Promise<Response> {
|
||||
return fetch('a').then(this.foo);
|
||||
}
|
||||
|
||||
private foo = res => res;
|
||||
}
|
||||
`);
|
||||
|
||||
_testConvertToAsyncFunction("convertToAsyncFunction_ResRef3", `
|
||||
const res = (result) => {
|
||||
return result.ok;
|
||||
}
|
||||
function [#|f|](): Promise<boolean> {
|
||||
return fetch('https://typescriptlang.org').then(res);
|
||||
}
|
||||
`
|
||||
);
|
||||
|
||||
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_NoSuggestionResRef1", `
|
||||
const res = 1;
|
||||
function [#|f|]() {
|
||||
return fetch('https://typescriptlang.org').then(res);
|
||||
}
|
||||
`
|
||||
);
|
||||
|
||||
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_NoSuggestionResRef2", `
|
||||
class Foo {
|
||||
private foo = 1;
|
||||
public [#|method|](): Promise<boolean> {
|
||||
return fetch('a').then(this.foo);
|
||||
}
|
||||
}
|
||||
`
|
||||
);
|
||||
|
||||
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_NoSuggestionResRef3", `
|
||||
const res = undefined;
|
||||
function [#|f|]() {
|
||||
return fetch('https://typescriptlang.org').then(res);
|
||||
}
|
||||
`
|
||||
);
|
||||
|
||||
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_NoSuggestionResRef4", `
|
||||
class Foo {
|
||||
private foo = undefined;
|
||||
public [#|method|](): Promise<boolean> {
|
||||
return fetch('a').then(this.foo);
|
||||
}
|
||||
}
|
||||
`
|
||||
);
|
||||
|
||||
_testConvertToAsyncFunction("convertToAsyncFunction_ResRefNoReturnVal", `
|
||||
function [#|f|]():Promise<void> {
|
||||
return fetch('https://typescriptlang.org').then(res);
|
||||
@ -618,6 +689,19 @@ function res(result){
|
||||
}
|
||||
`
|
||||
);
|
||||
|
||||
_testConvertToAsyncFunction("convertToAsyncFunction_ResRefNoReturnVal1", `
|
||||
class Foo {
|
||||
public [#|method|](): Promise<void> {
|
||||
return fetch('a').then(this.foo);
|
||||
}
|
||||
|
||||
private foo(res) {
|
||||
console.log(res);
|
||||
}
|
||||
}
|
||||
`);
|
||||
|
||||
_testConvertToAsyncFunction("convertToAsyncFunction_NoBrackets", `
|
||||
function [#|f|]():Promise<void> {
|
||||
return fetch('https://typescriptlang.org').then(result => console.log(result));
|
||||
|
||||
52
src/testRunner/unittests/tsbuild/configFileExtends.ts
Normal file
52
src/testRunner/unittests/tsbuild/configFileExtends.ts
Normal file
@ -0,0 +1,52 @@
|
||||
namespace ts {
|
||||
describe("unittests:: tsbuild:: configFileExtends:: when tsconfig extends another config", () => {
|
||||
function getConfigExtendsWithIncludeFs() {
|
||||
return loadProjectFromFiles({
|
||||
"/src/tsconfig.json": JSON.stringify({
|
||||
references: [
|
||||
{ path: "./shared/tsconfig.json" },
|
||||
{ path: "./webpack/tsconfig.json" }
|
||||
],
|
||||
files: []
|
||||
}),
|
||||
"/src/shared/tsconfig-base.json": JSON.stringify({
|
||||
include: ["./typings-base/"]
|
||||
}),
|
||||
"/src/shared/typings-base/globals.d.ts": `type Unrestricted = any;`,
|
||||
"/src/shared/tsconfig.json": JSON.stringify({
|
||||
extends: "./tsconfig-base.json",
|
||||
compilerOptions: {
|
||||
composite: true,
|
||||
outDir: "../target-tsc-build/",
|
||||
rootDir: ".."
|
||||
},
|
||||
files: ["./index.ts"]
|
||||
}),
|
||||
"/src/shared/index.ts": `export const a: Unrestricted = 1;`,
|
||||
"/src/webpack/tsconfig.json": JSON.stringify({
|
||||
extends: "../shared/tsconfig-base.json",
|
||||
compilerOptions: {
|
||||
composite: true,
|
||||
outDir: "../target-tsc-build/",
|
||||
rootDir: ".."
|
||||
},
|
||||
files: ["./index.ts"],
|
||||
references: [{ path: "../shared/tsconfig.json" }]
|
||||
}),
|
||||
"/src/webpack/index.ts": `export const b: Unrestricted = 1;`,
|
||||
});
|
||||
}
|
||||
verifyTsc({
|
||||
scenario: "configFileExtends",
|
||||
subScenario: "when building solution with projects extends config with include",
|
||||
fs: getConfigExtendsWithIncludeFs,
|
||||
commandLineArgs: ["--b", "/src/tsconfig.json", "--v", "--listFiles"],
|
||||
});
|
||||
verifyTsc({
|
||||
scenario: "configFileExtends",
|
||||
subScenario: "when building project uses reference and both extend config with include",
|
||||
fs: getConfigExtendsWithIncludeFs,
|
||||
commandLineArgs: ["--b", "/src/webpack/tsconfig.json", "--v", "--listFiles"],
|
||||
});
|
||||
});
|
||||
}
|
||||
@ -7,7 +7,7 @@ namespace ts.projectSystem {
|
||||
};
|
||||
const f2 = {
|
||||
path: "/a/largefile.js",
|
||||
content: ""
|
||||
content: "",
|
||||
};
|
||||
const config = {
|
||||
path: "/a/jsconfig.json",
|
||||
@ -47,5 +47,39 @@ namespace ts.projectSystem {
|
||||
assert.equal(events[1].data.project.getProjectName(), config.path, "config path");
|
||||
assert.isTrue(events[1].data.languageServiceEnabled, "Language service state");
|
||||
});
|
||||
|
||||
it("Large file size is determined correctly", () => {
|
||||
const f1: File = {
|
||||
path: "/a/app.js",
|
||||
content: "let x = 1;"
|
||||
};
|
||||
const f2: File = {
|
||||
path: "/a/largefile.js",
|
||||
content: "",
|
||||
fileSize: server.maxProgramSizeForNonTsFiles + 1
|
||||
};
|
||||
const f3: File = {
|
||||
path: "/a/extremlylarge.d.ts",
|
||||
content: "",
|
||||
fileSize: server.maxProgramSizeForNonTsFiles + 100
|
||||
};
|
||||
const config = {
|
||||
path: "/a/jsconfig.json",
|
||||
content: "{}"
|
||||
};
|
||||
const host = createServerHost([f1, f2, f3, libFile, config]);
|
||||
const logs: string[] = [];
|
||||
const logger: server.Logger = {
|
||||
...nullLogger,
|
||||
info: s => logs.push(s)
|
||||
};
|
||||
const service = createProjectService(host, { logger });
|
||||
service.openClientFile(f1.path);
|
||||
checkNumberOfProjects(service, { configuredProjects: 1 });
|
||||
const project = service.configuredProjects.get(config.path)!;
|
||||
assert.isFalse(project.languageServiceEnabled, "Language service enabled");
|
||||
assert.equal(project.lastFileExceededProgramSize, f2.path);
|
||||
assert.isTrue(contains(logs, `Non TS file size exceeded limit (${f1.content.length + f2.fileSize!}). Largest files: ${f2.path}:${f2.fileSize}, ${f1.path}:${f1.content.length}`));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
55
src/testRunner/unittests/tsserver/plugins.ts
Normal file
55
src/testRunner/unittests/tsserver/plugins.ts
Normal file
@ -0,0 +1,55 @@
|
||||
namespace ts.projectSystem {
|
||||
describe("unittests:: tsserver:: plugins loading", () => {
|
||||
function createHostWithPlugin(files: readonly File[]) {
|
||||
const host = createServerHost(files);
|
||||
const pluginsLoaded: string[] = [];
|
||||
host.require = (_initialPath, moduleName) => {
|
||||
pluginsLoaded.push(moduleName);
|
||||
return {
|
||||
module: () => ({
|
||||
create(info: server.PluginCreateInfo) {
|
||||
return Harness.LanguageService.makeDefaultProxy(info);
|
||||
}
|
||||
}),
|
||||
error: undefined
|
||||
};
|
||||
};
|
||||
return { host, pluginsLoaded };
|
||||
}
|
||||
|
||||
it("With local plugins", () => {
|
||||
const expectedToLoad = ["@myscoped/plugin", "unscopedPlugin"];
|
||||
const notToLoad = ["../myPlugin", "myPlugin/../malicious"];
|
||||
const aTs: File = { path: "/a.ts", content: `class c { prop = "hello"; foo() { return this.prop; } }` };
|
||||
const tsconfig: File = {
|
||||
path: "/tsconfig.json",
|
||||
content: JSON.stringify({
|
||||
compilerOptions: {
|
||||
plugins: [
|
||||
...[...expectedToLoad, ...notToLoad].map(name => ({ name })),
|
||||
{ transform: "some-transform" }
|
||||
]
|
||||
}
|
||||
})
|
||||
};
|
||||
const { host, pluginsLoaded } = createHostWithPlugin([aTs, tsconfig, libFile]);
|
||||
const service = createProjectService(host);
|
||||
service.openClientFile(aTs.path);
|
||||
assert.deepEqual(pluginsLoaded, expectedToLoad);
|
||||
});
|
||||
|
||||
it("With global plugins", () => {
|
||||
const expectedToLoad = ["@myscoped/plugin", "unscopedPlugin"];
|
||||
const notToLoad = ["../myPlugin", "myPlugin/../malicious"];
|
||||
const aTs: File = { path: "/a.ts", content: `class c { prop = "hello"; foo() { return this.prop; } }` };
|
||||
const tsconfig: File = {
|
||||
path: "/tsconfig.json",
|
||||
content: "{}"
|
||||
};
|
||||
const { host, pluginsLoaded } = createHostWithPlugin([aTs, tsconfig, libFile]);
|
||||
const service = createProjectService(host, { globalPlugins: [...expectedToLoad, ...notToLoad] });
|
||||
service.openClientFile(aTs.path);
|
||||
assert.deepEqual(pluginsLoaded, expectedToLoad);
|
||||
});
|
||||
});
|
||||
}
|
||||
@ -409,7 +409,7 @@ namespace ts.projectSystem {
|
||||
checkErrors([serverUtilities.path, app.path]);
|
||||
|
||||
function checkErrors(openFiles: [string, string]) {
|
||||
verifyGetErrRequestNoErrors({ session, host, files: openFiles, existingTimeouts: 2 });
|
||||
verifyGetErrRequestNoErrors({ session, host, files: openFiles });
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@ -419,7 +419,7 @@ namespace ts.projectSystem {
|
||||
unresolvedImports: response.unresolvedImports,
|
||||
});
|
||||
|
||||
host.checkTimeoutQueueLengthAndRun(1);
|
||||
host.checkTimeoutQueueLength(0);
|
||||
assert.isUndefined(request);
|
||||
});
|
||||
|
||||
|
||||
@ -198,10 +198,7 @@ namespace ts.projectSystem {
|
||||
|
||||
checkNumberOfProjects(service, { inferredProjects: 1 });
|
||||
session.clearMessages();
|
||||
host.checkTimeoutQueueLengthAndRun(2);
|
||||
|
||||
checkProjectUpdatedInBackgroundEvent(session, [file.path]);
|
||||
|
||||
host.checkTimeoutQueueLength(0);
|
||||
verifyGetErrRequest({
|
||||
session,
|
||||
host,
|
||||
@ -240,10 +237,7 @@ namespace ts.projectSystem {
|
||||
|
||||
checkNumberOfProjects(service, { inferredProjects: 1 });
|
||||
session.clearMessages();
|
||||
host.checkTimeoutQueueLengthAndRun(2);
|
||||
|
||||
checkProjectUpdatedInBackgroundEvent(session, [file.path]);
|
||||
|
||||
host.checkTimeoutQueueLength(0);
|
||||
verifyGetErrRequest({
|
||||
session,
|
||||
host,
|
||||
|
||||
@ -244,7 +244,7 @@ namespace ts.projectSystem {
|
||||
checkProjectActualFiles(p, [jqueryJs.path]);
|
||||
|
||||
installer.installAll(/*expectedCount*/ 0);
|
||||
host.checkTimeoutQueueLengthAndRun(2);
|
||||
host.checkTimeoutQueueLength(0);
|
||||
checkNumberOfProjects(projectService, { inferredProjects: 1 });
|
||||
// files should not be removed from project if ATA is skipped
|
||||
checkProjectActualFiles(p, [jqueryJs.path]);
|
||||
@ -1024,9 +1024,8 @@ namespace ts.projectSystem {
|
||||
service.openClientFile(f.path);
|
||||
|
||||
installer.checkPendingCommands(/*expectedCount*/ 0);
|
||||
|
||||
host.writeFile(fixedPackageJson.path, fixedPackageJson.content);
|
||||
host.checkTimeoutQueueLengthAndRun(2); // To refresh the project and refresh inferred projects
|
||||
host.checkTimeoutQueueLength(0);
|
||||
// expected install request
|
||||
installer.installAll(/*expectedCount*/ 1);
|
||||
host.checkTimeoutQueueLengthAndRun(2);
|
||||
@ -1212,7 +1211,8 @@ namespace ts.projectSystem {
|
||||
}
|
||||
};
|
||||
session.executeCommand(changeRequest);
|
||||
host.checkTimeoutQueueLengthAndRun(2); // This enqueues the updategraph and refresh inferred projects
|
||||
host.checkTimeoutQueueLength(0);
|
||||
proj.updateGraph();
|
||||
const version2 = proj.lastCachedUnresolvedImportsList;
|
||||
assert.strictEqual(version1, version2, "set of unresolved imports should change");
|
||||
});
|
||||
@ -1837,6 +1837,7 @@ namespace ts.projectSystem {
|
||||
const appPath = "/a/b/app.js" as Path;
|
||||
const foooPath = "/a/b/node_modules/fooo/index.d.ts";
|
||||
function verifyResolvedModuleOfFooo(project: server.Project) {
|
||||
server.updateProjectIfDirty(project);
|
||||
const foooResolution = project.getLanguageService().getProgram()!.getSourceFileByPath(appPath)!.resolvedModules!.get("fooo")!;
|
||||
assert.equal(foooResolution.resolvedFileName, foooPath);
|
||||
return foooResolution;
|
||||
@ -1851,6 +1852,7 @@ namespace ts.projectSystem {
|
||||
path: foooPath,
|
||||
content: `export var x: string;`
|
||||
};
|
||||
|
||||
const host = createServerHost([app, fooo]);
|
||||
const installer = new (class extends Installer {
|
||||
constructor() {
|
||||
@ -1873,6 +1875,17 @@ namespace ts.projectSystem {
|
||||
checkProjectActualFiles(proj, typingFiles.map(f => f.path).concat(app.path, fooo.path));
|
||||
const foooResolution2 = verifyResolvedModuleOfFooo(proj);
|
||||
assert.strictEqual(foooResolution1, foooResolution2);
|
||||
projectService.applyChangesInOpenFiles(/*openFiles*/ undefined, arrayIterator([{
|
||||
fileName: app.path,
|
||||
changes: arrayIterator([{
|
||||
span: { start: 0, length: 0 },
|
||||
newText: `import * as bar from "bar";`
|
||||
}])
|
||||
}]));
|
||||
host.runQueuedTimeoutCallbacks(); // Update the graph
|
||||
// Update the typing
|
||||
host.checkTimeoutQueueLength(0);
|
||||
assert.isFalse(proj.resolutionCache.isFileWithInvalidatedNonRelativeUnresolvedImports(app.path as Path));
|
||||
}
|
||||
|
||||
it("correctly invalidate the resolutions with typing names", () => {
|
||||
@ -1883,6 +1896,10 @@ namespace ts.projectSystem {
|
||||
});
|
||||
|
||||
it("correctly invalidate the resolutions with typing names that are trimmed", () => {
|
||||
const fooIndex: File = {
|
||||
path: `${globalTypingsCacheLocation}/node_modules/foo/index.d.ts`,
|
||||
content: "export function aa(): void;"
|
||||
};
|
||||
const fooAA: File = {
|
||||
path: `${globalTypingsCacheLocation}/node_modules/foo/a/a.d.ts`,
|
||||
content: "export function a (): void;"
|
||||
@ -1899,7 +1916,7 @@ namespace ts.projectSystem {
|
||||
import * as a from "foo/a/a";
|
||||
import * as b from "foo/a/b";
|
||||
import * as c from "foo/a/c";
|
||||
`, ["foo"], [fooAA, fooAB, fooAC]);
|
||||
`, ["foo"], [fooIndex, fooAA, fooAB, fooAC]);
|
||||
});
|
||||
|
||||
it("should handle node core modules", () => {
|
||||
@ -1958,12 +1975,21 @@ declare module "stream" {
|
||||
host.checkTimeoutQueueLengthAndRun(2);
|
||||
checkProjectActualFiles(proj, [file.path, libFile.path, nodeTyping.path]);
|
||||
|
||||
// Here, since typings doesnt contain node typings and resolution fails and
|
||||
// node typings go out of project,
|
||||
// but because we handle core node modules when resolving from typings cache
|
||||
// node typings are included in the project
|
||||
host.checkTimeoutQueueLengthAndRun(2);
|
||||
// Here, since typings dont change, there is no timeout scheduled
|
||||
host.checkTimeoutQueueLength(0);
|
||||
checkProjectActualFiles(proj, [file.path, libFile.path, nodeTyping.path]);
|
||||
projectService.applyChangesInOpenFiles(/*openFiles*/ undefined, arrayIterator([{
|
||||
fileName: file.path,
|
||||
changes: arrayIterator([{
|
||||
span: { start: file.content.indexOf("const"), length: 0 },
|
||||
newText: `const bar = require("bar");`
|
||||
}])
|
||||
}]));
|
||||
proj.updateGraph(); // Update the graph
|
||||
checkProjectActualFiles(proj, [file.path, libFile.path, nodeTyping.path]);
|
||||
// Update the typing
|
||||
host.checkTimeoutQueueLength(0);
|
||||
assert.isFalse(proj.resolutionCache.isFileWithInvalidatedNonRelativeUnresolvedImports(file.path as Path));
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@ -833,9 +833,7 @@ namespace ts.server {
|
||||
exit() {
|
||||
this.logger.info("Exiting...");
|
||||
this.projectService.closeLog();
|
||||
if (traceDir) {
|
||||
tracing.stopTracing(ts.emptyArray);
|
||||
}
|
||||
tracing?.stopTracing(ts.emptyArray);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
@ -863,7 +861,7 @@ namespace ts.server {
|
||||
? stripQuotes(commandLineTraceDir)
|
||||
: process.env.TSS_TRACE;
|
||||
if (traceDir) {
|
||||
tracing.startTracing(tracing.Mode.Server, traceDir);
|
||||
startTracing(tracingEnabled.Mode.Server, traceDir);
|
||||
}
|
||||
|
||||
const ioSession = new IOSession();
|
||||
|
||||
@ -0,0 +1,42 @@
|
||||
{
|
||||
"kind": "JSDocComment",
|
||||
"pos": 0,
|
||||
"end": 54,
|
||||
"flags": "JSDoc",
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"tags": {
|
||||
"0": {
|
||||
"kind": "JSDocParameterTag",
|
||||
"pos": 7,
|
||||
"end": 52,
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"tagName": {
|
||||
"kind": "Identifier",
|
||||
"pos": 8,
|
||||
"end": 13,
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"escapedText": "param"
|
||||
},
|
||||
"comment": "is (@@fine@@and) is one comment",
|
||||
"name": {
|
||||
"kind": "Identifier",
|
||||
"pos": 14,
|
||||
"end": 18,
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"originalKeywordKind": "ThisKeyword",
|
||||
"escapedText": "this"
|
||||
},
|
||||
"isNameFirst": true,
|
||||
"isBracketed": false
|
||||
},
|
||||
"length": 1,
|
||||
"pos": 7,
|
||||
"end": 52,
|
||||
"hasTrailingComma": false,
|
||||
"transformFlags": 0
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,75 @@
|
||||
{
|
||||
"kind": "JSDocComment",
|
||||
"pos": 0,
|
||||
"end": 15,
|
||||
"flags": "JSDoc",
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"tags": {
|
||||
"0": {
|
||||
"kind": "JSDocTag",
|
||||
"pos": 3,
|
||||
"end": 6,
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"tagName": {
|
||||
"kind": "Identifier",
|
||||
"pos": 4,
|
||||
"end": 5,
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"escapedText": "a"
|
||||
}
|
||||
},
|
||||
"1": {
|
||||
"kind": "JSDocTag",
|
||||
"pos": 6,
|
||||
"end": 9,
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"tagName": {
|
||||
"kind": "Identifier",
|
||||
"pos": 7,
|
||||
"end": 8,
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"escapedText": "b"
|
||||
}
|
||||
},
|
||||
"2": {
|
||||
"kind": "JSDocTag",
|
||||
"pos": 9,
|
||||
"end": 11,
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"tagName": {
|
||||
"kind": "Identifier",
|
||||
"pos": 10,
|
||||
"end": 11,
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"escapedText": "c"
|
||||
}
|
||||
},
|
||||
"3": {
|
||||
"kind": "JSDocTag",
|
||||
"pos": 11,
|
||||
"end": 13,
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"tagName": {
|
||||
"kind": "Identifier",
|
||||
"pos": 12,
|
||||
"end": 13,
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"escapedText": "d"
|
||||
}
|
||||
},
|
||||
"length": 4,
|
||||
"pos": 3,
|
||||
"end": 13,
|
||||
"hasTrailingComma": false,
|
||||
"transformFlags": 0
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,9 @@
|
||||
{
|
||||
"kind": "JSDocComment",
|
||||
"pos": 0,
|
||||
"end": 21,
|
||||
"flags": "JSDoc",
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"comment": "bill@example.com"
|
||||
}
|
||||
@ -0,0 +1,9 @@
|
||||
{
|
||||
"kind": "JSDocComment",
|
||||
"pos": 0,
|
||||
"end": 8,
|
||||
"flags": "JSDoc",
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"comment": "*@a"
|
||||
}
|
||||
@ -0,0 +1,9 @@
|
||||
{
|
||||
"kind": "JSDocComment",
|
||||
"pos": 0,
|
||||
"end": 8,
|
||||
"flags": "JSDoc",
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"comment": "*@a"
|
||||
}
|
||||
@ -0,0 +1,9 @@
|
||||
{
|
||||
"kind": "JSDocComment",
|
||||
"pos": 0,
|
||||
"end": 9,
|
||||
"flags": "JSDoc",
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"comment": "* @a"
|
||||
}
|
||||
@ -0,0 +1,9 @@
|
||||
{
|
||||
"kind": "JSDocComment",
|
||||
"pos": 0,
|
||||
"end": 9,
|
||||
"flags": "JSDoc",
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"comment": "* @a"
|
||||
}
|
||||
@ -1,7 +1,7 @@
|
||||
{
|
||||
"kind": "JSDocComment",
|
||||
"pos": 0,
|
||||
"end": 738,
|
||||
"end": 739,
|
||||
"flags": "JSDoc",
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
@ -262,7 +262,7 @@
|
||||
"16": {
|
||||
"kind": "JSDocAuthorTag",
|
||||
"pos": 559,
|
||||
"end": 598,
|
||||
"end": 599,
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"tagName": {
|
||||
@ -273,18 +273,18 @@
|
||||
"transformFlags": 0,
|
||||
"escapedText": "author"
|
||||
},
|
||||
"comment": "No Line Breaks:<the.email"
|
||||
"comment": "No Line Breaks:<the email"
|
||||
},
|
||||
"17": {
|
||||
"kind": "JSDocTag",
|
||||
"pos": 598,
|
||||
"end": 606,
|
||||
"pos": 599,
|
||||
"end": 607,
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"tagName": {
|
||||
"kind": "Identifier",
|
||||
"pos": 599,
|
||||
"end": 606,
|
||||
"pos": 600,
|
||||
"end": 607,
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"escapedText": "address"
|
||||
@ -293,14 +293,14 @@
|
||||
},
|
||||
"18": {
|
||||
"kind": "JSDocAuthorTag",
|
||||
"pos": 645,
|
||||
"end": 736,
|
||||
"pos": 646,
|
||||
"end": 737,
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"tagName": {
|
||||
"kind": "Identifier",
|
||||
"pos": 646,
|
||||
"end": 652,
|
||||
"pos": 647,
|
||||
"end": 653,
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"escapedText": "author"
|
||||
@ -309,7 +309,7 @@
|
||||
},
|
||||
"length": 19,
|
||||
"pos": 7,
|
||||
"end": 736,
|
||||
"end": 737,
|
||||
"hasTrailingComma": false,
|
||||
"transformFlags": 0
|
||||
}
|
||||
|
||||
@ -0,0 +1,99 @@
|
||||
{
|
||||
"kind": "JSDocComment",
|
||||
"pos": 0,
|
||||
"end": 91,
|
||||
"flags": "JSDoc",
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"tags": {
|
||||
"0": {
|
||||
"kind": "JSDocParameterTag",
|
||||
"pos": 7,
|
||||
"end": 29,
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"tagName": {
|
||||
"kind": "Identifier",
|
||||
"pos": 8,
|
||||
"end": 13,
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"escapedText": "param"
|
||||
},
|
||||
"comment": "(@is@)",
|
||||
"name": {
|
||||
"kind": "Identifier",
|
||||
"pos": 14,
|
||||
"end": 18,
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"originalKeywordKind": "ThisKeyword",
|
||||
"escapedText": "this"
|
||||
},
|
||||
"isNameFirst": true,
|
||||
"isBracketed": false
|
||||
},
|
||||
"1": {
|
||||
"kind": "JSDocParameterTag",
|
||||
"pos": 29,
|
||||
"end": 50,
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"tagName": {
|
||||
"kind": "Identifier",
|
||||
"pos": 30,
|
||||
"end": 35,
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"escapedText": "param"
|
||||
},
|
||||
"comment": "its@fine",
|
||||
"name": {
|
||||
"kind": "Identifier",
|
||||
"pos": 36,
|
||||
"end": 40,
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"escapedText": "fine"
|
||||
},
|
||||
"isNameFirst": true,
|
||||
"isBracketed": false
|
||||
},
|
||||
"2": {
|
||||
"kind": "JSDocTag",
|
||||
"pos": 50,
|
||||
"end": 62,
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"tagName": {
|
||||
"kind": "Identifier",
|
||||
"pos": 51,
|
||||
"end": 60,
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"escapedText": "zerowidth"
|
||||
}
|
||||
},
|
||||
"3": {
|
||||
"kind": "JSDocTag",
|
||||
"pos": 62,
|
||||
"end": 75,
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"tagName": {
|
||||
"kind": "Identifier",
|
||||
"pos": 63,
|
||||
"end": 73,
|
||||
"modifierFlagsCache": 0,
|
||||
"transformFlags": 0,
|
||||
"escapedText": "singlestar"
|
||||
},
|
||||
"comment": "*@doublestar"
|
||||
},
|
||||
"length": 4,
|
||||
"pos": 7,
|
||||
"end": 75,
|
||||
"hasTrailingComma": false,
|
||||
"transformFlags": 0
|
||||
}
|
||||
}
|
||||
@ -1,6 +1,6 @@
|
||||
=== tests/cases/conformance/es6/templates/TemplateExpression1.ts ===
|
||||
var v = `foo ${ a
|
||||
>v : string
|
||||
>`foo ${ a : `foo ${any}`
|
||||
>`foo ${ a : string
|
||||
>a : any
|
||||
|
||||
|
||||
@ -22,7 +22,7 @@ class Derived extends Base {
|
||||
>console.log : (...data: any[]) => void
|
||||
>console : Console
|
||||
>log : (...data: any[]) => void
|
||||
>`x was set to ${value}` : `x was set to ${number}`
|
||||
>`x was set to ${value}` : string
|
||||
>value : number
|
||||
}
|
||||
|
||||
|
||||
@ -2656,8 +2656,6 @@ declare namespace ts {
|
||||
export interface TemplateLiteralType extends InstantiableType {
|
||||
texts: readonly string[];
|
||||
types: readonly Type[];
|
||||
freshType: TemplateLiteralType;
|
||||
regularType: TemplateLiteralType;
|
||||
}
|
||||
export interface StringMappingType extends InstantiableType {
|
||||
symbol: Symbol;
|
||||
@ -3970,6 +3968,7 @@ declare namespace ts {
|
||||
reScanJsxToken(): JsxTokenSyntaxKind;
|
||||
reScanLessThanToken(): SyntaxKind;
|
||||
reScanQuestionToken(): SyntaxKind;
|
||||
reScanInvalidIdentifier(): SyntaxKind;
|
||||
scanJsxToken(): JsxTokenSyntaxKind;
|
||||
scanJsDocToken(): JSDocSyntaxKind;
|
||||
scan(): SyntaxKind;
|
||||
@ -5556,7 +5555,7 @@ declare namespace ts {
|
||||
getFormattingEditsForRange(fileName: string, start: number, end: number, options: FormatCodeOptions | FormatCodeSettings): TextChange[];
|
||||
getFormattingEditsForDocument(fileName: string, options: FormatCodeOptions | FormatCodeSettings): TextChange[];
|
||||
getFormattingEditsAfterKeystroke(fileName: string, position: number, key: string, options: FormatCodeOptions | FormatCodeSettings): TextChange[];
|
||||
getDocCommentTemplateAtPosition(fileName: string, position: number): TextInsertion | undefined;
|
||||
getDocCommentTemplateAtPosition(fileName: string, position: number, options?: DocCommentTemplateOptions): TextInsertion | undefined;
|
||||
isValidBraceCompletionAtPosition(fileName: string, position: number, openingBrace: number): boolean;
|
||||
/**
|
||||
* This will return a defined result if the position is after the `>` of the opening tag, or somewhere in the text, of a JSXElement with no closing tag.
|
||||
@ -6021,11 +6020,15 @@ declare namespace ts {
|
||||
interface RenameInfoOptions {
|
||||
readonly allowRenameOfImportPath?: boolean;
|
||||
}
|
||||
interface DocCommentTemplateOptions {
|
||||
readonly generateReturnInDocTemplate?: boolean;
|
||||
}
|
||||
interface SignatureHelpParameter {
|
||||
name: string;
|
||||
documentation: SymbolDisplayPart[];
|
||||
displayParts: SymbolDisplayPart[];
|
||||
isOptional: boolean;
|
||||
isRest?: boolean;
|
||||
}
|
||||
interface SelectionRange {
|
||||
textSpan: TextSpan;
|
||||
@ -9046,6 +9049,7 @@ declare namespace ts.server.protocol {
|
||||
readonly provideRefactorNotApplicableReason?: boolean;
|
||||
readonly allowRenameOfImportPath?: boolean;
|
||||
readonly includePackageJsonAutoImports?: "auto" | "on" | "off";
|
||||
readonly generateReturnInDocTemplate?: boolean;
|
||||
}
|
||||
interface CompilerOptions {
|
||||
allowJs?: boolean;
|
||||
@ -9267,7 +9271,6 @@ declare namespace ts.server {
|
||||
private missingFilesMap;
|
||||
private generatedFilesMap;
|
||||
private plugins;
|
||||
private lastFileExceededProgramSize;
|
||||
protected languageService: LanguageService;
|
||||
languageServiceEnabled: boolean;
|
||||
readonly trace?: (s: string) => void;
|
||||
|
||||
@ -2656,8 +2656,6 @@ declare namespace ts {
|
||||
export interface TemplateLiteralType extends InstantiableType {
|
||||
texts: readonly string[];
|
||||
types: readonly Type[];
|
||||
freshType: TemplateLiteralType;
|
||||
regularType: TemplateLiteralType;
|
||||
}
|
||||
export interface StringMappingType extends InstantiableType {
|
||||
symbol: Symbol;
|
||||
@ -3970,6 +3968,7 @@ declare namespace ts {
|
||||
reScanJsxToken(): JsxTokenSyntaxKind;
|
||||
reScanLessThanToken(): SyntaxKind;
|
||||
reScanQuestionToken(): SyntaxKind;
|
||||
reScanInvalidIdentifier(): SyntaxKind;
|
||||
scanJsxToken(): JsxTokenSyntaxKind;
|
||||
scanJsDocToken(): JSDocSyntaxKind;
|
||||
scan(): SyntaxKind;
|
||||
@ -5556,7 +5555,7 @@ declare namespace ts {
|
||||
getFormattingEditsForRange(fileName: string, start: number, end: number, options: FormatCodeOptions | FormatCodeSettings): TextChange[];
|
||||
getFormattingEditsForDocument(fileName: string, options: FormatCodeOptions | FormatCodeSettings): TextChange[];
|
||||
getFormattingEditsAfterKeystroke(fileName: string, position: number, key: string, options: FormatCodeOptions | FormatCodeSettings): TextChange[];
|
||||
getDocCommentTemplateAtPosition(fileName: string, position: number): TextInsertion | undefined;
|
||||
getDocCommentTemplateAtPosition(fileName: string, position: number, options?: DocCommentTemplateOptions): TextInsertion | undefined;
|
||||
isValidBraceCompletionAtPosition(fileName: string, position: number, openingBrace: number): boolean;
|
||||
/**
|
||||
* This will return a defined result if the position is after the `>` of the opening tag, or somewhere in the text, of a JSXElement with no closing tag.
|
||||
@ -6021,11 +6020,15 @@ declare namespace ts {
|
||||
interface RenameInfoOptions {
|
||||
readonly allowRenameOfImportPath?: boolean;
|
||||
}
|
||||
interface DocCommentTemplateOptions {
|
||||
readonly generateReturnInDocTemplate?: boolean;
|
||||
}
|
||||
interface SignatureHelpParameter {
|
||||
name: string;
|
||||
documentation: SymbolDisplayPart[];
|
||||
displayParts: SymbolDisplayPart[];
|
||||
isOptional: boolean;
|
||||
isRest?: boolean;
|
||||
}
|
||||
interface SelectionRange {
|
||||
textSpan: TextSpan;
|
||||
|
||||
@ -5,7 +5,7 @@ declare function tag(...x: any[]): any;
|
||||
|
||||
var a = `${123 + 456 as number}`;
|
||||
>a : string
|
||||
>`${123 + 456 as number}` : `${number}`
|
||||
>`${123 + 456 as number}` : string
|
||||
>123 + 456 as number : number
|
||||
>123 + 456 : number
|
||||
>123 : 123
|
||||
@ -13,7 +13,7 @@ var a = `${123 + 456 as number}`;
|
||||
|
||||
var b = `leading ${123 + 456 as number}`;
|
||||
>b : string
|
||||
>`leading ${123 + 456 as number}` : `leading ${number}`
|
||||
>`leading ${123 + 456 as number}` : string
|
||||
>123 + 456 as number : number
|
||||
>123 + 456 : number
|
||||
>123 : 123
|
||||
@ -21,7 +21,7 @@ var b = `leading ${123 + 456 as number}`;
|
||||
|
||||
var c = `${123 + 456 as number} trailing`;
|
||||
>c : string
|
||||
>`${123 + 456 as number} trailing` : `${number} trailing`
|
||||
>`${123 + 456 as number} trailing` : string
|
||||
>123 + 456 as number : number
|
||||
>123 + 456 : number
|
||||
>123 : 123
|
||||
@ -30,7 +30,7 @@ var c = `${123 + 456 as number} trailing`;
|
||||
var d = `Hello ${123} World` as string;
|
||||
>d : string
|
||||
>`Hello ${123} World` as string : string
|
||||
>`Hello ${123} World` : "Hello 123 World"
|
||||
>`Hello ${123} World` : string
|
||||
>123 : 123
|
||||
|
||||
var e = `Hello` as string;
|
||||
@ -43,7 +43,7 @@ var f = 1 + `${1} end of string` as string;
|
||||
>1 + `${1} end of string` as string : string
|
||||
>1 + `${1} end of string` : string
|
||||
>1 : 1
|
||||
>`${1} end of string` : "1 end of string"
|
||||
>`${1} end of string` : string
|
||||
>1 : 1
|
||||
|
||||
var g = tag `Hello ${123} World` as string;
|
||||
@ -51,7 +51,7 @@ var g = tag `Hello ${123} World` as string;
|
||||
>tag `Hello ${123} World` as string : string
|
||||
>tag `Hello ${123} World` : any
|
||||
>tag : (...x: any[]) => any
|
||||
>`Hello ${123} World` : "Hello 123 World"
|
||||
>`Hello ${123} World` : string
|
||||
>123 : 123
|
||||
|
||||
var h = tag `Hello` as string;
|
||||
|
||||
@ -1,18 +1,18 @@
|
||||
tests/cases/compiler/awaitInNonAsyncFunction.ts(4,7): error TS1103: A 'for-await-of' statement is only allowed within an async function or async generator.
|
||||
tests/cases/compiler/awaitInNonAsyncFunction.ts(4,7): error TS1103: 'for await' loops are only allowed within async functions and at the top levels of modules.
|
||||
tests/cases/compiler/awaitInNonAsyncFunction.ts(5,10): error TS1308: 'await' expressions are only allowed within async functions and at the top levels of modules.
|
||||
tests/cases/compiler/awaitInNonAsyncFunction.ts(9,7): error TS1103: A 'for-await-of' statement is only allowed within an async function or async generator.
|
||||
tests/cases/compiler/awaitInNonAsyncFunction.ts(9,7): error TS1103: 'for await' loops are only allowed within async functions and at the top levels of modules.
|
||||
tests/cases/compiler/awaitInNonAsyncFunction.ts(10,10): error TS1308: 'await' expressions are only allowed within async functions and at the top levels of modules.
|
||||
tests/cases/compiler/awaitInNonAsyncFunction.ts(14,7): error TS1103: A 'for-await-of' statement is only allowed within an async function or async generator.
|
||||
tests/cases/compiler/awaitInNonAsyncFunction.ts(14,7): error TS1103: 'for await' loops are only allowed within async functions and at the top levels of modules.
|
||||
tests/cases/compiler/awaitInNonAsyncFunction.ts(15,3): error TS1308: 'await' expressions are only allowed within async functions and at the top levels of modules.
|
||||
tests/cases/compiler/awaitInNonAsyncFunction.ts(19,7): error TS1103: A 'for-await-of' statement is only allowed within an async function or async generator.
|
||||
tests/cases/compiler/awaitInNonAsyncFunction.ts(19,7): error TS1103: 'for await' loops are only allowed within async functions and at the top levels of modules.
|
||||
tests/cases/compiler/awaitInNonAsyncFunction.ts(20,10): error TS1308: 'await' expressions are only allowed within async functions and at the top levels of modules.
|
||||
tests/cases/compiler/awaitInNonAsyncFunction.ts(24,7): error TS1103: A 'for-await-of' statement is only allowed within an async function or async generator.
|
||||
tests/cases/compiler/awaitInNonAsyncFunction.ts(24,7): error TS1103: 'for await' loops are only allowed within async functions and at the top levels of modules.
|
||||
tests/cases/compiler/awaitInNonAsyncFunction.ts(25,9): error TS1308: 'await' expressions are only allowed within async functions and at the top levels of modules.
|
||||
tests/cases/compiler/awaitInNonAsyncFunction.ts(30,9): error TS1103: A 'for-await-of' statement is only allowed within an async function or async generator.
|
||||
tests/cases/compiler/awaitInNonAsyncFunction.ts(30,9): error TS1103: 'for await' loops are only allowed within async functions and at the top levels of modules.
|
||||
tests/cases/compiler/awaitInNonAsyncFunction.ts(31,5): error TS1308: 'await' expressions are only allowed within async functions and at the top levels of modules.
|
||||
tests/cases/compiler/awaitInNonAsyncFunction.ts(34,7): error TS1103: A 'for-await-of' statement is only allowed within an async function or async generator.
|
||||
tests/cases/compiler/awaitInNonAsyncFunction.ts(34,7): error TS1103: 'for await' loops are only allowed within async functions and at the top levels of modules.
|
||||
tests/cases/compiler/awaitInNonAsyncFunction.ts(35,5): error TS1308: 'await' expressions are only allowed within async functions and at the top levels of modules.
|
||||
tests/cases/compiler/awaitInNonAsyncFunction.ts(39,5): error TS1103: A 'for-await-of' statement is only allowed within an async function or async generator.
|
||||
tests/cases/compiler/awaitInNonAsyncFunction.ts(39,5): error TS1432: Top-level 'for await' loops are only allowed when the 'module' option is set to 'esnext' or 'system', and the 'target' option is set to 'es2017' or higher.
|
||||
tests/cases/compiler/awaitInNonAsyncFunction.ts(40,1): error TS1378: Top-level 'await' expressions are only allowed when the 'module' option is set to 'esnext' or 'system', and the 'target' option is set to 'es2017' or higher.
|
||||
|
||||
|
||||
@ -22,7 +22,7 @@ tests/cases/compiler/awaitInNonAsyncFunction.ts(40,1): error TS1378: Top-level '
|
||||
function normalFunc(p: Promise<number>) {
|
||||
for await (const _ of []);
|
||||
~~~~~
|
||||
!!! error TS1103: A 'for-await-of' statement is only allowed within an async function or async generator.
|
||||
!!! error TS1103: 'for await' loops are only allowed within async functions and at the top levels of modules.
|
||||
!!! related TS1356 tests/cases/compiler/awaitInNonAsyncFunction.ts:3:10: Did you mean to mark this function as 'async'?
|
||||
return await p;
|
||||
~~~~~
|
||||
@ -33,7 +33,7 @@ tests/cases/compiler/awaitInNonAsyncFunction.ts(40,1): error TS1378: Top-level '
|
||||
export function exportedFunc(p: Promise<number>) {
|
||||
for await (const _ of []);
|
||||
~~~~~
|
||||
!!! error TS1103: A 'for-await-of' statement is only allowed within an async function or async generator.
|
||||
!!! error TS1103: 'for await' loops are only allowed within async functions and at the top levels of modules.
|
||||
!!! related TS1356 tests/cases/compiler/awaitInNonAsyncFunction.ts:8:17: Did you mean to mark this function as 'async'?
|
||||
return await p;
|
||||
~~~~~
|
||||
@ -44,7 +44,7 @@ tests/cases/compiler/awaitInNonAsyncFunction.ts(40,1): error TS1378: Top-level '
|
||||
const functionExpression = function(p: Promise<number>) {
|
||||
for await (const _ of []);
|
||||
~~~~~
|
||||
!!! error TS1103: A 'for-await-of' statement is only allowed within an async function or async generator.
|
||||
!!! error TS1103: 'for await' loops are only allowed within async functions and at the top levels of modules.
|
||||
!!! related TS1356 tests/cases/compiler/awaitInNonAsyncFunction.ts:13:28: Did you mean to mark this function as 'async'?
|
||||
await p;
|
||||
~~~~~
|
||||
@ -55,7 +55,7 @@ tests/cases/compiler/awaitInNonAsyncFunction.ts(40,1): error TS1378: Top-level '
|
||||
const arrowFunc = (p: Promise<number>) => {
|
||||
for await (const _ of []);
|
||||
~~~~~
|
||||
!!! error TS1103: A 'for-await-of' statement is only allowed within an async function or async generator.
|
||||
!!! error TS1103: 'for await' loops are only allowed within async functions and at the top levels of modules.
|
||||
!!! related TS1356 tests/cases/compiler/awaitInNonAsyncFunction.ts:18:19: Did you mean to mark this function as 'async'?
|
||||
return await p;
|
||||
~~~~~
|
||||
@ -66,7 +66,7 @@ tests/cases/compiler/awaitInNonAsyncFunction.ts(40,1): error TS1378: Top-level '
|
||||
function* generatorFunc(p: Promise<number>) {
|
||||
for await (const _ of []);
|
||||
~~~~~
|
||||
!!! error TS1103: A 'for-await-of' statement is only allowed within an async function or async generator.
|
||||
!!! error TS1103: 'for await' loops are only allowed within async functions and at the top levels of modules.
|
||||
!!! related TS1356 tests/cases/compiler/awaitInNonAsyncFunction.ts:23:11: Did you mean to mark this function as 'async'?
|
||||
yield await p;
|
||||
~~~~~
|
||||
@ -78,7 +78,7 @@ tests/cases/compiler/awaitInNonAsyncFunction.ts(40,1): error TS1378: Top-level '
|
||||
constructor(p: Promise<number>) {
|
||||
for await (const _ of []);
|
||||
~~~~~
|
||||
!!! error TS1103: A 'for-await-of' statement is only allowed within an async function or async generator.
|
||||
!!! error TS1103: 'for await' loops are only allowed within async functions and at the top levels of modules.
|
||||
await p;
|
||||
~~~~~
|
||||
!!! error TS1308: 'await' expressions are only allowed within async functions and at the top levels of modules.
|
||||
@ -86,7 +86,7 @@ tests/cases/compiler/awaitInNonAsyncFunction.ts(40,1): error TS1378: Top-level '
|
||||
method(p: Promise<number>) {
|
||||
for await (const _ of []);
|
||||
~~~~~
|
||||
!!! error TS1103: A 'for-await-of' statement is only allowed within an async function or async generator.
|
||||
!!! error TS1103: 'for await' loops are only allowed within async functions and at the top levels of modules.
|
||||
!!! related TS1356 tests/cases/compiler/awaitInNonAsyncFunction.ts:33:3: Did you mean to mark this function as 'async'?
|
||||
await p;
|
||||
~~~~~
|
||||
@ -97,7 +97,7 @@ tests/cases/compiler/awaitInNonAsyncFunction.ts(40,1): error TS1378: Top-level '
|
||||
|
||||
for await (const _ of []);
|
||||
~~~~~
|
||||
!!! error TS1103: A 'for-await-of' statement is only allowed within an async function or async generator.
|
||||
!!! error TS1432: Top-level 'for await' loops are only allowed when the 'module' option is set to 'esnext' or 'system', and the 'target' option is set to 'es2017' or higher.
|
||||
await null;
|
||||
~~~~~
|
||||
!!! error TS1378: Top-level 'await' expressions are only allowed when the 'module' option is set to 'esnext' or 'system', and the 'target' option is set to 'es2017' or higher.
|
||||
@ -8,7 +8,7 @@ var loop2 = loop1;
|
||||
>loop1 : Symbol(loop1, Decl(loop.js, 0, 3))
|
||||
|
||||
module.exports = loop2;
|
||||
>module.exports : Symbol("tests/cases/conformance/salsa/loop", Decl(loop.js, 0, 0))
|
||||
>module.exports : Symbol(module.exports, Decl(loop.js, 0, 0))
|
||||
>module : Symbol(export=, Decl(loop.js, 1, 18))
|
||||
>exports : Symbol(export=, Decl(loop.js, 1, 18))
|
||||
>loop2 : Symbol(loop2, Decl(loop.js, 1, 3))
|
||||
|
||||
@ -10,7 +10,7 @@ var loop2 = loop1;
|
||||
module.exports = loop2;
|
||||
>module.exports = loop2 : any
|
||||
>module.exports : any
|
||||
>module : { "\"tests/cases/conformance/salsa/loop\"": any; }
|
||||
>module : { exports: any; }
|
||||
>exports : any
|
||||
>loop2 : any
|
||||
|
||||
|
||||
@ -260,7 +260,7 @@ declare const o5: <T>() => undefined | (() => void);
|
||||
>o5 : <T>() => undefined | (() => void)
|
||||
|
||||
o5<number>()?.();
|
||||
>o5<number>()?.() : void | undefined
|
||||
>o5<number>()?.() : void
|
||||
>o5<number>() : (() => void) | undefined
|
||||
>o5 : <T>() => (() => void) | undefined
|
||||
|
||||
|
||||
25
tests/baselines/reference/callChainInference.js
Normal file
25
tests/baselines/reference/callChainInference.js
Normal file
@ -0,0 +1,25 @@
|
||||
//// [callChainInference.ts]
|
||||
// Repro from #42404
|
||||
|
||||
interface Y {
|
||||
foo<T>(this: T, arg: keyof T): void;
|
||||
a: number;
|
||||
b: string;
|
||||
}
|
||||
|
||||
declare const value: Y | undefined;
|
||||
|
||||
if (value) {
|
||||
value?.foo("a");
|
||||
}
|
||||
|
||||
value?.foo("a");
|
||||
|
||||
|
||||
//// [callChainInference.js]
|
||||
"use strict";
|
||||
// Repro from #42404
|
||||
if (value) {
|
||||
value === null || value === void 0 ? void 0 : value.foo("a");
|
||||
}
|
||||
value === null || value === void 0 ? void 0 : value.foo("a");
|
||||
39
tests/baselines/reference/callChainInference.symbols
Normal file
39
tests/baselines/reference/callChainInference.symbols
Normal file
@ -0,0 +1,39 @@
|
||||
=== tests/cases/conformance/expressions/optionalChaining/callChain/callChainInference.ts ===
|
||||
// Repro from #42404
|
||||
|
||||
interface Y {
|
||||
>Y : Symbol(Y, Decl(callChainInference.ts, 0, 0))
|
||||
|
||||
foo<T>(this: T, arg: keyof T): void;
|
||||
>foo : Symbol(Y.foo, Decl(callChainInference.ts, 2, 13))
|
||||
>T : Symbol(T, Decl(callChainInference.ts, 3, 8))
|
||||
>this : Symbol(this, Decl(callChainInference.ts, 3, 11))
|
||||
>T : Symbol(T, Decl(callChainInference.ts, 3, 8))
|
||||
>arg : Symbol(arg, Decl(callChainInference.ts, 3, 19))
|
||||
>T : Symbol(T, Decl(callChainInference.ts, 3, 8))
|
||||
|
||||
a: number;
|
||||
>a : Symbol(Y.a, Decl(callChainInference.ts, 3, 40))
|
||||
|
||||
b: string;
|
||||
>b : Symbol(Y.b, Decl(callChainInference.ts, 4, 14))
|
||||
}
|
||||
|
||||
declare const value: Y | undefined;
|
||||
>value : Symbol(value, Decl(callChainInference.ts, 8, 13))
|
||||
>Y : Symbol(Y, Decl(callChainInference.ts, 0, 0))
|
||||
|
||||
if (value) {
|
||||
>value : Symbol(value, Decl(callChainInference.ts, 8, 13))
|
||||
|
||||
value?.foo("a");
|
||||
>value?.foo : Symbol(Y.foo, Decl(callChainInference.ts, 2, 13))
|
||||
>value : Symbol(value, Decl(callChainInference.ts, 8, 13))
|
||||
>foo : Symbol(Y.foo, Decl(callChainInference.ts, 2, 13))
|
||||
}
|
||||
|
||||
value?.foo("a");
|
||||
>value?.foo : Symbol(Y.foo, Decl(callChainInference.ts, 2, 13))
|
||||
>value : Symbol(value, Decl(callChainInference.ts, 8, 13))
|
||||
>foo : Symbol(Y.foo, Decl(callChainInference.ts, 2, 13))
|
||||
|
||||
37
tests/baselines/reference/callChainInference.types
Normal file
37
tests/baselines/reference/callChainInference.types
Normal file
@ -0,0 +1,37 @@
|
||||
=== tests/cases/conformance/expressions/optionalChaining/callChain/callChainInference.ts ===
|
||||
// Repro from #42404
|
||||
|
||||
interface Y {
|
||||
foo<T>(this: T, arg: keyof T): void;
|
||||
>foo : <T>(this: T, arg: keyof T) => void
|
||||
>this : T
|
||||
>arg : keyof T
|
||||
|
||||
a: number;
|
||||
>a : number
|
||||
|
||||
b: string;
|
||||
>b : string
|
||||
}
|
||||
|
||||
declare const value: Y | undefined;
|
||||
>value : Y | undefined
|
||||
|
||||
if (value) {
|
||||
>value : Y | undefined
|
||||
|
||||
value?.foo("a");
|
||||
>value?.foo("a") : void
|
||||
>value?.foo : <T>(this: T, arg: keyof T) => void
|
||||
>value : Y
|
||||
>foo : <T>(this: T, arg: keyof T) => void
|
||||
>"a" : "a"
|
||||
}
|
||||
|
||||
value?.foo("a");
|
||||
>value?.foo("a") : void
|
||||
>value?.foo : (<T>(this: T, arg: keyof T) => void) | undefined
|
||||
>value : Y | undefined
|
||||
>foo : (<T>(this: T, arg: keyof T) => void) | undefined
|
||||
>"a" : "a"
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
* @return {any} I don't even know what this should return
|
||||
*/
|
||||
module.exports = C
|
||||
>module.exports : Symbol("tests/cases/conformance/jsdoc/mod1", Decl(mod1.js, 0, 0))
|
||||
>module.exports : Symbol(module.exports, Decl(mod1.js, 0, 0))
|
||||
>module : Symbol(export=, Decl(mod1.js, 0, 0))
|
||||
>exports : Symbol(export=, Decl(mod1.js, 0, 0))
|
||||
>C : Symbol(C, Decl(mod1.js, 4, 18))
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
module.exports = C
|
||||
>module.exports = C : typeof C
|
||||
>module.exports : typeof C
|
||||
>module : { "\"tests/cases/conformance/jsdoc/mod1\"": typeof C; }
|
||||
>module : { exports: typeof C; }
|
||||
>exports : typeof C
|
||||
>C : typeof C
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user