mirror of
https://github.com/microsoft/TypeScript.git
synced 2026-02-09 20:51:43 -06:00
Merge branch 'master' into attach_property_to_default_export
This commit is contained in:
commit
35ded510bb
2
.github/ISSUE_TEMPLATE/Bug_report.md
vendored
2
.github/ISSUE_TEMPLATE/Bug_report.md
vendored
@ -16,7 +16,7 @@ Please fill in the *entire* template below.
|
||||
-->
|
||||
|
||||
<!-- Please try to reproduce the issue with `typescript@next`. It may have already been fixed. -->
|
||||
**TypeScript Version:** 3.3.0-dev.201xxxxx
|
||||
**TypeScript Version:** 3.4.0-dev.201xxxxx
|
||||
|
||||
<!-- Search terms you tried before logging this (so others can find this issue more easily) -->
|
||||
**Search Terms:**
|
||||
|
||||
6
.github/pull_request_template.md
vendored
6
.github/pull_request_template.md
vendored
@ -2,11 +2,9 @@
|
||||
Thank you for submitting a pull request!
|
||||
|
||||
Here's a checklist you might find useful.
|
||||
* [ ] There is an associated issue that is labeled
|
||||
'Bug' or 'help wanted' or is in the Community milestone
|
||||
* [ ] There is an associated issue that is labeled 'Bug' or 'help wanted'
|
||||
* [ ] Code is up-to-date with the `master` branch
|
||||
* [ ] You've successfully run `jake runtests` locally
|
||||
* [ ] You've signed the CLA
|
||||
* [ ] You've successfully run `gulp runtests` locally
|
||||
* [ ] There are new or updated unit tests validating the change
|
||||
|
||||
Refer to CONTRIBUTING.MD for more details.
|
||||
|
||||
1
.mailmap
1
.mailmap
@ -121,6 +121,7 @@ Ken Howard <ken@simplicatedweb.com>
|
||||
Kevin Lang <klang2012@gmail.com>
|
||||
kimamula <kenji.imamula@gmail.com> # Kenji Imamula
|
||||
Kitson Kelly <me@kitsonkelly.com>
|
||||
Krishnadas Babu <krishnadas100033@gmail.com>
|
||||
Klaus Meinhardt <klaus.meinhardt1@gmail.com>
|
||||
Kyle Kelley <rgbkrk@gmail.com>
|
||||
Lorant Pinter <lorant.pinter@prezi.com>
|
||||
|
||||
@ -12,7 +12,11 @@ tests
|
||||
tslint.json
|
||||
Jakefile.js
|
||||
.editorconfig
|
||||
.failed-tests
|
||||
.git
|
||||
.git/
|
||||
.gitattributes
|
||||
.github/
|
||||
.gitmodules
|
||||
.settings/
|
||||
.travis.yml
|
||||
@ -23,6 +27,5 @@ Jakefile.js
|
||||
test.config
|
||||
package-lock.json
|
||||
yarn.lock
|
||||
.github/
|
||||
CONTRIBUTING.md
|
||||
TEST-results.xml
|
||||
|
||||
@ -3,9 +3,7 @@ language: node_js
|
||||
node_js:
|
||||
- 'node'
|
||||
- '10'
|
||||
- '6'
|
||||
|
||||
sudo: false
|
||||
- '8'
|
||||
|
||||
env:
|
||||
- workerCount=3 timeout=600000
|
||||
|
||||
@ -47,6 +47,16 @@ In general, things we find useful when reviewing suggestions are:
|
||||
|
||||
# Instructions for Contributing Code
|
||||
|
||||
## Tips
|
||||
|
||||
### Faster clones
|
||||
|
||||
The TypeScript repository is relatively large. To save some time, you might want to clone it without the repo's full history using `git clone --depth=1`.
|
||||
|
||||
### Using local builds
|
||||
|
||||
Run `gulp` to build a version of the compiler/language service that reflects changes you've made. You can then run `node <repo-root>/built/local/tsc.js` in place of `tsc` in your project. For example, to run `tsc --watch` from within the root of the repository on a file called `test.ts`, you can run `node ./built/local/tsc.js --watch test.ts`.
|
||||
|
||||
## Contributing bug fixes
|
||||
|
||||
TypeScript is currently accepting contributions in the form of bug fixes. A bug must have an issue tracking it in the issue tracker that has been approved ("Milestone == Community") by the TypeScript team. Your pull request should include a link to the bug that you are fixing. If you've submitted a PR for a bug, please post a comment in the bug to avoid duplication of effort.
|
||||
@ -94,7 +104,7 @@ Any changes should be made to [src/lib](https://github.com/Microsoft/TypeScript/
|
||||
Library files in `built/local/` are updated automatically by running the standard build task:
|
||||
|
||||
```sh
|
||||
jake
|
||||
gulp
|
||||
```
|
||||
|
||||
The files in `lib/` are used to bootstrap compilation and usually **should not** be updated unless publishing a new version or updating the LKG.
|
||||
@ -105,49 +115,49 @@ The files `src/lib/dom.generated.d.ts` and `src/lib/webworker.generated.d.ts` bo
|
||||
|
||||
## Running the Tests
|
||||
|
||||
To run all tests, invoke the `runtests-parallel` target using jake:
|
||||
To run all tests, invoke the `runtests-parallel` target using gulp:
|
||||
|
||||
```Shell
|
||||
jake runtests-parallel
|
||||
gulp runtests-parallel
|
||||
```
|
||||
|
||||
This will run all tests; to run only a specific subset of tests, use:
|
||||
|
||||
```Shell
|
||||
jake runtests tests=<regex>
|
||||
gulp runtests --tests=<regex>
|
||||
```
|
||||
|
||||
e.g. to run all compiler baseline tests:
|
||||
|
||||
```Shell
|
||||
jake runtests tests=compiler
|
||||
gulp runtests --tests=compiler
|
||||
```
|
||||
|
||||
or to run a specific test: `tests\cases\compiler\2dArrays.ts`
|
||||
|
||||
```Shell
|
||||
jake runtests tests=2dArrays
|
||||
gulp runtests --tests=2dArrays
|
||||
```
|
||||
|
||||
## Debugging the tests
|
||||
|
||||
To debug the tests, invoke the `runtests-browser` task from jake.
|
||||
To debug the tests, invoke the `runtests-browser` task from gulp.
|
||||
You will probably only want to debug one test at a time:
|
||||
|
||||
```Shell
|
||||
jake runtests-browser tests=2dArrays
|
||||
gulp runtests-browser --tests=2dArrays
|
||||
```
|
||||
|
||||
You can specify which browser to use for debugging. Currently Chrome and IE are supported:
|
||||
|
||||
```Shell
|
||||
jake runtests-browser tests=2dArrays browser=chrome
|
||||
gulp runtests-browser --tests=2dArrays --browser=chrome
|
||||
```
|
||||
|
||||
You can debug with VS Code or Node instead with `jake runtests inspect=true`:
|
||||
You can debug with VS Code or Node instead with `gulp runtests --inspect=true`:
|
||||
|
||||
```Shell
|
||||
jake runtests tests=2dArrays inspect=true
|
||||
gulp runtests --tests=2dArrays --inspect=true
|
||||
```
|
||||
|
||||
## Adding a Test
|
||||
@ -187,13 +197,13 @@ Compiler testcases generate baselines that track the emitted `.js`, the errors p
|
||||
When a change in the baselines is detected, the test will fail. To inspect changes vs the expected baselines, use
|
||||
|
||||
```Shell
|
||||
jake diff
|
||||
gulp diff
|
||||
```
|
||||
|
||||
After verifying that the changes in the baselines are correct, run
|
||||
|
||||
```Shell
|
||||
jake baseline-accept
|
||||
gulp baseline-accept
|
||||
```
|
||||
|
||||
to establish the new baselines as the desired behavior. This will change the files in `tests\baselines\reference`, which should be included as part of your commit. It's important to carefully validate changes in the baselines.
|
||||
@ -201,6 +211,6 @@ to establish the new baselines as the desired behavior. This will change the fil
|
||||
## Localization
|
||||
|
||||
All strings the user may see are stored in [`diagnosticMessages.json`](./src/compiler/diagnosticMessages.json).
|
||||
If you make changes to it, run `jake generate-diagnostics` to push them to the `Diagnostic` interface in `diagnosticInformationMap.generated.ts`.
|
||||
If you make changes to it, run `gulp generate-diagnostics` to push them to the `Diagnostic` interface in `diagnosticInformationMap.generated.ts`.
|
||||
|
||||
See [coding guidelines on diagnostic messages](https://github.com/Microsoft/TypeScript/wiki/Coding-guidelines#diagnostic-messages).
|
||||
|
||||
1044
Gulpfile.js
1044
Gulpfile.js
File diff suppressed because it is too large
Load Diff
858
Jakefile.js
858
Jakefile.js
@ -1,858 +0,0 @@
|
||||
// This file contains the build logic for the public repo
|
||||
// @ts-check
|
||||
/// <reference types="jake" />
|
||||
|
||||
const fs = require("fs");
|
||||
const os = require("os");
|
||||
const path = require("path");
|
||||
const fold = require("travis-fold");
|
||||
const ts = require("./lib/typescript");
|
||||
const del = require("del");
|
||||
const getDirSize = require("./scripts/build/getDirSize");
|
||||
const { base64VLQFormatEncode } = require("./scripts/build/sourcemaps");
|
||||
const needsUpdate = require("./scripts/build/needsUpdate");
|
||||
const { flatten } = require("./scripts/build/project");
|
||||
|
||||
// add node_modules to path so we don't need global modules, prefer the modules by adding them first
|
||||
var nodeModulesPathPrefix = path.resolve("./node_modules/.bin/") + path.delimiter;
|
||||
if (process.env.path !== undefined) {
|
||||
process.env.path = nodeModulesPathPrefix + process.env.path;
|
||||
}
|
||||
else if (process.env.PATH !== undefined) {
|
||||
process.env.PATH = nodeModulesPathPrefix + process.env.PATH;
|
||||
}
|
||||
|
||||
const host = process.env.TYPESCRIPT_HOST || process.env.host || "node";
|
||||
|
||||
const defaultTestTimeout = 40000;
|
||||
|
||||
let useDebugMode = true;
|
||||
|
||||
const TaskNames = {
|
||||
local: "local",
|
||||
runtests: "runtests",
|
||||
runtestsParallel: "runtests-parallel",
|
||||
buildRules: "build-rules",
|
||||
clean: "clean",
|
||||
lib: "lib",
|
||||
buildFoldStart: "build-fold-start",
|
||||
buildFoldEnd: "build-fold-end",
|
||||
generateDiagnostics: "generate-diagnostics",
|
||||
coreBuild: "core-build",
|
||||
tsc: "tsc",
|
||||
lkg: "LKG",
|
||||
release: "release",
|
||||
lssl: "lssl",
|
||||
lint: "lint",
|
||||
scripts: "scripts",
|
||||
localize: "localize",
|
||||
configureInsiders: "configure-insiders",
|
||||
publishInsiders: "publish-insiders",
|
||||
configureNightly: "configure-nightly",
|
||||
publishNightly: "publish-nightly",
|
||||
help: "help"
|
||||
};
|
||||
|
||||
const Paths = {};
|
||||
Paths.lkg = "lib";
|
||||
Paths.lkgCompiler = "lib/tsc.js";
|
||||
Paths.built = "built";
|
||||
Paths.builtLocal = "built/local";
|
||||
Paths.builtLocalCompiler = "built/local/tsc.js";
|
||||
Paths.builtLocalTSServer = "built/local/tsserver.js";
|
||||
Paths.builtLocalRun = "built/local/run.js";
|
||||
Paths.releaseCompiler = "built/local/tsc.release.js";
|
||||
Paths.typesMapOutput = "built/local/typesMap.json";
|
||||
Paths.typescriptFile = "built/local/typescript.js";
|
||||
Paths.servicesFile = "built/local/typescriptServices.js";
|
||||
Paths.servicesDefinitionFile = "built/local/typescriptServices.d.ts";
|
||||
Paths.servicesOutFile = "built/local/typescriptServices.out.js";
|
||||
Paths.servicesDefinitionOutFile = "built/local/typescriptServices.out.d.ts";
|
||||
Paths.typescriptDefinitionFile = "built/local/typescript.d.ts";
|
||||
Paths.typescriptStandaloneDefinitionFile = "built/local/typescript_standalone.d.ts";
|
||||
Paths.tsserverLibraryFile = "built/local/tsserverlibrary.js";
|
||||
Paths.tsserverLibraryDefinitionFile = "built/local/tsserverlibrary.d.ts";
|
||||
Paths.tsserverLibraryOutFile = "built/local/tsserverlibrary.out.js";
|
||||
Paths.tsserverLibraryDefinitionOutFile = "built/local/tsserverlibrary.out.d.ts";
|
||||
Paths.baselines = {};
|
||||
Paths.baselines.local = "tests/baselines/local";
|
||||
Paths.baselines.localTest262 = "tests/baselines/test262/local";
|
||||
Paths.baselines.localRwc = "internal/baselines/rwc/local";
|
||||
Paths.baselines.reference = "tests/baselines/reference";
|
||||
Paths.baselines.referenceTest262 = "tests/baselines/test262/reference";
|
||||
Paths.baselines.referenceRwc = "internal/baselines/rwc/reference";
|
||||
Paths.copyright = "CopyrightNotice.txt";
|
||||
Paths.thirdParty = "ThirdPartyNoticeText.txt";
|
||||
Paths.processDiagnosticMessagesJs = "scripts/processDiagnosticMessages.js";
|
||||
Paths.diagnosticInformationMap = "src/compiler/diagnosticInformationMap.generated.ts";
|
||||
Paths.diagnosticMessagesJson = "src/compiler/diagnosticMessages.json";
|
||||
Paths.diagnosticGeneratedJson = "src/compiler/diagnosticMessages.generated.json";
|
||||
Paths.builtDiagnosticGeneratedJson = "built/local/diagnosticMessages.generated.json";
|
||||
Paths.lcl = "src/loc/lcl"
|
||||
Paths.locLcg = "built/local/enu/diagnosticMessages.generated.json.lcg";
|
||||
Paths.generatedLCGFile = path.join(Paths.builtLocal, "enu", "diagnosticMessages.generated.json.lcg");
|
||||
Paths.library = "src/lib";
|
||||
Paths.srcServer = "src/server";
|
||||
Paths.scripts = {};
|
||||
Paths.scripts.generateLocalizedDiagnosticMessages = "scripts/generateLocalizedDiagnosticMessages.js";
|
||||
Paths.scripts.processDiagnosticMessages = "scripts/processDiagnosticMessages.js";
|
||||
Paths.scripts.produceLKG = "scripts/produceLKG.js";
|
||||
Paths.scripts.configurePrerelease = "scripts/configurePrerelease.js";
|
||||
Paths.packageJson = "package.json";
|
||||
Paths.versionFile = "src/compiler/core.ts";
|
||||
|
||||
const ConfigFileFor = {
|
||||
tsc: "src/tsc",
|
||||
tscRelease: "src/tsc/tsconfig.release.json",
|
||||
tsserver: "src/tsserver",
|
||||
runjs: "src/testRunner",
|
||||
lint: "scripts/tslint",
|
||||
scripts: "scripts",
|
||||
all: "src",
|
||||
typescriptServices: "built/local/typescriptServices.tsconfig.json",
|
||||
tsserverLibrary: "built/local/tsserverlibrary.tsconfig.json",
|
||||
};
|
||||
|
||||
const ExpectedLKGFiles = [
|
||||
"tsc.js",
|
||||
"tsserver.js",
|
||||
"typescriptServices.js",
|
||||
"typescriptServices.d.ts",
|
||||
"typescript.js",
|
||||
"typescript.d.ts",
|
||||
"cancellationToken.js",
|
||||
"typingsInstaller.js",
|
||||
"protocol.d.ts",
|
||||
"watchGuard.js"
|
||||
];
|
||||
|
||||
directory(Paths.builtLocal);
|
||||
|
||||
// Local target to build the compiler and services
|
||||
desc("Builds the full compiler and services");
|
||||
task(TaskNames.local, [
|
||||
TaskNames.buildFoldStart,
|
||||
TaskNames.coreBuild,
|
||||
Paths.servicesDefinitionFile,
|
||||
Paths.typescriptFile,
|
||||
Paths.typescriptDefinitionFile,
|
||||
Paths.typescriptStandaloneDefinitionFile,
|
||||
Paths.tsserverLibraryDefinitionFile,
|
||||
TaskNames.localize,
|
||||
TaskNames.buildFoldEnd
|
||||
]);
|
||||
|
||||
task("default", [TaskNames.local]);
|
||||
|
||||
const RunTestsPrereqs = [TaskNames.lib, Paths.servicesDefinitionFile, Paths.typescriptDefinitionFile, Paths.tsserverLibraryDefinitionFile];
|
||||
desc("Runs all the tests in parallel using the built run.js file. Optional arguments are: t[ests]=category1|category2|... i[nspect]=true.");
|
||||
task(TaskNames.runtestsParallel, RunTestsPrereqs, function () {
|
||||
tsbuild([ConfigFileFor.runjs], true, () => {
|
||||
runConsoleTests("min", /*parallel*/ true);
|
||||
});
|
||||
}, { async: true });
|
||||
|
||||
desc("Runs all the tests in parallel using the built run.js file. Optional arguments are: t[ests]=category1|category2|... i[nspect]=true.");
|
||||
task(TaskNames.runtests, RunTestsPrereqs, function () {
|
||||
tsbuild([ConfigFileFor.runjs], true, () => {
|
||||
runConsoleTests('mocha-fivemat-progress-reporter', /*runInParallel*/ false);
|
||||
});
|
||||
}, { async: true });
|
||||
|
||||
desc("Generates a diagnostic file in TypeScript based on an input JSON file");
|
||||
task(TaskNames.generateDiagnostics, [Paths.diagnosticInformationMap]);
|
||||
|
||||
const libraryTargets = getLibraryTargets();
|
||||
desc("Builds the library targets");
|
||||
task(TaskNames.lib, libraryTargets);
|
||||
|
||||
desc("Builds internal scripts");
|
||||
task(TaskNames.scripts, [TaskNames.coreBuild], function() {
|
||||
tsbuild([ConfigFileFor.scripts], true, () => {
|
||||
complete();
|
||||
});
|
||||
}, { async: true });
|
||||
|
||||
task(Paths.releaseCompiler, function () {
|
||||
tsbuild([ConfigFileFor.tscRelease], true, () => {
|
||||
complete();
|
||||
});
|
||||
}, { async: true });
|
||||
|
||||
// Makes a new LKG. This target does not build anything, but errors if not all the outputs are present in the built/local directory
|
||||
desc("Makes a new LKG out of the built js files");
|
||||
task(TaskNames.lkg, [
|
||||
TaskNames.scripts,
|
||||
TaskNames.release,
|
||||
TaskNames.local,
|
||||
Paths.servicesDefinitionFile,
|
||||
Paths.typescriptFile,
|
||||
Paths.typescriptDefinitionFile,
|
||||
Paths.typescriptStandaloneDefinitionFile,
|
||||
Paths.tsserverLibraryDefinitionFile,
|
||||
Paths.releaseCompiler,
|
||||
...libraryTargets
|
||||
], () => {
|
||||
const sizeBefore = getDirSize(Paths.lkg);
|
||||
|
||||
exec(`${host} ${Paths.scripts.produceLKG}`, () => {
|
||||
const sizeAfter = getDirSize(Paths.lkg);
|
||||
if (sizeAfter > (sizeBefore * 1.10)) {
|
||||
throw new Error("The lib folder increased by 10% or more. This likely indicates a bug.");
|
||||
}
|
||||
|
||||
complete();
|
||||
});
|
||||
}, { async: true });
|
||||
|
||||
desc("Makes the most recent test results the new baseline, overwriting the old baseline");
|
||||
task("baseline-accept", function () {
|
||||
acceptBaseline(Paths.baselines.local, Paths.baselines.reference);
|
||||
});
|
||||
|
||||
desc("Makes the most recent rwc test results the new baseline, overwriting the old baseline");
|
||||
task("baseline-accept-rwc", function () {
|
||||
acceptBaseline(Paths.baselines.localRwc, Paths.baselines.referenceRwc);
|
||||
});
|
||||
|
||||
desc("Makes the most recent test262 test results the new baseline, overwriting the old baseline");
|
||||
task("baseline-accept-test262", function () {
|
||||
acceptBaseline(Paths.baselines.localTest262, Paths.baselines.referenceTest262);
|
||||
});
|
||||
|
||||
desc("Runs tslint on the compiler sources. Optional arguments are: f[iles]=regex");
|
||||
task(TaskNames.lint, [TaskNames.buildRules], () => {
|
||||
if (fold.isTravis()) console.log(fold.start("lint"));
|
||||
function lint(project, cb) {
|
||||
const fix = process.env.fix || process.env.f;
|
||||
const cmd = `node node_modules/tslint/bin/tslint --project ${project} --formatters-dir ./built/local/tslint/formatters --format autolinkableStylish${fix ? " --fix" : ""}`;
|
||||
exec(cmd, cb);
|
||||
}
|
||||
lint("scripts/tslint/tsconfig.json", () => lint("src/tsconfig-base.json", () => {
|
||||
if (fold.isTravis()) console.log(fold.end("lint"));
|
||||
complete();
|
||||
}));
|
||||
}, { async: true });
|
||||
|
||||
desc("Diffs the compiler baselines using the diff tool specified by the 'DIFF' environment variable");
|
||||
task('diff', function () {
|
||||
var cmd = `"${getDiffTool()}" ${Paths.baselines.reference} ${Paths.baselines.local}`;
|
||||
exec(cmd);
|
||||
}, { async: true });
|
||||
|
||||
desc("Diffs the RWC baselines using the diff tool specified by the 'DIFF' environment variable");
|
||||
task('diff-rwc', function () {
|
||||
var cmd = `"${getDiffTool()}" ${Paths.baselines.referenceRwc} ${Paths.baselines.localRwc}`;
|
||||
exec(cmd);
|
||||
}, { async: true });
|
||||
|
||||
task(TaskNames.configureNightly, [TaskNames.scripts], function () {
|
||||
const cmd = `${host} ${Paths.scripts.configurePrerelease} dev ${Paths.packageJson} ${Paths.versionFile}`;
|
||||
exec(cmd, () => complete());
|
||||
}, { async: true });
|
||||
|
||||
desc("Configure, build, test, and publish the nightly release.");
|
||||
task(TaskNames.publishNightly, [TaskNames.coreBuild, TaskNames.configureNightly, TaskNames.lkg, "setDebugMode", "runtests-parallel"], function () {
|
||||
var cmd = "npm publish --tag next";
|
||||
exec(cmd, () => complete());
|
||||
}, { async: true });
|
||||
|
||||
task(TaskNames.help, function() {
|
||||
var cmd = "jake --tasks";
|
||||
exec(cmd, () => complete());
|
||||
})
|
||||
|
||||
task(TaskNames.configureInsiders, [TaskNames.scripts], function () {
|
||||
const cmd = `${host} ${Paths.scripts.configurePrerelease} insiders ${Paths.packageJson} ${Paths.versionFile}`;
|
||||
exec(cmd, () => complete());
|
||||
}, { async: true });
|
||||
|
||||
desc("Configure, build, test, and publish the insiders release.");
|
||||
task(TaskNames.publishInsiders, [TaskNames.coreBuild, TaskNames.configureInsiders, TaskNames.lkg, "setDebugMode", "runtests-parallel"], function () {
|
||||
var cmd = "npm publish --tag insiders";
|
||||
exec(cmd, () => complete());
|
||||
}, { async: true });
|
||||
|
||||
desc("Sets the release mode flag");
|
||||
task("release", function () {
|
||||
useDebugMode = false;
|
||||
});
|
||||
|
||||
desc("Clears the release mode flag");
|
||||
task("setDebugMode", function () {
|
||||
useDebugMode = true;
|
||||
});
|
||||
|
||||
desc("Generates localized diagnostic messages");
|
||||
task(TaskNames.localize, [Paths.generatedLCGFile]);
|
||||
|
||||
desc("Emit the start of the build fold");
|
||||
task(TaskNames.buildFoldStart, [], function () {
|
||||
if (fold.isTravis()) console.log(fold.start("build"));
|
||||
});
|
||||
|
||||
desc("Emit the end of the build fold");
|
||||
task(TaskNames.buildFoldEnd, [], function () {
|
||||
if (fold.isTravis()) console.log(fold.end("build"));
|
||||
});
|
||||
|
||||
desc("Compiles tslint rules to js");
|
||||
task(TaskNames.buildRules, [], function () {
|
||||
tsbuild(ConfigFileFor.lint, false, () => complete());
|
||||
}, { async: true });
|
||||
|
||||
desc("Cleans the compiler output, declare files, and tests");
|
||||
task(TaskNames.clean, function () {
|
||||
jake.rmRf(Paths.built);
|
||||
});
|
||||
|
||||
desc("Generates the LCG file for localization");
|
||||
task("localize", [Paths.generatedLCGFile]);
|
||||
|
||||
task(TaskNames.tsc, [Paths.diagnosticInformationMap, TaskNames.lib], function () {
|
||||
tsbuild(ConfigFileFor.tsc, true, () => {
|
||||
complete();
|
||||
});
|
||||
}, { async: true });
|
||||
|
||||
task(TaskNames.coreBuild, [Paths.diagnosticInformationMap, TaskNames.lib], function () {
|
||||
tsbuild(ConfigFileFor.all, true, () => {
|
||||
complete();
|
||||
});
|
||||
}, { async: true });
|
||||
|
||||
file(Paths.diagnosticMessagesJson);
|
||||
|
||||
file(Paths.typesMapOutput, /** @type {*} */(function () {
|
||||
var content = readFileSync(path.join(Paths.srcServer, 'typesMap.json'));
|
||||
// Validate that it's valid JSON
|
||||
try {
|
||||
JSON.parse(content);
|
||||
} catch (e) {
|
||||
console.log("Parse error in typesMap.json: " + e);
|
||||
}
|
||||
fs.writeFileSync(Paths.typesMapOutput, content);
|
||||
}));
|
||||
|
||||
file(Paths.builtDiagnosticGeneratedJson, [Paths.diagnosticGeneratedJson], function () {
|
||||
if (fs.existsSync(Paths.builtLocal)) {
|
||||
jake.cpR(Paths.diagnosticGeneratedJson, Paths.builtDiagnosticGeneratedJson);
|
||||
}
|
||||
});
|
||||
|
||||
// Localized diagnostics
|
||||
file(Paths.generatedLCGFile, [TaskNames.scripts, Paths.diagnosticInformationMap, Paths.diagnosticGeneratedJson], function () {
|
||||
const cmd = `${host} ${Paths.scripts.generateLocalizedDiagnosticMessages} ${Paths.lcl} ${Paths.builtLocal} ${Paths.diagnosticGeneratedJson}`
|
||||
exec(cmd, complete);
|
||||
}, { async: true });
|
||||
|
||||
|
||||
// The generated diagnostics map; built for the compiler and for the 'generate-diagnostics' task
|
||||
file(Paths.diagnosticInformationMap, [Paths.diagnosticMessagesJson], function () {
|
||||
tsbuild(ConfigFileFor.scripts, true, () => {
|
||||
const cmd = `${host} ${Paths.scripts.processDiagnosticMessages} ${Paths.diagnosticMessagesJson}`;
|
||||
exec(cmd, complete);
|
||||
});
|
||||
}, { async: true });
|
||||
|
||||
file(ConfigFileFor.tsserverLibrary, [], function () {
|
||||
flatten("src/tsserver/tsconfig.json", ConfigFileFor.tsserverLibrary, {
|
||||
exclude: ["src/tsserver/server.ts"],
|
||||
compilerOptions: {
|
||||
"removeComments": false,
|
||||
"stripInternal": true,
|
||||
"declarationMap": false,
|
||||
"outFile": "tsserverlibrary.out.js"
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
// tsserverlibrary.js
|
||||
// tsserverlibrary.d.ts
|
||||
file(Paths.tsserverLibraryFile, [TaskNames.coreBuild, ConfigFileFor.tsserverLibrary], function() {
|
||||
tsbuild(ConfigFileFor.tsserverLibrary, false, () => {
|
||||
if (needsUpdate([Paths.tsserverLibraryOutFile, Paths.tsserverLibraryDefinitionOutFile], [Paths.tsserverLibraryFile, Paths.tsserverLibraryDefinitionFile])) {
|
||||
const copyright = readFileSync(Paths.copyright);
|
||||
|
||||
let libraryDefinitionContent = readFileSync(Paths.tsserverLibraryDefinitionOutFile);
|
||||
libraryDefinitionContent = copyright + removeConstModifierFromEnumDeclarations(libraryDefinitionContent);
|
||||
libraryDefinitionContent += "\nexport = ts;\nexport as namespace ts;";
|
||||
fs.writeFileSync(Paths.tsserverLibraryDefinitionFile, libraryDefinitionContent, "utf8");
|
||||
|
||||
let libraryContent = readFileSync(Paths.tsserverLibraryOutFile);
|
||||
libraryContent = copyright + libraryContent;
|
||||
fs.writeFileSync(Paths.tsserverLibraryFile, libraryContent, "utf8");
|
||||
|
||||
// adjust source map for tsserverlibrary.js
|
||||
let libraryMapContent = readFileSync(Paths.tsserverLibraryOutFile + ".map");
|
||||
const map = JSON.parse(libraryMapContent);
|
||||
const lineStarts = /**@type {*}*/(ts).computeLineStarts(copyright);
|
||||
let prependMappings = "";
|
||||
for (let i = 1; i < lineStarts.length; i++) {
|
||||
prependMappings += ";";
|
||||
}
|
||||
|
||||
const offset = copyright.length - lineStarts[lineStarts.length - 1];
|
||||
if (offset > 0) {
|
||||
prependMappings += base64VLQFormatEncode(offset) + ",";
|
||||
}
|
||||
|
||||
const outputMap = {
|
||||
version: map.version,
|
||||
file: map.file,
|
||||
sources: map.sources,
|
||||
sourceRoot: map.sourceRoot,
|
||||
mappings: prependMappings + map.mappings,
|
||||
names: map.names,
|
||||
sourcesContent: map.sourcesContent
|
||||
};
|
||||
|
||||
libraryMapContent = JSON.stringify(outputMap);
|
||||
fs.writeFileSync(Paths.tsserverLibraryFile + ".map", libraryMapContent);
|
||||
}
|
||||
complete();
|
||||
});
|
||||
}, { async: true });
|
||||
task(Paths.tsserverLibraryDefinitionFile, [Paths.tsserverLibraryFile]);
|
||||
|
||||
file(ConfigFileFor.typescriptServices, [], function () {
|
||||
flatten("src/services/tsconfig.json", ConfigFileFor.typescriptServices, {
|
||||
compilerOptions: {
|
||||
"removeComments": false,
|
||||
"stripInternal": true,
|
||||
"declarationMap": false,
|
||||
"outFile": "typescriptServices.out.js"
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// typescriptServices.js
|
||||
// typescriptServices.d.ts
|
||||
file(Paths.servicesFile, [TaskNames.coreBuild, ConfigFileFor.typescriptServices], function() {
|
||||
tsbuild(ConfigFileFor.typescriptServices, false, () => {
|
||||
if (needsUpdate([Paths.servicesOutFile, Paths.servicesDefinitionOutFile], [Paths.servicesFile, Paths.servicesDefinitionFile])) {
|
||||
const copyright = readFileSync(Paths.copyright);
|
||||
|
||||
let servicesDefinitionContent = readFileSync(Paths.servicesDefinitionOutFile);
|
||||
servicesDefinitionContent = copyright + removeConstModifierFromEnumDeclarations(servicesDefinitionContent);
|
||||
fs.writeFileSync(Paths.servicesDefinitionFile, servicesDefinitionContent, "utf8");
|
||||
|
||||
let servicesContent = readFileSync(Paths.servicesOutFile);
|
||||
servicesContent = copyright + servicesContent;
|
||||
fs.writeFileSync(Paths.servicesFile, servicesContent, "utf8");
|
||||
|
||||
// adjust source map for typescriptServices.js
|
||||
let servicesMapContent = readFileSync(Paths.servicesOutFile + ".map");
|
||||
const map = JSON.parse(servicesMapContent);
|
||||
const lineStarts = /**@type {*}*/(ts).computeLineStarts(copyright);
|
||||
let prependMappings = "";
|
||||
for (let i = 1; i < lineStarts.length; i++) {
|
||||
prependMappings += ";";
|
||||
}
|
||||
|
||||
const offset = copyright.length - lineStarts[lineStarts.length - 1];
|
||||
if (offset > 0) {
|
||||
prependMappings += base64VLQFormatEncode(offset) + ",";
|
||||
}
|
||||
|
||||
const outputMap = {
|
||||
version: map.version,
|
||||
file: map.file,
|
||||
sources: map.sources,
|
||||
sourceRoot: map.sourceRoot,
|
||||
mappings: prependMappings + map.mappings,
|
||||
names: map.names,
|
||||
sourcesContent: map.sourcesContent
|
||||
};
|
||||
|
||||
servicesMapContent = JSON.stringify(outputMap);
|
||||
fs.writeFileSync(Paths.servicesFile + ".map", servicesMapContent);
|
||||
}
|
||||
|
||||
complete();
|
||||
});
|
||||
}, { async: true });
|
||||
task(Paths.servicesDefinitionFile, [Paths.servicesFile]);
|
||||
|
||||
// typescript.js
|
||||
// typescript.d.ts
|
||||
file(Paths.typescriptFile, [Paths.servicesFile], function() {
|
||||
if (needsUpdate([Paths.servicesFile, Paths.servicesDefinitionFile], [Paths.typescriptFile, Paths.typescriptDefinitionFile])) {
|
||||
jake.cpR(Paths.servicesFile, Paths.typescriptFile);
|
||||
if (fs.existsSync(Paths.servicesFile + ".map")) {
|
||||
jake.cpR(Paths.servicesFile + ".map", Paths.typescriptFile + ".map");
|
||||
}
|
||||
const content = readFileSync(Paths.servicesDefinitionFile);
|
||||
fs.writeFileSync(Paths.typescriptDefinitionFile, content + "\r\nexport = ts;", { encoding: "utf-8" });
|
||||
}
|
||||
});
|
||||
task(Paths.typescriptDefinitionFile, [Paths.typescriptFile]);
|
||||
|
||||
// typescript_standalone.d.ts
|
||||
file(Paths.typescriptStandaloneDefinitionFile, [Paths.servicesDefinitionFile], function() {
|
||||
if (needsUpdate(Paths.servicesDefinitionFile, Paths.typescriptStandaloneDefinitionFile)) {
|
||||
const content = readFileSync(Paths.servicesDefinitionFile);
|
||||
fs.writeFileSync(Paths.typescriptStandaloneDefinitionFile, content.replace(/declare (namespace|module) ts(\..+)? \{/g, 'declare module "typescript" {'), { encoding: "utf-8"});
|
||||
}
|
||||
});
|
||||
|
||||
function getLibraryTargets() {
|
||||
/** @type {{ libs: string[], paths?: Record<string, string>, sources?: Record<string, string[]> }} */
|
||||
const libraries = readJson("./src/lib/libs.json");
|
||||
return libraries.libs.map(function (lib) {
|
||||
const relativeSources = ["header.d.ts"].concat(libraries.sources && libraries.sources[lib] || [lib + ".d.ts"]);
|
||||
const relativeTarget = libraries.paths && libraries.paths[lib] || ("lib." + lib + ".d.ts");
|
||||
const sources = [Paths.copyright].concat(relativeSources.map(s => path.join(Paths.library, s)));
|
||||
const target = path.join(Paths.builtLocal, relativeTarget);
|
||||
file(target, [Paths.builtLocal].concat(sources), function () {
|
||||
concatenateFiles(target, sources);
|
||||
});
|
||||
return target;
|
||||
});
|
||||
}
|
||||
|
||||
function runConsoleTests(defaultReporter, runInParallel) {
|
||||
var dirty = process.env.dirty;
|
||||
if (!dirty) {
|
||||
cleanTestDirs();
|
||||
}
|
||||
|
||||
let testTimeout = process.env.timeout || defaultTestTimeout;
|
||||
const inspect = process.env.inspect || process.env["inspect-brk"] || process.env.i;
|
||||
const runners = process.env.runners || process.env.runner || process.env.ru;
|
||||
const tests = process.env.test || process.env.tests || process.env.t;
|
||||
const light = process.env.light === undefined || process.env.light !== "false";
|
||||
const failed = process.env.failed;
|
||||
const keepFailed = process.env.keepFailed || failed;
|
||||
const stackTraceLimit = process.env.stackTraceLimit;
|
||||
const colorsFlag = process.env.color || process.env.colors;
|
||||
const colors = colorsFlag !== "false" && colorsFlag !== "0";
|
||||
const reporter = process.env.reporter || process.env.r || defaultReporter;
|
||||
const bail = process.env.bail || process.env.b;
|
||||
const lintFlag = process.env.lint !== 'false';
|
||||
const testConfigFile = 'test.config';
|
||||
|
||||
if (fs.existsSync(testConfigFile)) {
|
||||
fs.unlinkSync(testConfigFile);
|
||||
}
|
||||
|
||||
let workerCount, taskConfigsFolder;
|
||||
if (runInParallel) {
|
||||
// generate name to store task configuration files
|
||||
const prefix = os.tmpdir() + "/ts-tests";
|
||||
let i = 1;
|
||||
do {
|
||||
taskConfigsFolder = prefix + i;
|
||||
i++;
|
||||
} while (fs.existsSync(taskConfigsFolder));
|
||||
fs.mkdirSync(taskConfigsFolder);
|
||||
|
||||
workerCount = process.env.workerCount || process.env.p || os.cpus().length;
|
||||
}
|
||||
|
||||
if (tests && tests.toLocaleLowerCase() === "rwc") {
|
||||
testTimeout = 800000;
|
||||
}
|
||||
|
||||
if (tests || runners || light || testTimeout || taskConfigsFolder || keepFailed) {
|
||||
writeTestConfigFile(tests, runners, light, taskConfigsFolder, workerCount, stackTraceLimit, colors, testTimeout, keepFailed);
|
||||
}
|
||||
|
||||
// timeout normally isn't necessary but Travis-CI has been timing out on compiler baselines occasionally
|
||||
// default timeout is 2sec which really should be enough, but maybe we just need a small amount longer
|
||||
if (!runInParallel) {
|
||||
var startTime = Travis.mark();
|
||||
var args = [];
|
||||
args.push("-R", "scripts/failed-tests");
|
||||
args.push("-O", '"reporter=' + reporter + (keepFailed ? ",keepFailed=true" : "") + '"');
|
||||
if (tests) args.push("-g", `"${tests}"`);
|
||||
args.push(colors ? "--colors" : "--no-colors");
|
||||
if (bail) args.push("--bail");
|
||||
if (inspect) {
|
||||
args.unshift("--inspect-brk");
|
||||
} else {
|
||||
args.push("-t", testTimeout);
|
||||
}
|
||||
args.push(Paths.builtLocalRun);
|
||||
|
||||
var cmd;
|
||||
if (failed) {
|
||||
args.unshift("scripts/run-failed-tests.js");
|
||||
cmd = host + " " + args.join(" ");
|
||||
}
|
||||
else {
|
||||
cmd = "mocha " + args.join(" ");
|
||||
}
|
||||
var savedNodeEnv = process.env.NODE_ENV;
|
||||
process.env.NODE_ENV = "development";
|
||||
exec(cmd, function () {
|
||||
process.env.NODE_ENV = savedNodeEnv;
|
||||
Travis.measure(startTime);
|
||||
runLinterAndComplete();
|
||||
}, function (e, status) {
|
||||
process.env.NODE_ENV = savedNodeEnv;
|
||||
Travis.measure(startTime);
|
||||
finish(status);
|
||||
});
|
||||
}
|
||||
else {
|
||||
var savedNodeEnv = process.env.NODE_ENV;
|
||||
process.env.NODE_ENV = "development";
|
||||
var startTime = Travis.mark();
|
||||
const cmd = `${host} ${Paths.builtLocalRun}`;
|
||||
exec(cmd, function () {
|
||||
// Tests succeeded; run 'lint' task
|
||||
process.env.NODE_ENV = savedNodeEnv;
|
||||
Travis.measure(startTime);
|
||||
runLinterAndComplete();
|
||||
}, function (e, status) {
|
||||
// Tests failed
|
||||
process.env.NODE_ENV = savedNodeEnv;
|
||||
Travis.measure(startTime);
|
||||
finish(status);
|
||||
});
|
||||
}
|
||||
|
||||
function finish(errorStatus) {
|
||||
deleteTemporaryProjectOutput();
|
||||
if (errorStatus !== undefined) {
|
||||
fail("Process exited with code " + errorStatus);
|
||||
}
|
||||
else {
|
||||
complete();
|
||||
}
|
||||
}
|
||||
|
||||
function runLinterAndComplete() {
|
||||
if (!lintFlag || dirty) {
|
||||
return finish();
|
||||
}
|
||||
var lint = jake.Task['lint'];
|
||||
lint.once('complete', function () {
|
||||
finish();
|
||||
});
|
||||
lint.invoke();
|
||||
}
|
||||
|
||||
function deleteTemporaryProjectOutput() {
|
||||
if (fs.existsSync(path.join(Paths.baselines.local, "projectOutput/"))) {
|
||||
jake.rmRf(path.join(Paths.baselines.local, "projectOutput/"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// used to pass data from jake command line directly to run.js
|
||||
function writeTestConfigFile(tests, runners, light, taskConfigsFolder, workerCount, stackTraceLimit, colors, testTimeout, keepFailed) {
|
||||
var testConfigContents = JSON.stringify({
|
||||
runners: runners ? runners.split(",") : undefined,
|
||||
test: tests ? [tests] : undefined,
|
||||
light: light,
|
||||
workerCount: workerCount,
|
||||
taskConfigsFolder: taskConfigsFolder,
|
||||
stackTraceLimit: stackTraceLimit,
|
||||
noColor: !colors,
|
||||
timeout: testTimeout,
|
||||
keepFailed: keepFailed
|
||||
});
|
||||
fs.writeFileSync('test.config', testConfigContents, { encoding: "utf-8" });
|
||||
}
|
||||
|
||||
function cleanTestDirs() {
|
||||
// Clean the local baselines directory
|
||||
if (fs.existsSync(Paths.baselines.local)) {
|
||||
del.sync(Paths.baselines.local);
|
||||
}
|
||||
|
||||
// Clean the local Rwc baselines directory
|
||||
if (fs.existsSync(Paths.baselines.localRwc)) {
|
||||
del.sync(Paths.baselines.localRwc);
|
||||
}
|
||||
|
||||
jake.mkdirP(Paths.baselines.local);
|
||||
jake.mkdirP(Paths.baselines.localTest262);
|
||||
}
|
||||
|
||||
function tsbuild(tsconfigPath, useLkg = true, done = undefined) {
|
||||
const startCompileTime = Travis.mark();
|
||||
const compilerPath = useLkg ? Paths.lkgCompiler : Paths.builtLocalCompiler;
|
||||
const cmd = `${host} ${compilerPath} -b ${Array.isArray(tsconfigPath) ? tsconfigPath.join(" ") : tsconfigPath}`;
|
||||
|
||||
exec(cmd, () => {
|
||||
// Success
|
||||
Travis.measure(startCompileTime);
|
||||
done ? done() : complete();
|
||||
}, () => {
|
||||
// Fail
|
||||
Travis.measure(startCompileTime);
|
||||
fail(`Compilation of ${tsconfigPath} unsuccessful`);
|
||||
});
|
||||
}
|
||||
|
||||
const Travis = {
|
||||
mark() {
|
||||
if (!fold.isTravis()) return;
|
||||
var stamp = process.hrtime();
|
||||
var id = Math.floor(Math.random() * 0xFFFFFFFF).toString(16);
|
||||
console.log("travis_time:start:" + id + "\r");
|
||||
return {
|
||||
stamp: stamp,
|
||||
id: id
|
||||
};
|
||||
},
|
||||
measure(marker) {
|
||||
if (!fold.isTravis()) return;
|
||||
var diff = process.hrtime(marker.stamp);
|
||||
var total = [marker.stamp[0] + diff[0], marker.stamp[1] + diff[1]];
|
||||
console.log("travis_time:end:" + marker.id + ":start=" + toNs(marker.stamp) + ",finish=" + toNs(total) + ",duration=" + toNs(diff) + "\r");
|
||||
}
|
||||
};
|
||||
|
||||
function toNs(diff) {
|
||||
return diff[0] * 1e9 + diff[1];
|
||||
}
|
||||
|
||||
function exec(cmd, successHandler, errorHandler) {
|
||||
var ex = jake.createExec([cmd], /** @type {jake.ExecOptions} */({ windowsVerbatimArguments: true, interactive: true }));
|
||||
// Add listeners for output and error
|
||||
ex.addListener("stdout", function (output) {
|
||||
process.stdout.write(output);
|
||||
});
|
||||
ex.addListener("stderr", function (error) {
|
||||
process.stderr.write(error);
|
||||
});
|
||||
ex.addListener("cmdEnd", function () {
|
||||
if (successHandler) {
|
||||
successHandler();
|
||||
}
|
||||
});
|
||||
ex.addListener("error", function (e, status) {
|
||||
if (errorHandler) {
|
||||
errorHandler(e, status);
|
||||
}
|
||||
else {
|
||||
fail("Process exited with code " + status);
|
||||
}
|
||||
});
|
||||
|
||||
console.log(cmd);
|
||||
ex.run();
|
||||
}
|
||||
|
||||
function acceptBaseline(sourceFolder, targetFolder) {
|
||||
console.log('Accept baselines from ' + sourceFolder + ' to ' + targetFolder);
|
||||
var deleteEnding = '.delete';
|
||||
|
||||
jake.mkdirP(targetFolder);
|
||||
acceptBaselineFolder(sourceFolder, targetFolder);
|
||||
|
||||
function acceptBaselineFolder(sourceFolder, targetFolder) {
|
||||
var files = fs.readdirSync(sourceFolder);
|
||||
|
||||
for (var i in files) {
|
||||
var filename = files[i];
|
||||
var fullLocalPath = path.join(sourceFolder, filename);
|
||||
var stat = fs.statSync(fullLocalPath);
|
||||
if (stat.isFile()) {
|
||||
if (filename.substr(filename.length - deleteEnding.length) === deleteEnding) {
|
||||
filename = filename.substr(0, filename.length - deleteEnding.length);
|
||||
fs.unlinkSync(path.join(targetFolder, filename));
|
||||
}
|
||||
else {
|
||||
var target = path.join(targetFolder, filename);
|
||||
if (fs.existsSync(target)) {
|
||||
fs.unlinkSync(target);
|
||||
}
|
||||
jake.mkdirP(path.dirname(target));
|
||||
fs.renameSync(path.join(sourceFolder, filename), target);
|
||||
}
|
||||
}
|
||||
else if (stat.isDirectory()) {
|
||||
acceptBaselineFolder(fullLocalPath, path.join(targetFolder, filename));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** @param jsonPath {string} */
|
||||
function readJson(jsonPath) {
|
||||
const jsonText = readFileSync(jsonPath);
|
||||
const result = ts.parseConfigFileTextToJson(jsonPath, jsonText);
|
||||
if (result.error) {
|
||||
reportDiagnostics([result.error]);
|
||||
throw new Error("An error occurred during parse.");
|
||||
}
|
||||
return result.config;
|
||||
}
|
||||
|
||||
/** @param diagnostics {ts.Diagnostic[]} */
|
||||
function reportDiagnostics(diagnostics) {
|
||||
console.log(diagnosticsToString(diagnostics, process.stdout.isTTY));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param diagnostics {ts.Diagnostic[]}
|
||||
* @param [pretty] {boolean}
|
||||
*/
|
||||
function diagnosticsToString(diagnostics, pretty) {
|
||||
const host = {
|
||||
getCurrentDirectory() { return process.cwd(); },
|
||||
getCanonicalFileName(fileName) { return fileName; },
|
||||
getNewLine() { return os.EOL; }
|
||||
};
|
||||
return pretty ? ts.formatDiagnosticsWithColorAndContext(diagnostics, host) :
|
||||
ts.formatDiagnostics(diagnostics, host);
|
||||
}
|
||||
|
||||
/**
|
||||
* Concatenate a list of sourceFiles to a destinationFile
|
||||
* @param {string} destinationFile
|
||||
* @param {string[]} sourceFiles
|
||||
* @param {string=} extraContent
|
||||
*/
|
||||
function concatenateFiles(destinationFile, sourceFiles, extraContent) {
|
||||
var temp = "temptemp";
|
||||
// append all files in sequence
|
||||
var text = "";
|
||||
for (var i = 0; i < sourceFiles.length; i++) {
|
||||
if (!fs.existsSync(sourceFiles[i])) {
|
||||
fail(sourceFiles[i] + " does not exist!");
|
||||
}
|
||||
if (i > 0) { text += "\n\n"; }
|
||||
text += readFileSync(sourceFiles[i]).replace(/\r?\n/g, "\n");
|
||||
}
|
||||
if (extraContent) {
|
||||
text += extraContent;
|
||||
}
|
||||
fs.writeFileSync(temp, text);
|
||||
// Move the file to the final destination
|
||||
fs.renameSync(temp, destinationFile);
|
||||
}
|
||||
|
||||
function appendToFile(path, content) {
|
||||
fs.writeFileSync(path, readFileSync(path) + "\r\n" + content);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} path
|
||||
* @returns string
|
||||
*/
|
||||
function readFileSync(path) {
|
||||
return fs.readFileSync(path, { encoding: "utf-8" });
|
||||
}
|
||||
|
||||
function getDiffTool() {
|
||||
var program = process.env['DIFF'];
|
||||
if (!program) {
|
||||
fail("Add the 'DIFF' environment variable to the path of the program you want to use.");
|
||||
}
|
||||
return program;
|
||||
}
|
||||
|
||||
/**
|
||||
* Replaces const enum declarations with non-const enums
|
||||
* @param {string} text
|
||||
*/
|
||||
function removeConstModifierFromEnumDeclarations(text) {
|
||||
return text.replace(/^(\s*)(export )?const enum (\S+) {(\s*)$/gm, '$1$2enum $3 {$4');
|
||||
}
|
||||
28
README.md
28
README.md
@ -1,5 +1,5 @@
|
||||
[](https://travis-ci.org/Microsoft/TypeScript)
|
||||
[](https://typescript.visualstudio.com/TypeScript/_build/latest?definitionId=4&view=logs)
|
||||
[](https://dev.azure.com/typescript/TypeScript/_build/latest?definitionId=4&view=logs)
|
||||
[](https://www.npmjs.com/package/typescript)
|
||||
[](https://www.npmjs.com/package/typescript)
|
||||
|
||||
@ -61,29 +61,29 @@ Change to the TypeScript directory:
|
||||
cd TypeScript
|
||||
```
|
||||
|
||||
Install [Jake](http://jakejs.com/) tools and dev dependencies:
|
||||
Install [Gulp](https://gulpjs.com/) tools and dev dependencies:
|
||||
|
||||
```bash
|
||||
npm install -g jake
|
||||
npm install -g gulp
|
||||
npm install
|
||||
```
|
||||
|
||||
Use one of the following to build and test:
|
||||
|
||||
```
|
||||
jake local # Build the compiler into built/local
|
||||
jake clean # Delete the built compiler
|
||||
jake LKG # Replace the last known good with the built one.
|
||||
gulp local # Build the compiler into built/local
|
||||
gulp clean # Delete the built compiler
|
||||
gulp LKG # Replace the last known good with the built one.
|
||||
# Bootstrapping step to be executed when the built compiler reaches a stable state.
|
||||
jake tests # Build the test infrastructure using the built compiler.
|
||||
jake runtests # Run tests using the built compiler and test infrastructure.
|
||||
gulp tests # Build the test infrastructure using the built compiler.
|
||||
gulp runtests # Run tests using the built compiler and test infrastructure.
|
||||
# You can override the host or specify a test for this command.
|
||||
# Use host=<hostName> or tests=<testPath>.
|
||||
jake runtests-browser # Runs the tests using the built run.js file. Syntax is jake runtests. Optional
|
||||
parameters 'host=', 'tests=[regex], reporter=[list|spec|json|<more>]'.
|
||||
jake baseline-accept # This replaces the baseline test results with the results obtained from jake runtests.
|
||||
jake lint # Runs tslint on the TypeScript source.
|
||||
jake help # List the above commands.
|
||||
# Use --host=<hostName> or --tests=<testPath>.
|
||||
gulp runtests-browser # Runs the tests using the built run.js file. Syntax is gulp runtests. Optional
|
||||
parameters '--host=', '--tests=[regex], --reporter=[list|spec|json|<more>]'.
|
||||
gulp baseline-accept # This replaces the baseline test results with the results obtained from gulp runtests.
|
||||
gulp lint # Runs tslint on the TypeScript source.
|
||||
gulp help # List the above commands.
|
||||
```
|
||||
|
||||
|
||||
|
||||
12
doc/spec.md
12
doc/spec.md
@ -239,7 +239,7 @@ TypeScript is a trademark of Microsoft Corporation.
|
||||
|
||||
# <a name="1"/>1 Introduction
|
||||
|
||||
JavaScript applications such as web e-mail, maps, document editing, and collaboration tools are becoming an increasingly important part of the everyday computing. We designed TypeScript to meet the needs of the JavaScript programming teams that build and maintain large JavaScript programs. TypeScript helps programming teams to define interfaces between software components and to gain insight into the behavior of existing JavaScript libraries. TypeScript also enables teams to reduce naming conflicts by organizing their code into dynamically-loadable modules. TypeScript's optional type system enables JavaScript programmers to use highly-productive development tools and practices: static checking, symbol-based navigation, statement completion, and code re-factoring.
|
||||
JavaScript applications such as web e-mail, maps, document editing, and collaboration tools are becoming an increasingly important part of the everyday computing. We designed TypeScript to meet the needs of the JavaScript programming teams that build and maintain large JavaScript programs. TypeScript helps programming teams to define interfaces between software components and to gain insight into the behavior of existing JavaScript libraries. TypeScript also enables teams to reduce naming conflicts by organizing their code into dynamically-loadable modules. TypeScript's optional type system enables JavaScript programmers to use highly-productive development tools and practices: static checking, symbol-based navigation, statement completion, and code refactoring.
|
||||
|
||||
TypeScript is a syntactic sugar for JavaScript. TypeScript syntax is a superset of ECMAScript 2015 (ES2015) syntax. Every JavaScript program is also a TypeScript program. The TypeScript compiler performs only file-local transformations on TypeScript programs and does not re-order variables declared in TypeScript. This leads to JavaScript output that closely matches the TypeScript input. TypeScript does not transform variable names, making tractable the direct debugging of emitted JavaScript. TypeScript optionally provides source maps, enabling source-level debugging. TypeScript tools typically emit JavaScript upon file save, preserving the test, edit, refresh cycle commonly used in JavaScript development.
|
||||
|
||||
@ -263,7 +263,7 @@ function f() {
|
||||
}
|
||||
```
|
||||
|
||||
To benefit from this inference, a programmer can use the TypeScript language service. For example, a code editor can incorporate the TypeScript language service and use the service to find the members of a string object as in the following screen shot.
|
||||
To benefit from this inference, a programmer can use the TypeScript language service. For example, a code editor can incorporate the TypeScript language service and use the service to find the members of a string object as in the following screenshot.
|
||||
|
||||
  
|
||||
|
||||
@ -411,7 +411,7 @@ We mentioned above that the '$' function behaves differently depending on the ty
|
||||
|
||||
This signature denotes that a function may be passed as the parameter of the '$' function. When a function is passed to '$', the jQuery library will invoke that function when a DOM document is ready. Because TypeScript supports overloading, tools can use TypeScript to show all available function signatures with their documentation tips and to give the correct documentation once a function has been called with a particular signature.
|
||||
|
||||
A typical client would not need to add any additional typing but could just use a community-supplied typing to discover (through statement completion with documentation tips) and verify (through static checking) correct use of the library, as in the following screen shot.
|
||||
A typical client would not need to add any additional typing but could just use a community-supplied typing to discover (through statement completion with documentation tips) and verify (through static checking) correct use of the library, as in the following screenshot.
|
||||
|
||||
  
|
||||
|
||||
@ -628,7 +628,7 @@ JavaScript implementations can use these explicit constants to generate efficien
|
||||
|
||||
An important goal of TypeScript is to provide accurate and straightforward types for existing JavaScript programming patterns. To that end, TypeScript includes generic types, discussed in the next section, and *overloading on string parameters*, the topic of this section.
|
||||
|
||||
JavaScript programming interfaces often include functions whose behavior is discriminated by a string constant passed to the function. The Document Object Model makes heavy use of this pattern. For example, the following screen shot shows that the 'createElement' method of the 'document' object has multiple signatures, some of which identify the types returned when specific strings are passed into the method.
|
||||
JavaScript programming interfaces often include functions whose behavior is discriminated by a string constant passed to the function. The Document Object Model makes heavy use of this pattern. For example, the following screenshot shows that the 'createElement' method of the 'document' object has multiple signatures, some of which identify the types returned when specific strings are passed into the method.
|
||||
|
||||
  
|
||||
|
||||
@ -639,7 +639,7 @@ var span = document.createElement("span");
|
||||
span.isMultiLine = false; // OK: HTMLSpanElement has isMultiline property
|
||||
```
|
||||
|
||||
In the following screen shot, a programming tool combines information from overloading on string parameters with contextual typing to infer that the type of the variable 'e' is 'MouseEvent' and that therefore 'e' has a 'clientX' property.
|
||||
In the following screenshot, a programming tool combines information from overloading on string parameters with contextual typing to infer that the type of the variable 'e' is 'MouseEvent' and that therefore 'e' has a 'clientX' property.
|
||||
|
||||
  
|
||||
|
||||
@ -3715,7 +3715,7 @@ the array literal initializer expression is contextually typed by the implied ty
|
||||
|
||||
## <a name="5.3"/>5.3 Let and Const Declarations
|
||||
|
||||
Let and const declarations are exended to include optional type annotations.
|
||||
Let and const declarations are extended to include optional type annotations.
|
||||
|
||||
  *LexicalBinding:* *( Modified )*
|
||||
   *SimpleLexicalBinding*
|
||||
|
||||
@ -2,4 +2,4 @@
|
||||
|
||||
**These files are not meant to be edited by hand.**
|
||||
If you need to make modifications, the respective files should be changed within the repository's top-level `src` directory.
|
||||
Running `jake LKG` will then appropriately update the files in this directory.
|
||||
Running `gulp LKG` will then appropriately update the files in this directory.
|
||||
|
||||
@ -27,6 +27,18 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";A_bigint_literal_cannot_use_exponential_notation_1352" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[A bigint literal cannot use exponential notation.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";A_bigint_literal_must_be_an_integer_1353" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[A bigint literal must be an integer.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";A_binding_pattern_parameter_cannot_be_optional_in_an_implementation_signature_2463" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[A binding pattern parameter cannot be optional in an implementation signature.]]></Val>
|
||||
@ -129,6 +141,12 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";A_const_assertion_can_only_be_applied_to_a_string_number_boolean_array_or_object_literal_1355" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[A 'const' assertion can only be applied to a string, number, boolean, array, or object literal.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";A_const_enum_member_can_only_be_accessed_using_a_string_literal_2476" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[A const enum member can only be accessed using a string literal.]]></Val>
|
||||
@ -321,6 +339,18 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";A_non_dry_build_would_update_output_of_project_0_6375" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[A non-dry build would update output of project '{0}']]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";A_non_dry_build_would_update_timestamps_for_output_of_project_0_6374" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[A non-dry build would update timestamps for output of project '{0}']]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";A_parameter_initializer_is_only_allowed_in_a_function_or_constructor_implementation_2371" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[A parameter initializer is only allowed in a function or constructor implementation.]]></Val>
|
||||
@ -849,12 +879,6 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Ambient_const_enums_are_not_allowed_when_the_isolatedModules_flag_is_provided_1209" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Ambient const enums are not allowed when the '--isolatedModules' flag is provided.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Ambient_module_declaration_cannot_specify_relative_module_name_2436" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Ambient module declaration cannot specify relative module name.]]></Val>
|
||||
@ -1005,6 +1029,12 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";An_identifier_or_keyword_cannot_immediately_follow_a_numeric_literal_1351" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[An identifier or keyword cannot immediately follow a numeric literal.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";An_implementation_cannot_be_declared_in_ambient_contexts_1183" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[An implementation cannot be declared in ambient contexts.]]></Val>
|
||||
@ -1329,6 +1359,12 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Cannot_access_ambient_const_enums_when_the_isolatedModules_flag_is_provided_2748" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Cannot access ambient const enums when the '--isolatedModules' flag is provided.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Cannot_assign_a_0_constructor_type_to_a_1_constructor_type_2672" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Cannot assign a '{0}' constructor type to a '{1}' constructor type.]]></Val>
|
||||
@ -1487,17 +1523,35 @@
|
||||
</Item>
|
||||
<Item ItemId=";Cannot_find_name_0_Do_you_need_to_install_type_definitions_for_a_test_runner_Try_npm_i_types_Slashje_2582" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Cannot find name '{0}'. Do you need to install type definitions for a test runner? Try `npm i @types/jest` or `npm i @types/mocha` and then add `jest` or `mocha` to the types field in your tsconfig.]]></Val>
|
||||
<Val><![CDATA[Cannot find name '{0}'. Do you need to install type definitions for a test runner? Try `npm i @types/jest` or `npm i @types/mocha`.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Cannot_find_name_0_Do_you_need_to_install_type_definitions_for_jQuery_Try_npm_i_types_Slashjquery_an_2581" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Item ItemId=";Cannot_find_name_0_Do_you_need_to_install_type_definitions_for_a_test_runner_Try_npm_i_types_Slashje_2593" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Cannot find name '{0}'. Do you need to install type definitions for a test runner? Try `npm i @types/jest` or `npm i @types/mocha` and then add `jest` or `mocha` to the types field in your tsconfig.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Cannot_find_name_0_Do_you_need_to_install_type_definitions_for_jQuery_Try_npm_i_types_Slashjquery_2581" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Cannot find name '{0}'. Do you need to install type definitions for jQuery? Try `npm i @types/jquery`.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Cannot_find_name_0_Do_you_need_to_install_type_definitions_for_jQuery_Try_npm_i_types_Slashjquery_an_2592" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Cannot find name '{0}'. Do you need to install type definitions for jQuery? Try `npm i @types/jquery` and then add `jquery` to the types field in your tsconfig.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Cannot_find_name_0_Do_you_need_to_install_type_definitions_for_node_Try_npm_i_types_Slashnode_and_th_2580" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Item ItemId=";Cannot_find_name_0_Do_you_need_to_install_type_definitions_for_node_Try_npm_i_types_Slashnode_2580" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Cannot find name '{0}'. Do you need to install type definitions for node? Try `npm i @types/node`.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Cannot_find_name_0_Do_you_need_to_install_type_definitions_for_node_Try_npm_i_types_Slashnode_and_th_2591" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Cannot find name '{0}'. Do you need to install type definitions for node? Try `npm i @types/node` and then add `node` to the types field in your tsconfig.]]></Val>
|
||||
</Str>
|
||||
@ -1599,6 +1653,12 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Cannot_update_output_of_project_0_because_there_was_error_reading_file_1_6376" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Cannot update output of project '{0}' because there was error reading file '{1}']]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Cannot_use_JSX_unless_the_jsx_flag_is_provided_17004" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Cannot use JSX unless the '--jsx' flag is provided.]]></Val>
|
||||
@ -1629,6 +1689,12 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Cannot_write_file_0_because_it_will_overwrite_tsbuildinfo_file_generated_by_referenced_project_1_6377" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Cannot write file '{0}' because it will overwrite '.tsbuildinfo' file generated by referenced project '{1}']]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Cannot_write_file_0_because_it_would_be_overwritten_by_multiple_input_files_5056" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Cannot write file '{0}' because it would be overwritten by multiple input files.]]></Val>
|
||||
@ -1707,6 +1773,12 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Circularity_originates_in_type_at_this_location_2751" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Circularity originates in type at this location.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Class_0_defines_instance_member_accessor_1_but_extended_class_2_defines_it_as_instance_member_functi_2426" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Class '{0}' defines instance member accessor '{1}', but extended class '{2}' defines it as instance member function.]]></Val>
|
||||
@ -1821,6 +1893,12 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Composite_projects_may_not_disable_incremental_compilation_6379" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Composite projects may not disable incremental compilation.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Computed_property_names_are_not_allowed_in_enums_1164" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Computed property names are not allowed in enums.]]></Val>
|
||||
@ -1983,6 +2061,12 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Convert_to_named_parameters_95075" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Convert to named parameters]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Corrupted_locale_file_0_6051" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Corrupted locale file {0}.]]></Val>
|
||||
@ -2235,12 +2319,6 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Duplicate_declaration_0_2718" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Duplicate declaration '{0}'.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Duplicate_function_implementation_2393" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Duplicate function implementation.]]></Val>
|
||||
@ -2307,6 +2385,12 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Duplicate_property_0_2718" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Duplicate property '{0}'.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Duplicate_string_index_signature_2374" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Duplicate string index signature.]]></Val>
|
||||
@ -2373,6 +2457,12 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Enable_incremental_compilation_6378" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Enable incremental compilation]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Enable_project_compilation_6302" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Enable project compilation]]></Val>
|
||||
@ -2403,6 +2493,12 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Enable_the_experimentalDecorators_option_in_your_configuration_file_95074" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Enable the 'experimentalDecorators' option in your configuration file]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Enable_tracing_of_the_name_resolution_process_6085" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Enable tracing of the name resolution process.]]></Val>
|
||||
@ -2655,6 +2751,12 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Expression_produces_a_union_type_that_is_too_complex_to_represent_2590" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Expression produces a union type that is too complex to represent.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Expression_resolves_to_super_that_compiler_uses_to_capture_base_class_reference_2402" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Expression resolves to '_super' that compiler uses to capture base class reference.]]></Val>
|
||||
@ -3189,12 +3291,6 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";In_const_enum_declarations_member_initializer_must_be_constant_expression_2474" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[In 'const' enum declarations member initializer must be constant expression.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Include_modules_imported_with_json_extension_6197" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Include modules imported with '.json' extension]]></Val>
|
||||
@ -3741,12 +3837,6 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Module_0_resolves_to_a_non_module_entity_and_cannot_be_imported_using_this_construct_2497" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Module '{0}' resolves to a non-module entity and cannot be imported using this construct.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Module_0_uses_export_and_cannot_be_used_with_export_Asterisk_2498" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Module '{0}' uses 'export =' and cannot be used with 'export *'.]]></Val>
|
||||
@ -3867,6 +3957,12 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";No_overload_expects_0_type_arguments_but_overloads_do_exist_that_expect_either_1_or_2_type_arguments_2743" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[No overload expects {0} type arguments, but overloads do exist that expect either {1} or {2} type arguments.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Non_abstract_class_0_does_not_implement_inherited_abstract_member_1_from_class_2_2515" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Non-abstract class '{0}' does not implement inherited abstract member '{1}' from class '{2}'.]]></Val>
|
||||
@ -4137,12 +4233,6 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Overload_signature_is_not_compatible_with_function_implementation_2394" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Overload signature is not compatible with function implementation.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Overload_signatures_must_all_be_abstract_or_non_abstract_2512" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Overload signatures must all be abstract or non-abstract.]]></Val>
|
||||
@ -4437,6 +4527,18 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Project_0_is_out_of_date_because_output_for_it_was_generated_with_version_1_that_differs_with_curren_6381" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Project '{0}' is out of date because output for it was generated with version '{1}' that differs with current version '{2}']]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Project_0_is_out_of_date_because_output_of_its_dependency_1_has_changed_6372" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Project '{0}' is out of date because output of its dependency '{1}' has changed]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Project_0_is_up_to_date_6361" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Project '{0}' is up to date]]></Val>
|
||||
@ -4671,6 +4773,12 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Property_0_was_also_declared_here_2733" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Property '{0}' was also declared here.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Property_assignment_expected_1136" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Property assignment expected.]]></Val>
|
||||
@ -4773,6 +4881,12 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Qualified_name_0_is_not_allowed_without_a_leading_param_object_1_8032" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Qualified name '{0}' is not allowed without a leading '@param {object} {1}'.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Raise_error_on_expressions_and_declarations_with_an_implied_any_type_6052" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Raise error on expressions and declarations with an implied 'any' type.]]></Val>
|
||||
@ -5271,9 +5385,9 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Specify_ECMAScript_target_version_Colon_ES3_default_ES5_ES2015_ES2016_ES2017_ES2018_or_ESNEXT_6015" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Item ItemId=";Specify_ECMAScript_target_version_Colon_ES3_default_ES5_ES2015_ES2016_ES2017_ES2018_ES2019_or_ESNEXT_6015" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017','ES2018' or 'ESNEXT'.]]></Val>
|
||||
<Val><![CDATA[Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
@ -5283,6 +5397,12 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Specify_file_to_store_incremental_compilation_information_6380" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Specify file to store incremental compilation information]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Specify_library_files_to_be_included_in_the_compilation_6079" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Specify library files to be included in the compilation.]]></Val>
|
||||
@ -5511,9 +5631,9 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";The_containing_arrow_function_captures_the_global_value_of_this_which_implicitly_has_type_any_7041" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Item ItemId=";The_containing_arrow_function_captures_the_global_value_of_this_7041" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[The containing arrow function captures the global value of 'this' which implicitly has type 'any'.]]></Val>
|
||||
<Val><![CDATA[The containing arrow function captures the global value of 'this'.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
@ -5571,6 +5691,12 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";The_implementation_signature_is_declared_here_2750" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[The implementation signature is declared here.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";The_import_meta_meta_property_is_only_allowed_using_ESNext_for_the_target_and_module_compiler_option_1343" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[The 'import.meta' meta-property is only allowed using 'ESNext' for the 'target' and 'module' compiler options.]]></Val>
|
||||
@ -5805,6 +5931,18 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";This_JSX_tag_s_0_prop_expects_a_single_child_of_type_1_but_multiple_children_were_provided_2746" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[This JSX tag's '{0}' prop expects a single child of type '{1}', but multiple children were provided.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";This_JSX_tag_s_0_prop_expects_type_1_which_requires_multiple_children_but_only_a_single_child_was_pr_2745" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[This JSX tag's '{0}' prop expects type '{1}' which requires multiple children, but only a single child was provided.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";This_condition_will_always_return_0_since_the_types_1_and_2_have_no_overlap_2367" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[This condition will always return '{0}' since the types '{1}' and '{2}' have no overlap.]]></Val>
|
||||
@ -5823,6 +5961,18 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";This_module_can_only_be_referenced_with_ECMAScript_imports_Slashexports_by_turning_on_the_0_flag_and_2497" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[This module can only be referenced with ECMAScript imports/exports by turning on the '{0}' flag and referencing its default export.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";This_overload_signature_is_not_compatible_with_its_implementation_signature_2394" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[This overload signature is not compatible with its implementation signature.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";This_parameter_is_not_allowed_with_use_strict_directive_1346" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[This parameter is not allowed with 'use strict' directive.]]></Val>
|
||||
@ -5871,9 +6021,9 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Tuple_type_0_with_length_1_cannot_be_assigned_to_tuple_with_length_2_2493" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Item ItemId=";Tuple_type_0_of_length_1_has_no_element_at_index_2_2493" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Tuple type '{0}' with length '{1}' cannot be assigned to tuple with length '{2}'.]]></Val>
|
||||
<Val><![CDATA[Tuple type '{0}' of length '{1}' has no element at index '{2}'.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
@ -5907,18 +6057,6 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Type_0_has_no_property_1_2460" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Type '{0}' has no property '{1}'.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Type_0_has_no_property_1_and_no_string_index_signature_2459" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Type '{0}' has no property '{1}' and no string index signature.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Type_0_is_missing_the_following_properties_from_type_1_Colon_2_2739" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Type '{0}' is missing the following properties from type '{1}': {2}]]></Val>
|
||||
@ -5949,12 +6087,6 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Type_0_is_not_an_array_type_Use_compiler_option_downlevelIteration_to_allow_iterating_of_iterators_2568" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Type '{0}' is not an array type. Use compiler option '--downlevelIteration' to allow iterating of iterators.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Type_0_is_not_an_array_type_or_a_string_type_2495" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Type '{0}' is not an array type or a string type.]]></Val>
|
||||
@ -6081,6 +6213,12 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Type_instantiation_is_excessively_deep_and_possibly_infinite_2589" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Type instantiation is excessively deep and possibly infinite.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Type_is_referenced_directly_or_indirectly_in_the_fulfillment_callback_of_its_own_then_method_1062" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Type is referenced directly or indirectly in the fulfillment callback of its own 'then' method.]]></Val>
|
||||
@ -6159,6 +6297,12 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Type_parameter_0_of_exported_mapped_object_type_is_using_private_name_1_4103" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Type parameter '{0}' of exported mapped object type is using private name '{1}'.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Type_parameter_0_of_exported_type_alias_has_or_is_using_private_name_1_4083" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Type parameter '{0}' of exported type alias has or is using private name '{1}'.]]></Val>
|
||||
@ -6189,6 +6333,12 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Type_parameter_defaults_can_only_reference_previously_declared_type_parameters_2744" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Type parameter defaults can only reference previously declared type parameters.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Type_parameter_list_cannot_be_empty_1098" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Type parameter list cannot be empty.]]></Val>
|
||||
@ -6381,12 +6531,24 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Updating_output_of_project_0_6373" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Updating output of project '{0}'...]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Updating_output_timestamps_of_project_0_6359" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Updating output timestamps of project '{0}'...]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Updating_unchanged_output_timestamps_of_project_0_6371" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Updating unchanged output timestamps of project '{0}'...]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";Use_synthetic_default_member_95016" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[Use synthetic 'default' member.]]></Val>
|
||||
@ -6519,6 +6681,12 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";_0_components_don_t_accept_text_as_child_elements_Text_in_JSX_has_the_type_string_but_the_expected_t_2747" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA['{0}' components don't accept text as child elements. Text in JSX has the type 'string', but the expected type of '{1}' is '{2}'.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";_0_expected_1005" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA['{0}' expected.]]></Val>
|
||||
@ -6693,6 +6861,12 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";_0_refers_to_a_value_but_is_being_used_as_a_type_here_2749" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA['{0}' refers to a value, but is being used as a type here.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";_0_tag_already_specified_1223" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA['{0}' tag already specified.]]></Val>
|
||||
@ -6789,6 +6963,12 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";const_enum_member_initializers_can_only_contain_literal_values_and_other_computed_enum_values_2474" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[const enum member initializers can only contain literal values and other computed enum values.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";const_enums_can_only_be_used_in_property_or_index_access_expressions_or_the_right_hand_side_of_an_im_2475" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA['const' enums can only be used in property or index access expressions or the right hand side of an import declaration or export assignment or type query.]]></Val>
|
||||
@ -6999,6 +7179,12 @@
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";readonly_type_modifier_is_only_permitted_on_array_and_tuple_literal_types_1354" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA['readonly' type modifier is only permitted on array and tuple literal types.]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>
|
||||
<Item ItemId=";require_call_may_be_converted_to_an_import_80005" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA['require' call may be converted to an import.]]></Val>
|
||||
|
||||
872
lib/lib.dom.d.ts
vendored
872
lib/lib.dom.d.ts
vendored
File diff suppressed because it is too large
Load Diff
7
lib/lib.dom.iterable.d.ts
vendored
7
lib/lib.dom.iterable.d.ts
vendored
@ -116,6 +116,13 @@ interface Headers {
|
||||
values(): IterableIterator<string>;
|
||||
}
|
||||
|
||||
interface MediaKeyStatusMap {
|
||||
[Symbol.iterator](): IterableIterator<[BufferSource, MediaKeyStatus]>;
|
||||
entries(): IterableIterator<[BufferSource, MediaKeyStatus]>;
|
||||
keys(): IterableIterator<BufferSource>;
|
||||
values(): IterableIterator<MediaKeyStatus>;
|
||||
}
|
||||
|
||||
interface MediaList {
|
||||
[Symbol.iterator](): IterableIterator<string>;
|
||||
}
|
||||
|
||||
4
lib/lib.es2017.sharedmemory.d.ts
vendored
4
lib/lib.es2017.sharedmemory.d.ts
vendored
@ -123,7 +123,7 @@ interface Atomics {
|
||||
* Wakes up sleeping agents that are waiting on the given index of the array, returning the
|
||||
* number of agents that were awoken.
|
||||
*/
|
||||
wake(typedArray: Int32Array, index: number, count: number): number;
|
||||
notify(typedArray: Int32Array, index: number, count: number): number;
|
||||
|
||||
/**
|
||||
* Stores the bitwise XOR of a value with the value at the given position in the array,
|
||||
@ -135,4 +135,4 @@ interface Atomics {
|
||||
readonly [Symbol.toStringTag]: "Atomics";
|
||||
}
|
||||
|
||||
declare var Atomics: Atomics;
|
||||
declare var Atomics: Atomics;
|
||||
|
||||
44
lib/lib.es2018.asynciterable.d.ts
vendored
Normal file
44
lib/lib.es2018.asynciterable.d.ts
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
/*! *****************************************************************************
|
||||
Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
this file except in compliance with the License. You may obtain a copy of the
|
||||
License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
|
||||
WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
|
||||
MERCHANTABLITY OR NON-INFRINGEMENT.
|
||||
|
||||
See the Apache Version 2.0 License for specific language governing permissions
|
||||
and limitations under the License.
|
||||
***************************************************************************** */
|
||||
|
||||
|
||||
|
||||
/// <reference no-default-lib="true"/>
|
||||
|
||||
|
||||
/// <reference lib="es2015.symbol" />
|
||||
/// <reference lib="es2015.iterable" />
|
||||
|
||||
interface SymbolConstructor {
|
||||
/**
|
||||
* A method that returns the default async iterator for an object. Called by the semantics of
|
||||
* the for-await-of statement.
|
||||
*/
|
||||
readonly asyncIterator: symbol;
|
||||
}
|
||||
|
||||
interface AsyncIterator<T> {
|
||||
next(value?: any): Promise<IteratorResult<T>>;
|
||||
return?(value?: any): Promise<IteratorResult<T>>;
|
||||
throw?(e?: any): Promise<IteratorResult<T>>;
|
||||
}
|
||||
|
||||
interface AsyncIterable<T> {
|
||||
[Symbol.asyncIterator](): AsyncIterator<T>;
|
||||
}
|
||||
|
||||
interface AsyncIterableIterator<T> extends AsyncIterator<T> {
|
||||
[Symbol.asyncIterator](): AsyncIterableIterator<T>;
|
||||
}
|
||||
1
lib/lib.es2018.d.ts
vendored
1
lib/lib.es2018.d.ts
vendored
@ -19,6 +19,7 @@ and limitations under the License.
|
||||
|
||||
|
||||
/// <reference lib="es2017" />
|
||||
/// <reference lib="es2018.asynciterable" />
|
||||
/// <reference lib="es2018.promise" />
|
||||
/// <reference lib="es2018.regexp" />
|
||||
/// <reference lib="es2018.intl" />
|
||||
|
||||
223
lib/lib.es2019.array.d.ts
vendored
Normal file
223
lib/lib.es2019.array.d.ts
vendored
Normal file
@ -0,0 +1,223 @@
|
||||
/*! *****************************************************************************
|
||||
Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
this file except in compliance with the License. You may obtain a copy of the
|
||||
License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
|
||||
WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
|
||||
MERCHANTABLITY OR NON-INFRINGEMENT.
|
||||
|
||||
See the Apache Version 2.0 License for specific language governing permissions
|
||||
and limitations under the License.
|
||||
***************************************************************************** */
|
||||
|
||||
|
||||
|
||||
/// <reference no-default-lib="true"/>
|
||||
|
||||
|
||||
interface ReadonlyArray<T> {
|
||||
|
||||
/**
|
||||
* Calls a defined callback function on each element of an array. Then, flattens the result into
|
||||
* a new array.
|
||||
* This is identical to a map followed by flat with depth 1.
|
||||
*
|
||||
* @param callback A function that accepts up to three arguments. The flatMap method calls the
|
||||
* callback function one time for each element in the array.
|
||||
* @param thisArg An object to which the this keyword can refer in the callback function. If
|
||||
* thisArg is omitted, undefined is used as the this value.
|
||||
*/
|
||||
flatMap<U, This = undefined> (
|
||||
callback: (this: This, value: T, index: number, array: T[]) => U|ReadonlyArray<U>,
|
||||
thisArg?: This
|
||||
): U[]
|
||||
|
||||
|
||||
/**
|
||||
* Returns a new array with all sub-array elements concatenated into it recursively up to the
|
||||
* specified depth.
|
||||
*
|
||||
* @param depth The maximum recursion depth
|
||||
*/
|
||||
flat<U>(this:
|
||||
ReadonlyArray<U[][][][]> |
|
||||
|
||||
ReadonlyArray<ReadonlyArray<U[][][]>> |
|
||||
ReadonlyArray<ReadonlyArray<U[][]>[]> |
|
||||
ReadonlyArray<ReadonlyArray<U[]>[][]> |
|
||||
ReadonlyArray<ReadonlyArray<U>[][][]> |
|
||||
|
||||
ReadonlyArray<ReadonlyArray<ReadonlyArray<U[][]>>> |
|
||||
ReadonlyArray<ReadonlyArray<ReadonlyArray<U>[][]>> |
|
||||
ReadonlyArray<ReadonlyArray<ReadonlyArray<U>>[][]> |
|
||||
ReadonlyArray<ReadonlyArray<ReadonlyArray<U>[]>[]> |
|
||||
ReadonlyArray<ReadonlyArray<ReadonlyArray<U[]>>[]> |
|
||||
ReadonlyArray<ReadonlyArray<ReadonlyArray<U[]>[]>> |
|
||||
|
||||
ReadonlyArray<ReadonlyArray<ReadonlyArray<ReadonlyArray<U[]>>>> |
|
||||
ReadonlyArray<ReadonlyArray<ReadonlyArray<ReadonlyArray<U>[]>>> |
|
||||
ReadonlyArray<ReadonlyArray<ReadonlyArray<ReadonlyArray<U>>[]>> |
|
||||
ReadonlyArray<ReadonlyArray<ReadonlyArray<ReadonlyArray<U>>>[]> |
|
||||
|
||||
ReadonlyArray<ReadonlyArray<ReadonlyArray<ReadonlyArray<ReadonlyArray<U>>>>>,
|
||||
depth: 4): U[];
|
||||
|
||||
/**
|
||||
* Returns a new array with all sub-array elements concatenated into it recursively up to the
|
||||
* specified depth.
|
||||
*
|
||||
* @param depth The maximum recursion depth
|
||||
*/
|
||||
flat<U>(this:
|
||||
ReadonlyArray<U[][][]> |
|
||||
|
||||
ReadonlyArray<ReadonlyArray<U>[][]> |
|
||||
ReadonlyArray<ReadonlyArray<U[]>[]> |
|
||||
ReadonlyArray<ReadonlyArray<U[][]>> |
|
||||
|
||||
ReadonlyArray<ReadonlyArray<ReadonlyArray<U[]>>> |
|
||||
ReadonlyArray<ReadonlyArray<ReadonlyArray<U>[]>> |
|
||||
ReadonlyArray<ReadonlyArray<ReadonlyArray<U>>[]> |
|
||||
|
||||
ReadonlyArray<ReadonlyArray<ReadonlyArray<ReadonlyArray<U>>>>,
|
||||
depth: 3): U[];
|
||||
|
||||
/**
|
||||
* Returns a new array with all sub-array elements concatenated into it recursively up to the
|
||||
* specified depth.
|
||||
*
|
||||
* @param depth The maximum recursion depth
|
||||
*/
|
||||
flat<U>(this:
|
||||
ReadonlyArray<U[][]> |
|
||||
|
||||
ReadonlyArray<ReadonlyArray<U[]>> |
|
||||
ReadonlyArray<ReadonlyArray<U>[]> |
|
||||
|
||||
ReadonlyArray<ReadonlyArray<ReadonlyArray<U>>>,
|
||||
depth: 2): U[];
|
||||
|
||||
/**
|
||||
* Returns a new array with all sub-array elements concatenated into it recursively up to the
|
||||
* specified depth.
|
||||
*
|
||||
* @param depth The maximum recursion depth
|
||||
*/
|
||||
flat<U>(this:
|
||||
ReadonlyArray<U[]> |
|
||||
ReadonlyArray<ReadonlyArray<U>>,
|
||||
depth?: 1
|
||||
): U[];
|
||||
|
||||
/**
|
||||
* Returns a new array with all sub-array elements concatenated into it recursively up to the
|
||||
* specified depth.
|
||||
*
|
||||
* @param depth The maximum recursion depth
|
||||
*/
|
||||
flat<U>(this:
|
||||
ReadonlyArray<U>,
|
||||
depth: 0
|
||||
): U[];
|
||||
|
||||
/**
|
||||
* Returns a new array with all sub-array elements concatenated into it recursively up to the
|
||||
* specified depth. If no depth is provided, flat method defaults to the depth of 1.
|
||||
*
|
||||
* @param depth The maximum recursion depth
|
||||
*/
|
||||
flat<U>(depth?: number): any[];
|
||||
}
|
||||
|
||||
interface Array<T> {
|
||||
|
||||
/**
|
||||
* Calls a defined callback function on each element of an array. Then, flattens the result into
|
||||
* a new array.
|
||||
* This is identical to a map followed by flat with depth 1.
|
||||
*
|
||||
* @param callback A function that accepts up to three arguments. The flatMap method calls the
|
||||
* callback function one time for each element in the array.
|
||||
* @param thisArg An object to which the this keyword can refer in the callback function. If
|
||||
* thisArg is omitted, undefined is used as the this value.
|
||||
*/
|
||||
flatMap<U, This = undefined> (
|
||||
callback: (this: This, value: T, index: number, array: T[]) => U|ReadonlyArray<U>,
|
||||
thisArg?: This
|
||||
): U[]
|
||||
|
||||
/**
|
||||
* Returns a new array with all sub-array elements concatenated into it recursively up to the
|
||||
* specified depth.
|
||||
*
|
||||
* @param depth The maximum recursion depth
|
||||
*/
|
||||
flat<U>(this: U[][][][][][][][], depth: 7): U[];
|
||||
|
||||
/**
|
||||
* Returns a new array with all sub-array elements concatenated into it recursively up to the
|
||||
* specified depth.
|
||||
*
|
||||
* @param depth The maximum recursion depth
|
||||
*/
|
||||
flat<U>(this: U[][][][][][][], depth: 6): U[];
|
||||
|
||||
/**
|
||||
* Returns a new array with all sub-array elements concatenated into it recursively up to the
|
||||
* specified depth.
|
||||
*
|
||||
* @param depth The maximum recursion depth
|
||||
*/
|
||||
flat<U>(this: U[][][][][][], depth: 5): U[];
|
||||
|
||||
/**
|
||||
* Returns a new array with all sub-array elements concatenated into it recursively up to the
|
||||
* specified depth.
|
||||
*
|
||||
* @param depth The maximum recursion depth
|
||||
*/
|
||||
flat<U>(this: U[][][][][], depth: 4): U[];
|
||||
|
||||
/**
|
||||
* Returns a new array with all sub-array elements concatenated into it recursively up to the
|
||||
* specified depth.
|
||||
*
|
||||
* @param depth The maximum recursion depth
|
||||
*/
|
||||
flat<U>(this: U[][][][], depth: 3): U[];
|
||||
|
||||
/**
|
||||
* Returns a new array with all sub-array elements concatenated into it recursively up to the
|
||||
* specified depth.
|
||||
*
|
||||
* @param depth The maximum recursion depth
|
||||
*/
|
||||
flat<U>(this: U[][][], depth: 2): U[];
|
||||
|
||||
/**
|
||||
* Returns a new array with all sub-array elements concatenated into it recursively up to the
|
||||
* specified depth.
|
||||
*
|
||||
* @param depth The maximum recursion depth
|
||||
*/
|
||||
flat<U>(this: U[][], depth?: 1): U[];
|
||||
|
||||
/**
|
||||
* Returns a new array with all sub-array elements concatenated into it recursively up to the
|
||||
* specified depth.
|
||||
*
|
||||
* @param depth The maximum recursion depth
|
||||
*/
|
||||
flat<U>(this: U[], depth: 0): U[];
|
||||
|
||||
/**
|
||||
* Returns a new array with all sub-array elements concatenated into it recursively up to the
|
||||
* specified depth. If no depth is provided, flat method defaults to the depth of 1.
|
||||
*
|
||||
* @param depth The maximum recursion depth
|
||||
*/
|
||||
flat<U>(depth?: number): any[];
|
||||
}
|
||||
24
lib/lib.es2019.d.ts
vendored
Normal file
24
lib/lib.es2019.d.ts
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
/*! *****************************************************************************
|
||||
Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
this file except in compliance with the License. You may obtain a copy of the
|
||||
License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
|
||||
WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
|
||||
MERCHANTABLITY OR NON-INFRINGEMENT.
|
||||
|
||||
See the Apache Version 2.0 License for specific language governing permissions
|
||||
and limitations under the License.
|
||||
***************************************************************************** */
|
||||
|
||||
|
||||
|
||||
/// <reference no-default-lib="true"/>
|
||||
|
||||
|
||||
/// <reference lib="es2018" />
|
||||
/// <reference lib="es2019.array" />
|
||||
/// <reference lib="es2019.string" />
|
||||
/// <reference lib="es2019.symbol" />
|
||||
25
lib/lib.es2019.full.d.ts
vendored
Normal file
25
lib/lib.es2019.full.d.ts
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
/*! *****************************************************************************
|
||||
Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
this file except in compliance with the License. You may obtain a copy of the
|
||||
License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
|
||||
WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
|
||||
MERCHANTABLITY OR NON-INFRINGEMENT.
|
||||
|
||||
See the Apache Version 2.0 License for specific language governing permissions
|
||||
and limitations under the License.
|
||||
***************************************************************************** */
|
||||
|
||||
|
||||
|
||||
/// <reference no-default-lib="true"/>
|
||||
|
||||
|
||||
/// <reference lib="es2019" />
|
||||
/// <reference lib="dom" />
|
||||
/// <reference lib="webworker.importscripts" />
|
||||
/// <reference lib="scripthost" />
|
||||
/// <reference lib="dom.iterable" />
|
||||
33
lib/lib.es2019.string.d.ts
vendored
Normal file
33
lib/lib.es2019.string.d.ts
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
/*! *****************************************************************************
|
||||
Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
this file except in compliance with the License. You may obtain a copy of the
|
||||
License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
|
||||
WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
|
||||
MERCHANTABLITY OR NON-INFRINGEMENT.
|
||||
|
||||
See the Apache Version 2.0 License for specific language governing permissions
|
||||
and limitations under the License.
|
||||
***************************************************************************** */
|
||||
|
||||
|
||||
|
||||
/// <reference no-default-lib="true"/>
|
||||
|
||||
|
||||
interface String {
|
||||
/** Removes the trailing white space and line terminator characters from a string. */
|
||||
trimEnd(): string;
|
||||
|
||||
/** Removes the leading white space and line terminator characters from a string. */
|
||||
trimStart(): string;
|
||||
|
||||
/** Removes the trailing white space and line terminator characters from a string. */
|
||||
trimLeft(): string;
|
||||
|
||||
/** Removes the leading white space and line terminator characters from a string. */
|
||||
trimRight(): string;
|
||||
}
|
||||
26
lib/lib.es2019.symbol.d.ts
vendored
Normal file
26
lib/lib.es2019.symbol.d.ts
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
/*! *****************************************************************************
|
||||
Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||
this file except in compliance with the License. You may obtain a copy of the
|
||||
License at http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
|
||||
WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
|
||||
MERCHANTABLITY OR NON-INFRINGEMENT.
|
||||
|
||||
See the Apache Version 2.0 License for specific language governing permissions
|
||||
and limitations under the License.
|
||||
***************************************************************************** */
|
||||
|
||||
|
||||
|
||||
/// <reference no-default-lib="true"/>
|
||||
|
||||
|
||||
interface Symbol {
|
||||
/**
|
||||
* expose the [[Description]] internal slot of a symbol directly
|
||||
*/
|
||||
readonly description: string;
|
||||
}
|
||||
106
lib/lib.es5.d.ts
vendored
106
lib/lib.es5.d.ts
vendored
@ -315,6 +315,16 @@ interface FunctionConstructor {
|
||||
|
||||
declare const Function: FunctionConstructor;
|
||||
|
||||
/**
|
||||
* Extracts the type of the 'this' parameter of a function type, or 'unknown' if the function type has no 'this' parameter.
|
||||
*/
|
||||
type ThisParameterType<T> = T extends (this: unknown, ...args: any[]) => any ? unknown : T extends (this: infer U, ...args: any[]) => any ? U : unknown;
|
||||
|
||||
/**
|
||||
* Removes the 'this' parameter from a function type.
|
||||
*/
|
||||
type OmitThisParameter<T> = unknown extends ThisParameterType<T> ? T : T extends (...args: infer A) => infer R ? (...args: A) => R : T;
|
||||
|
||||
interface CallableFunction extends Function {
|
||||
/**
|
||||
* Calls the function with the specified object as the this value and the elements of specified array as the arguments.
|
||||
@ -337,7 +347,7 @@ interface CallableFunction extends Function {
|
||||
* @param thisArg The object to be used as the this object.
|
||||
* @param args Arguments to bind to the parameters of the function.
|
||||
*/
|
||||
bind<T, A extends any[], R>(this: (this: T, ...args: A) => R, thisArg: T): (...args: A) => R;
|
||||
bind<T>(this: T, thisArg: ThisParameterType<T>): OmitThisParameter<T>;
|
||||
bind<T, A0, A extends any[], R>(this: (this: T, arg0: A0, ...args: A) => R, thisArg: T, arg0: A0): (...args: A) => R;
|
||||
bind<T, A0, A1, A extends any[], R>(this: (this: T, arg0: A0, arg1: A1, ...args: A) => R, thisArg: T, arg0: A0, arg1: A1): (...args: A) => R;
|
||||
bind<T, A0, A1, A2, A extends any[], R>(this: (this: T, arg0: A0, arg1: A1, arg2: A2, ...args: A) => R, thisArg: T, arg0: A0, arg1: A1, arg2: A2): (...args: A) => R;
|
||||
@ -367,7 +377,7 @@ interface NewableFunction extends Function {
|
||||
* @param thisArg The object to be used as the this object.
|
||||
* @param args Arguments to bind to the parameters of the function.
|
||||
*/
|
||||
bind<A extends any[], R>(this: new (...args: A) => R, thisArg: any): new (...args: A) => R;
|
||||
bind<T>(this: T, thisArg: any): T;
|
||||
bind<A0, A extends any[], R>(this: new (arg0: A0, ...args: A) => R, thisArg: any, arg0: A0): new (...args: A) => R;
|
||||
bind<A0, A1, A extends any[], R>(this: new (arg0: A0, arg1: A1, ...args: A) => R, thisArg: any, arg0: A0, arg1: A1): new (...args: A) => R;
|
||||
bind<A0, A1, A2, A extends any[], R>(this: new (arg0: A0, arg1: A1, arg2: A2, ...args: A) => R, thisArg: any, arg0: A0, arg1: A1, arg2: A2): new (...args: A) => R;
|
||||
@ -597,7 +607,7 @@ interface TemplateStringsArray extends ReadonlyArray<string> {
|
||||
|
||||
/**
|
||||
* The type of `import.meta`.
|
||||
*
|
||||
*
|
||||
* If you need to declare that a given property exists on `import.meta`,
|
||||
* this type may be augmented via interface merging.
|
||||
*/
|
||||
@ -1046,14 +1056,14 @@ interface JSON {
|
||||
* @param reviver A function that transforms the results. This function is called for each member of the object.
|
||||
* If a member contains nested objects, the nested objects are transformed before the parent object is.
|
||||
*/
|
||||
parse(text: string, reviver?: (key: any, value: any) => any): any;
|
||||
parse(text: string, reviver?: (this: any, key: string, value: any) => any): any;
|
||||
/**
|
||||
* Converts a JavaScript value to a JavaScript Object Notation (JSON) string.
|
||||
* @param value A JavaScript value, usually an object or array, to be converted.
|
||||
* @param replacer A function that transforms the results.
|
||||
* @param space Adds indentation, white space, and line break characters to the return-value JSON text to make it easier to read.
|
||||
*/
|
||||
stringify(value: any, replacer?: (key: string, value: any) => any, space?: string | number): string;
|
||||
stringify(value: any, replacer?: (this: any, key: string, value: any) => any, space?: string | number): string;
|
||||
/**
|
||||
* Converts a JavaScript value to a JavaScript Object Notation (JSON) string.
|
||||
* @param value A JavaScript value, usually an object or array, to be converted.
|
||||
@ -1124,13 +1134,13 @@ interface ReadonlyArray<T> {
|
||||
* @param callbackfn A function that accepts up to three arguments. The every method calls the callbackfn function for each element in array1 until the callbackfn returns false, or until the end of the array.
|
||||
* @param thisArg An object to which the this keyword can refer in the callbackfn function. If thisArg is omitted, undefined is used as the this value.
|
||||
*/
|
||||
every(callbackfn: (value: T, index: number, array: ReadonlyArray<T>) => boolean, thisArg?: any): boolean;
|
||||
every(callbackfn: (value: T, index: number, array: ReadonlyArray<T>) => unknown, thisArg?: any): boolean;
|
||||
/**
|
||||
* Determines whether the specified callback function returns true for any element of an array.
|
||||
* @param callbackfn A function that accepts up to three arguments. The some method calls the callbackfn function for each element in array1 until the callbackfn returns true, or until the end of the array.
|
||||
* @param thisArg An object to which the this keyword can refer in the callbackfn function. If thisArg is omitted, undefined is used as the this value.
|
||||
*/
|
||||
some(callbackfn: (value: T, index: number, array: ReadonlyArray<T>) => boolean, thisArg?: any): boolean;
|
||||
some(callbackfn: (value: T, index: number, array: ReadonlyArray<T>) => unknown, thisArg?: any): boolean;
|
||||
/**
|
||||
* Performs the specified action for each element in an array.
|
||||
* @param callbackfn A function that accepts up to three arguments. forEach calls the callbackfn function one time for each element in the array.
|
||||
@ -1154,7 +1164,7 @@ interface ReadonlyArray<T> {
|
||||
* @param callbackfn A function that accepts up to three arguments. The filter method calls the callbackfn function one time for each element in the array.
|
||||
* @param thisArg An object to which the this keyword can refer in the callbackfn function. If thisArg is omitted, undefined is used as the this value.
|
||||
*/
|
||||
filter(callbackfn: (value: T, index: number, array: ReadonlyArray<T>) => any, thisArg?: any): T[];
|
||||
filter(callbackfn: (value: T, index: number, array: ReadonlyArray<T>) => unknown, thisArg?: any): T[];
|
||||
/**
|
||||
* Calls the specified callback function for all the elements in an array. The return value of the callback function is the accumulated result, and is provided as an argument in the next call to the callback function.
|
||||
* @param callbackfn A function that accepts up to four arguments. The reduce method calls the callbackfn function one time for each element in the array.
|
||||
@ -1461,22 +1471,22 @@ type NonNullable<T> = T extends null | undefined ? never : T;
|
||||
/**
|
||||
* Obtain the parameters of a function type in a tuple
|
||||
*/
|
||||
type Parameters<T extends (...args: any[]) => any> = T extends (...args: infer P) => any ? P : never;
|
||||
type Parameters<T extends (...args: any) => any> = T extends (...args: infer P) => any ? P : never;
|
||||
|
||||
/**
|
||||
* Obtain the parameters of a constructor function type in a tuple
|
||||
*/
|
||||
type ConstructorParameters<T extends new (...args: any[]) => any> = T extends new (...args: infer P) => any ? P : never;
|
||||
type ConstructorParameters<T extends new (...args: any) => any> = T extends new (...args: infer P) => any ? P : never;
|
||||
|
||||
/**
|
||||
* Obtain the return type of a function type
|
||||
*/
|
||||
type ReturnType<T extends (...args: any[]) => any> = T extends (...args: any[]) => infer R ? R : any;
|
||||
type ReturnType<T extends (...args: any) => any> = T extends (...args: any) => infer R ? R : any;
|
||||
|
||||
/**
|
||||
* Obtain the return type of a constructor function type
|
||||
*/
|
||||
type InstanceType<T extends new (...args: any[]) => any> = T extends new (...args: any[]) => infer R ? R : any;
|
||||
type InstanceType<T extends new (...args: any) => any> = T extends new (...args: any) => infer R ? R : any;
|
||||
|
||||
/**
|
||||
* Marker for contextual 'this' type
|
||||
@ -1923,13 +1933,19 @@ interface Int8ArrayConstructor {
|
||||
*/
|
||||
of(...items: number[]): Int8Array;
|
||||
|
||||
/**
|
||||
* Creates an array from an array-like or iterable object.
|
||||
* @param arrayLike An array-like or iterable object to convert to an array.
|
||||
*/
|
||||
from(arrayLike: ArrayLike<number>): Int8Array;
|
||||
|
||||
/**
|
||||
* Creates an array from an array-like or iterable object.
|
||||
* @param arrayLike An array-like or iterable object to convert to an array.
|
||||
* @param mapfn A mapping function to call on every element of the array.
|
||||
* @param thisArg Value of 'this' used to invoke the mapfn.
|
||||
*/
|
||||
from(arrayLike: ArrayLike<number>, mapfn?: (v: number, k: number) => number, thisArg?: any): Int8Array;
|
||||
from<T>(arrayLike: ArrayLike<T>, mapfn: (v: T, k: number) => number, thisArg?: any): Int8Array;
|
||||
|
||||
|
||||
}
|
||||
@ -2193,13 +2209,19 @@ interface Uint8ArrayConstructor {
|
||||
*/
|
||||
of(...items: number[]): Uint8Array;
|
||||
|
||||
/**
|
||||
* Creates an array from an array-like or iterable object.
|
||||
* @param arrayLike An array-like or iterable object to convert to an array.
|
||||
*/
|
||||
from(arrayLike: ArrayLike<number>): Uint8Array;
|
||||
|
||||
/**
|
||||
* Creates an array from an array-like or iterable object.
|
||||
* @param arrayLike An array-like or iterable object to convert to an array.
|
||||
* @param mapfn A mapping function to call on every element of the array.
|
||||
* @param thisArg Value of 'this' used to invoke the mapfn.
|
||||
*/
|
||||
from(arrayLike: ArrayLike<number>, mapfn?: (v: number, k: number) => number, thisArg?: any): Uint8Array;
|
||||
from<T>(arrayLike: ArrayLike<T>, mapfn: (v: T, k: number) => number, thisArg?: any): Uint8Array;
|
||||
|
||||
}
|
||||
declare const Uint8Array: Uint8ArrayConstructor;
|
||||
@ -2462,13 +2484,19 @@ interface Uint8ClampedArrayConstructor {
|
||||
*/
|
||||
of(...items: number[]): Uint8ClampedArray;
|
||||
|
||||
/**
|
||||
* Creates an array from an array-like or iterable object.
|
||||
* @param arrayLike An array-like or iterable object to convert to an array.
|
||||
*/
|
||||
from(arrayLike: ArrayLike<number>): Uint8ClampedArray;
|
||||
|
||||
/**
|
||||
* Creates an array from an array-like or iterable object.
|
||||
* @param arrayLike An array-like or iterable object to convert to an array.
|
||||
* @param mapfn A mapping function to call on every element of the array.
|
||||
* @param thisArg Value of 'this' used to invoke the mapfn.
|
||||
*/
|
||||
from(arrayLike: ArrayLike<number>, mapfn?: (v: number, k: number) => number, thisArg?: any): Uint8ClampedArray;
|
||||
from<T>(arrayLike: ArrayLike<T>, mapfn: (v: T, k: number) => number, thisArg?: any): Uint8ClampedArray;
|
||||
}
|
||||
declare const Uint8ClampedArray: Uint8ClampedArrayConstructor;
|
||||
|
||||
@ -2729,13 +2757,19 @@ interface Int16ArrayConstructor {
|
||||
*/
|
||||
of(...items: number[]): Int16Array;
|
||||
|
||||
/**
|
||||
* Creates an array from an array-like or iterable object.
|
||||
* @param arrayLike An array-like or iterable object to convert to an array.
|
||||
*/
|
||||
from(arrayLike: ArrayLike<number>): Int16Array;
|
||||
|
||||
/**
|
||||
* Creates an array from an array-like or iterable object.
|
||||
* @param arrayLike An array-like or iterable object to convert to an array.
|
||||
* @param mapfn A mapping function to call on every element of the array.
|
||||
* @param thisArg Value of 'this' used to invoke the mapfn.
|
||||
*/
|
||||
from(arrayLike: ArrayLike<number>, mapfn?: (v: number, k: number) => number, thisArg?: any): Int16Array;
|
||||
from<T>(arrayLike: ArrayLike<T>, mapfn: (v: T, k: number) => number, thisArg?: any): Int16Array;
|
||||
|
||||
|
||||
}
|
||||
@ -2999,13 +3033,19 @@ interface Uint16ArrayConstructor {
|
||||
*/
|
||||
of(...items: number[]): Uint16Array;
|
||||
|
||||
/**
|
||||
* Creates an array from an array-like or iterable object.
|
||||
* @param arrayLike An array-like or iterable object to convert to an array.
|
||||
*/
|
||||
from(arrayLike: ArrayLike<number>): Uint16Array;
|
||||
|
||||
/**
|
||||
* Creates an array from an array-like or iterable object.
|
||||
* @param arrayLike An array-like or iterable object to convert to an array.
|
||||
* @param mapfn A mapping function to call on every element of the array.
|
||||
* @param thisArg Value of 'this' used to invoke the mapfn.
|
||||
*/
|
||||
from(arrayLike: ArrayLike<number>, mapfn?: (v: number, k: number) => number, thisArg?: any): Uint16Array;
|
||||
from<T>(arrayLike: ArrayLike<T>, mapfn: (v: T, k: number) => number, thisArg?: any): Uint16Array;
|
||||
|
||||
|
||||
}
|
||||
@ -3268,13 +3308,19 @@ interface Int32ArrayConstructor {
|
||||
*/
|
||||
of(...items: number[]): Int32Array;
|
||||
|
||||
/**
|
||||
* Creates an array from an array-like or iterable object.
|
||||
* @param arrayLike An array-like or iterable object to convert to an array.
|
||||
*/
|
||||
from(arrayLike: ArrayLike<number>): Int32Array;
|
||||
|
||||
/**
|
||||
* Creates an array from an array-like or iterable object.
|
||||
* @param arrayLike An array-like or iterable object to convert to an array.
|
||||
* @param mapfn A mapping function to call on every element of the array.
|
||||
* @param thisArg Value of 'this' used to invoke the mapfn.
|
||||
*/
|
||||
from(arrayLike: ArrayLike<number>, mapfn?: (v: number, k: number) => number, thisArg?: any): Int32Array;
|
||||
from<T>(arrayLike: ArrayLike<T>, mapfn: (v: T, k: number) => number, thisArg?: any): Int32Array;
|
||||
|
||||
}
|
||||
declare const Int32Array: Int32ArrayConstructor;
|
||||
@ -3536,13 +3582,19 @@ interface Uint32ArrayConstructor {
|
||||
*/
|
||||
of(...items: number[]): Uint32Array;
|
||||
|
||||
/**
|
||||
* Creates an array from an array-like or iterable object.
|
||||
* @param arrayLike An array-like or iterable object to convert to an array.
|
||||
*/
|
||||
from(arrayLike: ArrayLike<number>): Uint32Array;
|
||||
|
||||
/**
|
||||
* Creates an array from an array-like or iterable object.
|
||||
* @param arrayLike An array-like or iterable object to convert to an array.
|
||||
* @param mapfn A mapping function to call on every element of the array.
|
||||
* @param thisArg Value of 'this' used to invoke the mapfn.
|
||||
*/
|
||||
from(arrayLike: ArrayLike<number>, mapfn?: (v: number, k: number) => number, thisArg?: any): Uint32Array;
|
||||
from<T>(arrayLike: ArrayLike<T>, mapfn: (v: T, k: number) => number, thisArg?: any): Uint32Array;
|
||||
|
||||
}
|
||||
declare const Uint32Array: Uint32ArrayConstructor;
|
||||
@ -3805,13 +3857,19 @@ interface Float32ArrayConstructor {
|
||||
*/
|
||||
of(...items: number[]): Float32Array;
|
||||
|
||||
/**
|
||||
* Creates an array from an array-like or iterable object.
|
||||
* @param arrayLike An array-like or iterable object to convert to an array.
|
||||
*/
|
||||
from(arrayLike: ArrayLike<number>): Float32Array;
|
||||
|
||||
/**
|
||||
* Creates an array from an array-like or iterable object.
|
||||
* @param arrayLike An array-like or iterable object to convert to an array.
|
||||
* @param mapfn A mapping function to call on every element of the array.
|
||||
* @param thisArg Value of 'this' used to invoke the mapfn.
|
||||
*/
|
||||
from(arrayLike: ArrayLike<number>, mapfn?: (v: number, k: number) => number, thisArg?: any): Float32Array;
|
||||
from<T>(arrayLike: ArrayLike<T>, mapfn: (v: T, k: number) => number, thisArg?: any): Float32Array;
|
||||
|
||||
|
||||
}
|
||||
@ -4075,13 +4133,19 @@ interface Float64ArrayConstructor {
|
||||
*/
|
||||
of(...items: number[]): Float64Array;
|
||||
|
||||
/**
|
||||
* Creates an array from an array-like or iterable object.
|
||||
* @param arrayLike An array-like or iterable object to convert to an array.
|
||||
*/
|
||||
from(arrayLike: ArrayLike<number>): Float64Array;
|
||||
|
||||
/**
|
||||
* Creates an array from an array-like or iterable object.
|
||||
* @param arrayLike An array-like or iterable object to convert to an array.
|
||||
* @param mapfn A mapping function to call on every element of the array.
|
||||
* @param thisArg Value of 'this' used to invoke the mapfn.
|
||||
*/
|
||||
from(arrayLike: ArrayLike<number>, mapfn?: (v: number, k: number) => number, thisArg?: any): Float64Array;
|
||||
from<T>(arrayLike: ArrayLike<T>, mapfn: (v: T, k: number) => number, thisArg?: any): Float64Array;
|
||||
|
||||
}
|
||||
declare const Float64Array: Float64ArrayConstructor;
|
||||
|
||||
5
lib/lib.esnext.d.ts
vendored
5
lib/lib.esnext.d.ts
vendored
@ -18,9 +18,6 @@ and limitations under the License.
|
||||
/// <reference no-default-lib="true"/>
|
||||
|
||||
|
||||
/// <reference lib="es2018" />
|
||||
/// <reference lib="esnext.asynciterable" />
|
||||
/// <reference lib="esnext.array" />
|
||||
/// <reference lib="es2019" />
|
||||
/// <reference lib="esnext.bigint" />
|
||||
/// <reference lib="esnext.symbol" />
|
||||
/// <reference lib="esnext.intl" />
|
||||
|
||||
184
lib/lib.webworker.d.ts
vendored
184
lib/lib.webworker.d.ts
vendored
@ -174,6 +174,10 @@ interface EventListenerOptions {
|
||||
capture?: boolean;
|
||||
}
|
||||
|
||||
interface EventSourceInit {
|
||||
withCredentials?: boolean;
|
||||
}
|
||||
|
||||
interface ExtendableEventInit extends EventInit {
|
||||
}
|
||||
|
||||
@ -475,6 +479,7 @@ interface EventListener {
|
||||
(evt: Event): void;
|
||||
}
|
||||
|
||||
/** The ANGLE_instanced_arrays extension is part of the WebGL API and allows to draw the same object, or groups of similar objects multiple times, if they share the same vertex data, primitive count and type. */
|
||||
interface ANGLE_instanced_arrays {
|
||||
drawArraysInstancedANGLE(mode: GLenum, first: GLint, count: GLsizei, primcount: GLsizei): void;
|
||||
drawElementsInstancedANGLE(mode: GLenum, count: GLsizei, type: GLenum, offset: GLintptr, primcount: GLsizei): void;
|
||||
@ -482,6 +487,7 @@ interface ANGLE_instanced_arrays {
|
||||
readonly VERTEX_ATTRIB_ARRAY_DIVISOR_ANGLE: GLenum;
|
||||
}
|
||||
|
||||
/** The AbortController interface represents a controller object that allows you to abort one or more DOM requests as and when desired. */
|
||||
interface AbortController {
|
||||
/**
|
||||
* Returns the AbortSignal object associated with this object.
|
||||
@ -500,16 +506,17 @@ declare var AbortController: {
|
||||
};
|
||||
|
||||
interface AbortSignalEventMap {
|
||||
"abort": ProgressEvent;
|
||||
"abort": Event;
|
||||
}
|
||||
|
||||
/** The AbortSignal interface represents a signal object that allows you to communicate with a DOM request (such as a Fetch) and abort it if required via an AbortController object. */
|
||||
interface AbortSignal extends EventTarget {
|
||||
/**
|
||||
* Returns true if this AbortSignal's AbortController has signaled to abort, and false
|
||||
* otherwise.
|
||||
*/
|
||||
readonly aborted: boolean;
|
||||
onabort: ((this: AbortSignal, ev: ProgressEvent) => any) | null;
|
||||
onabort: ((this: AbortSignal, ev: Event) => any) | null;
|
||||
addEventListener<K extends keyof AbortSignalEventMap>(type: K, listener: (this: AbortSignal, ev: AbortSignalEventMap[K]) => any, options?: boolean | AddEventListenerOptions): void;
|
||||
addEventListener(type: string, listener: EventListenerOrEventListenerObject, options?: boolean | AddEventListenerOptions): void;
|
||||
removeEventListener<K extends keyof AbortSignalEventMap>(type: K, listener: (this: AbortSignal, ev: AbortSignalEventMap[K]) => any, options?: boolean | EventListenerOptions): void;
|
||||
@ -541,6 +548,7 @@ interface AesCmacParams extends Algorithm {
|
||||
length: number;
|
||||
}
|
||||
|
||||
/** A Blob object represents a file-like object of immutable, raw data. Blobs represent data that isn't necessarily in a JavaScript-native format. The File interface is based on Blob, inheriting blob functionality and expanding it to support files on the user's system. */
|
||||
interface Blob {
|
||||
readonly size: number;
|
||||
readonly type: string;
|
||||
@ -598,6 +606,7 @@ interface BroadcastChannelEventMap {
|
||||
messageerror: MessageEvent;
|
||||
}
|
||||
|
||||
/** The ByteLengthQueuingStrategy interface of the the Streams API provides a built-in byte length queuing strategy that can be used when constructing streams. */
|
||||
interface ByteLengthQueuingStrategy extends QueuingStrategy<ArrayBufferView> {
|
||||
highWaterMark: number;
|
||||
size(chunk: ArrayBufferView): number;
|
||||
@ -608,6 +617,7 @@ declare var ByteLengthQueuingStrategy: {
|
||||
new(options: { highWaterMark: number }): ByteLengthQueuingStrategy;
|
||||
};
|
||||
|
||||
/** The Cache interface provides a storage mechanism for Request / Response object pairs that are cached, for example as part of the ServiceWorker life cycle. Note that the Cache interface is exposed to windowed scopes as well as workers. You don't have to use it in conjunction with service workers, even though it is defined in the service worker spec. */
|
||||
interface Cache {
|
||||
add(request: RequestInfo): Promise<void>;
|
||||
addAll(requests: RequestInfo[]): Promise<void>;
|
||||
@ -623,6 +633,7 @@ declare var Cache: {
|
||||
new(): Cache;
|
||||
};
|
||||
|
||||
/** The CacheStorage interface represents the storage for Cache objects. */
|
||||
interface CacheStorage {
|
||||
delete(cacheName: string): Promise<boolean>;
|
||||
has(cacheName: string): Promise<boolean>;
|
||||
@ -636,6 +647,7 @@ declare var CacheStorage: {
|
||||
new(): CacheStorage;
|
||||
};
|
||||
|
||||
/** The CanvasGradient interface represents an opaque object describing a gradient. It is returned by the methods CanvasRenderingContext2D.createLinearGradient() or CanvasRenderingContext2D.createRadialGradient(). */
|
||||
interface CanvasGradient {
|
||||
/**
|
||||
* Adds a color stop with the given color to the gradient at the given offset. 0.0 is the offset
|
||||
@ -664,6 +676,7 @@ interface CanvasPath {
|
||||
rect(x: number, y: number, w: number, h: number): void;
|
||||
}
|
||||
|
||||
/** The CanvasPattern interface represents an opaque object describing a pattern, based on an image, a canvas, or a video, created by the CanvasRenderingContext2D.createPattern() method. */
|
||||
interface CanvasPattern {
|
||||
/**
|
||||
* Sets the transformation matrix that will be used when rendering the pattern during a fill or
|
||||
@ -677,6 +690,7 @@ declare var CanvasPattern: {
|
||||
new(): CanvasPattern;
|
||||
};
|
||||
|
||||
/** The Client interface represents an executable context such as a Worker, or a SharedWorker. Window clients are represented by the more-specific WindowClient. You can get Client/WindowClient objects from methods such as Clients.matchAll() and Clients.get(). */
|
||||
interface Client {
|
||||
readonly id: string;
|
||||
readonly type: ClientTypes;
|
||||
@ -689,6 +703,7 @@ declare var Client: {
|
||||
new(): Client;
|
||||
};
|
||||
|
||||
/** The Clients interface provides access to Client objects. Access it via self.clients within a service worker. */
|
||||
interface Clients {
|
||||
claim(): Promise<void>;
|
||||
get(id: string): Promise<any>;
|
||||
@ -701,6 +716,7 @@ declare var Clients: {
|
||||
new(): Clients;
|
||||
};
|
||||
|
||||
/** A CloseEvent is sent to clients using WebSockets when the connection is closed. This is delivered to the listener indicated by the WebSocket object's onclose attribute. */
|
||||
interface CloseEvent extends Event {
|
||||
readonly code: number;
|
||||
readonly reason: string;
|
||||
@ -723,6 +739,7 @@ interface ConcatParams extends Algorithm {
|
||||
publicInfo?: Uint8Array;
|
||||
}
|
||||
|
||||
/** The Console object provides access to the browser's debugging console (e.g. the Web Console in Firefox). The specifics of how it works varies from browser to browser, but there is a de facto set of features that are typically provided. */
|
||||
interface Console {
|
||||
memory: any;
|
||||
assert(condition?: boolean, message?: string, ...data: any[]): void;
|
||||
@ -756,6 +773,7 @@ declare var Console: {
|
||||
new(): Console;
|
||||
};
|
||||
|
||||
/** The CountQueuingStrategy interface of the the Streams API provides a built-in byte length queuing strategy that can be used when constructing streams. */
|
||||
interface CountQueuingStrategy extends QueuingStrategy {
|
||||
highWaterMark: number;
|
||||
size(chunk: any): 1;
|
||||
@ -766,6 +784,7 @@ declare var CountQueuingStrategy: {
|
||||
new(options: { highWaterMark: number }): CountQueuingStrategy;
|
||||
};
|
||||
|
||||
/** The Crypto interface represents basic cryptography features available in the current context. It allows access to a cryptographically strong random number generator and to cryptographic primitives. */
|
||||
interface Crypto {
|
||||
readonly subtle: SubtleCrypto;
|
||||
getRandomValues<T extends Int8Array | Int16Array | Int32Array | Uint8Array | Uint16Array | Uint32Array | Uint8ClampedArray | Float32Array | Float64Array | DataView | null>(array: T): T;
|
||||
@ -776,6 +795,7 @@ declare var Crypto: {
|
||||
new(): Crypto;
|
||||
};
|
||||
|
||||
/** The CryptoKey interface represents a cryptographic key derived from a specific key algorithm. */
|
||||
interface CryptoKey {
|
||||
readonly algorithm: KeyAlgorithm;
|
||||
readonly extractable: boolean;
|
||||
@ -802,6 +822,7 @@ declare var CustomEvent: {
|
||||
new<T>(typeArg: string, eventInitDict?: CustomEventInit<T>): CustomEvent<T>;
|
||||
};
|
||||
|
||||
/** The DOMException interface represents an abnormal event (called an exception) which occurs as a result of calling a method or accessing a property of a web API. */
|
||||
interface DOMException {
|
||||
readonly code: number;
|
||||
readonly message: string;
|
||||
@ -941,6 +962,8 @@ interface DOMMatrixReadOnly {
|
||||
rotateFromVector(x?: number, y?: number): DOMMatrix;
|
||||
scale(scaleX?: number, scaleY?: number, scaleZ?: number, originX?: number, originY?: number, originZ?: number): DOMMatrix;
|
||||
scale3d(scale?: number, originX?: number, originY?: number, originZ?: number): DOMMatrix;
|
||||
/** @deprecated */
|
||||
scaleNonUniform(scaleX?: number, scaleY?: number): DOMMatrix;
|
||||
skewX(sx?: number): DOMMatrix;
|
||||
skewY(sy?: number): DOMMatrix;
|
||||
toFloat32Array(): Float32Array;
|
||||
@ -1033,6 +1056,7 @@ declare var DOMRectReadOnly: {
|
||||
fromRect(other?: DOMRectInit): DOMRectReadOnly;
|
||||
};
|
||||
|
||||
/** A type returned by some APIs which contains a list of DOMString (strings). */
|
||||
interface DOMStringList {
|
||||
/**
|
||||
* Returns the number of strings in strings.
|
||||
@ -1059,6 +1083,7 @@ interface DedicatedWorkerGlobalScopeEventMap extends WorkerGlobalScopeEventMap {
|
||||
"message": MessageEvent;
|
||||
}
|
||||
|
||||
/** The DedicatedWorkerGlobalScope object (the Worker global scope) is accessible through the self keyword. Some additional global functions, namespaces objects, and constructors, not typically associated with the worker global scope, but available on it, are listed in the JavaScript Reference. See also: Functions available to workers. */
|
||||
interface DedicatedWorkerGlobalScope extends WorkerGlobalScope {
|
||||
onmessage: ((this: DedicatedWorkerGlobalScope, ev: MessageEvent) => any) | null;
|
||||
close(): void;
|
||||
@ -1098,6 +1123,7 @@ interface EXT_blend_minmax {
|
||||
readonly MIN_EXT: GLenum;
|
||||
}
|
||||
|
||||
/** The EXT_frag_depth extension is part of the WebGL API and enables to set a depth value of a fragment from within the fragment shader. */
|
||||
interface EXT_frag_depth {
|
||||
}
|
||||
|
||||
@ -1111,11 +1137,13 @@ interface EXT_sRGB {
|
||||
interface EXT_shader_texture_lod {
|
||||
}
|
||||
|
||||
/** The EXT_texture_filter_anisotropic extension is part of the WebGL API and exposes two constants for anisotropic filtering (AF). */
|
||||
interface EXT_texture_filter_anisotropic {
|
||||
readonly MAX_TEXTURE_MAX_ANISOTROPY_EXT: GLenum;
|
||||
readonly TEXTURE_MAX_ANISOTROPY_EXT: GLenum;
|
||||
}
|
||||
|
||||
/** The ErrorEvent interface represents events providing information related to errors in scripts or in files. */
|
||||
interface ErrorEvent extends Event {
|
||||
readonly colno: number;
|
||||
readonly error: any;
|
||||
@ -1129,6 +1157,7 @@ declare var ErrorEvent: {
|
||||
new(type: string, eventInitDict?: ErrorEventInit): ErrorEvent;
|
||||
};
|
||||
|
||||
/** The Event interface represents any event which takes place in the DOM; some are user-generated (such as mouse or keyboard events), while others are generated by APIs (such as events that indicate an animation has finished running, a video has been paused, and so forth). While events are usually triggered by such "external" sources, they can also be triggered programmatically, such as by calling the HTMLElement.click() method of an element, or by defining the event, then sending it to a specified target using EventTarget.dispatchEvent(). There are many types of events, some of which use other interfaces based on the main Event interface. Event itself contains the properties and methods which are common to all events. */
|
||||
interface Event {
|
||||
/**
|
||||
* Returns true or false depending on how event was initialized. True if event goes through its target's ancestors in reverse tree order, and false otherwise.
|
||||
@ -1153,6 +1182,8 @@ interface Event {
|
||||
*/
|
||||
readonly isTrusted: boolean;
|
||||
returnValue: boolean;
|
||||
/** @deprecated */
|
||||
readonly srcElement: EventTarget | null;
|
||||
/**
|
||||
* Returns the object to which event is dispatched (its target).
|
||||
*/
|
||||
@ -1200,28 +1231,50 @@ interface EventListenerObject {
|
||||
handleEvent(evt: Event): void;
|
||||
}
|
||||
|
||||
interface EventSourceEventMap {
|
||||
"error": Event;
|
||||
"message": MessageEvent;
|
||||
"open": Event;
|
||||
}
|
||||
|
||||
interface EventSource extends EventTarget {
|
||||
onerror: ((this: EventSource, ev: Event) => any) | null;
|
||||
onmessage: ((this: EventSource, ev: MessageEvent) => any) | null;
|
||||
onopen: ((this: EventSource, ev: Event) => any) | null;
|
||||
/**
|
||||
* Returns the state of this EventSource object's connection. It can have the
|
||||
* values described below.
|
||||
*/
|
||||
readonly readyState: number;
|
||||
/**
|
||||
* Returns the URL providing the event stream.
|
||||
*/
|
||||
readonly url: string;
|
||||
/**
|
||||
* Returns true if the credentials mode
|
||||
* for connection requests to the URL providing the
|
||||
* event stream is set to "include", and false otherwise.
|
||||
*/
|
||||
readonly withCredentials: boolean;
|
||||
close(): void;
|
||||
readonly CLOSED: number;
|
||||
readonly CONNECTING: number;
|
||||
readonly OPEN: number;
|
||||
onerror: (evt: MessageEvent) => any;
|
||||
onmessage: (evt: MessageEvent) => any;
|
||||
onopen: (evt: MessageEvent) => any;
|
||||
readonly readyState: number;
|
||||
readonly url: string;
|
||||
readonly withCredentials: boolean;
|
||||
close(): void;
|
||||
addEventListener<K extends keyof EventSourceEventMap>(type: K, listener: (this: EventSource, ev: EventSourceEventMap[K]) => any, options?: boolean | AddEventListenerOptions): void;
|
||||
addEventListener(type: string, listener: EventListenerOrEventListenerObject, options?: boolean | AddEventListenerOptions): void;
|
||||
removeEventListener<K extends keyof EventSourceEventMap>(type: K, listener: (this: EventSource, ev: EventSourceEventMap[K]) => any, options?: boolean | EventListenerOptions): void;
|
||||
removeEventListener(type: string, listener: EventListenerOrEventListenerObject, options?: boolean | EventListenerOptions): void;
|
||||
}
|
||||
|
||||
declare var EventSource: {
|
||||
prototype: EventSource;
|
||||
new(url: string, eventSourceInitDict?: EventSourceInit): EventSource;
|
||||
readonly CLOSED: number;
|
||||
readonly CONNECTING: number;
|
||||
readonly OPEN: number;
|
||||
};
|
||||
|
||||
interface EventSourceInit {
|
||||
readonly withCredentials: boolean;
|
||||
}
|
||||
|
||||
/** EventTarget is an interface implemented by objects that can receive events and may have listeners for them. */
|
||||
interface EventTarget {
|
||||
/**
|
||||
* Appends an event listener for events whose type attribute value is type. The callback argument sets the callback that will be invoked when the event is dispatched.
|
||||
@ -1250,8 +1303,9 @@ declare var EventTarget: {
|
||||
new(): EventTarget;
|
||||
};
|
||||
|
||||
/** The ExtendableEvent interface extends the lifetime of the install and activate events dispatched on the global scope as part of the service worker lifecycle. This ensures that any functional events (like FetchEvent) are not dispatched until it upgrades database schemas and deletes the outdated cache entries. */
|
||||
interface ExtendableEvent extends Event {
|
||||
waitUntil(f: Promise<any>): void;
|
||||
waitUntil(f: any): void;
|
||||
}
|
||||
|
||||
declare var ExtendableEvent: {
|
||||
@ -1259,6 +1313,7 @@ declare var ExtendableEvent: {
|
||||
new(type: string, eventInitDict?: ExtendableEventInit): ExtendableEvent;
|
||||
};
|
||||
|
||||
/** The ExtendableMessageEvent interface of the ServiceWorker API represents the event object of a message event fired on a service worker (when a channel message is received on the ServiceWorkerGlobalScope from another context) — extends the lifetime of such events. */
|
||||
interface ExtendableMessageEvent extends ExtendableEvent {
|
||||
readonly data: any;
|
||||
readonly lastEventId: string;
|
||||
@ -1272,13 +1327,14 @@ declare var ExtendableMessageEvent: {
|
||||
new(type: string, eventInitDict?: ExtendableMessageEventInit): ExtendableMessageEvent;
|
||||
};
|
||||
|
||||
/** This is the event type for fetch events dispatched on the service worker global scope. It contains information about the fetch, including the request and how the receiver will treat the response. It provides the event.respondWith() method, which allows us to provide a response to this fetch. */
|
||||
interface FetchEvent extends ExtendableEvent {
|
||||
readonly clientId: string;
|
||||
readonly preloadResponse: Promise<any>;
|
||||
readonly request: Request;
|
||||
readonly resultingClientId: string;
|
||||
readonly targetClientId: string;
|
||||
respondWith(r: Promise<Response>): void;
|
||||
respondWith(r: Response | Promise<Response>): void;
|
||||
}
|
||||
|
||||
declare var FetchEvent: {
|
||||
@ -1286,6 +1342,7 @@ declare var FetchEvent: {
|
||||
new(type: string, eventInitDict: FetchEventInit): FetchEvent;
|
||||
};
|
||||
|
||||
/** The File interface provides information about files and allows JavaScript in a web page to access their content. */
|
||||
interface File extends Blob {
|
||||
readonly lastModified: number;
|
||||
readonly name: string;
|
||||
@ -1296,6 +1353,7 @@ declare var File: {
|
||||
new(fileBits: BlobPart[], fileName: string, options?: FilePropertyBag): File;
|
||||
};
|
||||
|
||||
/** An object of this type is returned by the files property of the HTML <input> element; this lets you access the list of files selected with the <input type="file"> element. It's also used for a list of files dropped into web content when using the drag and drop API; see the DataTransfer object for details on this usage. */
|
||||
interface FileList {
|
||||
readonly length: number;
|
||||
item(index: number): File | null;
|
||||
@ -1316,6 +1374,7 @@ interface FileReaderEventMap {
|
||||
"progress": ProgressEvent;
|
||||
}
|
||||
|
||||
/** The FileReader object lets web applications asynchronously read the contents of files (or raw data buffers) stored on the user's computer, using File or Blob objects to specify the file or data to read. */
|
||||
interface FileReader extends EventTarget {
|
||||
readonly error: DOMException | null;
|
||||
onabort: ((this: FileReader, ev: ProgressEvent) => any) | null;
|
||||
@ -1348,6 +1407,7 @@ declare var FileReader: {
|
||||
readonly LOADING: number;
|
||||
};
|
||||
|
||||
/** The FileReaderSync interface allows to read File or Blob objects in a synchronous way. */
|
||||
interface FileReaderSync {
|
||||
readAsArrayBuffer(blob: Blob): ArrayBuffer;
|
||||
readAsBinaryString(blob: Blob): string;
|
||||
@ -1360,6 +1420,7 @@ declare var FileReaderSync: {
|
||||
new(): FileReaderSync;
|
||||
};
|
||||
|
||||
/** The FormData interface provides a way to easily construct a set of key/value pairs representing form fields and their values, which can then be easily sent using the XMLHttpRequest.send() method. It uses the same format a form would use if the encoding type were set to "multipart/form-data". */
|
||||
interface FormData {
|
||||
append(name: string, value: string | Blob, fileName?: string): void;
|
||||
delete(name: string): void;
|
||||
@ -1379,6 +1440,7 @@ interface GlobalFetch {
|
||||
fetch(input: RequestInfo, init?: RequestInit): Promise<Response>;
|
||||
}
|
||||
|
||||
/** The Headers interface of the Fetch API allows you to perform various actions on HTTP request and response headers. These actions include retrieving, setting, adding to, and removing. A Headers object has an associated header list, which is initially empty and consists of zero or more name and value pairs. You can add to this using methods like append() (see Examples.) In all methods of this interface, header names are matched by case-insensitive byte sequence. */
|
||||
interface Headers {
|
||||
append(name: string, value: string): void;
|
||||
delete(name: string): void;
|
||||
@ -1402,6 +1464,7 @@ interface HkdfCtrParams extends Algorithm {
|
||||
interface IDBArrayKey extends Array<IDBValidKey> {
|
||||
}
|
||||
|
||||
/** The IDBCursor interface of the IndexedDB API represents a cursor for traversing or iterating over multiple records in a database. */
|
||||
interface IDBCursor {
|
||||
/**
|
||||
* Returns the direction ("next", "nextunique", "prev" or "prevunique")
|
||||
@ -1412,12 +1475,12 @@ interface IDBCursor {
|
||||
* Returns the key of the cursor.
|
||||
* Throws a "InvalidStateError" DOMException if the cursor is advancing or is finished.
|
||||
*/
|
||||
readonly key: IDBValidKey | IDBKeyRange;
|
||||
readonly key: IDBValidKey;
|
||||
/**
|
||||
* Returns the effective key of the cursor.
|
||||
* Throws a "InvalidStateError" DOMException if the cursor is advancing or is finished.
|
||||
*/
|
||||
readonly primaryKey: IDBValidKey | IDBKeyRange;
|
||||
readonly primaryKey: IDBValidKey;
|
||||
/**
|
||||
* Returns the IDBObjectStore or IDBIndex the cursor was opened from.
|
||||
*/
|
||||
@ -1431,12 +1494,12 @@ interface IDBCursor {
|
||||
* Advances the cursor to the next record in range matching or
|
||||
* after key.
|
||||
*/
|
||||
continue(key?: IDBValidKey | IDBKeyRange): void;
|
||||
continue(key?: IDBValidKey): void;
|
||||
/**
|
||||
* Advances the cursor to the next record in range matching
|
||||
* or after key and primaryKey. Throws an "InvalidAccessError" DOMException if the source is not an index.
|
||||
*/
|
||||
continuePrimaryKey(key: IDBValidKey | IDBKeyRange, primaryKey: IDBValidKey | IDBKeyRange): void;
|
||||
continuePrimaryKey(key: IDBValidKey, primaryKey: IDBValidKey): void;
|
||||
/**
|
||||
* Delete the record pointed at by the cursor with a new value.
|
||||
* If successful, request's result will be undefined.
|
||||
@ -1455,6 +1518,7 @@ declare var IDBCursor: {
|
||||
new(): IDBCursor;
|
||||
};
|
||||
|
||||
/** The IDBCursorWithValue interface of the IndexedDB API represents a cursor for traversing or iterating over multiple records in a database. It is the same as the IDBCursor, except that it includes the value property. */
|
||||
interface IDBCursorWithValue extends IDBCursor {
|
||||
/**
|
||||
* Returns the cursor's current value.
|
||||
@ -1474,6 +1538,7 @@ interface IDBDatabaseEventMap {
|
||||
"versionchange": IDBVersionChangeEvent;
|
||||
}
|
||||
|
||||
/** The IDBDatabase interface of the IndexedDB API provides a connection to a database; you can use an IDBDatabase object to open a transaction on your database then create, manipulate, and delete objects (data) in that database. The interface provides the only way to get and manage versions of the database. */
|
||||
interface IDBDatabase extends EventTarget {
|
||||
/**
|
||||
* Returns the name of the database.
|
||||
@ -1521,6 +1586,7 @@ declare var IDBDatabase: {
|
||||
new(): IDBDatabase;
|
||||
};
|
||||
|
||||
/** In the following code snippet, we make a request to open a database, and include handlers for the success and error cases. For a full working example, see our To-do Notifications app (view example live.) */
|
||||
interface IDBFactory {
|
||||
/**
|
||||
* Compares two values as keys. Returns -1 if key1 precedes key2, 1 if key2 precedes key1, and 0 if
|
||||
@ -1550,6 +1616,7 @@ declare var IDBFactory: {
|
||||
new(): IDBFactory;
|
||||
};
|
||||
|
||||
/** IDBIndex interface of the IndexedDB API provides asynchronous access to an index in a database. An index is a kind of object store for looking up records in another object store, called the referenced object store. You use this interface to retrieve data. */
|
||||
interface IDBIndex {
|
||||
readonly keyPath: string | string[];
|
||||
readonly multiEntry: boolean;
|
||||
@ -1610,6 +1677,7 @@ declare var IDBIndex: {
|
||||
new(): IDBIndex;
|
||||
};
|
||||
|
||||
/** A key range can be a single value or a range with upper and lower bounds or endpoints. If the key range has both upper and lower bounds, then it is bounded; if it has no bounds, it is unbounded. A bounded key range can either be open (the endpoints are excluded) or closed (the endpoints are included). To retrieve all keys within a certain range, you can use the following code constructs: */
|
||||
interface IDBKeyRange {
|
||||
/**
|
||||
* Returns lower bound, or undefined if none.
|
||||
@ -1658,6 +1726,7 @@ declare var IDBKeyRange: {
|
||||
upperBound(upper: any, open?: boolean): IDBKeyRange;
|
||||
};
|
||||
|
||||
/** This example shows a variety of different uses of object stores, from updating the data structure with IDBObjectStore.createIndex inside an onupgradeneeded function, to adding a new item to our object store with IDBObjectStore.add. For a full working example, see our To-do Notifications app (view example live.) */
|
||||
interface IDBObjectStore {
|
||||
/**
|
||||
* Returns true if the store has a key generator, and false otherwise.
|
||||
@ -1681,7 +1750,7 @@ interface IDBObjectStore {
|
||||
* Returns the associated transaction.
|
||||
*/
|
||||
readonly transaction: IDBTransaction;
|
||||
add(value: any, key?: IDBValidKey | IDBKeyRange): IDBRequest<IDBValidKey>;
|
||||
add(value: any, key?: IDBValidKey): IDBRequest<IDBValidKey>;
|
||||
/**
|
||||
* Deletes all records in store.
|
||||
* If successful, request's result will
|
||||
@ -1754,7 +1823,7 @@ interface IDBObjectStore {
|
||||
* null if there were no matching records.
|
||||
*/
|
||||
openKeyCursor(query?: IDBValidKey | IDBKeyRange, direction?: IDBCursorDirection): IDBRequest<IDBCursor | null>;
|
||||
put(value: any, key?: IDBValidKey | IDBKeyRange): IDBRequest<IDBValidKey>;
|
||||
put(value: any, key?: IDBValidKey): IDBRequest<IDBValidKey>;
|
||||
}
|
||||
|
||||
declare var IDBObjectStore: {
|
||||
@ -1767,6 +1836,7 @@ interface IDBOpenDBRequestEventMap extends IDBRequestEventMap {
|
||||
"upgradeneeded": IDBVersionChangeEvent;
|
||||
}
|
||||
|
||||
/** Also inherits methods from its parents IDBRequest and EventTarget. */
|
||||
interface IDBOpenDBRequest extends IDBRequest<IDBDatabase> {
|
||||
onblocked: ((this: IDBOpenDBRequest, ev: Event) => any) | null;
|
||||
onupgradeneeded: ((this: IDBOpenDBRequest, ev: IDBVersionChangeEvent) => any) | null;
|
||||
@ -1786,6 +1856,7 @@ interface IDBRequestEventMap {
|
||||
"success": Event;
|
||||
}
|
||||
|
||||
/** The request object does not initially contain any information about the result of the operation, but once information becomes available, an event is fired on the request, and the information becomes available through the properties of the IDBRequest instance. */
|
||||
interface IDBRequest<T = any> extends EventTarget {
|
||||
/**
|
||||
* When a request is completed, returns the error (a DOMException), or null if the request succeeded. Throws
|
||||
@ -1877,6 +1948,7 @@ declare var IDBTransaction: {
|
||||
new(): IDBTransaction;
|
||||
};
|
||||
|
||||
/** The IDBVersionChangeEvent interface of the IndexedDB API indicates that the version of the database has changed, as the result of an IDBOpenDBRequest.onupgradeneeded event handler function. */
|
||||
interface IDBVersionChangeEvent extends Event {
|
||||
readonly newVersion: number | null;
|
||||
readonly oldVersion: number;
|
||||
@ -1918,6 +1990,7 @@ interface ImageBitmapOptions {
|
||||
resizeWidth?: number;
|
||||
}
|
||||
|
||||
/** The ImageData interface represents the underlying pixel data of an area of a <canvas> element. It is created using the ImageData() constructor or creator methods on the CanvasRenderingContext2D object associated with a canvas: createImageData() and getImageData(). It can also be used to set a part of the canvas by using putImageData(). */
|
||||
interface ImageData {
|
||||
/**
|
||||
* Returns the one-dimensional array containing the data in RGBA order, as integers in the
|
||||
@ -1938,6 +2011,7 @@ declare var ImageData: {
|
||||
new(array: Uint8ClampedArray, width: number, height: number): ImageData;
|
||||
};
|
||||
|
||||
/** The MessageChannel interface of the Channel Messaging API allows us to create a new message channel and send data through it via its two MessagePort properties. */
|
||||
interface MessageChannel {
|
||||
readonly port1: MessagePort;
|
||||
readonly port2: MessagePort;
|
||||
@ -1948,6 +2022,7 @@ declare var MessageChannel: {
|
||||
new(): MessageChannel;
|
||||
};
|
||||
|
||||
/** The MessageEvent interface represents a message received by a target object. */
|
||||
interface MessageEvent extends Event {
|
||||
/**
|
||||
* Returns the data of the message.
|
||||
@ -1986,6 +2061,7 @@ interface MessagePortEventMap {
|
||||
"messageerror": MessageEvent;
|
||||
}
|
||||
|
||||
/** The MessagePort interface of the Channel Messaging API represents one of the two ports of a MessageChannel, allowing messages to be sent from one port and listening out for them arriving at the other. */
|
||||
interface MessagePort extends EventTarget {
|
||||
onmessage: ((this: MessagePort, ev: MessageEvent) => any) | null;
|
||||
onmessageerror: ((this: MessagePort, ev: MessageEvent) => any) | null;
|
||||
@ -2063,6 +2139,7 @@ interface NotificationEventMap {
|
||||
"show": Event;
|
||||
}
|
||||
|
||||
/** The Notification interface of the Notifications API is used to configure and display desktop notifications to the user. */
|
||||
interface Notification extends EventTarget {
|
||||
readonly actions: ReadonlyArray<NotificationAction>;
|
||||
readonly badge: string;
|
||||
@ -2097,6 +2174,7 @@ declare var Notification: {
|
||||
readonly permission: NotificationPermission;
|
||||
};
|
||||
|
||||
/** The parameter passed into the onnotificationclick handler, the NotificationEvent interface represents a notification click event that is dispatched on the ServiceWorkerGlobalScope of a ServiceWorker. */
|
||||
interface NotificationEvent extends ExtendableEvent {
|
||||
readonly action: string;
|
||||
readonly notification: Notification;
|
||||
@ -2107,23 +2185,29 @@ declare var NotificationEvent: {
|
||||
new(type: string, eventInitDict: NotificationEventInit): NotificationEvent;
|
||||
};
|
||||
|
||||
/** The OES_element_index_uint extension is part of the WebGL API and adds support for gl.UNSIGNED_INT types to WebGLRenderingContext.drawElements(). */
|
||||
interface OES_element_index_uint {
|
||||
}
|
||||
|
||||
/** The OES_standard_derivatives extension is part of the WebGL API and adds the GLSL derivative functions dFdx, dFdy, and fwidth. */
|
||||
interface OES_standard_derivatives {
|
||||
readonly FRAGMENT_SHADER_DERIVATIVE_HINT_OES: GLenum;
|
||||
}
|
||||
|
||||
/** The OES_texture_float extension is part of the WebGL API and exposes floating-point pixel types for textures. */
|
||||
interface OES_texture_float {
|
||||
}
|
||||
|
||||
/** The OES_texture_float_linear extension is part of the WebGL API and allows linear filtering with floating-point pixel types for textures. */
|
||||
interface OES_texture_float_linear {
|
||||
}
|
||||
|
||||
/** The OES_texture_half_float extension is part of the WebGL API and adds texture formats with 16- (aka half float) and 32-bit floating-point components. */
|
||||
interface OES_texture_half_float {
|
||||
readonly HALF_FLOAT_OES: GLenum;
|
||||
}
|
||||
|
||||
/** The OES_texture_half_float_linear extension is part of the WebGL API and allows linear filtering with half floating-point pixel types for textures. */
|
||||
interface OES_texture_half_float_linear {
|
||||
}
|
||||
|
||||
@ -2135,6 +2219,7 @@ interface OES_vertex_array_object {
|
||||
readonly VERTEX_ARRAY_BINDING_OES: GLenum;
|
||||
}
|
||||
|
||||
/** The Path2D interface of the Canvas 2D API is used to declare a path that can then be used on a CanvasRenderingContext2D object. The path methods of the CanvasRenderingContext2D interface are also present on this interface, which gives you the convenience of being able to retain and replay your path whenever desired. */
|
||||
interface Path2D extends CanvasPath {
|
||||
addPath(path: Path2D, transform?: DOMMatrix2DInit): void;
|
||||
}
|
||||
@ -2148,6 +2233,7 @@ interface PerformanceEventMap {
|
||||
"resourcetimingbufferfull": Event;
|
||||
}
|
||||
|
||||
/** The Performance interface provides access to performance-related information for the current page. It's part of the High Resolution Time API, but is enhanced by the Performance Timeline API, the Navigation Timing API, the User Timing API, and the Resource Timing API. */
|
||||
interface Performance extends EventTarget {
|
||||
onresourcetimingbufferfull: ((this: Performance, ev: Event) => any) | null;
|
||||
readonly timeOrigin: number;
|
||||
@ -2173,6 +2259,7 @@ declare var Performance: {
|
||||
new(): Performance;
|
||||
};
|
||||
|
||||
/** The PerformanceEntry object encapsulates a single performance metric that is part of the performance timeline. A performance entry can be directly created by making a performance mark or measure (for example by calling the mark() method) at an explicit point in an application. Performance entries are also created in indirect ways such as loading a resource (such as an image). */
|
||||
interface PerformanceEntry {
|
||||
readonly duration: number;
|
||||
readonly entryType: string;
|
||||
@ -2186,6 +2273,7 @@ declare var PerformanceEntry: {
|
||||
new(): PerformanceEntry;
|
||||
};
|
||||
|
||||
/** PerformanceMark is an abstract interface for PerformanceEntry objects with an entryType of "mark". Entries of this type are created by calling performance.mark() to add a named DOMHighResTimeStamp (the mark) to the browser's performance timeline. */
|
||||
interface PerformanceMark extends PerformanceEntry {
|
||||
}
|
||||
|
||||
@ -2194,6 +2282,7 @@ declare var PerformanceMark: {
|
||||
new(): PerformanceMark;
|
||||
};
|
||||
|
||||
/** PerformanceMeasure is an abstract interface for PerformanceEntry objects with an entryType of "measure". Entries of this type are created by calling performance.measure() to add a named DOMHighResTimeStamp (the measure) between two marks to the browser's performance timeline. */
|
||||
interface PerformanceMeasure extends PerformanceEntry {
|
||||
}
|
||||
|
||||
@ -2224,6 +2313,7 @@ declare var PerformanceObserverEntryList: {
|
||||
new(): PerformanceObserverEntryList;
|
||||
};
|
||||
|
||||
/** The PerformanceResourceTiming interface enables retrieval and analysis of detailed network timing data regarding the loading of an application's resources. An application can use the timing metrics to determine, for example, the length of time it takes to fetch a specific resource, such as an XMLHttpRequest, <SVG>, image, or script. */
|
||||
interface PerformanceResourceTiming extends PerformanceEntry {
|
||||
readonly connectEnd: number;
|
||||
readonly connectStart: number;
|
||||
@ -2250,6 +2340,7 @@ declare var PerformanceResourceTiming: {
|
||||
new(): PerformanceResourceTiming;
|
||||
};
|
||||
|
||||
/** The ProgressEvent interface represents events measuring progress of an underlying process, like an HTTP request (for an XMLHttpRequest, or the loading of the underlying resource of an <img>, <audio>, <video>, <style> or <link>). */
|
||||
interface ProgressEvent extends Event {
|
||||
readonly lengthComputable: boolean;
|
||||
readonly loaded: number;
|
||||
@ -2271,6 +2362,7 @@ declare var PromiseRejectionEvent: {
|
||||
new(type: string, eventInitDict: PromiseRejectionEventInit): PromiseRejectionEvent;
|
||||
};
|
||||
|
||||
/** The PushEvent interface of the Push API represents a push message that has been received. This event is sent to the global scope of a ServiceWorker. It contains the information sent from an application server to a PushSubscription. */
|
||||
interface PushEvent extends ExtendableEvent {
|
||||
readonly data: PushMessageData | null;
|
||||
}
|
||||
@ -2280,6 +2372,7 @@ declare var PushEvent: {
|
||||
new(type: string, eventInitDict?: PushEventInit): PushEvent;
|
||||
};
|
||||
|
||||
/** The PushManager interface of the Push API provides a way to receive notifications from third-party servers as well as request URLs for push notifications. */
|
||||
interface PushManager {
|
||||
getSubscription(): Promise<PushSubscription | null>;
|
||||
permissionState(options?: PushSubscriptionOptionsInit): Promise<PushPermissionState>;
|
||||
@ -2292,6 +2385,7 @@ declare var PushManager: {
|
||||
readonly supportedContentEncodings: ReadonlyArray<string>;
|
||||
};
|
||||
|
||||
/** The PushMessageData interface of the Push API provides methods which let you retrieve the push data sent by a server in various formats. */
|
||||
interface PushMessageData {
|
||||
arrayBuffer(): ArrayBuffer;
|
||||
blob(): Blob;
|
||||
@ -2304,6 +2398,7 @@ declare var PushMessageData: {
|
||||
new(): PushMessageData;
|
||||
};
|
||||
|
||||
/** The PushSubscription interface of the Push API provides a subcription's URL endpoint and allows unsubscription from a push service. */
|
||||
interface PushSubscription {
|
||||
readonly endpoint: string;
|
||||
readonly expirationTime: number | null;
|
||||
@ -2346,6 +2441,7 @@ interface ReadableByteStreamController {
|
||||
error(error?: any): void;
|
||||
}
|
||||
|
||||
/** The ReadableStream interface of the Streams API represents a readable stream of byte data. The Fetch API offers a concrete instance of a ReadableStream through the body property of a Response object. */
|
||||
interface ReadableStream<R = any> {
|
||||
readonly locked: boolean;
|
||||
cancel(reason?: any): Promise<void>;
|
||||
@ -2410,6 +2506,7 @@ declare var ReadableStreamReader: {
|
||||
new(): ReadableStreamReader;
|
||||
};
|
||||
|
||||
/** The Request interface of the Fetch API represents a resource request. */
|
||||
interface Request extends Body {
|
||||
/**
|
||||
* Returns the cache mode associated with request, which is a string indicating
|
||||
@ -2495,6 +2592,7 @@ declare var Request: {
|
||||
new(input: RequestInfo, init?: RequestInit): Request;
|
||||
};
|
||||
|
||||
/** The Response interface of the Fetch API represents the response to a request. */
|
||||
interface Response extends Body {
|
||||
readonly headers: Headers;
|
||||
readonly ok: boolean;
|
||||
@ -2518,6 +2616,7 @@ interface ServiceWorkerEventMap extends AbstractWorkerEventMap {
|
||||
"statechange": Event;
|
||||
}
|
||||
|
||||
/** The ServiceWorker interface of the ServiceWorker API provides a reference to a service worker. Multiple browsing contexts (e.g. pages, workers, etc.) can be associated with the same service worker, each through a unique ServiceWorker object. */
|
||||
interface ServiceWorker extends EventTarget, AbstractWorker {
|
||||
onstatechange: ((this: ServiceWorker, ev: Event) => any) | null;
|
||||
readonly scriptURL: string;
|
||||
@ -2540,6 +2639,7 @@ interface ServiceWorkerContainerEventMap {
|
||||
"messageerror": MessageEvent;
|
||||
}
|
||||
|
||||
/** The ServiceWorkerContainer interface of the ServiceWorker API provides an object representing the service worker as an overall unit in the network ecosystem, including facilities to register, unregister and update service workers, and access the state of service workers and their registrations. */
|
||||
interface ServiceWorkerContainer extends EventTarget {
|
||||
readonly controller: ServiceWorker | null;
|
||||
oncontrollerchange: ((this: ServiceWorkerContainer, ev: Event) => any) | null;
|
||||
@ -2574,6 +2674,7 @@ interface ServiceWorkerGlobalScopeEventMap extends WorkerGlobalScopeEventMap {
|
||||
"sync": SyncEvent;
|
||||
}
|
||||
|
||||
/** The ServiceWorkerGlobalScope interface of the ServiceWorker API represents the global execution context of a service worker. */
|
||||
interface ServiceWorkerGlobalScope extends WorkerGlobalScope {
|
||||
readonly clients: Clients;
|
||||
onactivate: ((this: ServiceWorkerGlobalScope, ev: ExtendableEvent) => any) | null;
|
||||
@ -2603,6 +2704,7 @@ interface ServiceWorkerRegistrationEventMap {
|
||||
"updatefound": Event;
|
||||
}
|
||||
|
||||
/** The ServiceWorkerRegistration interface of the ServiceWorker API represents the service worker registration. You register a service worker to control one or more pages that share the same origin. */
|
||||
interface ServiceWorkerRegistration extends EventTarget {
|
||||
readonly active: ServiceWorker | null;
|
||||
readonly installing: ServiceWorker | null;
|
||||
@ -2638,6 +2740,7 @@ declare var StorageManager: {
|
||||
new(): StorageManager;
|
||||
};
|
||||
|
||||
/** The SubtleCrypto interface represents a set of cryptographic primitives. It is available via the Crypto.subtle properties available in a window context (via Window.crypto). */
|
||||
interface SubtleCrypto {
|
||||
decrypt(algorithm: string | RsaOaepParams | AesCtrParams | AesCbcParams | AesCmacParams | AesGcmParams | AesCfbParams, key: CryptoKey, data: Int8Array | Int16Array | Int32Array | Uint8Array | Uint16Array | Uint32Array | Uint8ClampedArray | Float32Array | Float64Array | DataView | ArrayBuffer): PromiseLike<ArrayBuffer>;
|
||||
deriveBits(algorithm: string | EcdhKeyDeriveParams | DhKeyDeriveParams | ConcatParams | HkdfCtrParams | Pbkdf2Params, baseKey: CryptoKey, length: number): PromiseLike<ArrayBuffer>;
|
||||
@ -2664,6 +2767,7 @@ declare var SubtleCrypto: {
|
||||
new(): SubtleCrypto;
|
||||
};
|
||||
|
||||
/** The SyncEvent interface represents a sync action that is dispatched on the ServiceWorkerGlobalScope of a ServiceWorker. */
|
||||
interface SyncEvent extends ExtendableEvent {
|
||||
readonly lastChance: boolean;
|
||||
readonly tag: string;
|
||||
@ -2674,6 +2778,7 @@ declare var SyncEvent: {
|
||||
new(type: string, init: SyncEventInit): SyncEvent;
|
||||
};
|
||||
|
||||
/** The SyncManager interface of the the ServiceWorker API provides an interface for registering and listing sync registrations. */
|
||||
interface SyncManager {
|
||||
getTags(): Promise<string[]>;
|
||||
register(tag: string): Promise<void>;
|
||||
@ -2684,6 +2789,7 @@ declare var SyncManager: {
|
||||
new(): SyncManager;
|
||||
};
|
||||
|
||||
/** The TextDecoder interface represents a decoder for a specific method, that is a specific character encoding, like utf-8, iso-8859-2, koi8, cp1261, gbk, etc. A decoder takes a stream of bytes as input and emits a stream of code points. For a more scalable, non-native library, see StringView – a C-like representation of strings based on typed arrays. */
|
||||
interface TextDecoder {
|
||||
/**
|
||||
* Returns encoding's name, lowercased.
|
||||
@ -2719,6 +2825,7 @@ declare var TextDecoder: {
|
||||
new(label?: string, options?: TextDecoderOptions): TextDecoder;
|
||||
};
|
||||
|
||||
/** TextEncoder takes a stream of code points as input and emits a stream of bytes. For a more scalable, non-native library, see StringView – a C-like representation of strings based on typed arrays. */
|
||||
interface TextEncoder {
|
||||
/**
|
||||
* Returns "utf-8".
|
||||
@ -2735,6 +2842,7 @@ declare var TextEncoder: {
|
||||
new(): TextEncoder;
|
||||
};
|
||||
|
||||
/** The TextMetrics interface represents the dimension of a text in the canvas, as created by the CanvasRenderingContext2D.measureText() method. */
|
||||
interface TextMetrics {
|
||||
readonly actualBoundingBoxAscent: number;
|
||||
readonly actualBoundingBoxDescent: number;
|
||||
@ -2775,6 +2883,7 @@ interface TransformStreamDefaultController<O = any> {
|
||||
terminate(): void;
|
||||
}
|
||||
|
||||
/** The URL interface represents an object providing static methods used for creating object URLs. */
|
||||
interface URL {
|
||||
hash: string;
|
||||
host: string;
|
||||
@ -2870,6 +2979,7 @@ interface WEBGL_compressed_texture_astc {
|
||||
readonly COMPRESSED_SRGB8_ALPHA8_ASTC_8x8_KHR: GLenum;
|
||||
}
|
||||
|
||||
/** The WEBGL_compressed_texture_s3tc extension is part of the WebGL API and exposes four S3TC compressed texture formats. */
|
||||
interface WEBGL_compressed_texture_s3tc {
|
||||
readonly COMPRESSED_RGBA_S3TC_DXT1_EXT: GLenum;
|
||||
readonly COMPRESSED_RGBA_S3TC_DXT3_EXT: GLenum;
|
||||
@ -2884,6 +2994,7 @@ interface WEBGL_compressed_texture_s3tc_srgb {
|
||||
readonly COMPRESSED_SRGB_S3TC_DXT1_EXT: GLenum;
|
||||
}
|
||||
|
||||
/** The WEBGL_debug_renderer_info extension is part of the WebGL API and exposes two constants with information about the graphics driver for debugging purposes. */
|
||||
interface WEBGL_debug_renderer_info {
|
||||
readonly UNMASKED_RENDERER_WEBGL: GLenum;
|
||||
readonly UNMASKED_VENDOR_WEBGL: GLenum;
|
||||
@ -2893,6 +3004,7 @@ interface WEBGL_debug_shaders {
|
||||
getTranslatedShaderSource(shader: WebGLShader): string;
|
||||
}
|
||||
|
||||
/** The WEBGL_depth_texture extension is part of the WebGL API and defines 2D depth and depth-stencil textures. */
|
||||
interface WEBGL_depth_texture {
|
||||
readonly UNSIGNED_INT_24_8_WEBGL: GLenum;
|
||||
}
|
||||
@ -2940,6 +3052,7 @@ interface WEBGL_lose_context {
|
||||
restoreContext(): void;
|
||||
}
|
||||
|
||||
/** The WebGLActiveInfo interface is part of the WebGL API and represents the information returned by calling the WebGLRenderingContext.getActiveAttrib() and WebGLRenderingContext.getActiveUniform() methods. */
|
||||
interface WebGLActiveInfo {
|
||||
readonly name: string;
|
||||
readonly size: GLint;
|
||||
@ -2951,6 +3064,7 @@ declare var WebGLActiveInfo: {
|
||||
new(): WebGLActiveInfo;
|
||||
};
|
||||
|
||||
/** The WebGLBuffer interface is part of the WebGL API and represents an opaque buffer object storing data such as vertices or colors. */
|
||||
interface WebGLBuffer extends WebGLObject {
|
||||
}
|
||||
|
||||
@ -2959,6 +3073,7 @@ declare var WebGLBuffer: {
|
||||
new(): WebGLBuffer;
|
||||
};
|
||||
|
||||
/** The WebContextEvent interface is part of the WebGL API and is an interface for an event that is generated in response to a status change to the WebGL rendering context. */
|
||||
interface WebGLContextEvent extends Event {
|
||||
readonly statusMessage: string;
|
||||
}
|
||||
@ -2968,6 +3083,7 @@ declare var WebGLContextEvent: {
|
||||
new(type: string, eventInit?: WebGLContextEventInit): WebGLContextEvent;
|
||||
};
|
||||
|
||||
/** The WebGLFramebuffer interface is part of the WebGL API and represents a collection of buffers that serve as a rendering destination. */
|
||||
interface WebGLFramebuffer extends WebGLObject {
|
||||
}
|
||||
|
||||
@ -2984,6 +3100,7 @@ declare var WebGLObject: {
|
||||
new(): WebGLObject;
|
||||
};
|
||||
|
||||
/** The WebGLProgram is part of the WebGL API and is a combination of two compiled WebGLShaders consisting of a vertex shader and a fragment shader (both written in GLSL). */
|
||||
interface WebGLProgram extends WebGLObject {
|
||||
}
|
||||
|
||||
@ -2992,6 +3109,7 @@ declare var WebGLProgram: {
|
||||
new(): WebGLProgram;
|
||||
};
|
||||
|
||||
/** The WebGLRenderbuffer interface is part of the WebGL API and represents a buffer that can contain an image, or can be source or target of an rendering operation. */
|
||||
interface WebGLRenderbuffer extends WebGLObject {
|
||||
}
|
||||
|
||||
@ -3000,6 +3118,7 @@ declare var WebGLRenderbuffer: {
|
||||
new(): WebGLRenderbuffer;
|
||||
};
|
||||
|
||||
/** The WebGLRenderingContext interface provides an interface to the OpenGL ES 2.0 graphics rendering context for the drawing surface of an HTML <canvas> element. */
|
||||
interface WebGLRenderingContext extends WebGLRenderingContextBase {
|
||||
}
|
||||
|
||||
@ -3418,7 +3537,7 @@ interface WebGLRenderingContextBase {
|
||||
isTexture(texture: WebGLTexture | null): GLboolean;
|
||||
lineWidth(width: GLfloat): void;
|
||||
linkProgram(program: WebGLProgram): void;
|
||||
pixelStorei(pname: GLenum, param: GLint): void;
|
||||
pixelStorei(pname: GLenum, param: GLint | GLboolean): void;
|
||||
polygonOffset(factor: GLfloat, units: GLfloat): void;
|
||||
readPixels(x: GLint, y: GLint, width: GLsizei, height: GLsizei, format: GLenum, type: GLenum, pixels: ArrayBufferView | null): void;
|
||||
renderbufferStorage(target: GLenum, internalformat: GLenum, width: GLsizei, height: GLsizei): void;
|
||||
@ -3766,6 +3885,7 @@ interface WebGLRenderingContextBase {
|
||||
readonly ZERO: GLenum;
|
||||
}
|
||||
|
||||
/** The WebGLShader is part of the WebGL API and can either be a vertex or a fragment shader. A WebGLProgram requires both types of shaders. */
|
||||
interface WebGLShader extends WebGLObject {
|
||||
}
|
||||
|
||||
@ -3774,6 +3894,7 @@ declare var WebGLShader: {
|
||||
new(): WebGLShader;
|
||||
};
|
||||
|
||||
/** The WebGLShaderPrecisionFormat interface is part of the WebGL API and represents the information returned by calling the WebGLRenderingContext.getShaderPrecisionFormat() method. */
|
||||
interface WebGLShaderPrecisionFormat {
|
||||
readonly precision: GLint;
|
||||
readonly rangeMax: GLint;
|
||||
@ -3785,6 +3906,7 @@ declare var WebGLShaderPrecisionFormat: {
|
||||
new(): WebGLShaderPrecisionFormat;
|
||||
};
|
||||
|
||||
/** The WebGLTexture interface is part of the WebGL API and represents an opaque texture object providing storage and state for texturing operations. */
|
||||
interface WebGLTexture extends WebGLObject {
|
||||
}
|
||||
|
||||
@ -3793,6 +3915,7 @@ declare var WebGLTexture: {
|
||||
new(): WebGLTexture;
|
||||
};
|
||||
|
||||
/** The WebGLUniformLocation interface is part of the WebGL API and represents the location of a uniform variable in a shader program. */
|
||||
interface WebGLUniformLocation {
|
||||
}
|
||||
|
||||
@ -3811,6 +3934,7 @@ interface WebSocketEventMap {
|
||||
"open": Event;
|
||||
}
|
||||
|
||||
/** The WebSocket object provides the API for creating and managing a WebSocket connection to a server, as well as for sending and receiving data on the connection. */
|
||||
interface WebSocket extends EventTarget {
|
||||
binaryType: BinaryType;
|
||||
readonly bufferedAmount: number;
|
||||
@ -3848,6 +3972,7 @@ interface WindowBase64 {
|
||||
btoa(rawString: string): string;
|
||||
}
|
||||
|
||||
/** The WindowClient interface of the ServiceWorker API represents the scope of a service worker client that is a document in a browser context, controlled by an active worker. The service worker client independently selects and uses a service worker for its own loading and sub-resources. */
|
||||
interface WindowClient extends Client {
|
||||
readonly ancestorOrigins: ReadonlyArray<string>;
|
||||
readonly focused: boolean;
|
||||
@ -3887,6 +4012,7 @@ interface WorkerEventMap extends AbstractWorkerEventMap {
|
||||
"message": MessageEvent;
|
||||
}
|
||||
|
||||
/** The Worker interface of the Web Workers API represents a background task that can be easily created and can send messages back to its creator. Creating a worker is as simple as calling the Worker() constructor and specifying a script to be run in the worker thread. */
|
||||
interface Worker extends EventTarget, AbstractWorker {
|
||||
onmessage: ((this: Worker, ev: MessageEvent) => any) | null;
|
||||
postMessage(message: any, transfer?: Transferable[]): void;
|
||||
@ -3899,13 +4025,14 @@ interface Worker extends EventTarget, AbstractWorker {
|
||||
|
||||
declare var Worker: {
|
||||
prototype: Worker;
|
||||
new(stringUrl: string, options?: WorkerOptions): Worker;
|
||||
new(stringUrl: string | URL, options?: WorkerOptions): Worker;
|
||||
};
|
||||
|
||||
interface WorkerGlobalScopeEventMap {
|
||||
"error": ErrorEvent;
|
||||
}
|
||||
|
||||
/** The WorkerGlobalScope interface of the Web Workers API is an interface representing the scope of any worker. Workers have no browsing context; this scope contains the information usually conveyed by Window objects — in this case event handlers, the console or the associated WorkerNavigator object. Each WorkerGlobalScope has its own event loop. */
|
||||
interface WorkerGlobalScope extends EventTarget, WorkerUtils, WindowConsole, GlobalFetch, WindowOrWorkerGlobalScope {
|
||||
readonly caches: CacheStorage;
|
||||
readonly isSecureContext: boolean;
|
||||
@ -3925,6 +4052,7 @@ declare var WorkerGlobalScope: {
|
||||
new(): WorkerGlobalScope;
|
||||
};
|
||||
|
||||
/** The WorkerLocation interface defines the absolute location of the script executed by the Worker. Such an object is initialized for each worker and is available via the WorkerGlobalScope.location property obtained by calling self.location. */
|
||||
interface WorkerLocation {
|
||||
readonly hash: string;
|
||||
readonly host: string;
|
||||
@ -3943,6 +4071,7 @@ declare var WorkerLocation: {
|
||||
new(): WorkerLocation;
|
||||
};
|
||||
|
||||
/** The WorkerNavigator interface represents a subset of the Navigator interface allowed to be accessed from a Worker. Such an object is initialized for each worker and is available via the WorkerGlobalScope.navigator property obtained by calling window.self.navigator. */
|
||||
interface WorkerNavigator extends NavigatorID, NavigatorOnLine, NavigatorBeacon, NavigatorConcurrentHardware, NavigatorStorage {
|
||||
readonly serviceWorker: ServiceWorkerContainer;
|
||||
}
|
||||
@ -3959,6 +4088,7 @@ interface WorkerUtils extends WindowBase64 {
|
||||
importScripts(...urls: string[]): void;
|
||||
}
|
||||
|
||||
/** The WritableStream interface of the the Streams API provides a standard abstraction for writing streaming data to a destination, known as a sink. This object comes with built-in backpressure and queuing. */
|
||||
interface WritableStream<W = any> {
|
||||
readonly locked: boolean;
|
||||
abort(reason?: any): Promise<void>;
|
||||
@ -3970,10 +4100,12 @@ declare var WritableStream: {
|
||||
new<W = any>(underlyingSink?: UnderlyingSink<W>, strategy?: QueuingStrategy<W>): WritableStream<W>;
|
||||
};
|
||||
|
||||
/** The WritableStreamDefaultController interface of the the Streams API represents a controller allowing control of a WritableStream's state. When constructing a WritableStream, the underlying sink is given a corresponding WritableStreamDefaultController instance to manipulate. */
|
||||
interface WritableStreamDefaultController {
|
||||
error(error?: any): void;
|
||||
}
|
||||
|
||||
/** The WritableStreamDefaultWriter interface of the the Streams API is the object returned by WritableStream.getWriter() and once created locks the < writer to the WritableStream ensuring that no other streams can write to the underlying sink. */
|
||||
interface WritableStreamDefaultWriter<W = any> {
|
||||
readonly closed: Promise<void>;
|
||||
readonly desiredSize: number | null;
|
||||
@ -3988,6 +4120,7 @@ interface XMLHttpRequestEventMap extends XMLHttpRequestEventTargetEventMap {
|
||||
"readystatechange": Event;
|
||||
}
|
||||
|
||||
/** Use XMLHttpRequest (XHR) objects to interact with servers. You can retrieve data from a URL without having to do a full page refresh. This enables a Web page to update just part of a page without disrupting what the user is doing. */
|
||||
interface XMLHttpRequest extends XMLHttpRequestEventTarget {
|
||||
onreadystatechange: ((this: XMLHttpRequest, ev: Event) => any) | null;
|
||||
/**
|
||||
@ -4062,7 +4195,8 @@ interface XMLHttpRequest extends XMLHttpRequestEventTarget {
|
||||
*/
|
||||
overrideMimeType(mime: string): void;
|
||||
/**
|
||||
* Initiates the request. The optional argument provides the request body. The argument is ignored if request method is GET or HEAD.
|
||||
* Initiates the request. The body argument provides the request body, if any,
|
||||
* and is ignored if the request method is GET or HEAD.
|
||||
* Throws an "InvalidStateError" DOMException if either state is not opened or the send() flag is set.
|
||||
*/
|
||||
send(body?: BodyInit | null): void;
|
||||
@ -4279,7 +4413,7 @@ type NotificationDirection = "auto" | "ltr" | "rtl";
|
||||
type NotificationPermission = "default" | "denied" | "granted";
|
||||
type PushEncryptionKeyName = "p256dh" | "auth";
|
||||
type PushPermissionState = "denied" | "granted" | "prompt";
|
||||
type ReferrerPolicy = "" | "no-referrer" | "no-referrer-when-downgrade" | "origin-only" | "origin-when-cross-origin" | "unsafe-url";
|
||||
type ReferrerPolicy = "" | "no-referrer" | "no-referrer-when-downgrade" | "same-origin" | "origin" | "strict-origin" | "origin-when-cross-origin" | "strict-origin-when-cross-origin" | "unsafe-url";
|
||||
type RequestCache = "default" | "no-store" | "reload" | "no-cache" | "force-cache" | "only-if-cached";
|
||||
type RequestCredentials = "omit" | "same-origin" | "include";
|
||||
type RequestDestination = "" | "audio" | "audioworklet" | "document" | "embed" | "font" | "image" | "manifest" | "object" | "paintworklet" | "report" | "script" | "sharedworker" | "style" | "track" | "video" | "worker" | "xslt";
|
||||
|
||||
29
lib/protocol.d.ts
vendored
29
lib/protocol.d.ts
vendored
@ -49,6 +49,7 @@ declare namespace ts.server.protocol {
|
||||
OpenExternalProject = "openExternalProject",
|
||||
OpenExternalProjects = "openExternalProjects",
|
||||
CloseExternalProject = "closeExternalProject",
|
||||
UpdateOpen = "updateOpen",
|
||||
GetOutliningSpans = "getOutliningSpans",
|
||||
TodoComments = "todoComments",
|
||||
Indentation = "indentation",
|
||||
@ -1117,6 +1118,30 @@ declare namespace ts.server.protocol {
|
||||
*/
|
||||
interface CloseExternalProjectResponse extends Response {
|
||||
}
|
||||
/**
|
||||
* Request to synchronize list of open files with the client
|
||||
*/
|
||||
interface UpdateOpenRequest extends Request {
|
||||
command: CommandTypes.UpdateOpen;
|
||||
arguments: UpdateOpenRequestArgs;
|
||||
}
|
||||
/**
|
||||
* Arguments to UpdateOpenRequest
|
||||
*/
|
||||
interface UpdateOpenRequestArgs {
|
||||
/**
|
||||
* List of newly open files
|
||||
*/
|
||||
openFiles?: OpenRequestArgs[];
|
||||
/**
|
||||
* List of open files files that were changes
|
||||
*/
|
||||
changedFiles?: FileCodeEdits[];
|
||||
/**
|
||||
* List of files that were closed
|
||||
*/
|
||||
closedFiles?: string[];
|
||||
}
|
||||
/**
|
||||
* Request to set compiler options for inferred projects.
|
||||
* External projects are opened / closed explicitly.
|
||||
@ -2265,7 +2290,7 @@ declare namespace ts.server.protocol {
|
||||
}
|
||||
interface UserPreferences {
|
||||
readonly disableSuggestions?: boolean;
|
||||
readonly quotePreference?: "double" | "single";
|
||||
readonly quotePreference?: "auto" | "double" | "single";
|
||||
/**
|
||||
* If enabled, TypeScript will search through all external modules' exports and add them to the completions list.
|
||||
* This affects lone identifier completions but not completions on the right hand side of `obj.`.
|
||||
@ -2279,6 +2304,8 @@ declare namespace ts.server.protocol {
|
||||
readonly importModuleSpecifierPreference?: "relative" | "non-relative";
|
||||
readonly allowTextChangesInNewFiles?: boolean;
|
||||
readonly lazyConfiguredProjectsFromExternalProject?: boolean;
|
||||
readonly providePrefixAndSuffixTextForRename?: boolean;
|
||||
readonly allowRenameOfImportPath?: boolean;
|
||||
}
|
||||
interface CompilerOptions {
|
||||
allowJs?: boolean;
|
||||
|
||||
@ -847,7 +847,7 @@
|
||||
"The_arguments_object_cannot_be_referenced_in_an_arrow_function_in_ES3_and_ES5_Consider_using_a_stand_2496": "ES3 ve ES5'te bulunan bir ok işlevinde 'arguments' nesnesine başvuru yapılamaz. Standart bir işlev ifadesi kullanmayı göz önünde bulundurun.",
|
||||
"The_arguments_object_cannot_be_referenced_in_an_async_function_or_method_in_ES3_and_ES5_Consider_usi_2522": "ES3 ve ES5'te 'arguments' nesnesine zaman uyumsuz bir işlev veya metot içinde başvurulamaz. Standart bir işlev veya metot kullanmayı düşünün.",
|
||||
"The_body_of_an_if_statement_cannot_be_the_empty_statement_1313": "'if' deyiminin gövdesi boş deyim olamaz.",
|
||||
"The_character_set_of_the_input_files_6163": "Girdi dosyalarının karakter kümesi.",
|
||||
"The_character_set_of_the_input_files_6163": "Giriş dosyalarının karakter kümesi.",
|
||||
"The_containing_function_or_module_body_is_too_large_for_control_flow_analysis_2563": "İçeren işlev veya modül gövdesi, denetim akışı analizi için çok büyük.",
|
||||
"The_current_host_does_not_support_the_0_option_5001": "Mevcut ana bilgisayar '{0}' seçeneğini desteklemiyor.",
|
||||
"The_expression_of_an_export_assignment_must_be_an_identifier_or_qualified_name_in_an_ambient_context_2714": "Bir dışarı aktarma ataması ifadesi, çevresel bağlamda bir tanımlayıcı veya tam ad olmalıdır.",
|
||||
|
||||
7457
lib/tsc.js
7457
lib/tsc.js
File diff suppressed because it is too large
Load Diff
12494
lib/tsserver.js
12494
lib/tsserver.js
File diff suppressed because it is too large
Load Diff
254
lib/tsserverlibrary.d.ts
vendored
254
lib/tsserverlibrary.d.ts
vendored
@ -14,7 +14,7 @@ and limitations under the License.
|
||||
***************************************************************************** */
|
||||
|
||||
declare namespace ts {
|
||||
const versionMajorMinor = "3.3";
|
||||
const versionMajorMinor = "3.4";
|
||||
/** The version of the TypeScript compiler release */
|
||||
const version: string;
|
||||
}
|
||||
@ -355,40 +355,45 @@ declare namespace ts {
|
||||
ShorthandPropertyAssignment = 276,
|
||||
SpreadAssignment = 277,
|
||||
EnumMember = 278,
|
||||
SourceFile = 279,
|
||||
Bundle = 280,
|
||||
UnparsedSource = 281,
|
||||
InputFiles = 282,
|
||||
JSDocTypeExpression = 283,
|
||||
JSDocAllType = 284,
|
||||
JSDocUnknownType = 285,
|
||||
JSDocNullableType = 286,
|
||||
JSDocNonNullableType = 287,
|
||||
JSDocOptionalType = 288,
|
||||
JSDocFunctionType = 289,
|
||||
JSDocVariadicType = 290,
|
||||
JSDocComment = 291,
|
||||
JSDocTypeLiteral = 292,
|
||||
JSDocSignature = 293,
|
||||
JSDocTag = 294,
|
||||
JSDocAugmentsTag = 295,
|
||||
JSDocClassTag = 296,
|
||||
JSDocCallbackTag = 297,
|
||||
JSDocEnumTag = 298,
|
||||
JSDocParameterTag = 299,
|
||||
JSDocReturnTag = 300,
|
||||
JSDocThisTag = 301,
|
||||
JSDocTypeTag = 302,
|
||||
JSDocTemplateTag = 303,
|
||||
JSDocTypedefTag = 304,
|
||||
JSDocPropertyTag = 305,
|
||||
SyntaxList = 306,
|
||||
NotEmittedStatement = 307,
|
||||
PartiallyEmittedExpression = 308,
|
||||
CommaListExpression = 309,
|
||||
MergeDeclarationMarker = 310,
|
||||
EndOfDeclarationMarker = 311,
|
||||
Count = 312,
|
||||
UnparsedPrologue = 279,
|
||||
UnparsedPrepend = 280,
|
||||
UnparsedText = 281,
|
||||
UnparsedInternalText = 282,
|
||||
UnparsedSyntheticReference = 283,
|
||||
SourceFile = 284,
|
||||
Bundle = 285,
|
||||
UnparsedSource = 286,
|
||||
InputFiles = 287,
|
||||
JSDocTypeExpression = 288,
|
||||
JSDocAllType = 289,
|
||||
JSDocUnknownType = 290,
|
||||
JSDocNullableType = 291,
|
||||
JSDocNonNullableType = 292,
|
||||
JSDocOptionalType = 293,
|
||||
JSDocFunctionType = 294,
|
||||
JSDocVariadicType = 295,
|
||||
JSDocComment = 296,
|
||||
JSDocTypeLiteral = 297,
|
||||
JSDocSignature = 298,
|
||||
JSDocTag = 299,
|
||||
JSDocAugmentsTag = 300,
|
||||
JSDocClassTag = 301,
|
||||
JSDocCallbackTag = 302,
|
||||
JSDocEnumTag = 303,
|
||||
JSDocParameterTag = 304,
|
||||
JSDocReturnTag = 305,
|
||||
JSDocThisTag = 306,
|
||||
JSDocTypeTag = 307,
|
||||
JSDocTemplateTag = 308,
|
||||
JSDocTypedefTag = 309,
|
||||
JSDocPropertyTag = 310,
|
||||
SyntaxList = 311,
|
||||
NotEmittedStatement = 312,
|
||||
PartiallyEmittedExpression = 313,
|
||||
CommaListExpression = 314,
|
||||
MergeDeclarationMarker = 315,
|
||||
EndOfDeclarationMarker = 316,
|
||||
Count = 317,
|
||||
FirstAssignment = 59,
|
||||
LastAssignment = 71,
|
||||
FirstCompoundAssignment = 60,
|
||||
@ -414,10 +419,10 @@ declare namespace ts {
|
||||
FirstBinaryOperator = 28,
|
||||
LastBinaryOperator = 71,
|
||||
FirstNode = 148,
|
||||
FirstJSDocNode = 283,
|
||||
LastJSDocNode = 305,
|
||||
FirstJSDocTagNode = 294,
|
||||
LastJSDocTagNode = 305
|
||||
FirstJSDocNode = 288,
|
||||
LastJSDocNode = 310,
|
||||
FirstJSDocTagNode = 299,
|
||||
LastJSDocTagNode = 310,
|
||||
}
|
||||
enum NodeFlags {
|
||||
None = 0,
|
||||
@ -446,7 +451,7 @@ declare namespace ts {
|
||||
ReachabilityCheckFlags = 384,
|
||||
ReachabilityAndEmitFlags = 1408,
|
||||
ContextFlags = 12679168,
|
||||
TypeExcludesFlags = 20480
|
||||
TypeExcludesFlags = 20480,
|
||||
}
|
||||
enum ModifierFlags {
|
||||
None = 0,
|
||||
@ -814,7 +819,7 @@ declare namespace ts {
|
||||
}
|
||||
interface TypeOperatorNode extends TypeNode {
|
||||
kind: SyntaxKind.TypeOperator;
|
||||
operator: SyntaxKind.KeyOfKeyword | SyntaxKind.UniqueKeyword;
|
||||
operator: SyntaxKind.KeyOfKeyword | SyntaxKind.UniqueKeyword | SyntaxKind.ReadonlyKeyword;
|
||||
type: TypeNode;
|
||||
}
|
||||
interface IndexedAccessTypeNode extends TypeNode {
|
||||
@ -997,6 +1002,14 @@ declare namespace ts {
|
||||
interface NoSubstitutionTemplateLiteral extends LiteralExpression {
|
||||
kind: SyntaxKind.NoSubstitutionTemplateLiteral;
|
||||
}
|
||||
enum TokenFlags {
|
||||
None = 0,
|
||||
Scientific = 16,
|
||||
Octal = 32,
|
||||
HexSpecifier = 64,
|
||||
BinarySpecifier = 128,
|
||||
OctalSpecifier = 256,
|
||||
}
|
||||
interface NumericLiteral extends LiteralExpression {
|
||||
kind: SyntaxKind.NumericLiteral;
|
||||
}
|
||||
@ -1726,18 +1739,55 @@ declare namespace ts {
|
||||
}
|
||||
interface InputFiles extends Node {
|
||||
kind: SyntaxKind.InputFiles;
|
||||
javascriptPath?: string;
|
||||
javascriptText: string;
|
||||
javascriptMapPath?: string;
|
||||
javascriptMapText?: string;
|
||||
declarationPath?: string;
|
||||
declarationText: string;
|
||||
declarationMapPath?: string;
|
||||
declarationMapText?: string;
|
||||
}
|
||||
interface UnparsedSource extends Node {
|
||||
kind: SyntaxKind.UnparsedSource;
|
||||
fileName: string;
|
||||
text: string;
|
||||
prologues: ReadonlyArray<UnparsedPrologue>;
|
||||
helpers: ReadonlyArray<UnscopedEmitHelper> | undefined;
|
||||
referencedFiles: ReadonlyArray<FileReference>;
|
||||
typeReferenceDirectives: ReadonlyArray<string> | undefined;
|
||||
libReferenceDirectives: ReadonlyArray<FileReference>;
|
||||
hasNoDefaultLib?: boolean;
|
||||
sourceMapPath?: string;
|
||||
sourceMapText?: string;
|
||||
syntheticReferences?: ReadonlyArray<UnparsedSyntheticReference>;
|
||||
texts: ReadonlyArray<UnparsedSourceText>;
|
||||
}
|
||||
type UnparsedSourceText = UnparsedPrepend | UnparsedTextLike;
|
||||
type UnparsedNode = UnparsedPrologue | UnparsedSourceText | UnparsedSyntheticReference;
|
||||
interface UnparsedSection extends Node {
|
||||
kind: SyntaxKind;
|
||||
data?: string;
|
||||
parent: UnparsedSource;
|
||||
}
|
||||
interface UnparsedPrologue extends UnparsedSection {
|
||||
kind: SyntaxKind.UnparsedPrologue;
|
||||
data: string;
|
||||
parent: UnparsedSource;
|
||||
}
|
||||
interface UnparsedPrepend extends UnparsedSection {
|
||||
kind: SyntaxKind.UnparsedPrepend;
|
||||
data: string;
|
||||
parent: UnparsedSource;
|
||||
texts: ReadonlyArray<UnparsedTextLike>;
|
||||
}
|
||||
interface UnparsedTextLike extends UnparsedSection {
|
||||
kind: SyntaxKind.UnparsedText | SyntaxKind.UnparsedInternalText;
|
||||
parent: UnparsedSource;
|
||||
}
|
||||
interface UnparsedSyntheticReference extends UnparsedSection {
|
||||
kind: SyntaxKind.UnparsedSyntheticReference;
|
||||
parent: UnparsedSource;
|
||||
}
|
||||
interface JsonSourceFile extends SourceFile {
|
||||
statements: NodeArray<JsonObjectExpressionStatement>;
|
||||
@ -1778,7 +1828,7 @@ declare namespace ts {
|
||||
type ResolvedConfigFileName = string & {
|
||||
_isResolvedConfigFileName: never;
|
||||
};
|
||||
type WriteFileCallback = (fileName: string, data: string, writeByteOrderMark: boolean, onError: ((message: string) => void) | undefined, sourceFiles?: ReadonlyArray<SourceFile>) => void;
|
||||
type WriteFileCallback = (fileName: string, data: string, writeByteOrderMark: boolean, onError?: (message: string) => void, sourceFiles?: ReadonlyArray<SourceFile>) => void;
|
||||
class OperationCanceledException {
|
||||
}
|
||||
interface CancellationToken {
|
||||
@ -2011,7 +2061,7 @@ declare namespace ts {
|
||||
WriteTypeParametersOrArguments = 1,
|
||||
UseOnlyExternalAliasing = 2,
|
||||
AllowAnyNodeKind = 4,
|
||||
UseAliasDefinedOutsideCurrentScope = 8
|
||||
UseAliasDefinedOutsideCurrentScope = 8,
|
||||
}
|
||||
enum TypePredicateKind {
|
||||
This = 0,
|
||||
@ -2089,7 +2139,7 @@ declare namespace ts {
|
||||
ExportHasLocal = 944,
|
||||
BlockScoped = 418,
|
||||
PropertyOrAccessor = 98308,
|
||||
ClassMember = 106500
|
||||
ClassMember = 106500,
|
||||
}
|
||||
interface Symbol {
|
||||
flags: SymbolFlags;
|
||||
@ -2197,7 +2247,7 @@ declare namespace ts {
|
||||
Instantiable = 63176704,
|
||||
StructuredOrInstantiable = 66846720,
|
||||
Narrowable = 133970943,
|
||||
NotUnionOrUnit = 67637251
|
||||
NotUnionOrUnit = 67637251,
|
||||
}
|
||||
type DestructuringPattern = BindingPattern | ObjectLiteralExpression | ArrayLiteralExpression;
|
||||
interface Type {
|
||||
@ -2214,6 +2264,7 @@ declare namespace ts {
|
||||
}
|
||||
interface UniqueESSymbolType extends Type {
|
||||
symbol: Symbol;
|
||||
escapedName: __String;
|
||||
}
|
||||
interface StringLiteralType extends LiteralType {
|
||||
value: string;
|
||||
@ -2243,7 +2294,7 @@ declare namespace ts {
|
||||
MarkerType = 8192,
|
||||
JSLiteral = 16384,
|
||||
FreshLiteral = 32768,
|
||||
ClassOrInterface = 3
|
||||
ClassOrInterface = 3,
|
||||
}
|
||||
interface ObjectType extends Type {
|
||||
objectFlags: ObjectFlags;
|
||||
@ -2282,6 +2333,7 @@ declare namespace ts {
|
||||
interface TupleType extends GenericType {
|
||||
minLength: number;
|
||||
hasRestElement: boolean;
|
||||
readonly: boolean;
|
||||
associatedNames?: __String[];
|
||||
}
|
||||
interface TupleTypeReference extends TypeReference {
|
||||
@ -2490,6 +2542,8 @@ declare namespace ts {
|
||||
reactNamespace?: string;
|
||||
jsxFactory?: string;
|
||||
composite?: boolean;
|
||||
incremental?: boolean;
|
||||
tsBuildInfoFile?: string;
|
||||
removeComments?: boolean;
|
||||
rootDir?: string;
|
||||
rootDirs?: string[];
|
||||
@ -2566,9 +2620,10 @@ declare namespace ts {
|
||||
ES2016 = 3,
|
||||
ES2017 = 4,
|
||||
ES2018 = 5,
|
||||
ESNext = 6,
|
||||
ES2019 = 6,
|
||||
ESNext = 7,
|
||||
JSON = 100,
|
||||
Latest = 6
|
||||
Latest = 7
|
||||
}
|
||||
enum LanguageVariant {
|
||||
Standard = 0,
|
||||
@ -2665,7 +2720,8 @@ declare namespace ts {
|
||||
Dts = ".d.ts",
|
||||
Js = ".js",
|
||||
Jsx = ".jsx",
|
||||
Json = ".json"
|
||||
Json = ".json",
|
||||
TsBuildInfo = ".tsbuildinfo"
|
||||
}
|
||||
interface ResolvedModuleWithFailedLookupLocations {
|
||||
readonly resolvedModule: ResolvedModuleFull | undefined;
|
||||
@ -2689,7 +2745,6 @@ declare namespace ts {
|
||||
getDefaultLibLocation?(): string;
|
||||
writeFile: WriteFileCallback;
|
||||
getCurrentDirectory(): string;
|
||||
getDirectories(path: string): string[];
|
||||
getCanonicalFileName(fileName: string): string;
|
||||
useCaseSensitiveFileNames(): boolean;
|
||||
getNewLine(): string;
|
||||
@ -2739,7 +2794,7 @@ declare namespace ts {
|
||||
NoHoisting = 2097152,
|
||||
HasEndOfDeclarationMarker = 4194304,
|
||||
Iterator = 8388608,
|
||||
NoAsciiEscaping = 16777216
|
||||
NoAsciiEscaping = 16777216,
|
||||
}
|
||||
interface EmitHelper {
|
||||
readonly name: string;
|
||||
@ -2747,6 +2802,10 @@ declare namespace ts {
|
||||
readonly text: string | ((node: EmitHelperUniqueNameCallback) => string);
|
||||
readonly priority?: number;
|
||||
}
|
||||
interface UnscopedEmitHelper extends EmitHelper {
|
||||
readonly scoped: false;
|
||||
readonly text: string;
|
||||
}
|
||||
type EmitHelperUniqueNameCallback = (name: string) => string;
|
||||
enum EmitHint {
|
||||
SourceFile = 0,
|
||||
@ -3006,13 +3065,14 @@ declare namespace ts {
|
||||
}
|
||||
interface UserPreferences {
|
||||
readonly disableSuggestions?: boolean;
|
||||
readonly quotePreference?: "double" | "single";
|
||||
readonly quotePreference?: "auto" | "double" | "single";
|
||||
readonly includeCompletionsForModuleExports?: boolean;
|
||||
readonly includeCompletionsWithInsertText?: boolean;
|
||||
readonly importModuleSpecifierPreference?: "relative" | "non-relative";
|
||||
/** Determines whether we import `foo/index.ts` as "foo", "foo/index", or "foo/index.js" */
|
||||
readonly importModuleSpecifierEnding?: "minimal" | "index" | "js";
|
||||
readonly allowTextChangesInNewFiles?: boolean;
|
||||
readonly providePrefixAndSuffixTextForRename?: boolean;
|
||||
}
|
||||
/** Represents a bigint literal value without requiring bigint support */
|
||||
interface PseudoBigInt {
|
||||
@ -3097,6 +3157,7 @@ declare namespace ts {
|
||||
scanJsxIdentifier(): SyntaxKind;
|
||||
scanJsxAttributeValue(): SyntaxKind;
|
||||
reScanJsxToken(): JsxTokenSyntaxKind;
|
||||
reScanLessThanToken(): SyntaxKind;
|
||||
scanJsxToken(): JsxTokenSyntaxKind;
|
||||
scanJSDocToken(): JsDocSyntaxKind;
|
||||
scan(): SyntaxKind;
|
||||
@ -3354,6 +3415,7 @@ declare namespace ts {
|
||||
function isNewExpression(node: Node): node is NewExpression;
|
||||
function isTaggedTemplateExpression(node: Node): node is TaggedTemplateExpression;
|
||||
function isTypeAssertion(node: Node): node is TypeAssertion;
|
||||
function isConstTypeReference(node: Node): boolean;
|
||||
function isParenthesizedExpression(node: Node): node is ParenthesizedExpression;
|
||||
function skipPartiallyEmittedExpressions(node: Expression): Expression;
|
||||
function skipPartiallyEmittedExpressions(node: Node): Node;
|
||||
@ -3443,6 +3505,9 @@ declare namespace ts {
|
||||
function isSourceFile(node: Node): node is SourceFile;
|
||||
function isBundle(node: Node): node is Bundle;
|
||||
function isUnparsedSource(node: Node): node is UnparsedSource;
|
||||
function isUnparsedPrepend(node: Node): node is UnparsedPrepend;
|
||||
function isUnparsedTextLike(node: Node): node is UnparsedTextLike;
|
||||
function isUnparsedNode(node: Node): node is UnparsedNode;
|
||||
function isJSDocTypeExpression(node: Node): node is JSDocTypeExpression;
|
||||
function isJSDocAllType(node: JSDocAllType): node is JSDocAllType;
|
||||
function isJSDocUnknownType(node: Node): node is JSDocUnknownType;
|
||||
@ -3662,7 +3727,7 @@ declare namespace ts {
|
||||
function createLiteral(value: number | PseudoBigInt): NumericLiteral;
|
||||
function createLiteral(value: boolean): BooleanLiteral;
|
||||
function createLiteral(value: string | number | PseudoBigInt | boolean): PrimaryExpression;
|
||||
function createNumericLiteral(value: string): NumericLiteral;
|
||||
function createNumericLiteral(value: string, numericLiteralFlags?: TokenFlags): NumericLiteral;
|
||||
function createBigIntLiteral(value: string): BigIntLiteral;
|
||||
function createStringLiteral(text: string): StringLiteral;
|
||||
function createRegularExpressionLiteral(text: string): RegularExpressionLiteral;
|
||||
@ -3754,7 +3819,7 @@ declare namespace ts {
|
||||
function updateParenthesizedType(node: ParenthesizedTypeNode, type: TypeNode): ParenthesizedTypeNode;
|
||||
function createThisTypeNode(): ThisTypeNode;
|
||||
function createTypeOperatorNode(type: TypeNode): TypeOperatorNode;
|
||||
function createTypeOperatorNode(operator: SyntaxKind.KeyOfKeyword | SyntaxKind.UniqueKeyword, type: TypeNode): TypeOperatorNode;
|
||||
function createTypeOperatorNode(operator: SyntaxKind.KeyOfKeyword | SyntaxKind.UniqueKeyword | SyntaxKind.ReadonlyKeyword, type: TypeNode): TypeOperatorNode;
|
||||
function updateTypeOperatorNode(node: TypeOperatorNode, type: TypeNode): TypeOperatorNode;
|
||||
function createIndexedAccessTypeNode(objectType: TypeNode, indexType: TypeNode): IndexedAccessTypeNode;
|
||||
function updateIndexedAccessTypeNode(node: IndexedAccessTypeNode, objectType: TypeNode, indexType: TypeNode): IndexedAccessTypeNode;
|
||||
@ -3978,9 +4043,11 @@ declare namespace ts {
|
||||
function updateCommaList(node: CommaListExpression, elements: ReadonlyArray<Expression>): CommaListExpression;
|
||||
function createBundle(sourceFiles: ReadonlyArray<SourceFile>, prepends?: ReadonlyArray<UnparsedSource | InputFiles>): Bundle;
|
||||
function createUnparsedSourceFile(text: string): UnparsedSource;
|
||||
function createUnparsedSourceFile(inputFile: InputFiles, type: "js" | "dts", stripInternal?: boolean): UnparsedSource;
|
||||
function createUnparsedSourceFile(text: string, mapPath: string | undefined, map: string | undefined): UnparsedSource;
|
||||
function createInputFiles(javascript: string, declaration: string): InputFiles;
|
||||
function createInputFiles(javascript: string, declaration: string, javascriptMapPath: string | undefined, javascriptMapText: string | undefined, declarationMapPath: string | undefined, declarationMapText: string | undefined): InputFiles;
|
||||
function createInputFiles(javascriptText: string, declarationText: string): InputFiles;
|
||||
function createInputFiles(readFileText: (path: string) => string | undefined, javascriptPath: string, javascriptMapPath: string | undefined, declarationPath: string, declarationMapPath: string | undefined, buildInfoPath: string | undefined): InputFiles;
|
||||
function createInputFiles(javascriptText: string, declarationText: string, javascriptMapPath: string | undefined, javascriptMapText: string | undefined, declarationMapPath: string | undefined, declarationMapText: string | undefined): InputFiles;
|
||||
function updateBundle(node: Bundle, sourceFiles: ReadonlyArray<SourceFile>, prepends?: ReadonlyArray<UnparsedSource>): Bundle;
|
||||
function createImmediatelyInvokedFunctionExpression(statements: ReadonlyArray<Statement>): CallExpression;
|
||||
function createImmediatelyInvokedFunctionExpression(statements: ReadonlyArray<Statement>, param: ParameterDeclaration, paramValue: Expression): CallExpression;
|
||||
@ -4278,6 +4345,10 @@ declare namespace ts {
|
||||
* Get the syntax diagnostics, for all source files if source file is not supplied
|
||||
*/
|
||||
getSyntacticDiagnostics(sourceFile?: SourceFile, cancellationToken?: CancellationToken): ReadonlyArray<Diagnostic>;
|
||||
/**
|
||||
* Get the declaration diagnostics, for all source files if source file is not supplied
|
||||
*/
|
||||
getDeclarationDiagnostics(sourceFile?: SourceFile, cancellationToken?: CancellationToken): ReadonlyArray<DiagnosticWithLocation>;
|
||||
/**
|
||||
* Get all the dependencies of the file
|
||||
*/
|
||||
@ -4364,13 +4435,11 @@ declare namespace ts {
|
||||
/** If provided, will be used to reset existing delayed compilation */
|
||||
clearTimeout?(timeoutId: any): void;
|
||||
}
|
||||
interface WatchCompilerHost<T extends BuilderProgram> extends WatchHost {
|
||||
interface ProgramHost<T extends BuilderProgram> {
|
||||
/**
|
||||
* Used to create the program when need for program creation or recreation detected
|
||||
*/
|
||||
createProgram: CreateProgram<T>;
|
||||
/** If provided, callback to invoke after every new program creation */
|
||||
afterProgramCreate?(program: T): void;
|
||||
useCaseSensitiveFileNames(): boolean;
|
||||
getNewLine(): string;
|
||||
getCurrentDirectory(): string;
|
||||
@ -4404,6 +4473,10 @@ declare namespace ts {
|
||||
/** If provided, used to resolve type reference directives, otherwise typescript's default resolution */
|
||||
resolveTypeReferenceDirectives?(typeReferenceDirectiveNames: string[], containingFile: string, redirectedReference?: ResolvedProjectReference): (ResolvedTypeReferenceDirective | undefined)[];
|
||||
}
|
||||
interface WatchCompilerHost<T extends BuilderProgram> extends ProgramHost<T>, WatchHost {
|
||||
/** If provided, callback to invoke after every new program creation */
|
||||
afterProgramCreate?(program: T): void;
|
||||
}
|
||||
/**
|
||||
* Host to create watch with root files and options
|
||||
*/
|
||||
@ -4706,8 +4779,8 @@ declare namespace ts {
|
||||
getNameOrDottedNameSpan(fileName: string, startPos: number, endPos: number): TextSpan | undefined;
|
||||
getBreakpointStatementAtPosition(fileName: string, position: number): TextSpan | undefined;
|
||||
getSignatureHelpItems(fileName: string, position: number, options: SignatureHelpItemsOptions | undefined): SignatureHelpItems | undefined;
|
||||
getRenameInfo(fileName: string, position: number): RenameInfo;
|
||||
findRenameLocations(fileName: string, position: number, findInStrings: boolean, findInComments: boolean): ReadonlyArray<RenameLocation> | undefined;
|
||||
getRenameInfo(fileName: string, position: number, options?: RenameInfoOptions): RenameInfo;
|
||||
findRenameLocations(fileName: string, position: number, findInStrings: boolean, findInComments: boolean, providePrefixAndSuffixTextForRename?: boolean): ReadonlyArray<RenameLocation> | undefined;
|
||||
getDefinitionAtPosition(fileName: string, position: number): ReadonlyArray<DefinitionInfo> | undefined;
|
||||
getDefinitionAndBoundSpan(fileName: string, position: number): DefinitionInfoAndBoundSpan | undefined;
|
||||
getTypeDefinitionAtPosition(fileName: string, position: number): ReadonlyArray<DefinitionInfo> | undefined;
|
||||
@ -5150,6 +5223,9 @@ declare namespace ts {
|
||||
canRename: false;
|
||||
localizedErrorMessage: string;
|
||||
}
|
||||
interface RenameInfoOptions {
|
||||
readonly allowRenameOfImportPath?: boolean;
|
||||
}
|
||||
interface SignatureHelpParameter {
|
||||
name: string;
|
||||
documentation: SymbolDisplayPart[];
|
||||
@ -5693,6 +5769,7 @@ declare namespace ts.server.protocol {
|
||||
OpenExternalProject = "openExternalProject",
|
||||
OpenExternalProjects = "openExternalProjects",
|
||||
CloseExternalProject = "closeExternalProject",
|
||||
UpdateOpen = "updateOpen",
|
||||
GetOutliningSpans = "getOutliningSpans",
|
||||
TodoComments = "todoComments",
|
||||
Indentation = "indentation",
|
||||
@ -6761,6 +6838,30 @@ declare namespace ts.server.protocol {
|
||||
*/
|
||||
interface CloseExternalProjectResponse extends Response {
|
||||
}
|
||||
/**
|
||||
* Request to synchronize list of open files with the client
|
||||
*/
|
||||
interface UpdateOpenRequest extends Request {
|
||||
command: CommandTypes.UpdateOpen;
|
||||
arguments: UpdateOpenRequestArgs;
|
||||
}
|
||||
/**
|
||||
* Arguments to UpdateOpenRequest
|
||||
*/
|
||||
interface UpdateOpenRequestArgs {
|
||||
/**
|
||||
* List of newly open files
|
||||
*/
|
||||
openFiles?: OpenRequestArgs[];
|
||||
/**
|
||||
* List of open files files that were changes
|
||||
*/
|
||||
changedFiles?: FileCodeEdits[];
|
||||
/**
|
||||
* List of files that were closed
|
||||
*/
|
||||
closedFiles?: string[];
|
||||
}
|
||||
/**
|
||||
* Request to set compiler options for inferred projects.
|
||||
* External projects are opened / closed explicitly.
|
||||
@ -7909,7 +8010,7 @@ declare namespace ts.server.protocol {
|
||||
}
|
||||
interface UserPreferences {
|
||||
readonly disableSuggestions?: boolean;
|
||||
readonly quotePreference?: "double" | "single";
|
||||
readonly quotePreference?: "auto" | "double" | "single";
|
||||
/**
|
||||
* If enabled, TypeScript will search through all external modules' exports and add them to the completions list.
|
||||
* This affects lone identifier completions but not completions on the right hand side of `obj.`.
|
||||
@ -7923,6 +8024,8 @@ declare namespace ts.server.protocol {
|
||||
readonly importModuleSpecifierPreference?: "relative" | "non-relative";
|
||||
readonly allowTextChangesInNewFiles?: boolean;
|
||||
readonly lazyConfiguredProjectsFromExternalProject?: boolean;
|
||||
readonly providePrefixAndSuffixTextForRename?: boolean;
|
||||
readonly allowRenameOfImportPath?: boolean;
|
||||
}
|
||||
interface CompilerOptions {
|
||||
allowJs?: boolean;
|
||||
@ -8326,7 +8429,7 @@ declare namespace ts.server {
|
||||
excludedFiles: ReadonlyArray<NormalizedPath>;
|
||||
private typeAcquisition;
|
||||
updateGraph(): boolean;
|
||||
getExcludedFiles(): ReadonlyArray<NormalizedPath>;
|
||||
getExcludedFiles(): readonly NormalizedPath[];
|
||||
getTypeAcquisition(): TypeAcquisition;
|
||||
setTypeAcquisition(newTypeAcquisition: TypeAcquisition): void;
|
||||
}
|
||||
@ -8336,7 +8439,6 @@ declare namespace ts.server {
|
||||
const ProjectsUpdatedInBackgroundEvent = "projectsUpdatedInBackground";
|
||||
const ProjectLoadingStartEvent = "projectLoadingStart";
|
||||
const ProjectLoadingFinishEvent = "projectLoadingFinish";
|
||||
const SurveyReady = "surveyReady";
|
||||
const LargeFileReferencedEvent = "largeFileReferenced";
|
||||
const ConfigFileDiagEvent = "configFileDiag";
|
||||
const ProjectLanguageServiceStateEvent = "projectLanguageServiceState";
|
||||
@ -8361,12 +8463,6 @@ declare namespace ts.server {
|
||||
project: Project;
|
||||
};
|
||||
}
|
||||
interface SurveyReady {
|
||||
eventName: typeof SurveyReady;
|
||||
data: {
|
||||
surveyId: string;
|
||||
};
|
||||
}
|
||||
interface LargeFileReferencedEvent {
|
||||
eventName: typeof LargeFileReferencedEvent;
|
||||
data: {
|
||||
@ -8451,7 +8547,7 @@ declare namespace ts.server {
|
||||
interface OpenFileInfo {
|
||||
readonly checkJs: boolean;
|
||||
}
|
||||
type ProjectServiceEvent = LargeFileReferencedEvent | SurveyReady | ProjectsUpdatedInBackgroundEvent | ProjectLoadingStartEvent | ProjectLoadingFinishEvent | ConfigFileDiagEvent | ProjectLanguageServiceStateEvent | ProjectInfoTelemetryEvent | OpenFileInfoTelemetryEvent;
|
||||
type ProjectServiceEvent = LargeFileReferencedEvent | ProjectsUpdatedInBackgroundEvent | ProjectLoadingStartEvent | ProjectLoadingFinishEvent | ConfigFileDiagEvent | ProjectLanguageServiceStateEvent | ProjectInfoTelemetryEvent | OpenFileInfoTelemetryEvent;
|
||||
type ProjectServiceEventHandler = (event: ProjectServiceEvent) => void;
|
||||
interface SafeList {
|
||||
[name: string]: {
|
||||
@ -8497,10 +8593,6 @@ declare namespace ts.server {
|
||||
syntaxOnly?: boolean;
|
||||
}
|
||||
class ProjectService {
|
||||
/**
|
||||
* Container of all known scripts
|
||||
*/
|
||||
private readonly filenameToScriptInfo;
|
||||
private readonly scriptInfoInNodeModulesWatchers;
|
||||
/**
|
||||
* Contains all the deleted script info's version information so that
|
||||
@ -8572,8 +8664,6 @@ declare namespace ts.server {
|
||||
readonly syntaxOnly?: boolean;
|
||||
/** Tracks projects that we have already sent telemetry for. */
|
||||
private readonly seenProjects;
|
||||
/** Tracks projects that we have already sent survey events for. */
|
||||
private readonly seenSurveyProjects;
|
||||
constructor(opts: ProjectServiceOptions);
|
||||
toPath(fileName: string): Path;
|
||||
private loadTypesMap;
|
||||
@ -8599,6 +8689,9 @@ declare namespace ts.server {
|
||||
getHostFormatCodeOptions(): FormatCodeSettings;
|
||||
getHostPreferences(): protocol.UserPreferences;
|
||||
private onSourceFileChanged;
|
||||
private handleSourceMapProjects;
|
||||
private delayUpdateSourceInfoProjects;
|
||||
private delayUpdateProjectsOfScriptInfoPath;
|
||||
private handleDeletedFile;
|
||||
private onConfigChangedForConfiguredProject;
|
||||
/**
|
||||
@ -8608,6 +8701,7 @@ declare namespace ts.server {
|
||||
*/
|
||||
private onConfigFileChangeForOpenScriptInfo;
|
||||
private removeProject;
|
||||
private assignOrphanScriptInfosToInferredProject;
|
||||
/**
|
||||
* Remove this file from the set of open, non-configured files.
|
||||
* @param info The file that has been closed or newly configured
|
||||
@ -8689,6 +8783,8 @@ declare namespace ts.server {
|
||||
*/
|
||||
getScriptInfoForNormalizedPath(fileName: NormalizedPath): ScriptInfo | undefined;
|
||||
getScriptInfoForPath(fileName: Path): ScriptInfo | undefined;
|
||||
private addSourceInfoToSourceMap;
|
||||
private addMissingSourceMapFile;
|
||||
setHostConfiguration(args: protocol.ConfigureRequestArguments): void;
|
||||
closeLog(): void;
|
||||
/**
|
||||
@ -8724,8 +8820,12 @@ declare namespace ts.server {
|
||||
*/
|
||||
openClientFile(fileName: string, fileContent?: string, scriptKind?: ScriptKind, projectRootPath?: string): OpenConfiguredProjectResult;
|
||||
private findExternalProjectContainingOpenScriptInfo;
|
||||
private getOrCreateOpenScriptInfo;
|
||||
private assignProjectToOpenedScriptInfo;
|
||||
private cleanupAfterOpeningFile;
|
||||
openClientFileWithNormalizedPath(fileName: NormalizedPath, fileContent?: string, scriptKind?: ScriptKind, hasMixedContent?: boolean, projectRootPath?: NormalizedPath): OpenConfiguredProjectResult;
|
||||
private removeOrphanConfiguredProjects;
|
||||
private removeOrphanScriptInfos;
|
||||
private telemetryOnOpenFile;
|
||||
/**
|
||||
* Close file whose contents is managed by the client
|
||||
|
||||
11245
lib/tsserverlibrary.js
11245
lib/tsserverlibrary.js
File diff suppressed because it is too large
Load Diff
198
lib/typescript.d.ts
vendored
198
lib/typescript.d.ts
vendored
@ -14,7 +14,7 @@ and limitations under the License.
|
||||
***************************************************************************** */
|
||||
|
||||
declare namespace ts {
|
||||
const versionMajorMinor = "3.3";
|
||||
const versionMajorMinor = "3.4";
|
||||
/** The version of the TypeScript compiler release */
|
||||
const version: string;
|
||||
}
|
||||
@ -355,40 +355,45 @@ declare namespace ts {
|
||||
ShorthandPropertyAssignment = 276,
|
||||
SpreadAssignment = 277,
|
||||
EnumMember = 278,
|
||||
SourceFile = 279,
|
||||
Bundle = 280,
|
||||
UnparsedSource = 281,
|
||||
InputFiles = 282,
|
||||
JSDocTypeExpression = 283,
|
||||
JSDocAllType = 284,
|
||||
JSDocUnknownType = 285,
|
||||
JSDocNullableType = 286,
|
||||
JSDocNonNullableType = 287,
|
||||
JSDocOptionalType = 288,
|
||||
JSDocFunctionType = 289,
|
||||
JSDocVariadicType = 290,
|
||||
JSDocComment = 291,
|
||||
JSDocTypeLiteral = 292,
|
||||
JSDocSignature = 293,
|
||||
JSDocTag = 294,
|
||||
JSDocAugmentsTag = 295,
|
||||
JSDocClassTag = 296,
|
||||
JSDocCallbackTag = 297,
|
||||
JSDocEnumTag = 298,
|
||||
JSDocParameterTag = 299,
|
||||
JSDocReturnTag = 300,
|
||||
JSDocThisTag = 301,
|
||||
JSDocTypeTag = 302,
|
||||
JSDocTemplateTag = 303,
|
||||
JSDocTypedefTag = 304,
|
||||
JSDocPropertyTag = 305,
|
||||
SyntaxList = 306,
|
||||
NotEmittedStatement = 307,
|
||||
PartiallyEmittedExpression = 308,
|
||||
CommaListExpression = 309,
|
||||
MergeDeclarationMarker = 310,
|
||||
EndOfDeclarationMarker = 311,
|
||||
Count = 312,
|
||||
UnparsedPrologue = 279,
|
||||
UnparsedPrepend = 280,
|
||||
UnparsedText = 281,
|
||||
UnparsedInternalText = 282,
|
||||
UnparsedSyntheticReference = 283,
|
||||
SourceFile = 284,
|
||||
Bundle = 285,
|
||||
UnparsedSource = 286,
|
||||
InputFiles = 287,
|
||||
JSDocTypeExpression = 288,
|
||||
JSDocAllType = 289,
|
||||
JSDocUnknownType = 290,
|
||||
JSDocNullableType = 291,
|
||||
JSDocNonNullableType = 292,
|
||||
JSDocOptionalType = 293,
|
||||
JSDocFunctionType = 294,
|
||||
JSDocVariadicType = 295,
|
||||
JSDocComment = 296,
|
||||
JSDocTypeLiteral = 297,
|
||||
JSDocSignature = 298,
|
||||
JSDocTag = 299,
|
||||
JSDocAugmentsTag = 300,
|
||||
JSDocClassTag = 301,
|
||||
JSDocCallbackTag = 302,
|
||||
JSDocEnumTag = 303,
|
||||
JSDocParameterTag = 304,
|
||||
JSDocReturnTag = 305,
|
||||
JSDocThisTag = 306,
|
||||
JSDocTypeTag = 307,
|
||||
JSDocTemplateTag = 308,
|
||||
JSDocTypedefTag = 309,
|
||||
JSDocPropertyTag = 310,
|
||||
SyntaxList = 311,
|
||||
NotEmittedStatement = 312,
|
||||
PartiallyEmittedExpression = 313,
|
||||
CommaListExpression = 314,
|
||||
MergeDeclarationMarker = 315,
|
||||
EndOfDeclarationMarker = 316,
|
||||
Count = 317,
|
||||
FirstAssignment = 59,
|
||||
LastAssignment = 71,
|
||||
FirstCompoundAssignment = 60,
|
||||
@ -414,10 +419,10 @@ declare namespace ts {
|
||||
FirstBinaryOperator = 28,
|
||||
LastBinaryOperator = 71,
|
||||
FirstNode = 148,
|
||||
FirstJSDocNode = 283,
|
||||
LastJSDocNode = 305,
|
||||
FirstJSDocTagNode = 294,
|
||||
LastJSDocTagNode = 305
|
||||
FirstJSDocNode = 288,
|
||||
LastJSDocNode = 310,
|
||||
FirstJSDocTagNode = 299,
|
||||
LastJSDocTagNode = 310,
|
||||
}
|
||||
enum NodeFlags {
|
||||
None = 0,
|
||||
@ -446,7 +451,7 @@ declare namespace ts {
|
||||
ReachabilityCheckFlags = 384,
|
||||
ReachabilityAndEmitFlags = 1408,
|
||||
ContextFlags = 12679168,
|
||||
TypeExcludesFlags = 20480
|
||||
TypeExcludesFlags = 20480,
|
||||
}
|
||||
enum ModifierFlags {
|
||||
None = 0,
|
||||
@ -814,7 +819,7 @@ declare namespace ts {
|
||||
}
|
||||
interface TypeOperatorNode extends TypeNode {
|
||||
kind: SyntaxKind.TypeOperator;
|
||||
operator: SyntaxKind.KeyOfKeyword | SyntaxKind.UniqueKeyword;
|
||||
operator: SyntaxKind.KeyOfKeyword | SyntaxKind.UniqueKeyword | SyntaxKind.ReadonlyKeyword;
|
||||
type: TypeNode;
|
||||
}
|
||||
interface IndexedAccessTypeNode extends TypeNode {
|
||||
@ -997,6 +1002,14 @@ declare namespace ts {
|
||||
interface NoSubstitutionTemplateLiteral extends LiteralExpression {
|
||||
kind: SyntaxKind.NoSubstitutionTemplateLiteral;
|
||||
}
|
||||
enum TokenFlags {
|
||||
None = 0,
|
||||
Scientific = 16,
|
||||
Octal = 32,
|
||||
HexSpecifier = 64,
|
||||
BinarySpecifier = 128,
|
||||
OctalSpecifier = 256,
|
||||
}
|
||||
interface NumericLiteral extends LiteralExpression {
|
||||
kind: SyntaxKind.NumericLiteral;
|
||||
}
|
||||
@ -1726,18 +1739,55 @@ declare namespace ts {
|
||||
}
|
||||
interface InputFiles extends Node {
|
||||
kind: SyntaxKind.InputFiles;
|
||||
javascriptPath?: string;
|
||||
javascriptText: string;
|
||||
javascriptMapPath?: string;
|
||||
javascriptMapText?: string;
|
||||
declarationPath?: string;
|
||||
declarationText: string;
|
||||
declarationMapPath?: string;
|
||||
declarationMapText?: string;
|
||||
}
|
||||
interface UnparsedSource extends Node {
|
||||
kind: SyntaxKind.UnparsedSource;
|
||||
fileName: string;
|
||||
text: string;
|
||||
prologues: ReadonlyArray<UnparsedPrologue>;
|
||||
helpers: ReadonlyArray<UnscopedEmitHelper> | undefined;
|
||||
referencedFiles: ReadonlyArray<FileReference>;
|
||||
typeReferenceDirectives: ReadonlyArray<string> | undefined;
|
||||
libReferenceDirectives: ReadonlyArray<FileReference>;
|
||||
hasNoDefaultLib?: boolean;
|
||||
sourceMapPath?: string;
|
||||
sourceMapText?: string;
|
||||
syntheticReferences?: ReadonlyArray<UnparsedSyntheticReference>;
|
||||
texts: ReadonlyArray<UnparsedSourceText>;
|
||||
}
|
||||
type UnparsedSourceText = UnparsedPrepend | UnparsedTextLike;
|
||||
type UnparsedNode = UnparsedPrologue | UnparsedSourceText | UnparsedSyntheticReference;
|
||||
interface UnparsedSection extends Node {
|
||||
kind: SyntaxKind;
|
||||
data?: string;
|
||||
parent: UnparsedSource;
|
||||
}
|
||||
interface UnparsedPrologue extends UnparsedSection {
|
||||
kind: SyntaxKind.UnparsedPrologue;
|
||||
data: string;
|
||||
parent: UnparsedSource;
|
||||
}
|
||||
interface UnparsedPrepend extends UnparsedSection {
|
||||
kind: SyntaxKind.UnparsedPrepend;
|
||||
data: string;
|
||||
parent: UnparsedSource;
|
||||
texts: ReadonlyArray<UnparsedTextLike>;
|
||||
}
|
||||
interface UnparsedTextLike extends UnparsedSection {
|
||||
kind: SyntaxKind.UnparsedText | SyntaxKind.UnparsedInternalText;
|
||||
parent: UnparsedSource;
|
||||
}
|
||||
interface UnparsedSyntheticReference extends UnparsedSection {
|
||||
kind: SyntaxKind.UnparsedSyntheticReference;
|
||||
parent: UnparsedSource;
|
||||
}
|
||||
interface JsonSourceFile extends SourceFile {
|
||||
statements: NodeArray<JsonObjectExpressionStatement>;
|
||||
@ -1778,7 +1828,7 @@ declare namespace ts {
|
||||
type ResolvedConfigFileName = string & {
|
||||
_isResolvedConfigFileName: never;
|
||||
};
|
||||
type WriteFileCallback = (fileName: string, data: string, writeByteOrderMark: boolean, onError: ((message: string) => void) | undefined, sourceFiles?: ReadonlyArray<SourceFile>) => void;
|
||||
type WriteFileCallback = (fileName: string, data: string, writeByteOrderMark: boolean, onError?: (message: string) => void, sourceFiles?: ReadonlyArray<SourceFile>) => void;
|
||||
class OperationCanceledException {
|
||||
}
|
||||
interface CancellationToken {
|
||||
@ -2011,7 +2061,7 @@ declare namespace ts {
|
||||
WriteTypeParametersOrArguments = 1,
|
||||
UseOnlyExternalAliasing = 2,
|
||||
AllowAnyNodeKind = 4,
|
||||
UseAliasDefinedOutsideCurrentScope = 8
|
||||
UseAliasDefinedOutsideCurrentScope = 8,
|
||||
}
|
||||
enum TypePredicateKind {
|
||||
This = 0,
|
||||
@ -2089,7 +2139,7 @@ declare namespace ts {
|
||||
ExportHasLocal = 944,
|
||||
BlockScoped = 418,
|
||||
PropertyOrAccessor = 98308,
|
||||
ClassMember = 106500
|
||||
ClassMember = 106500,
|
||||
}
|
||||
interface Symbol {
|
||||
flags: SymbolFlags;
|
||||
@ -2197,7 +2247,7 @@ declare namespace ts {
|
||||
Instantiable = 63176704,
|
||||
StructuredOrInstantiable = 66846720,
|
||||
Narrowable = 133970943,
|
||||
NotUnionOrUnit = 67637251
|
||||
NotUnionOrUnit = 67637251,
|
||||
}
|
||||
type DestructuringPattern = BindingPattern | ObjectLiteralExpression | ArrayLiteralExpression;
|
||||
interface Type {
|
||||
@ -2214,6 +2264,7 @@ declare namespace ts {
|
||||
}
|
||||
interface UniqueESSymbolType extends Type {
|
||||
symbol: Symbol;
|
||||
escapedName: __String;
|
||||
}
|
||||
interface StringLiteralType extends LiteralType {
|
||||
value: string;
|
||||
@ -2243,7 +2294,7 @@ declare namespace ts {
|
||||
MarkerType = 8192,
|
||||
JSLiteral = 16384,
|
||||
FreshLiteral = 32768,
|
||||
ClassOrInterface = 3
|
||||
ClassOrInterface = 3,
|
||||
}
|
||||
interface ObjectType extends Type {
|
||||
objectFlags: ObjectFlags;
|
||||
@ -2282,6 +2333,7 @@ declare namespace ts {
|
||||
interface TupleType extends GenericType {
|
||||
minLength: number;
|
||||
hasRestElement: boolean;
|
||||
readonly: boolean;
|
||||
associatedNames?: __String[];
|
||||
}
|
||||
interface TupleTypeReference extends TypeReference {
|
||||
@ -2490,6 +2542,8 @@ declare namespace ts {
|
||||
reactNamespace?: string;
|
||||
jsxFactory?: string;
|
||||
composite?: boolean;
|
||||
incremental?: boolean;
|
||||
tsBuildInfoFile?: string;
|
||||
removeComments?: boolean;
|
||||
rootDir?: string;
|
||||
rootDirs?: string[];
|
||||
@ -2566,9 +2620,10 @@ declare namespace ts {
|
||||
ES2016 = 3,
|
||||
ES2017 = 4,
|
||||
ES2018 = 5,
|
||||
ESNext = 6,
|
||||
ES2019 = 6,
|
||||
ESNext = 7,
|
||||
JSON = 100,
|
||||
Latest = 6
|
||||
Latest = 7
|
||||
}
|
||||
enum LanguageVariant {
|
||||
Standard = 0,
|
||||
@ -2665,7 +2720,8 @@ declare namespace ts {
|
||||
Dts = ".d.ts",
|
||||
Js = ".js",
|
||||
Jsx = ".jsx",
|
||||
Json = ".json"
|
||||
Json = ".json",
|
||||
TsBuildInfo = ".tsbuildinfo"
|
||||
}
|
||||
interface ResolvedModuleWithFailedLookupLocations {
|
||||
readonly resolvedModule: ResolvedModuleFull | undefined;
|
||||
@ -2689,7 +2745,6 @@ declare namespace ts {
|
||||
getDefaultLibLocation?(): string;
|
||||
writeFile: WriteFileCallback;
|
||||
getCurrentDirectory(): string;
|
||||
getDirectories(path: string): string[];
|
||||
getCanonicalFileName(fileName: string): string;
|
||||
useCaseSensitiveFileNames(): boolean;
|
||||
getNewLine(): string;
|
||||
@ -2739,7 +2794,7 @@ declare namespace ts {
|
||||
NoHoisting = 2097152,
|
||||
HasEndOfDeclarationMarker = 4194304,
|
||||
Iterator = 8388608,
|
||||
NoAsciiEscaping = 16777216
|
||||
NoAsciiEscaping = 16777216,
|
||||
}
|
||||
interface EmitHelper {
|
||||
readonly name: string;
|
||||
@ -2747,6 +2802,10 @@ declare namespace ts {
|
||||
readonly text: string | ((node: EmitHelperUniqueNameCallback) => string);
|
||||
readonly priority?: number;
|
||||
}
|
||||
interface UnscopedEmitHelper extends EmitHelper {
|
||||
readonly scoped: false;
|
||||
readonly text: string;
|
||||
}
|
||||
type EmitHelperUniqueNameCallback = (name: string) => string;
|
||||
enum EmitHint {
|
||||
SourceFile = 0,
|
||||
@ -3006,13 +3065,14 @@ declare namespace ts {
|
||||
}
|
||||
interface UserPreferences {
|
||||
readonly disableSuggestions?: boolean;
|
||||
readonly quotePreference?: "double" | "single";
|
||||
readonly quotePreference?: "auto" | "double" | "single";
|
||||
readonly includeCompletionsForModuleExports?: boolean;
|
||||
readonly includeCompletionsWithInsertText?: boolean;
|
||||
readonly importModuleSpecifierPreference?: "relative" | "non-relative";
|
||||
/** Determines whether we import `foo/index.ts` as "foo", "foo/index", or "foo/index.js" */
|
||||
readonly importModuleSpecifierEnding?: "minimal" | "index" | "js";
|
||||
readonly allowTextChangesInNewFiles?: boolean;
|
||||
readonly providePrefixAndSuffixTextForRename?: boolean;
|
||||
}
|
||||
/** Represents a bigint literal value without requiring bigint support */
|
||||
interface PseudoBigInt {
|
||||
@ -3097,6 +3157,7 @@ declare namespace ts {
|
||||
scanJsxIdentifier(): SyntaxKind;
|
||||
scanJsxAttributeValue(): SyntaxKind;
|
||||
reScanJsxToken(): JsxTokenSyntaxKind;
|
||||
reScanLessThanToken(): SyntaxKind;
|
||||
scanJsxToken(): JsxTokenSyntaxKind;
|
||||
scanJSDocToken(): JsDocSyntaxKind;
|
||||
scan(): SyntaxKind;
|
||||
@ -3354,6 +3415,7 @@ declare namespace ts {
|
||||
function isNewExpression(node: Node): node is NewExpression;
|
||||
function isTaggedTemplateExpression(node: Node): node is TaggedTemplateExpression;
|
||||
function isTypeAssertion(node: Node): node is TypeAssertion;
|
||||
function isConstTypeReference(node: Node): boolean;
|
||||
function isParenthesizedExpression(node: Node): node is ParenthesizedExpression;
|
||||
function skipPartiallyEmittedExpressions(node: Expression): Expression;
|
||||
function skipPartiallyEmittedExpressions(node: Node): Node;
|
||||
@ -3443,6 +3505,9 @@ declare namespace ts {
|
||||
function isSourceFile(node: Node): node is SourceFile;
|
||||
function isBundle(node: Node): node is Bundle;
|
||||
function isUnparsedSource(node: Node): node is UnparsedSource;
|
||||
function isUnparsedPrepend(node: Node): node is UnparsedPrepend;
|
||||
function isUnparsedTextLike(node: Node): node is UnparsedTextLike;
|
||||
function isUnparsedNode(node: Node): node is UnparsedNode;
|
||||
function isJSDocTypeExpression(node: Node): node is JSDocTypeExpression;
|
||||
function isJSDocAllType(node: JSDocAllType): node is JSDocAllType;
|
||||
function isJSDocUnknownType(node: Node): node is JSDocUnknownType;
|
||||
@ -3662,7 +3727,7 @@ declare namespace ts {
|
||||
function createLiteral(value: number | PseudoBigInt): NumericLiteral;
|
||||
function createLiteral(value: boolean): BooleanLiteral;
|
||||
function createLiteral(value: string | number | PseudoBigInt | boolean): PrimaryExpression;
|
||||
function createNumericLiteral(value: string): NumericLiteral;
|
||||
function createNumericLiteral(value: string, numericLiteralFlags?: TokenFlags): NumericLiteral;
|
||||
function createBigIntLiteral(value: string): BigIntLiteral;
|
||||
function createStringLiteral(text: string): StringLiteral;
|
||||
function createRegularExpressionLiteral(text: string): RegularExpressionLiteral;
|
||||
@ -3754,7 +3819,7 @@ declare namespace ts {
|
||||
function updateParenthesizedType(node: ParenthesizedTypeNode, type: TypeNode): ParenthesizedTypeNode;
|
||||
function createThisTypeNode(): ThisTypeNode;
|
||||
function createTypeOperatorNode(type: TypeNode): TypeOperatorNode;
|
||||
function createTypeOperatorNode(operator: SyntaxKind.KeyOfKeyword | SyntaxKind.UniqueKeyword, type: TypeNode): TypeOperatorNode;
|
||||
function createTypeOperatorNode(operator: SyntaxKind.KeyOfKeyword | SyntaxKind.UniqueKeyword | SyntaxKind.ReadonlyKeyword, type: TypeNode): TypeOperatorNode;
|
||||
function updateTypeOperatorNode(node: TypeOperatorNode, type: TypeNode): TypeOperatorNode;
|
||||
function createIndexedAccessTypeNode(objectType: TypeNode, indexType: TypeNode): IndexedAccessTypeNode;
|
||||
function updateIndexedAccessTypeNode(node: IndexedAccessTypeNode, objectType: TypeNode, indexType: TypeNode): IndexedAccessTypeNode;
|
||||
@ -3978,9 +4043,11 @@ declare namespace ts {
|
||||
function updateCommaList(node: CommaListExpression, elements: ReadonlyArray<Expression>): CommaListExpression;
|
||||
function createBundle(sourceFiles: ReadonlyArray<SourceFile>, prepends?: ReadonlyArray<UnparsedSource | InputFiles>): Bundle;
|
||||
function createUnparsedSourceFile(text: string): UnparsedSource;
|
||||
function createUnparsedSourceFile(inputFile: InputFiles, type: "js" | "dts", stripInternal?: boolean): UnparsedSource;
|
||||
function createUnparsedSourceFile(text: string, mapPath: string | undefined, map: string | undefined): UnparsedSource;
|
||||
function createInputFiles(javascript: string, declaration: string): InputFiles;
|
||||
function createInputFiles(javascript: string, declaration: string, javascriptMapPath: string | undefined, javascriptMapText: string | undefined, declarationMapPath: string | undefined, declarationMapText: string | undefined): InputFiles;
|
||||
function createInputFiles(javascriptText: string, declarationText: string): InputFiles;
|
||||
function createInputFiles(readFileText: (path: string) => string | undefined, javascriptPath: string, javascriptMapPath: string | undefined, declarationPath: string, declarationMapPath: string | undefined, buildInfoPath: string | undefined): InputFiles;
|
||||
function createInputFiles(javascriptText: string, declarationText: string, javascriptMapPath: string | undefined, javascriptMapText: string | undefined, declarationMapPath: string | undefined, declarationMapText: string | undefined): InputFiles;
|
||||
function updateBundle(node: Bundle, sourceFiles: ReadonlyArray<SourceFile>, prepends?: ReadonlyArray<UnparsedSource>): Bundle;
|
||||
function createImmediatelyInvokedFunctionExpression(statements: ReadonlyArray<Statement>): CallExpression;
|
||||
function createImmediatelyInvokedFunctionExpression(statements: ReadonlyArray<Statement>, param: ParameterDeclaration, paramValue: Expression): CallExpression;
|
||||
@ -4278,6 +4345,10 @@ declare namespace ts {
|
||||
* Get the syntax diagnostics, for all source files if source file is not supplied
|
||||
*/
|
||||
getSyntacticDiagnostics(sourceFile?: SourceFile, cancellationToken?: CancellationToken): ReadonlyArray<Diagnostic>;
|
||||
/**
|
||||
* Get the declaration diagnostics, for all source files if source file is not supplied
|
||||
*/
|
||||
getDeclarationDiagnostics(sourceFile?: SourceFile, cancellationToken?: CancellationToken): ReadonlyArray<DiagnosticWithLocation>;
|
||||
/**
|
||||
* Get all the dependencies of the file
|
||||
*/
|
||||
@ -4364,13 +4435,11 @@ declare namespace ts {
|
||||
/** If provided, will be used to reset existing delayed compilation */
|
||||
clearTimeout?(timeoutId: any): void;
|
||||
}
|
||||
interface WatchCompilerHost<T extends BuilderProgram> extends WatchHost {
|
||||
interface ProgramHost<T extends BuilderProgram> {
|
||||
/**
|
||||
* Used to create the program when need for program creation or recreation detected
|
||||
*/
|
||||
createProgram: CreateProgram<T>;
|
||||
/** If provided, callback to invoke after every new program creation */
|
||||
afterProgramCreate?(program: T): void;
|
||||
useCaseSensitiveFileNames(): boolean;
|
||||
getNewLine(): string;
|
||||
getCurrentDirectory(): string;
|
||||
@ -4404,6 +4473,10 @@ declare namespace ts {
|
||||
/** If provided, used to resolve type reference directives, otherwise typescript's default resolution */
|
||||
resolveTypeReferenceDirectives?(typeReferenceDirectiveNames: string[], containingFile: string, redirectedReference?: ResolvedProjectReference): (ResolvedTypeReferenceDirective | undefined)[];
|
||||
}
|
||||
interface WatchCompilerHost<T extends BuilderProgram> extends ProgramHost<T>, WatchHost {
|
||||
/** If provided, callback to invoke after every new program creation */
|
||||
afterProgramCreate?(program: T): void;
|
||||
}
|
||||
/**
|
||||
* Host to create watch with root files and options
|
||||
*/
|
||||
@ -4706,8 +4779,8 @@ declare namespace ts {
|
||||
getNameOrDottedNameSpan(fileName: string, startPos: number, endPos: number): TextSpan | undefined;
|
||||
getBreakpointStatementAtPosition(fileName: string, position: number): TextSpan | undefined;
|
||||
getSignatureHelpItems(fileName: string, position: number, options: SignatureHelpItemsOptions | undefined): SignatureHelpItems | undefined;
|
||||
getRenameInfo(fileName: string, position: number): RenameInfo;
|
||||
findRenameLocations(fileName: string, position: number, findInStrings: boolean, findInComments: boolean): ReadonlyArray<RenameLocation> | undefined;
|
||||
getRenameInfo(fileName: string, position: number, options?: RenameInfoOptions): RenameInfo;
|
||||
findRenameLocations(fileName: string, position: number, findInStrings: boolean, findInComments: boolean, providePrefixAndSuffixTextForRename?: boolean): ReadonlyArray<RenameLocation> | undefined;
|
||||
getDefinitionAtPosition(fileName: string, position: number): ReadonlyArray<DefinitionInfo> | undefined;
|
||||
getDefinitionAndBoundSpan(fileName: string, position: number): DefinitionInfoAndBoundSpan | undefined;
|
||||
getTypeDefinitionAtPosition(fileName: string, position: number): ReadonlyArray<DefinitionInfo> | undefined;
|
||||
@ -5150,6 +5223,9 @@ declare namespace ts {
|
||||
canRename: false;
|
||||
localizedErrorMessage: string;
|
||||
}
|
||||
interface RenameInfoOptions {
|
||||
readonly allowRenameOfImportPath?: boolean;
|
||||
}
|
||||
interface SignatureHelpParameter {
|
||||
name: string;
|
||||
documentation: SymbolDisplayPart[];
|
||||
|
||||
10609
lib/typescript.js
10609
lib/typescript.js
File diff suppressed because it is too large
Load Diff
198
lib/typescriptServices.d.ts
vendored
198
lib/typescriptServices.d.ts
vendored
@ -14,7 +14,7 @@ and limitations under the License.
|
||||
***************************************************************************** */
|
||||
|
||||
declare namespace ts {
|
||||
const versionMajorMinor = "3.3";
|
||||
const versionMajorMinor = "3.4";
|
||||
/** The version of the TypeScript compiler release */
|
||||
const version: string;
|
||||
}
|
||||
@ -355,40 +355,45 @@ declare namespace ts {
|
||||
ShorthandPropertyAssignment = 276,
|
||||
SpreadAssignment = 277,
|
||||
EnumMember = 278,
|
||||
SourceFile = 279,
|
||||
Bundle = 280,
|
||||
UnparsedSource = 281,
|
||||
InputFiles = 282,
|
||||
JSDocTypeExpression = 283,
|
||||
JSDocAllType = 284,
|
||||
JSDocUnknownType = 285,
|
||||
JSDocNullableType = 286,
|
||||
JSDocNonNullableType = 287,
|
||||
JSDocOptionalType = 288,
|
||||
JSDocFunctionType = 289,
|
||||
JSDocVariadicType = 290,
|
||||
JSDocComment = 291,
|
||||
JSDocTypeLiteral = 292,
|
||||
JSDocSignature = 293,
|
||||
JSDocTag = 294,
|
||||
JSDocAugmentsTag = 295,
|
||||
JSDocClassTag = 296,
|
||||
JSDocCallbackTag = 297,
|
||||
JSDocEnumTag = 298,
|
||||
JSDocParameterTag = 299,
|
||||
JSDocReturnTag = 300,
|
||||
JSDocThisTag = 301,
|
||||
JSDocTypeTag = 302,
|
||||
JSDocTemplateTag = 303,
|
||||
JSDocTypedefTag = 304,
|
||||
JSDocPropertyTag = 305,
|
||||
SyntaxList = 306,
|
||||
NotEmittedStatement = 307,
|
||||
PartiallyEmittedExpression = 308,
|
||||
CommaListExpression = 309,
|
||||
MergeDeclarationMarker = 310,
|
||||
EndOfDeclarationMarker = 311,
|
||||
Count = 312,
|
||||
UnparsedPrologue = 279,
|
||||
UnparsedPrepend = 280,
|
||||
UnparsedText = 281,
|
||||
UnparsedInternalText = 282,
|
||||
UnparsedSyntheticReference = 283,
|
||||
SourceFile = 284,
|
||||
Bundle = 285,
|
||||
UnparsedSource = 286,
|
||||
InputFiles = 287,
|
||||
JSDocTypeExpression = 288,
|
||||
JSDocAllType = 289,
|
||||
JSDocUnknownType = 290,
|
||||
JSDocNullableType = 291,
|
||||
JSDocNonNullableType = 292,
|
||||
JSDocOptionalType = 293,
|
||||
JSDocFunctionType = 294,
|
||||
JSDocVariadicType = 295,
|
||||
JSDocComment = 296,
|
||||
JSDocTypeLiteral = 297,
|
||||
JSDocSignature = 298,
|
||||
JSDocTag = 299,
|
||||
JSDocAugmentsTag = 300,
|
||||
JSDocClassTag = 301,
|
||||
JSDocCallbackTag = 302,
|
||||
JSDocEnumTag = 303,
|
||||
JSDocParameterTag = 304,
|
||||
JSDocReturnTag = 305,
|
||||
JSDocThisTag = 306,
|
||||
JSDocTypeTag = 307,
|
||||
JSDocTemplateTag = 308,
|
||||
JSDocTypedefTag = 309,
|
||||
JSDocPropertyTag = 310,
|
||||
SyntaxList = 311,
|
||||
NotEmittedStatement = 312,
|
||||
PartiallyEmittedExpression = 313,
|
||||
CommaListExpression = 314,
|
||||
MergeDeclarationMarker = 315,
|
||||
EndOfDeclarationMarker = 316,
|
||||
Count = 317,
|
||||
FirstAssignment = 59,
|
||||
LastAssignment = 71,
|
||||
FirstCompoundAssignment = 60,
|
||||
@ -414,10 +419,10 @@ declare namespace ts {
|
||||
FirstBinaryOperator = 28,
|
||||
LastBinaryOperator = 71,
|
||||
FirstNode = 148,
|
||||
FirstJSDocNode = 283,
|
||||
LastJSDocNode = 305,
|
||||
FirstJSDocTagNode = 294,
|
||||
LastJSDocTagNode = 305
|
||||
FirstJSDocNode = 288,
|
||||
LastJSDocNode = 310,
|
||||
FirstJSDocTagNode = 299,
|
||||
LastJSDocTagNode = 310,
|
||||
}
|
||||
enum NodeFlags {
|
||||
None = 0,
|
||||
@ -446,7 +451,7 @@ declare namespace ts {
|
||||
ReachabilityCheckFlags = 384,
|
||||
ReachabilityAndEmitFlags = 1408,
|
||||
ContextFlags = 12679168,
|
||||
TypeExcludesFlags = 20480
|
||||
TypeExcludesFlags = 20480,
|
||||
}
|
||||
enum ModifierFlags {
|
||||
None = 0,
|
||||
@ -814,7 +819,7 @@ declare namespace ts {
|
||||
}
|
||||
interface TypeOperatorNode extends TypeNode {
|
||||
kind: SyntaxKind.TypeOperator;
|
||||
operator: SyntaxKind.KeyOfKeyword | SyntaxKind.UniqueKeyword;
|
||||
operator: SyntaxKind.KeyOfKeyword | SyntaxKind.UniqueKeyword | SyntaxKind.ReadonlyKeyword;
|
||||
type: TypeNode;
|
||||
}
|
||||
interface IndexedAccessTypeNode extends TypeNode {
|
||||
@ -997,6 +1002,14 @@ declare namespace ts {
|
||||
interface NoSubstitutionTemplateLiteral extends LiteralExpression {
|
||||
kind: SyntaxKind.NoSubstitutionTemplateLiteral;
|
||||
}
|
||||
enum TokenFlags {
|
||||
None = 0,
|
||||
Scientific = 16,
|
||||
Octal = 32,
|
||||
HexSpecifier = 64,
|
||||
BinarySpecifier = 128,
|
||||
OctalSpecifier = 256,
|
||||
}
|
||||
interface NumericLiteral extends LiteralExpression {
|
||||
kind: SyntaxKind.NumericLiteral;
|
||||
}
|
||||
@ -1726,18 +1739,55 @@ declare namespace ts {
|
||||
}
|
||||
interface InputFiles extends Node {
|
||||
kind: SyntaxKind.InputFiles;
|
||||
javascriptPath?: string;
|
||||
javascriptText: string;
|
||||
javascriptMapPath?: string;
|
||||
javascriptMapText?: string;
|
||||
declarationPath?: string;
|
||||
declarationText: string;
|
||||
declarationMapPath?: string;
|
||||
declarationMapText?: string;
|
||||
}
|
||||
interface UnparsedSource extends Node {
|
||||
kind: SyntaxKind.UnparsedSource;
|
||||
fileName: string;
|
||||
text: string;
|
||||
prologues: ReadonlyArray<UnparsedPrologue>;
|
||||
helpers: ReadonlyArray<UnscopedEmitHelper> | undefined;
|
||||
referencedFiles: ReadonlyArray<FileReference>;
|
||||
typeReferenceDirectives: ReadonlyArray<string> | undefined;
|
||||
libReferenceDirectives: ReadonlyArray<FileReference>;
|
||||
hasNoDefaultLib?: boolean;
|
||||
sourceMapPath?: string;
|
||||
sourceMapText?: string;
|
||||
syntheticReferences?: ReadonlyArray<UnparsedSyntheticReference>;
|
||||
texts: ReadonlyArray<UnparsedSourceText>;
|
||||
}
|
||||
type UnparsedSourceText = UnparsedPrepend | UnparsedTextLike;
|
||||
type UnparsedNode = UnparsedPrologue | UnparsedSourceText | UnparsedSyntheticReference;
|
||||
interface UnparsedSection extends Node {
|
||||
kind: SyntaxKind;
|
||||
data?: string;
|
||||
parent: UnparsedSource;
|
||||
}
|
||||
interface UnparsedPrologue extends UnparsedSection {
|
||||
kind: SyntaxKind.UnparsedPrologue;
|
||||
data: string;
|
||||
parent: UnparsedSource;
|
||||
}
|
||||
interface UnparsedPrepend extends UnparsedSection {
|
||||
kind: SyntaxKind.UnparsedPrepend;
|
||||
data: string;
|
||||
parent: UnparsedSource;
|
||||
texts: ReadonlyArray<UnparsedTextLike>;
|
||||
}
|
||||
interface UnparsedTextLike extends UnparsedSection {
|
||||
kind: SyntaxKind.UnparsedText | SyntaxKind.UnparsedInternalText;
|
||||
parent: UnparsedSource;
|
||||
}
|
||||
interface UnparsedSyntheticReference extends UnparsedSection {
|
||||
kind: SyntaxKind.UnparsedSyntheticReference;
|
||||
parent: UnparsedSource;
|
||||
}
|
||||
interface JsonSourceFile extends SourceFile {
|
||||
statements: NodeArray<JsonObjectExpressionStatement>;
|
||||
@ -1778,7 +1828,7 @@ declare namespace ts {
|
||||
type ResolvedConfigFileName = string & {
|
||||
_isResolvedConfigFileName: never;
|
||||
};
|
||||
type WriteFileCallback = (fileName: string, data: string, writeByteOrderMark: boolean, onError: ((message: string) => void) | undefined, sourceFiles?: ReadonlyArray<SourceFile>) => void;
|
||||
type WriteFileCallback = (fileName: string, data: string, writeByteOrderMark: boolean, onError?: (message: string) => void, sourceFiles?: ReadonlyArray<SourceFile>) => void;
|
||||
class OperationCanceledException {
|
||||
}
|
||||
interface CancellationToken {
|
||||
@ -2011,7 +2061,7 @@ declare namespace ts {
|
||||
WriteTypeParametersOrArguments = 1,
|
||||
UseOnlyExternalAliasing = 2,
|
||||
AllowAnyNodeKind = 4,
|
||||
UseAliasDefinedOutsideCurrentScope = 8
|
||||
UseAliasDefinedOutsideCurrentScope = 8,
|
||||
}
|
||||
enum TypePredicateKind {
|
||||
This = 0,
|
||||
@ -2089,7 +2139,7 @@ declare namespace ts {
|
||||
ExportHasLocal = 944,
|
||||
BlockScoped = 418,
|
||||
PropertyOrAccessor = 98308,
|
||||
ClassMember = 106500
|
||||
ClassMember = 106500,
|
||||
}
|
||||
interface Symbol {
|
||||
flags: SymbolFlags;
|
||||
@ -2197,7 +2247,7 @@ declare namespace ts {
|
||||
Instantiable = 63176704,
|
||||
StructuredOrInstantiable = 66846720,
|
||||
Narrowable = 133970943,
|
||||
NotUnionOrUnit = 67637251
|
||||
NotUnionOrUnit = 67637251,
|
||||
}
|
||||
type DestructuringPattern = BindingPattern | ObjectLiteralExpression | ArrayLiteralExpression;
|
||||
interface Type {
|
||||
@ -2214,6 +2264,7 @@ declare namespace ts {
|
||||
}
|
||||
interface UniqueESSymbolType extends Type {
|
||||
symbol: Symbol;
|
||||
escapedName: __String;
|
||||
}
|
||||
interface StringLiteralType extends LiteralType {
|
||||
value: string;
|
||||
@ -2243,7 +2294,7 @@ declare namespace ts {
|
||||
MarkerType = 8192,
|
||||
JSLiteral = 16384,
|
||||
FreshLiteral = 32768,
|
||||
ClassOrInterface = 3
|
||||
ClassOrInterface = 3,
|
||||
}
|
||||
interface ObjectType extends Type {
|
||||
objectFlags: ObjectFlags;
|
||||
@ -2282,6 +2333,7 @@ declare namespace ts {
|
||||
interface TupleType extends GenericType {
|
||||
minLength: number;
|
||||
hasRestElement: boolean;
|
||||
readonly: boolean;
|
||||
associatedNames?: __String[];
|
||||
}
|
||||
interface TupleTypeReference extends TypeReference {
|
||||
@ -2490,6 +2542,8 @@ declare namespace ts {
|
||||
reactNamespace?: string;
|
||||
jsxFactory?: string;
|
||||
composite?: boolean;
|
||||
incremental?: boolean;
|
||||
tsBuildInfoFile?: string;
|
||||
removeComments?: boolean;
|
||||
rootDir?: string;
|
||||
rootDirs?: string[];
|
||||
@ -2566,9 +2620,10 @@ declare namespace ts {
|
||||
ES2016 = 3,
|
||||
ES2017 = 4,
|
||||
ES2018 = 5,
|
||||
ESNext = 6,
|
||||
ES2019 = 6,
|
||||
ESNext = 7,
|
||||
JSON = 100,
|
||||
Latest = 6
|
||||
Latest = 7
|
||||
}
|
||||
enum LanguageVariant {
|
||||
Standard = 0,
|
||||
@ -2665,7 +2720,8 @@ declare namespace ts {
|
||||
Dts = ".d.ts",
|
||||
Js = ".js",
|
||||
Jsx = ".jsx",
|
||||
Json = ".json"
|
||||
Json = ".json",
|
||||
TsBuildInfo = ".tsbuildinfo"
|
||||
}
|
||||
interface ResolvedModuleWithFailedLookupLocations {
|
||||
readonly resolvedModule: ResolvedModuleFull | undefined;
|
||||
@ -2689,7 +2745,6 @@ declare namespace ts {
|
||||
getDefaultLibLocation?(): string;
|
||||
writeFile: WriteFileCallback;
|
||||
getCurrentDirectory(): string;
|
||||
getDirectories(path: string): string[];
|
||||
getCanonicalFileName(fileName: string): string;
|
||||
useCaseSensitiveFileNames(): boolean;
|
||||
getNewLine(): string;
|
||||
@ -2739,7 +2794,7 @@ declare namespace ts {
|
||||
NoHoisting = 2097152,
|
||||
HasEndOfDeclarationMarker = 4194304,
|
||||
Iterator = 8388608,
|
||||
NoAsciiEscaping = 16777216
|
||||
NoAsciiEscaping = 16777216,
|
||||
}
|
||||
interface EmitHelper {
|
||||
readonly name: string;
|
||||
@ -2747,6 +2802,10 @@ declare namespace ts {
|
||||
readonly text: string | ((node: EmitHelperUniqueNameCallback) => string);
|
||||
readonly priority?: number;
|
||||
}
|
||||
interface UnscopedEmitHelper extends EmitHelper {
|
||||
readonly scoped: false;
|
||||
readonly text: string;
|
||||
}
|
||||
type EmitHelperUniqueNameCallback = (name: string) => string;
|
||||
enum EmitHint {
|
||||
SourceFile = 0,
|
||||
@ -3006,13 +3065,14 @@ declare namespace ts {
|
||||
}
|
||||
interface UserPreferences {
|
||||
readonly disableSuggestions?: boolean;
|
||||
readonly quotePreference?: "double" | "single";
|
||||
readonly quotePreference?: "auto" | "double" | "single";
|
||||
readonly includeCompletionsForModuleExports?: boolean;
|
||||
readonly includeCompletionsWithInsertText?: boolean;
|
||||
readonly importModuleSpecifierPreference?: "relative" | "non-relative";
|
||||
/** Determines whether we import `foo/index.ts` as "foo", "foo/index", or "foo/index.js" */
|
||||
readonly importModuleSpecifierEnding?: "minimal" | "index" | "js";
|
||||
readonly allowTextChangesInNewFiles?: boolean;
|
||||
readonly providePrefixAndSuffixTextForRename?: boolean;
|
||||
}
|
||||
/** Represents a bigint literal value without requiring bigint support */
|
||||
interface PseudoBigInt {
|
||||
@ -3097,6 +3157,7 @@ declare namespace ts {
|
||||
scanJsxIdentifier(): SyntaxKind;
|
||||
scanJsxAttributeValue(): SyntaxKind;
|
||||
reScanJsxToken(): JsxTokenSyntaxKind;
|
||||
reScanLessThanToken(): SyntaxKind;
|
||||
scanJsxToken(): JsxTokenSyntaxKind;
|
||||
scanJSDocToken(): JsDocSyntaxKind;
|
||||
scan(): SyntaxKind;
|
||||
@ -3354,6 +3415,7 @@ declare namespace ts {
|
||||
function isNewExpression(node: Node): node is NewExpression;
|
||||
function isTaggedTemplateExpression(node: Node): node is TaggedTemplateExpression;
|
||||
function isTypeAssertion(node: Node): node is TypeAssertion;
|
||||
function isConstTypeReference(node: Node): boolean;
|
||||
function isParenthesizedExpression(node: Node): node is ParenthesizedExpression;
|
||||
function skipPartiallyEmittedExpressions(node: Expression): Expression;
|
||||
function skipPartiallyEmittedExpressions(node: Node): Node;
|
||||
@ -3443,6 +3505,9 @@ declare namespace ts {
|
||||
function isSourceFile(node: Node): node is SourceFile;
|
||||
function isBundle(node: Node): node is Bundle;
|
||||
function isUnparsedSource(node: Node): node is UnparsedSource;
|
||||
function isUnparsedPrepend(node: Node): node is UnparsedPrepend;
|
||||
function isUnparsedTextLike(node: Node): node is UnparsedTextLike;
|
||||
function isUnparsedNode(node: Node): node is UnparsedNode;
|
||||
function isJSDocTypeExpression(node: Node): node is JSDocTypeExpression;
|
||||
function isJSDocAllType(node: JSDocAllType): node is JSDocAllType;
|
||||
function isJSDocUnknownType(node: Node): node is JSDocUnknownType;
|
||||
@ -3662,7 +3727,7 @@ declare namespace ts {
|
||||
function createLiteral(value: number | PseudoBigInt): NumericLiteral;
|
||||
function createLiteral(value: boolean): BooleanLiteral;
|
||||
function createLiteral(value: string | number | PseudoBigInt | boolean): PrimaryExpression;
|
||||
function createNumericLiteral(value: string): NumericLiteral;
|
||||
function createNumericLiteral(value: string, numericLiteralFlags?: TokenFlags): NumericLiteral;
|
||||
function createBigIntLiteral(value: string): BigIntLiteral;
|
||||
function createStringLiteral(text: string): StringLiteral;
|
||||
function createRegularExpressionLiteral(text: string): RegularExpressionLiteral;
|
||||
@ -3754,7 +3819,7 @@ declare namespace ts {
|
||||
function updateParenthesizedType(node: ParenthesizedTypeNode, type: TypeNode): ParenthesizedTypeNode;
|
||||
function createThisTypeNode(): ThisTypeNode;
|
||||
function createTypeOperatorNode(type: TypeNode): TypeOperatorNode;
|
||||
function createTypeOperatorNode(operator: SyntaxKind.KeyOfKeyword | SyntaxKind.UniqueKeyword, type: TypeNode): TypeOperatorNode;
|
||||
function createTypeOperatorNode(operator: SyntaxKind.KeyOfKeyword | SyntaxKind.UniqueKeyword | SyntaxKind.ReadonlyKeyword, type: TypeNode): TypeOperatorNode;
|
||||
function updateTypeOperatorNode(node: TypeOperatorNode, type: TypeNode): TypeOperatorNode;
|
||||
function createIndexedAccessTypeNode(objectType: TypeNode, indexType: TypeNode): IndexedAccessTypeNode;
|
||||
function updateIndexedAccessTypeNode(node: IndexedAccessTypeNode, objectType: TypeNode, indexType: TypeNode): IndexedAccessTypeNode;
|
||||
@ -3978,9 +4043,11 @@ declare namespace ts {
|
||||
function updateCommaList(node: CommaListExpression, elements: ReadonlyArray<Expression>): CommaListExpression;
|
||||
function createBundle(sourceFiles: ReadonlyArray<SourceFile>, prepends?: ReadonlyArray<UnparsedSource | InputFiles>): Bundle;
|
||||
function createUnparsedSourceFile(text: string): UnparsedSource;
|
||||
function createUnparsedSourceFile(inputFile: InputFiles, type: "js" | "dts", stripInternal?: boolean): UnparsedSource;
|
||||
function createUnparsedSourceFile(text: string, mapPath: string | undefined, map: string | undefined): UnparsedSource;
|
||||
function createInputFiles(javascript: string, declaration: string): InputFiles;
|
||||
function createInputFiles(javascript: string, declaration: string, javascriptMapPath: string | undefined, javascriptMapText: string | undefined, declarationMapPath: string | undefined, declarationMapText: string | undefined): InputFiles;
|
||||
function createInputFiles(javascriptText: string, declarationText: string): InputFiles;
|
||||
function createInputFiles(readFileText: (path: string) => string | undefined, javascriptPath: string, javascriptMapPath: string | undefined, declarationPath: string, declarationMapPath: string | undefined, buildInfoPath: string | undefined): InputFiles;
|
||||
function createInputFiles(javascriptText: string, declarationText: string, javascriptMapPath: string | undefined, javascriptMapText: string | undefined, declarationMapPath: string | undefined, declarationMapText: string | undefined): InputFiles;
|
||||
function updateBundle(node: Bundle, sourceFiles: ReadonlyArray<SourceFile>, prepends?: ReadonlyArray<UnparsedSource>): Bundle;
|
||||
function createImmediatelyInvokedFunctionExpression(statements: ReadonlyArray<Statement>): CallExpression;
|
||||
function createImmediatelyInvokedFunctionExpression(statements: ReadonlyArray<Statement>, param: ParameterDeclaration, paramValue: Expression): CallExpression;
|
||||
@ -4278,6 +4345,10 @@ declare namespace ts {
|
||||
* Get the syntax diagnostics, for all source files if source file is not supplied
|
||||
*/
|
||||
getSyntacticDiagnostics(sourceFile?: SourceFile, cancellationToken?: CancellationToken): ReadonlyArray<Diagnostic>;
|
||||
/**
|
||||
* Get the declaration diagnostics, for all source files if source file is not supplied
|
||||
*/
|
||||
getDeclarationDiagnostics(sourceFile?: SourceFile, cancellationToken?: CancellationToken): ReadonlyArray<DiagnosticWithLocation>;
|
||||
/**
|
||||
* Get all the dependencies of the file
|
||||
*/
|
||||
@ -4364,13 +4435,11 @@ declare namespace ts {
|
||||
/** If provided, will be used to reset existing delayed compilation */
|
||||
clearTimeout?(timeoutId: any): void;
|
||||
}
|
||||
interface WatchCompilerHost<T extends BuilderProgram> extends WatchHost {
|
||||
interface ProgramHost<T extends BuilderProgram> {
|
||||
/**
|
||||
* Used to create the program when need for program creation or recreation detected
|
||||
*/
|
||||
createProgram: CreateProgram<T>;
|
||||
/** If provided, callback to invoke after every new program creation */
|
||||
afterProgramCreate?(program: T): void;
|
||||
useCaseSensitiveFileNames(): boolean;
|
||||
getNewLine(): string;
|
||||
getCurrentDirectory(): string;
|
||||
@ -4404,6 +4473,10 @@ declare namespace ts {
|
||||
/** If provided, used to resolve type reference directives, otherwise typescript's default resolution */
|
||||
resolveTypeReferenceDirectives?(typeReferenceDirectiveNames: string[], containingFile: string, redirectedReference?: ResolvedProjectReference): (ResolvedTypeReferenceDirective | undefined)[];
|
||||
}
|
||||
interface WatchCompilerHost<T extends BuilderProgram> extends ProgramHost<T>, WatchHost {
|
||||
/** If provided, callback to invoke after every new program creation */
|
||||
afterProgramCreate?(program: T): void;
|
||||
}
|
||||
/**
|
||||
* Host to create watch with root files and options
|
||||
*/
|
||||
@ -4706,8 +4779,8 @@ declare namespace ts {
|
||||
getNameOrDottedNameSpan(fileName: string, startPos: number, endPos: number): TextSpan | undefined;
|
||||
getBreakpointStatementAtPosition(fileName: string, position: number): TextSpan | undefined;
|
||||
getSignatureHelpItems(fileName: string, position: number, options: SignatureHelpItemsOptions | undefined): SignatureHelpItems | undefined;
|
||||
getRenameInfo(fileName: string, position: number): RenameInfo;
|
||||
findRenameLocations(fileName: string, position: number, findInStrings: boolean, findInComments: boolean): ReadonlyArray<RenameLocation> | undefined;
|
||||
getRenameInfo(fileName: string, position: number, options?: RenameInfoOptions): RenameInfo;
|
||||
findRenameLocations(fileName: string, position: number, findInStrings: boolean, findInComments: boolean, providePrefixAndSuffixTextForRename?: boolean): ReadonlyArray<RenameLocation> | undefined;
|
||||
getDefinitionAtPosition(fileName: string, position: number): ReadonlyArray<DefinitionInfo> | undefined;
|
||||
getDefinitionAndBoundSpan(fileName: string, position: number): DefinitionInfoAndBoundSpan | undefined;
|
||||
getTypeDefinitionAtPosition(fileName: string, position: number): ReadonlyArray<DefinitionInfo> | undefined;
|
||||
@ -5150,6 +5223,9 @@ declare namespace ts {
|
||||
canRename: false;
|
||||
localizedErrorMessage: string;
|
||||
}
|
||||
interface RenameInfoOptions {
|
||||
readonly allowRenameOfImportPath?: boolean;
|
||||
}
|
||||
interface SignatureHelpParameter {
|
||||
name: string;
|
||||
documentation: SymbolDisplayPart[];
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -224,7 +224,7 @@
|
||||
"Cannot_find_name_0_Did_you_mean_the_static_member_1_0_2662": "找不到名稱 '{0}'。您要找的是此靜態成員 '{1}.{0}' 嗎?",
|
||||
"Cannot_find_namespace_0_2503": "找不到命名空間 '{0}'。",
|
||||
"Cannot_find_parameter_0_1225": "找不到參數 '{0}'。",
|
||||
"Cannot_find_the_common_subdirectory_path_for_the_input_files_5009": "找不到輸入檔案的一般子目錄路徑。",
|
||||
"Cannot_find_the_common_subdirectory_path_for_the_input_files_5009": "找不到輸入檔的一般子目錄路徑。",
|
||||
"Cannot_find_type_definition_file_for_0_2688": "找不到 '{0}' 的類型定義檔案。",
|
||||
"Cannot_import_type_declaration_files_Consider_importing_0_instead_of_1_6137": "無法匯入型別宣告檔案。請考慮匯入 '{0}' 而不是 '{1}'。",
|
||||
"Cannot_initialize_outer_scoped_variable_0_in_the_same_scope_as_block_scoped_declaration_1_2481": "無法初始化區塊範圍宣告 '{1}' 之同一範圍中的外部範圍變數 '{0}'。",
|
||||
@ -817,7 +817,7 @@
|
||||
"Specify_the_end_of_line_sequence_to_be_used_when_emitting_files_Colon_CRLF_dos_or_LF_unix_6060": "指定發出檔案時要用的行尾順序: 'CRLF' (DOS) 或 'LF' (UNIX)。",
|
||||
"Specify_the_location_where_debugger_should_locate_TypeScript_files_instead_of_source_locations_6004": "指定偵錯工具尋找 TypeScript 檔案的位置,而非原始檔位置。",
|
||||
"Specify_the_location_where_debugger_should_locate_map_files_instead_of_generated_locations_6003": "指定偵錯工具尋找對應檔的位置,而非產生的位置。",
|
||||
"Specify_the_root_directory_of_input_files_Use_to_control_the_output_directory_structure_with_outDir_6058": "指定輸入檔案的根目錄。用以控制具有 --outDir 的輸出目錄結構。",
|
||||
"Specify_the_root_directory_of_input_files_Use_to_control_the_output_directory_structure_with_outDir_6058": "指定輸入檔的根目錄。用以控制具有 --outDir 的輸出目錄結構。",
|
||||
"Spread_operator_in_new_expressions_is_only_available_when_targeting_ECMAScript_5_and_higher_2472": "只有當目標為 ECMAScript 5 及更高版本時,才可使用 'new' 運算式中的擴張運算子。",
|
||||
"Spread_types_may_only_be_created_from_object_types_2698": "Spread 類型只能從物件類型建立。",
|
||||
"Starting_compilation_in_watch_mode_6031": "在監看模式中開始編譯...",
|
||||
@ -992,7 +992,7 @@
|
||||
"Variable_declaration_expected_1134": "必須是變數宣告。",
|
||||
"Variable_declaration_list_cannot_be_empty_1123": "變數宣告清單不得為空白。",
|
||||
"Version_0_6029": "版本 {0}",
|
||||
"Watch_input_files_6005": "監看輸入檔案。",
|
||||
"Watch_input_files_6005": "監看輸入檔。",
|
||||
"Whether_to_keep_outdated_console_output_in_watch_mode_instead_of_clearing_the_screen_6191": "是否要將已過期的主控台輸出,維持在監看模式下,而非清除螢幕。",
|
||||
"You_cannot_rename_elements_that_are_defined_in_the_standard_TypeScript_library_8001": "您無法重新命名標準 TypeScript 程式庫中所定義的項目。",
|
||||
"You_cannot_rename_this_element_8000": "您無法重新命名這個項目。",
|
||||
|
||||
33
package.json
33
package.json
@ -2,7 +2,7 @@
|
||||
"name": "typescript",
|
||||
"author": "Microsoft Corp.",
|
||||
"homepage": "https://www.typescriptlang.org/",
|
||||
"version": "3.3.0",
|
||||
"version": "3.4.0",
|
||||
"license": "Apache-2.0",
|
||||
"description": "TypeScript is a language for application scale JavaScript development",
|
||||
"keywords": [
|
||||
@ -35,10 +35,8 @@
|
||||
"@types/convert-source-map": "latest",
|
||||
"@types/del": "latest",
|
||||
"@types/glob": "latest",
|
||||
"@types/gulp": "3.X",
|
||||
"@types/gulp": "^4.0.5",
|
||||
"@types/gulp-concat": "latest",
|
||||
"@types/gulp-help": "latest",
|
||||
"@types/gulp-if": "0.0.33",
|
||||
"@types/gulp-newer": "latest",
|
||||
"@types/gulp-rename": "0.0.33",
|
||||
"@types/gulp-sourcemaps": "0.0.32",
|
||||
@ -48,9 +46,9 @@
|
||||
"@types/minimist": "latest",
|
||||
"@types/mkdirp": "latest",
|
||||
"@types/mocha": "latest",
|
||||
"@types/ms": "latest",
|
||||
"@types/node": "8.5.5",
|
||||
"@types/q": "latest",
|
||||
"@types/run-sequence": "latest",
|
||||
"@types/source-map-support": "latest",
|
||||
"@types/through2": "latest",
|
||||
"@types/travis-fold": "latest",
|
||||
@ -63,30 +61,25 @@
|
||||
"del": "latest",
|
||||
"fancy-log": "latest",
|
||||
"fs-extra": "^6.0.1",
|
||||
"gulp": "3.X",
|
||||
"gulp-clone": "latest",
|
||||
"gulp": "^4.0.0",
|
||||
"gulp-concat": "latest",
|
||||
"gulp-help": "latest",
|
||||
"gulp-if": "latest",
|
||||
"gulp-insert": "latest",
|
||||
"gulp-newer": "latest",
|
||||
"gulp-rename": "latest",
|
||||
"gulp-sourcemaps": "latest",
|
||||
"gulp-typescript": "latest",
|
||||
"istanbul": "latest",
|
||||
"jake": "latest",
|
||||
"lodash": "4.17.10",
|
||||
"lodash": "^4.17.11",
|
||||
"merge2": "latest",
|
||||
"minimist": "latest",
|
||||
"mkdirp": "latest",
|
||||
"mocha": "latest",
|
||||
"mocha-fivemat-progress-reporter": "latest",
|
||||
"ms": "latest",
|
||||
"plugin-error": "latest",
|
||||
"pretty-hrtime": "^1.0.3",
|
||||
"prex": "^0.4.3",
|
||||
"q": "latest",
|
||||
"remove-internal": "^2.9.2",
|
||||
"run-sequence": "latest",
|
||||
"source-map-support": "latest",
|
||||
"through2": "latest",
|
||||
"travis-fold": "latest",
|
||||
@ -97,16 +90,16 @@
|
||||
"xml2js": "^0.4.19"
|
||||
},
|
||||
"scripts": {
|
||||
"pretest": "jake tests",
|
||||
"test": "jake runtests-parallel light=false",
|
||||
"pretest": "gulp tests",
|
||||
"test": "gulp runtests-parallel --light=false",
|
||||
"build": "npm run build:compiler && npm run build:tests",
|
||||
"build:compiler": "jake local",
|
||||
"build:tests": "jake tests",
|
||||
"build:compiler": "gulp local",
|
||||
"build:tests": "gulp tests",
|
||||
"start": "node lib/tsc",
|
||||
"clean": "jake clean",
|
||||
"clean": "gulp clean",
|
||||
"gulp": "gulp",
|
||||
"jake": "jake",
|
||||
"lint": "jake lint",
|
||||
"jake": "gulp",
|
||||
"lint": "gulp lint",
|
||||
"setup-hooks": "node scripts/link-hooks.js"
|
||||
},
|
||||
"browser": {
|
||||
|
||||
@ -4,7 +4,7 @@ import child_process = require("child_process");
|
||||
|
||||
type Author = {
|
||||
displayNames: string[];
|
||||
preferedName?: string;
|
||||
preferredName?: string;
|
||||
emails: string[];
|
||||
};
|
||||
|
||||
@ -15,12 +15,12 @@ type Command = {
|
||||
description?: string;
|
||||
};
|
||||
|
||||
const mailMapPath = path.resolve("../.mailmap");
|
||||
const authorsPath = path.resolve("../AUTHORS.md");
|
||||
const mailMapPath = path.resolve(__dirname, "../.mailmap");
|
||||
const authorsPath = path.resolve(__dirname, "../AUTHORS.md");
|
||||
|
||||
function getKnownAuthors(): Author[] {
|
||||
const segmentRegExp = /\s?([^<]+)\s+<([^>]+)>/g;
|
||||
const preferedNameRegeExp = /\s?#\s?([^#]+)$/;
|
||||
const preferredNameRegeExp = /\s?#\s?([^#]+)$/;
|
||||
const knownAuthors: Author[] = [];
|
||||
|
||||
if (!fs.existsSync(mailMapPath)) {
|
||||
@ -37,13 +37,13 @@ function getKnownAuthors(): Author[] {
|
||||
author.displayNames.push(match[1]);
|
||||
author.emails.push(match[2]);
|
||||
}
|
||||
if (match = preferedNameRegeExp.exec(line)) {
|
||||
author.preferedName = match[1];
|
||||
if (match = preferredNameRegeExp.exec(line)) {
|
||||
author.preferredName = match[1];
|
||||
}
|
||||
if (!author.emails) continue;
|
||||
knownAuthors.push(author);
|
||||
if (line.indexOf("#") > 0 && !author.preferedName) {
|
||||
throw new Error("Could not match prefered name for: " + line);
|
||||
if (line.indexOf("#") > 0 && !author.preferredName) {
|
||||
throw new Error("Could not match preferred name for: " + line);
|
||||
}
|
||||
// console.log("===> line: " + line);
|
||||
// console.log(JSON.stringify(author, undefined, 2));
|
||||
@ -52,7 +52,7 @@ function getKnownAuthors(): Author[] {
|
||||
}
|
||||
|
||||
function getAuthorName(author: Author) {
|
||||
return author.preferedName || author.displayNames[0];
|
||||
return author.preferredName || author.displayNames[0];
|
||||
}
|
||||
|
||||
function getKnownAuthorMaps() {
|
||||
@ -113,56 +113,54 @@ namespace Commands {
|
||||
const cmd = "git shortlog -se " + specs.join(" ");
|
||||
console.log(cmd);
|
||||
const outputRegExp = /\d+\s+([^<]+)<([^>]+)>/;
|
||||
const tty = process.platform === 'win32' ? 'CON' : '/dev/tty';
|
||||
const authors: { name: string, email: string, knownAuthor?: Author }[] = [];
|
||||
child_process.exec(`${cmd} < ${tty}`, { cwd: path.resolve("../") }, function (error, stdout, stderr) {
|
||||
if (error) {
|
||||
console.log(stderr.toString());
|
||||
}
|
||||
else {
|
||||
const output = stdout.toString();
|
||||
const lines = output.split("\n");
|
||||
lines.forEach(line => {
|
||||
if (line) {
|
||||
let match: RegExpExecArray | null;
|
||||
if (match = outputRegExp.exec(line)) {
|
||||
authors.push({ name: match[1], email: match[2] });
|
||||
}
|
||||
else {
|
||||
throw new Error("Could not parse output: " + line);
|
||||
}
|
||||
const {output: [error, stdout, stderr]} = child_process.spawnSync(`git`, ["shortlog", "-se", ...specs], { cwd: path.resolve(__dirname, "../") });
|
||||
if (error) {
|
||||
console.log(stderr.toString());
|
||||
}
|
||||
else {
|
||||
const output = stdout.toString();
|
||||
const lines = output.split("\n");
|
||||
lines.forEach(line => {
|
||||
if (line) {
|
||||
let match: RegExpExecArray | null;
|
||||
if (match = outputRegExp.exec(line)) {
|
||||
authors.push({ name: match[1], email: match[2] });
|
||||
}
|
||||
else {
|
||||
throw new Error("Could not parse output: " + line);
|
||||
}
|
||||
});
|
||||
|
||||
const maps = getKnownAuthorMaps();
|
||||
|
||||
const lookupAuthor = function ({name, email}: { name: string, email: string }) {
|
||||
return maps.authorsByEmail[email.toLocaleLowerCase()] || maps.authorsByName[name];
|
||||
};
|
||||
|
||||
const knownAuthors = authors
|
||||
.map(lookupAuthor)
|
||||
.filter(a => !!a)
|
||||
.map(getAuthorName);
|
||||
const unknownAuthors = authors
|
||||
.filter(a => !lookupAuthor(a))
|
||||
.map(a => `${a.name} <${a.email}>`);
|
||||
|
||||
if (knownAuthors.length) {
|
||||
console.log("\r\n");
|
||||
console.log("Found known authors: ");
|
||||
console.log("=====================");
|
||||
deduplicate(knownAuthors).sort(sortAuthors).forEach(log);
|
||||
}
|
||||
});
|
||||
|
||||
if (unknownAuthors.length) {
|
||||
console.log("\r\n");
|
||||
console.log("Found unknown authors: ");
|
||||
console.log("=====================");
|
||||
deduplicate(unknownAuthors).sort(sortAuthors).forEach(log);
|
||||
}
|
||||
const maps = getKnownAuthorMaps();
|
||||
|
||||
const lookupAuthor = function ({name, email}: { name: string, email: string }) {
|
||||
return maps.authorsByEmail[email.toLocaleLowerCase()] || maps.authorsByName[name];
|
||||
};
|
||||
|
||||
const knownAuthors = authors
|
||||
.map(lookupAuthor)
|
||||
.filter(a => !!a)
|
||||
.map(getAuthorName);
|
||||
const unknownAuthors = authors
|
||||
.filter(a => !lookupAuthor(a))
|
||||
.map(a => `${a.name} <${a.email}>`);
|
||||
|
||||
if (knownAuthors.length) {
|
||||
console.log("\r\n");
|
||||
console.log("Found known authors: ");
|
||||
console.log("=====================");
|
||||
deduplicate(knownAuthors).sort(sortAuthors).forEach(log);
|
||||
}
|
||||
});
|
||||
|
||||
if (unknownAuthors.length) {
|
||||
console.log("\r\n");
|
||||
console.log("Found unknown authors: ");
|
||||
console.log("=====================");
|
||||
deduplicate(unknownAuthors).sort(sortAuthors).forEach(log);
|
||||
}
|
||||
}
|
||||
};
|
||||
listAuthors.description = "List known and unknown authors for a given spec, e.g. 'node authors.js listAuthors origin/release-2.6..origin/release-2.7'";
|
||||
}
|
||||
|
||||
@ -5,7 +5,7 @@
|
||||
import cp = require('child_process');
|
||||
import fs = require('fs');
|
||||
|
||||
// Slice off 'node bisect-test.js' from the commandline args
|
||||
// Slice off 'node bisect-test.js' from the command line args
|
||||
var args = process.argv.slice(2);
|
||||
|
||||
function tsc(tscArgs: string, onExit: (exitCode: number) => void) {
|
||||
@ -15,6 +15,7 @@ function tsc(tscArgs: string, onExit: (exitCode: number) => void) {
|
||||
});
|
||||
}
|
||||
|
||||
// TODO: Rewrite bisect script to handle the post-jake/gulp swap period
|
||||
var jake = cp.exec('jake clean local', () => void 0);
|
||||
jake.on('close', jakeExitCode => {
|
||||
if (jakeExitCode === 0) {
|
||||
|
||||
@ -1,24 +0,0 @@
|
||||
// simple script to optionally elide source-map-support (or other optional modules) when running browserify.
|
||||
|
||||
var stream = require("stream"),
|
||||
Transform = stream.Transform,
|
||||
resolve = require("browser-resolve");
|
||||
|
||||
var requirePattern = /require\s*\(\s*['"](source-map-support)['"]\s*\)/;
|
||||
module.exports = function (file) {
|
||||
return new Transform({
|
||||
transform: function (data, encoding, cb) {
|
||||
var text = encoding === "buffer" ? data.toString("utf8") : data;
|
||||
this.push(new Buffer(text.replace(requirePattern, function (originalText, moduleName) {
|
||||
try {
|
||||
resolve.sync(moduleName, { filename: file });
|
||||
return originalText;
|
||||
}
|
||||
catch (e) {
|
||||
return "(function () { throw new Error(\"module '" + moduleName + "' not found.\"); })()";
|
||||
}
|
||||
}), "utf8"));
|
||||
cb();
|
||||
}
|
||||
});
|
||||
};
|
||||
@ -1,24 +0,0 @@
|
||||
// @ts-check
|
||||
const merge2 = require("merge2");
|
||||
const gulp = require("./gulp");
|
||||
const rename = require("gulp-rename");
|
||||
const rm = require("./rm");
|
||||
const { localBaseline, refBaseline } = require("./tests");
|
||||
|
||||
module.exports = baselineAccept;
|
||||
|
||||
function baselineAccept(subfolder = "") {
|
||||
return merge2(baselineCopy(subfolder), baselineDelete(subfolder));
|
||||
}
|
||||
|
||||
function baselineCopy(subfolder = "") {
|
||||
return gulp.src([`${localBaseline}${subfolder ? `${subfolder}/` : ``}**`, `!${localBaseline}${subfolder}/**/*.delete`], { base: localBaseline, read: false })
|
||||
.pipe(gulp.dest(refBaseline));
|
||||
}
|
||||
|
||||
function baselineDelete(subfolder = "") {
|
||||
return gulp.src([`${localBaseline}${subfolder ? `${subfolder}/` : ``}**/*.delete`], { base: localBaseline, read: false })
|
||||
.pipe(rm())
|
||||
.pipe(rename({ extname: "" }))
|
||||
.pipe(rm(refBaseline));
|
||||
}
|
||||
@ -1,34 +0,0 @@
|
||||
// @ts-check
|
||||
const browserify = require("browserify");
|
||||
const Vinyl = require("./vinyl");
|
||||
const { Transform } = require("stream");
|
||||
const { streamFromFile } = require("./utils");
|
||||
const { replaceContents } = require("./sourcemaps");
|
||||
|
||||
module.exports = browserifyFile;
|
||||
|
||||
/**
|
||||
* @param {import("browserify").Options} [opts]
|
||||
*/
|
||||
function browserifyFile(opts) {
|
||||
return new Transform({
|
||||
objectMode: true,
|
||||
/**
|
||||
* @param {string | Buffer | Vinyl} input
|
||||
*/
|
||||
transform(input, _, cb) {
|
||||
if (typeof input === "string" || Buffer.isBuffer(input)) return cb(new Error("Only Vinyl files are supported."));
|
||||
try {
|
||||
browserify(Object.assign({}, opts, { debug: !!input.sourceMap, basedir: input.base }))
|
||||
.add(streamFromFile(input), { file: input.path, basedir: input.base })
|
||||
.bundle((err, contents) => {
|
||||
if (err) return cb(err);
|
||||
cb(null, replaceContents(input, contents));
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
cb(e);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
@ -1,5 +0,0 @@
|
||||
// @ts-check
|
||||
|
||||
// this just fixes the incorrect types for chalk :/
|
||||
const chalk = /**@type {import("chalk").Chalk}*/(require("chalk").default || require("chalk"));
|
||||
module.exports = chalk;
|
||||
@ -1,19 +0,0 @@
|
||||
// @ts-check
|
||||
const replace = require("./replace");
|
||||
|
||||
module.exports = exports = convertConstEnum;
|
||||
|
||||
/**
|
||||
* This regexp exists to capture our const enums and replace them with normal enums in our public API
|
||||
* - this is fine since we compile with preserveConstEnums, and ensures our consumers are not locked
|
||||
* to the TS version they compile with.
|
||||
*/
|
||||
const constEnumCaptureRegexp = /^(\s*)(export )?const enum (\S+) {(\s*)$/gm;
|
||||
const constEnumReplacement = "$1$2enum $3 {$4";
|
||||
|
||||
/**
|
||||
* Converts `const enum` declarations in a .d.ts file into non-const `enum` declarations.
|
||||
*/
|
||||
function convertConstEnum() {
|
||||
return replace(constEnumCaptureRegexp, constEnumReplacement);
|
||||
}
|
||||
@ -1,31 +0,0 @@
|
||||
// @ts-check
|
||||
module.exports = debounce;
|
||||
|
||||
/**
|
||||
* @param {() => void} cb
|
||||
* @param {number} timeout
|
||||
* @param {DebounceOptions} [opts]
|
||||
*
|
||||
* @typedef DebounceOptions
|
||||
* @property {number} [max]
|
||||
*/
|
||||
function debounce(cb, timeout, opts = {}) {
|
||||
if (timeout < 10) timeout = 10;
|
||||
let max = opts.max || 10;
|
||||
if (max < timeout) max = timeout;
|
||||
let minTimer;
|
||||
let maxTimer;
|
||||
return trigger;
|
||||
|
||||
function trigger() {
|
||||
if (max > timeout && !maxTimer) maxTimer = setTimeout(done, max);
|
||||
if (minTimer) clearTimeout(minTimer);
|
||||
minTimer = setTimeout(done, timeout);
|
||||
}
|
||||
|
||||
function done() {
|
||||
if (maxTimer) maxTimer = void clearTimeout(maxTimer);
|
||||
if (minTimer) minTimer = void clearTimeout(minTimer);
|
||||
cb();
|
||||
}
|
||||
}
|
||||
@ -1,49 +0,0 @@
|
||||
// @ts-check
|
||||
const ts = require("../../lib/typescript");
|
||||
const log = require("fancy-log"); // was `require("gulp-util").log (see https://github.com/gulpjs/gulp-util)
|
||||
|
||||
/** @type {FormatDiagnosticsHost} */
|
||||
const formatDiagnosticsHost = exports.formatDiagnosticsHost = {
|
||||
getCanonicalFileName: fileName => fileName,
|
||||
getCurrentDirectory: () => process.cwd(),
|
||||
getNewLine: () => ts.sys.newLine
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Diagnostic[]} diagnostics
|
||||
* @param {{ cwd?: string, pretty?: boolean }} [options]
|
||||
*/
|
||||
function formatDiagnostics(diagnostics, options) {
|
||||
return options && options.pretty
|
||||
? ts.formatDiagnosticsWithColorAndContext(diagnostics, getFormatDiagnosticsHost(options && options.cwd))
|
||||
: ts.formatDiagnostics(diagnostics, getFormatDiagnosticsHost(options && options.cwd));
|
||||
}
|
||||
exports.formatDiagnostics = formatDiagnostics;
|
||||
|
||||
/**
|
||||
* @param {Diagnostic[]} diagnostics
|
||||
* @param {{ cwd?: string }} [options]
|
||||
*/
|
||||
function reportDiagnostics(diagnostics, options) {
|
||||
log(formatDiagnostics(diagnostics, { cwd: options && options.cwd, pretty: process.stdout.isTTY }));
|
||||
}
|
||||
exports.reportDiagnostics = reportDiagnostics;
|
||||
|
||||
/**
|
||||
* @param {string | undefined} cwd
|
||||
* @returns {FormatDiagnosticsHost}
|
||||
*/
|
||||
function getFormatDiagnosticsHost(cwd) {
|
||||
if (!cwd || cwd === process.cwd()) return formatDiagnosticsHost;
|
||||
return {
|
||||
getCanonicalFileName: formatDiagnosticsHost.getCanonicalFileName,
|
||||
getCurrentDirectory: () => cwd,
|
||||
getNewLine: formatDiagnosticsHost.getNewLine
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {import("../../lib/typescript").FormatDiagnosticsHost} FormatDiagnosticsHost
|
||||
* @typedef {import("../../lib/typescript").Diagnostic} Diagnostic
|
||||
*/
|
||||
void 0;
|
||||
@ -1,58 +0,0 @@
|
||||
// @ts-check
|
||||
const cp = require("child_process");
|
||||
const log = require("fancy-log"); // was `require("gulp-util").log (see https://github.com/gulpjs/gulp-util)
|
||||
const isWin = /^win/.test(process.platform);
|
||||
const chalk = require("./chalk");
|
||||
const { CancellationToken, CancelError } = require("prex");
|
||||
|
||||
module.exports = exec;
|
||||
|
||||
/**
|
||||
* Executes the provided command once with the supplied arguments.
|
||||
* @param {string} cmd
|
||||
* @param {string[]} args
|
||||
* @param {ExecOptions} [options]
|
||||
*
|
||||
* @typedef ExecOptions
|
||||
* @property {boolean} [ignoreExitCode]
|
||||
* @property {import("prex").CancellationToken} [cancelToken]
|
||||
*/
|
||||
function exec(cmd, args, options = {}) {
|
||||
return /**@type {Promise<{exitCode: number}>}*/(new Promise((resolve, reject) => {
|
||||
const { ignoreExitCode, cancelToken = CancellationToken.none } = options;
|
||||
cancelToken.throwIfCancellationRequested();
|
||||
|
||||
// TODO (weswig): Update child_process types to add windowsVerbatimArguments to the type definition
|
||||
const subshellFlag = isWin ? "/c" : "-c";
|
||||
const command = isWin ? [possiblyQuote(cmd), ...args] : [`${cmd} ${args.join(" ")}`];
|
||||
|
||||
log(`> ${chalk.green(cmd)} ${args.join(" ")}`);
|
||||
const proc = cp.spawn(isWin ? "cmd" : "/bin/sh", [subshellFlag, ...command], { stdio: "inherit", windowsVerbatimArguments: true });
|
||||
const registration = cancelToken.register(() => {
|
||||
log(`${chalk.red("killing")} '${chalk.green(cmd)} ${args.join(" ")}'...`);
|
||||
proc.kill("SIGINT");
|
||||
proc.kill("SIGTERM");
|
||||
reject(new CancelError());
|
||||
});
|
||||
proc.on("exit", exitCode => {
|
||||
registration.unregister();
|
||||
if (exitCode === 0 || ignoreExitCode) {
|
||||
resolve({ exitCode });
|
||||
}
|
||||
else {
|
||||
reject(new Error(`Process exited with code: ${exitCode}`));
|
||||
}
|
||||
});
|
||||
proc.on("error", error => {
|
||||
registration.unregister();
|
||||
reject(error);
|
||||
});
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} cmd
|
||||
*/
|
||||
function possiblyQuote(cmd) {
|
||||
return cmd.indexOf(" ") >= 0 ? `"${cmd}"` : cmd;
|
||||
}
|
||||
@ -1,46 +0,0 @@
|
||||
// @ts-check
|
||||
module.exports = finished;
|
||||
|
||||
/**
|
||||
* @param {NodeJS.ReadableStream | NodeJS.WritableStream} stream
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
function finished(stream) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const readable = "readable" in stream && stream.readable;
|
||||
const writable = "writable" in stream && stream.writable;
|
||||
|
||||
let countdown = 0;
|
||||
const cleanup = () => {
|
||||
if (readable) stream.removeListener("end", signal);
|
||||
if (writable) stream.removeListener("finish", signal);
|
||||
stream.removeListener("error", onerror);
|
||||
};
|
||||
const signal = () => {
|
||||
if (countdown > 0) {
|
||||
countdown--;
|
||||
if (countdown === 0) {
|
||||
cleanup();
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
};
|
||||
const onerror = (error) => {
|
||||
if (countdown > 0) {
|
||||
countdown = 0;
|
||||
cleanup();
|
||||
reject(error);
|
||||
}
|
||||
};
|
||||
stream.once("error", onerror);
|
||||
if (readable) {
|
||||
countdown++;
|
||||
stream.once("end", signal);
|
||||
}
|
||||
if (writable) {
|
||||
countdown++;
|
||||
stream.once("finish", signal);
|
||||
}
|
||||
if (countdown === 0) signal();
|
||||
});
|
||||
}
|
||||
@ -1,12 +0,0 @@
|
||||
// @ts-check
|
||||
const log = require("fancy-log"); // was `require("gulp-util").log (see https://github.com/gulpjs/gulp-util)
|
||||
module.exports = getDiffTool;
|
||||
|
||||
function getDiffTool() {
|
||||
const program = process.env.DIFF;
|
||||
if (!program) {
|
||||
log.warn("Add the 'DIFF' environment variable to the path of the program you want to use.");
|
||||
process.exit(1);
|
||||
}
|
||||
return program;
|
||||
}
|
||||
@ -1,23 +0,0 @@
|
||||
// @ts-check
|
||||
const { lstatSync, readdirSync } = require("fs");
|
||||
const { join } = require("path");
|
||||
|
||||
/**
|
||||
* Find the size of a directory recursively.
|
||||
* Symbolic links can cause a loop.
|
||||
* @param {string} root
|
||||
* @returns {number} bytes
|
||||
*/
|
||||
function getDirSize(root) {
|
||||
const stats = lstatSync(root);
|
||||
|
||||
if (!stats.isDirectory()) {
|
||||
return stats.size;
|
||||
}
|
||||
|
||||
return readdirSync(root)
|
||||
.map(file => getDirSize(join(root, file)))
|
||||
.reduce((acc, num) => acc + num, 0);
|
||||
}
|
||||
|
||||
module.exports = getDirSize;
|
||||
@ -1,149 +0,0 @@
|
||||
// @ts-check
|
||||
const path = require("path");
|
||||
const child_process = require("child_process");
|
||||
const fs = require("fs");
|
||||
const tsc = require("gulp-typescript");
|
||||
const Vinyl = require("vinyl");
|
||||
const { Duplex, Readable } = require("stream");
|
||||
const protocol = require("./protocol");
|
||||
|
||||
/**
|
||||
* @param {string | undefined} tsConfigFileName
|
||||
* @param {tsc.Settings} settings
|
||||
* @param {CreateProjectOptions} options
|
||||
*
|
||||
* @typedef CreateProjectOptions
|
||||
* @property {string} [typescript]
|
||||
* @property {boolean} [parse]
|
||||
*/
|
||||
function createProject(tsConfigFileName, settings, options) {
|
||||
settings = Object.assign({}, settings);
|
||||
options = Object.assign({}, options);
|
||||
if (settings.typescript) throw new Error();
|
||||
|
||||
const localSettings = Object.assign({}, settings);
|
||||
if (options.typescript) {
|
||||
options.typescript = path.resolve(options.typescript);
|
||||
localSettings.typescript = require(options.typescript);
|
||||
}
|
||||
|
||||
const project = tsConfigFileName === undefined ? tsc.createProject(localSettings) : tsc.createProject(tsConfigFileName, localSettings);
|
||||
const wrappedProject = /** @type {tsc.Project} */((reporter = tsc.reporter.defaultReporter()) => {
|
||||
const ts = project.typescript;
|
||||
const proc = child_process.fork(require.resolve("./worker.js"), [], {
|
||||
// Prevent errors when debugging gulpfile due to the same debug port being passed to forked children.
|
||||
execArgv: []
|
||||
});
|
||||
/** @type {Map<string, import("vinyl")>} */
|
||||
const inputs = new Map();
|
||||
/** @type {Map<string, *>} */
|
||||
const sourceFiles = new Map();
|
||||
/** @type {protocol.SourceFileHost & protocol.VinylHost} */
|
||||
const host = {
|
||||
getVinyl(path) { return inputs.get(path); },
|
||||
getSourceFile(fileName) { return sourceFiles.get(fileName); },
|
||||
createSourceFile(fileName, text, languageVersion) {
|
||||
if (text === undefined) {
|
||||
text = fs.readFileSync(fileName, "utf8");
|
||||
}
|
||||
|
||||
/** @type {protocol.SourceFile} */
|
||||
let file;
|
||||
if (options.parse) {
|
||||
file = ts.createSourceFile(fileName, text, languageVersion, /*setParentNodes*/ true);
|
||||
}
|
||||
else {
|
||||
// NOTE: the built-in reporters in gulp-typescript don't actually need a full
|
||||
// source file, so save time by faking one unless requested.
|
||||
file = /**@type {protocol.SourceFile}*/({
|
||||
pos: 0,
|
||||
end: text.length,
|
||||
kind: ts.SyntaxKind.SourceFile,
|
||||
fileName,
|
||||
text,
|
||||
languageVersion,
|
||||
statements: /**@type {*} */([]),
|
||||
endOfFileToken: { pos: text.length, end: text.length, kind: ts.SyntaxKind.EndOfFileToken },
|
||||
amdDependencies: /**@type {*} */([]),
|
||||
referencedFiles: /**@type {*} */([]),
|
||||
typeReferenceDirectives: /**@type {*} */([]),
|
||||
libReferenceDirectives: /**@type {*} */([]),
|
||||
languageVariant: ts.LanguageVariant.Standard,
|
||||
isDeclarationFile: /\.d\.ts$/.test(fileName),
|
||||
hasNoDefaultLib: /[\\/]lib\.[^\\/]+\.d\.ts$/.test(fileName)
|
||||
});
|
||||
}
|
||||
sourceFiles.set(fileName, file);
|
||||
return file;
|
||||
}
|
||||
};
|
||||
/** @type {Duplex & { js?: Readable, dts?: Readable }} */
|
||||
const compileStream = new Duplex({
|
||||
objectMode: true,
|
||||
read() {},
|
||||
/** @param {*} file */
|
||||
write(file, _encoding, callback) {
|
||||
inputs.set(file.path, file);
|
||||
proc.send(protocol.message.write(file));
|
||||
callback();
|
||||
},
|
||||
final(callback) {
|
||||
proc.send(protocol.message.final());
|
||||
callback();
|
||||
}
|
||||
});
|
||||
const jsStream = compileStream.js = new Readable({
|
||||
objectMode: true,
|
||||
read() {}
|
||||
});
|
||||
const dtsStream = compileStream.dts = new Readable({
|
||||
objectMode: true,
|
||||
read() {}
|
||||
});
|
||||
proc.send(protocol.message.createProject(tsConfigFileName, settings, options));
|
||||
proc.on("message", (/**@type {protocol.WorkerMessage}*/ message) => {
|
||||
switch (message.method) {
|
||||
case "write": {
|
||||
const file = protocol.vinylFromJson(message.params);
|
||||
compileStream.push(file);
|
||||
if (file.path.endsWith(".d.ts")) {
|
||||
dtsStream.push(file);
|
||||
}
|
||||
else {
|
||||
jsStream.push(file);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case "final": {
|
||||
compileStream.push(null);
|
||||
jsStream.push(null);
|
||||
dtsStream.push(null);
|
||||
proc.kill(); // TODO(rbuckton): pool workers? may not be feasible due to gulp-typescript holding onto memory
|
||||
break;
|
||||
}
|
||||
case "error": {
|
||||
const error = protocol.errorFromJson(message.params);
|
||||
compileStream.emit("error", error);
|
||||
proc.kill(); // TODO(rbuckton): pool workers? may not be feasible due to gulp-typescript holding onto memory
|
||||
break;
|
||||
}
|
||||
case "reporter.error": {
|
||||
if (reporter.error) {
|
||||
const error = protocol.typeScriptErrorFromJson(message.params, host);
|
||||
reporter.error(error, project.typescript);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case "reporter.finish": {
|
||||
if (reporter.finish) {
|
||||
reporter.finish(message.params);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
return /** @type {*} */(compileStream);
|
||||
});
|
||||
return Object.assign(wrappedProject, project);
|
||||
}
|
||||
|
||||
exports.createProject = createProject;
|
||||
@ -1,281 +0,0 @@
|
||||
// @ts-check
|
||||
const Vinyl = require("vinyl");
|
||||
|
||||
/**
|
||||
* @param {File} file
|
||||
* @returns {*}
|
||||
*/
|
||||
function vinylToJson(file) {
|
||||
if (file.isStream()) throw new TypeError("Streams not supported.");
|
||||
return {
|
||||
path: file.path,
|
||||
cwd: file.cwd,
|
||||
base: file.base,
|
||||
contents: file.isBuffer() ? file.contents.toString("utf8") : undefined,
|
||||
sourceMap: file.sourceMap
|
||||
};
|
||||
}
|
||||
exports.vinylToJson = vinylToJson;
|
||||
|
||||
/**
|
||||
* @param {*} json
|
||||
* @returns {File}
|
||||
*/
|
||||
function vinylFromJson(json) {
|
||||
return new Vinyl({
|
||||
path: json.path,
|
||||
cwd: json.cwd,
|
||||
base: json.base,
|
||||
contents: typeof json.contents === "string" ? Buffer.from(json.contents, "utf8") : undefined,
|
||||
sourceMap: json.sourceMap
|
||||
});
|
||||
}
|
||||
exports.vinylFromJson = vinylFromJson;
|
||||
|
||||
/**
|
||||
* @param {Error} error
|
||||
* @returns {*}
|
||||
*/
|
||||
function errorToJson(error) {
|
||||
return {
|
||||
name: error.name,
|
||||
message: error.message,
|
||||
stack: error.stack
|
||||
};
|
||||
}
|
||||
exports.errorToJson = errorToJson;
|
||||
|
||||
/**
|
||||
* @param {*} json
|
||||
* @returns {Error}
|
||||
*/
|
||||
function errorFromJson(json) {
|
||||
const error = new Error();
|
||||
error.name = json.name;
|
||||
error.message = json.message;
|
||||
error.stack = json.stack;
|
||||
return error;
|
||||
}
|
||||
exports.errorFromJson = errorFromJson;
|
||||
|
||||
/**
|
||||
* @param {TypeScriptError} error
|
||||
* @returns {*}
|
||||
*/
|
||||
function typeScriptErrorToJson(error) {
|
||||
return Object.assign({}, errorToJson(error), {
|
||||
fullFilename: error.fullFilename,
|
||||
relativeFilename: error.relativeFilename,
|
||||
file: error.file && { path: error.file.path },
|
||||
tsFile: error.tsFile && sourceFileToJson(error.tsFile),
|
||||
diagnostic: diagnosticToJson(error.diagnostic),
|
||||
startPosition: error.startPosition,
|
||||
endPosition: error.endPosition
|
||||
});
|
||||
}
|
||||
exports.typeScriptErrorToJson = typeScriptErrorToJson;
|
||||
|
||||
/**
|
||||
* @param {*} json
|
||||
* @param {SourceFileHost & VinylHost} host
|
||||
* @returns {TypeScriptError}
|
||||
*/
|
||||
function typeScriptErrorFromJson(json, host) {
|
||||
const error = /**@type {TypeScriptError}*/(errorFromJson(json));
|
||||
error.fullFilename = json.fullFilename;
|
||||
error.relativeFilename = json.relativeFilename;
|
||||
error.file = json.file && host.getVinyl(json.file.path);
|
||||
error.tsFile = json.tsFile && sourceFileFromJson(json.tsFile, host);
|
||||
error.diagnostic = diagnosticFromJson(json.diagnostic, host);
|
||||
error.startPosition = json.startPosition;
|
||||
error.endPosition = json.endPosition;
|
||||
return error;
|
||||
}
|
||||
exports.typeScriptErrorFromJson = typeScriptErrorFromJson;
|
||||
|
||||
/**
|
||||
* @param {SourceFile} file
|
||||
* @returns {*}
|
||||
*/
|
||||
function sourceFileToJson(file) {
|
||||
return {
|
||||
fileName: file.fileName,
|
||||
text: file.text,
|
||||
languageVersion: file.languageVersion
|
||||
};
|
||||
}
|
||||
exports.sourceFileToJson = sourceFileToJson;
|
||||
|
||||
/**
|
||||
* @param {*} json
|
||||
* @param {SourceFileHost} host
|
||||
*/
|
||||
function sourceFileFromJson(json, host) {
|
||||
return host.getSourceFile(json.fileName)
|
||||
|| host.createSourceFile(json.fileName, json.text, json.languageVersion);
|
||||
}
|
||||
exports.sourceFileFromJson = sourceFileFromJson;
|
||||
|
||||
/**
|
||||
* @param {Diagnostic} diagnostic
|
||||
* @returns {*}
|
||||
*/
|
||||
function diagnosticToJson(diagnostic) {
|
||||
return Object.assign({}, diagnosticRelatedInformationToJson(diagnostic), {
|
||||
category: diagnostic.category,
|
||||
code: diagnostic.code,
|
||||
source: diagnostic.source,
|
||||
relatedInformation: diagnostic.relatedInformation && diagnostic.relatedInformation.map(diagnosticRelatedInformationToJson)
|
||||
});
|
||||
}
|
||||
exports.diagnosticToJson = diagnosticToJson;
|
||||
|
||||
/**
|
||||
* @param {*} json
|
||||
* @param {SourceFileHost} host
|
||||
* @returns {Diagnostic}
|
||||
*/
|
||||
function diagnosticFromJson(json, host) {
|
||||
return Object.assign({}, diagnosticRelatedInformationFromJson(json, host), {
|
||||
category: json.category,
|
||||
code: json.code,
|
||||
source: json.source,
|
||||
relatedInformation: json.relatedInformation && json.relatedInformation.map(json => diagnosticRelatedInformationFromJson(json, host))
|
||||
});
|
||||
}
|
||||
exports.diagnosticFromJson = diagnosticFromJson;
|
||||
|
||||
/**
|
||||
* @param {DiagnosticRelatedInformation} diagnostic
|
||||
* @returns {*}
|
||||
*/
|
||||
function diagnosticRelatedInformationToJson(diagnostic) {
|
||||
return {
|
||||
file: diagnostic.file && { fileName: diagnostic.file.fileName },
|
||||
start: diagnostic.start,
|
||||
length: diagnostic.length,
|
||||
messageText: diagnostic.messageText
|
||||
};
|
||||
}
|
||||
exports.diagnosticRelatedInformationToJson = diagnosticRelatedInformationToJson;
|
||||
|
||||
/**
|
||||
* @param {*} json
|
||||
* @param {SourceFileHost} host
|
||||
* @returns {DiagnosticRelatedInformation}
|
||||
*/
|
||||
function diagnosticRelatedInformationFromJson(json, host) {
|
||||
return {
|
||||
file: json.file && sourceFileFromJson(json.file, host),
|
||||
start: json.start,
|
||||
length: json.length,
|
||||
messageText: json.messageText,
|
||||
category: json.category,
|
||||
code: json.code
|
||||
};
|
||||
}
|
||||
exports.diagnosticRelatedInformationFromJson = diagnosticRelatedInformationFromJson;
|
||||
|
||||
exports.message = {};
|
||||
|
||||
/**
|
||||
* @param {string | undefined} tsConfigFileName
|
||||
* @param {import("gulp-typescript").Settings} settings
|
||||
* @param {Object} options
|
||||
* @param {string} [options.typescript]
|
||||
* @returns {CreateProjectMessage}
|
||||
*
|
||||
* @typedef CreateProjectMessage
|
||||
* @property {"createProject"} method
|
||||
* @property {CreateProjectParams} params
|
||||
*
|
||||
* @typedef CreateProjectParams
|
||||
* @property {string | undefined} tsConfigFileName
|
||||
* @property {import("gulp-typescript").Settings} settings
|
||||
* @property {CreateProjectOptions} options
|
||||
*
|
||||
* @typedef CreateProjectOptions
|
||||
* @property {string} [typescript]
|
||||
*/
|
||||
exports.message.createProject = function(tsConfigFileName, settings, options) {
|
||||
return { method: "createProject", params: { tsConfigFileName, settings, options } };
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {File} file
|
||||
* @returns {WriteMessage}
|
||||
*
|
||||
* @typedef WriteMessage
|
||||
* @property {"write"} method
|
||||
* @property {*} params
|
||||
*/
|
||||
exports.message.write = function(file) {
|
||||
return { method: "write", params: vinylToJson(file) };
|
||||
};
|
||||
|
||||
/**
|
||||
* @returns {FinalMessage}
|
||||
*
|
||||
* @typedef FinalMessage
|
||||
* @property {"final"} method
|
||||
*/
|
||||
exports.message.final = function() {
|
||||
return { method: "final" };
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Error} error
|
||||
* @returns {ErrorMessage}
|
||||
*
|
||||
* @typedef ErrorMessage
|
||||
* @property {"error"} method
|
||||
* @property {*} params
|
||||
*/
|
||||
exports.message.error = function(error) {
|
||||
return { method: "error", params: errorToJson(error) };
|
||||
};
|
||||
|
||||
exports.message.reporter = {};
|
||||
|
||||
/**
|
||||
* @param {TypeScriptError} error
|
||||
* @returns {reporter.ErrorMessage}
|
||||
*
|
||||
* @typedef reporter.ErrorMessage
|
||||
* @property {"reporter.error"} method
|
||||
* @property {*} params
|
||||
*/
|
||||
exports.message.reporter.error = function(error) {
|
||||
return { method: "reporter.error", params: typeScriptErrorToJson(error) };
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {*} results
|
||||
* @returns {reporter.FinishMessage}
|
||||
*
|
||||
* @typedef reporter.FinishMessage
|
||||
* @property {"reporter.finish"} method
|
||||
* @property {*} params
|
||||
*/
|
||||
exports.message.reporter.finish = function(results) {
|
||||
return { method: "reporter.finish", params: results };
|
||||
};
|
||||
|
||||
/**
|
||||
* @typedef {import("vinyl")} File
|
||||
* @typedef {typeof import("typescript")} TypeScriptModule
|
||||
* @typedef {import("typescript").SourceFile} SourceFile
|
||||
* @typedef {import("typescript").Diagnostic} Diagnostic
|
||||
* @typedef {import("typescript").DiagnosticRelatedInformation} DiagnosticRelatedInformation
|
||||
* @typedef {import("gulp-typescript").reporter.TypeScriptError} TypeScriptError
|
||||
* @typedef {WriteMessage | FinalMessage | CreateProjectMessage} HostMessage
|
||||
* @typedef {WriteMessage | FinalMessage | ErrorMessage | reporter.ErrorMessage | reporter.FinishMessage} WorkerMessage
|
||||
*
|
||||
* @typedef SourceFileHost
|
||||
* @property {(fileName: string) => SourceFile | undefined} getSourceFile
|
||||
* @property {(fileName: string, text: string, languageVersion: number) => SourceFile} createSourceFile
|
||||
*
|
||||
* @typedef VinylHost
|
||||
* @property {(path: string) => File | undefined} getVinyl
|
||||
*/
|
||||
void 0;
|
||||
@ -1,79 +0,0 @@
|
||||
// @ts-check
|
||||
const fs = require("fs");
|
||||
const tsc = require("gulp-typescript");
|
||||
const { Readable, Writable } = require("stream");
|
||||
const protocol = require("./protocol");
|
||||
|
||||
/** @type {tsc.Project} */
|
||||
let project;
|
||||
|
||||
/** @type {Readable} */
|
||||
let inputStream;
|
||||
|
||||
/** @type {Writable} */
|
||||
let outputStream;
|
||||
|
||||
/** @type {tsc.CompileStream} */
|
||||
let compileStream;
|
||||
|
||||
process.on("message", (/**@type {protocol.HostMessage}*/ message) => {
|
||||
try {
|
||||
switch (message.method) {
|
||||
case "createProject": {
|
||||
const { tsConfigFileName, settings, options } = message.params;
|
||||
if (options.typescript) {
|
||||
settings.typescript = require(options.typescript);
|
||||
}
|
||||
|
||||
project = tsConfigFileName === undefined
|
||||
? tsc.createProject(settings)
|
||||
: tsc.createProject(tsConfigFileName, settings);
|
||||
|
||||
inputStream = new Readable({
|
||||
objectMode: true,
|
||||
read() {}
|
||||
});
|
||||
|
||||
outputStream = new Writable({
|
||||
objectMode: true,
|
||||
/**
|
||||
* @param {*} file
|
||||
*/
|
||||
write(file, _, callback) {
|
||||
process.send(protocol.message.write(file));
|
||||
callback();
|
||||
},
|
||||
final(callback) {
|
||||
process.send(protocol.message.final());
|
||||
callback();
|
||||
}
|
||||
});
|
||||
compileStream = project({
|
||||
error(error) { process.send(protocol.message.reporter.error(error)); },
|
||||
finish(results) { process.send(protocol.message.reporter.finish(results)); }
|
||||
});
|
||||
compileStream.on("error", error => {
|
||||
process.send(protocol.message.error(error));
|
||||
});
|
||||
outputStream.on("error", () => {
|
||||
/* do nothing */
|
||||
});
|
||||
inputStream.pipe(compileStream).pipe(outputStream);
|
||||
break;
|
||||
}
|
||||
case "write": {
|
||||
const file = protocol.vinylFromJson(message.params);
|
||||
if (!file.isBuffer()) file.contents = fs.readFileSync(file.path);
|
||||
inputStream.push(file);
|
||||
break;
|
||||
}
|
||||
case "final": {
|
||||
inputStream.push(null);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
process.send(protocol.message.error(e));
|
||||
}
|
||||
});
|
||||
@ -1,8 +0,0 @@
|
||||
// @ts-check
|
||||
/**
|
||||
* @typedef {import("gulp").Gulp} Gulp
|
||||
* @typedef {import("gulp-help").GulpHelp} GulpHelp
|
||||
* @typedef {GulpHelp & { Gulp: new () => Gulp }} DotGulpModule
|
||||
* @type {DotGulpModule}
|
||||
*/
|
||||
module.exports = require("gulp-help")(require("gulp"));
|
||||
@ -1,30 +0,0 @@
|
||||
// @ts-check
|
||||
const readJson = require("./readJson");
|
||||
const path = require("path");
|
||||
const gulp = require("./gulp");
|
||||
const newer = require("gulp-newer");
|
||||
const concat = require("gulp-concat");
|
||||
const merge2 = require("merge2");
|
||||
|
||||
/** @type {{ libs: string[], paths?: Record<string, string>, sources?: Record<string, string[]> }} */
|
||||
const libraries = readJson("./src/lib/libs.json");
|
||||
const libs = libraries.libs.map(lib => {
|
||||
const relativeSources = ["header.d.ts"].concat(libraries.sources && libraries.sources[lib] || [lib + ".d.ts"]);
|
||||
const relativeTarget = libraries.paths && libraries.paths[lib] || ("lib." + lib + ".d.ts");
|
||||
const sources = relativeSources.map(s => path.posix.join("src/lib", s));
|
||||
const target = `built/local/${relativeTarget}`;
|
||||
return { target, relativeTarget, sources };
|
||||
});
|
||||
exports.libraryTargets = libs.map(lib => lib.target);
|
||||
|
||||
/**
|
||||
* @param {string[]} prepends
|
||||
*/
|
||||
function generateLibs(prepends) {
|
||||
return merge2(libs.map(({ sources, target, relativeTarget }) =>
|
||||
gulp.src(prepends.concat(sources))
|
||||
.pipe(newer(target))
|
||||
.pipe(concat(relativeTarget, { newLine: "\n\n" }))
|
||||
.pipe(gulp.dest("built/local"))));
|
||||
}
|
||||
exports.generateLibs = generateLibs;
|
||||
@ -1,14 +0,0 @@
|
||||
// @ts-check
|
||||
const mkdirp = require("mkdirp");
|
||||
|
||||
module.exports = exports = mkdirpAsync;
|
||||
|
||||
/**
|
||||
* @param {string} dir
|
||||
* @param {mkdirp.Mode | mkdirp.Options} [opts]
|
||||
*/
|
||||
function mkdirpAsync(dir, opts) {
|
||||
return new Promise((resolve, reject) => mkdirp(dir, opts, (err, made) => err ? reject(err) : resolve(made)));
|
||||
}
|
||||
|
||||
exports.sync = mkdirp.sync;
|
||||
@ -1,72 +0,0 @@
|
||||
// @ts-check
|
||||
const fs = require("fs");
|
||||
|
||||
module.exports = needsUpdate;
|
||||
|
||||
/**
|
||||
* @param {string | string[]} source
|
||||
* @param {string | string[]} dest
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function needsUpdate(source, dest) {
|
||||
if (typeof source === "string" && typeof dest === "string") {
|
||||
if (fs.existsSync(dest)) {
|
||||
const {mtime: outTime} = fs.statSync(dest);
|
||||
const {mtime: inTime} = fs.statSync(source);
|
||||
if (+inTime <= +outTime) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (typeof source === "string" && typeof dest !== "string") {
|
||||
const {mtime: inTime} = fs.statSync(source);
|
||||
for (const filepath of dest) {
|
||||
if (fs.existsSync(filepath)) {
|
||||
const {mtime: outTime} = fs.statSync(filepath);
|
||||
if (+inTime > +outTime) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
else if (typeof source !== "string" && typeof dest === "string") {
|
||||
if (fs.existsSync(dest)) {
|
||||
const {mtime: outTime} = fs.statSync(dest);
|
||||
for (const filepath of source) {
|
||||
if (fs.existsSync(filepath)) {
|
||||
const {mtime: inTime} = fs.statSync(filepath);
|
||||
if (+inTime > +outTime) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else if (typeof source !== "string" && typeof dest !== "string") {
|
||||
for (let i = 0; i < source.length; i++) {
|
||||
if (!dest[i]) {
|
||||
continue;
|
||||
}
|
||||
if (fs.existsSync(dest[i])) {
|
||||
const {mtime: outTime} = fs.statSync(dest[i]);
|
||||
const {mtime: inTime} = fs.statSync(source[i]);
|
||||
if (+inTime > +outTime) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@ -4,7 +4,7 @@ const os = require("os");
|
||||
|
||||
/** @type {CommandLineOptions} */
|
||||
module.exports = minimist(process.argv.slice(2), {
|
||||
boolean: ["debug", "dirty", "inspect", "light", "colors", "lint", "lkg", "soft", "fix", "failed", "keepFailed"],
|
||||
boolean: ["debug", "dirty", "inspect", "light", "colors", "lint", "lkg", "soft", "fix", "failed", "keepFailed", "force", "built"],
|
||||
string: ["browser", "tests", "host", "reporter", "stackTraceLimit", "timeout"],
|
||||
alias: {
|
||||
"b": "browser",
|
||||
@ -15,7 +15,7 @@ module.exports = minimist(process.argv.slice(2), {
|
||||
"r": "reporter",
|
||||
"c": "colors", "color": "colors",
|
||||
"w": "workers",
|
||||
"f": "fix",
|
||||
"f": "fix"
|
||||
},
|
||||
default: {
|
||||
soft: false,
|
||||
@ -34,11 +34,16 @@ module.exports = minimist(process.argv.slice(2), {
|
||||
workers: process.env.workerCount || os.cpus().length,
|
||||
failed: false,
|
||||
keepFailed: false,
|
||||
lkg: false,
|
||||
dirty: false
|
||||
lkg: true,
|
||||
dirty: false,
|
||||
built: false
|
||||
}
|
||||
});
|
||||
|
||||
if (module.exports.built) {
|
||||
module.exports.lkg = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef TypedOptions
|
||||
* @property {boolean} debug
|
||||
@ -48,6 +53,7 @@ module.exports = minimist(process.argv.slice(2), {
|
||||
* @property {boolean} colors
|
||||
* @property {boolean} lint
|
||||
* @property {boolean} lkg
|
||||
* @property {boolean} built
|
||||
* @property {boolean} soft
|
||||
* @property {boolean} fix
|
||||
* @property {string} browser
|
||||
|
||||
@ -1,20 +1,18 @@
|
||||
// @ts-check
|
||||
const stream = require("stream");
|
||||
const Vinyl = require("./vinyl");
|
||||
const Vinyl = require("vinyl");
|
||||
const ts = require("../../lib/typescript");
|
||||
const fs = require("fs");
|
||||
const { base64VLQFormatEncode } = require("./sourcemaps");
|
||||
|
||||
module.exports = exports = prepend;
|
||||
|
||||
/**
|
||||
* @param {string | ((file: Vinyl) => string)} data
|
||||
* @param {string | ((file: import("vinyl")) => string)} data
|
||||
*/
|
||||
function prepend(data) {
|
||||
return new stream.Transform({
|
||||
objectMode: true,
|
||||
/**
|
||||
* @param {string | Buffer | Vinyl} input
|
||||
* @param {string | Buffer | import("vinyl")} input
|
||||
* @param {(error: Error, data?: any) => void} cb
|
||||
*/
|
||||
transform(input, _, cb) {
|
||||
@ -56,11 +54,11 @@ function prepend(data) {
|
||||
exports.prepend = prepend;
|
||||
|
||||
/**
|
||||
* @param {string | ((file: Vinyl) => string)} file
|
||||
* @param {string | ((file: import("vinyl")) => string)} file
|
||||
*/
|
||||
function prependFile(file) {
|
||||
const data = typeof file === "string" ? fs.readFileSync(file, "utf8") :
|
||||
vinyl => fs.readFileSync(file(vinyl), "utf8");
|
||||
return prepend(data)
|
||||
}
|
||||
exports.file = prependFile;
|
||||
exports.prependFile = prependFile;
|
||||
File diff suppressed because it is too large
Load Diff
60
scripts/build/projects.js
Normal file
60
scripts/build/projects.js
Normal file
@ -0,0 +1,60 @@
|
||||
// @ts-check
|
||||
const { exec, Debouncer } = require("./utils");
|
||||
|
||||
class ProjectQueue {
|
||||
/**
|
||||
* @param {(projects: string[], lkg: boolean, force: boolean) => Promise<any>} action
|
||||
*/
|
||||
constructor(action) {
|
||||
/** @type {{ lkg: boolean, force: boolean, projects?: string[], debouncer: Debouncer }[]} */
|
||||
this._debouncers = [];
|
||||
this._action = action;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} project
|
||||
* @param {object} options
|
||||
*/
|
||||
enqueue(project, { lkg = true, force = false } = {}) {
|
||||
let entry = this._debouncers.find(entry => entry.lkg === lkg && entry.force === force);
|
||||
if (!entry) {
|
||||
const debouncer = new Debouncer(100, async () => {
|
||||
const projects = entry.projects;
|
||||
if (projects) {
|
||||
entry.projects = undefined;
|
||||
await this._action(projects, lkg, force);
|
||||
}
|
||||
});
|
||||
this._debouncers.push(entry = { lkg, force, debouncer });
|
||||
}
|
||||
if (!entry.projects) entry.projects = [];
|
||||
entry.projects.push(project);
|
||||
return entry.debouncer.enqueue();
|
||||
}
|
||||
}
|
||||
|
||||
const projectBuilder = new ProjectQueue((projects, lkg, force) => exec(process.execPath, [lkg ? "./lib/tsc" : "./built/local/tsc", "-b", ...(force ? ["--force"] : []), ...projects], { hidePrompt: true }));
|
||||
|
||||
/**
|
||||
* @param {string} project
|
||||
* @param {object} [options]
|
||||
* @param {boolean} [options.lkg=true]
|
||||
* @param {boolean} [options.force=false]
|
||||
*/
|
||||
exports.buildProject = (project, { lkg, force } = {}) => projectBuilder.enqueue(project, { lkg, force });
|
||||
|
||||
const projectCleaner = new ProjectQueue((projects, lkg) => exec(process.execPath, [lkg ? "./lib/tsc" : "./built/local/tsc", "-b", "--clean", ...projects], { hidePrompt: true }));
|
||||
|
||||
/**
|
||||
* @param {string} project
|
||||
*/
|
||||
exports.cleanProject = (project) => projectCleaner.enqueue(project);
|
||||
|
||||
const projectWatcher = new ProjectQueue((projects) => exec(process.execPath, ["./lib/tsc", "-b", "--watch", ...projects], { hidePrompt: true }));
|
||||
|
||||
/**
|
||||
* @param {string} project
|
||||
* @param {object} [options]
|
||||
* @param {boolean} [options.lkg=true]
|
||||
*/
|
||||
exports.watchProject = (project, { lkg } = {}) => projectWatcher.enqueue(project, { lkg });
|
||||
@ -1,17 +0,0 @@
|
||||
// @ts-check
|
||||
const ts = require("../../lib/typescript");
|
||||
const fs = require("fs");
|
||||
const { reportDiagnostics } = require("./diagnostics");
|
||||
|
||||
module.exports = exports = readJson;
|
||||
|
||||
/** @param {string} jsonPath */
|
||||
function readJson(jsonPath) {
|
||||
const jsonText = fs.readFileSync(jsonPath, "utf8");
|
||||
const result = ts.parseConfigFileTextToJson(jsonPath, jsonText);
|
||||
if (result.error) {
|
||||
reportDiagnostics([result.error]);
|
||||
throw new Error("An error occurred during parse.");
|
||||
}
|
||||
return result.config;
|
||||
}
|
||||
@ -1,12 +0,0 @@
|
||||
// @ts-check
|
||||
const insert = require("gulp-insert");
|
||||
|
||||
/**
|
||||
* @param {string | RegExp} searchValue
|
||||
* @param {string | ((...args: string[]) => string)} replacer
|
||||
*/
|
||||
function replace(searchValue, replacer) {
|
||||
return insert.transform(content => content.replace(searchValue, /**@type {string}*/(replacer)));
|
||||
}
|
||||
|
||||
module.exports = replace;
|
||||
@ -1,84 +0,0 @@
|
||||
// @ts-check
|
||||
const { Duplex } = require("stream");
|
||||
const path = require("path");
|
||||
const Vinyl = require("vinyl");
|
||||
const del = require("del");
|
||||
|
||||
module.exports = rm;
|
||||
|
||||
/**
|
||||
* @param {string | ((file: File) => string) | Options} [dest]
|
||||
* @param {Options} [opts]
|
||||
*/
|
||||
function rm(dest, opts) {
|
||||
if (dest && typeof dest === "object") opts = dest, dest = undefined;
|
||||
let failed = false;
|
||||
|
||||
const cwd = path.resolve(opts && opts.cwd || process.cwd());
|
||||
|
||||
/** @type {{ file: File, deleted: boolean, promise: Promise<any>, cb: Function }[]} */
|
||||
const pending = [];
|
||||
|
||||
const processDeleted = () => {
|
||||
if (failed) return;
|
||||
while (pending.length && pending[0].deleted) {
|
||||
const { file, cb } = pending.shift();
|
||||
duplex.push(file);
|
||||
cb();
|
||||
}
|
||||
};
|
||||
|
||||
const duplex = new Duplex({
|
||||
objectMode: true,
|
||||
/**
|
||||
* @param {string|Buffer|File} file
|
||||
*/
|
||||
write(file, _, cb) {
|
||||
if (failed) return;
|
||||
if (typeof file === "string" || Buffer.isBuffer(file)) return cb(new Error("Only Vinyl files are supported."));
|
||||
const basePath = typeof dest === "string" ? path.resolve(cwd, dest) :
|
||||
typeof dest === "function" ? path.resolve(cwd, dest(file)) :
|
||||
file.base;
|
||||
const filePath = path.resolve(basePath, file.relative);
|
||||
file.cwd = cwd;
|
||||
file.base = basePath;
|
||||
file.path = filePath;
|
||||
const entry = {
|
||||
file,
|
||||
deleted: false,
|
||||
cb,
|
||||
promise: del(file.path).then(() => {
|
||||
entry.deleted = true;
|
||||
processDeleted();
|
||||
}, err => {
|
||||
failed = true;
|
||||
pending.length = 0;
|
||||
cb(err);
|
||||
})
|
||||
};
|
||||
pending.push(entry);
|
||||
},
|
||||
final(cb) {
|
||||
processDeleted();
|
||||
if (pending.length) {
|
||||
Promise
|
||||
.all(pending.map(entry => entry.promise))
|
||||
.then(() => processDeleted())
|
||||
.then(() => cb(), cb);
|
||||
return;
|
||||
}
|
||||
cb();
|
||||
},
|
||||
read() {
|
||||
}
|
||||
});
|
||||
return duplex;
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {import("vinyl")} File
|
||||
*
|
||||
* @typedef Options
|
||||
* @property {string} [cwd]
|
||||
*/
|
||||
void 0;
|
||||
@ -1,99 +1,4 @@
|
||||
// @ts-check
|
||||
const path = require("path");
|
||||
const Vinyl = require("./vinyl");
|
||||
const convertMap = require("convert-source-map");
|
||||
const applySourceMap = require("vinyl-sourcemaps-apply");
|
||||
const through2 = require("through2");
|
||||
|
||||
/**
|
||||
* @param {Vinyl} input
|
||||
* @param {string | Buffer} contents
|
||||
* @param {string | RawSourceMap} [sourceMap]
|
||||
*/
|
||||
function replaceContents(input, contents, sourceMap) {
|
||||
const output = input.clone();
|
||||
output.contents = typeof contents === "string" ? Buffer.from(contents, "utf8") : contents;
|
||||
if (input.sourceMap) {
|
||||
output.sourceMap = typeof input.sourceMap === "string" ? /**@type {RawSourceMap}*/(JSON.parse(input.sourceMap)) : input.sourceMap;
|
||||
if (typeof sourceMap === "string") {
|
||||
sourceMap = /**@type {RawSourceMap}*/(JSON.parse(sourceMap));
|
||||
}
|
||||
else if (sourceMap === undefined) {
|
||||
const stringContents = typeof contents === "string" ? contents : contents.toString("utf8");
|
||||
const newSourceMapConverter = convertMap.fromSource(stringContents);
|
||||
if (newSourceMapConverter) {
|
||||
sourceMap = /**@type {RawSourceMap}*/(newSourceMapConverter.toObject());
|
||||
output.contents = new Buffer(convertMap.removeMapFileComments(stringContents), "utf8");
|
||||
}
|
||||
}
|
||||
if (sourceMap) {
|
||||
const cwd = input.cwd || process.cwd();
|
||||
const base = input.base || cwd;
|
||||
const sourceRoot = output.sourceMap.sourceRoot;
|
||||
makeAbsoluteSourceMap(cwd, base, output.sourceMap);
|
||||
makeAbsoluteSourceMap(cwd, base, sourceMap);
|
||||
applySourceMap(output, sourceMap);
|
||||
makeRelativeSourceMap(cwd, base, sourceRoot, output.sourceMap);
|
||||
}
|
||||
else {
|
||||
output.sourceMap = undefined;
|
||||
}
|
||||
}
|
||||
return output;
|
||||
}
|
||||
exports.replaceContents = replaceContents;
|
||||
|
||||
function removeSourceMaps() {
|
||||
return through2.obj((/**@type {Vinyl}*/file, _, cb) => {
|
||||
if (file.sourceMap && file.isBuffer()) {
|
||||
file.contents = Buffer.from(convertMap.removeMapFileComments(file.contents.toString("utf8")), "utf8");
|
||||
file.sourceMap = undefined;
|
||||
}
|
||||
cb(null, file);
|
||||
});
|
||||
}
|
||||
exports.removeSourceMaps = removeSourceMaps;
|
||||
|
||||
/**
|
||||
* @param {string | undefined} cwd
|
||||
* @param {string | undefined} base
|
||||
* @param {RawSourceMap} sourceMap
|
||||
*
|
||||
* @typedef RawSourceMap
|
||||
* @property {string} version
|
||||
* @property {string} file
|
||||
* @property {string} [sourceRoot]
|
||||
* @property {string[]} sources
|
||||
* @property {string[]} [sourcesContent]
|
||||
* @property {string} mappings
|
||||
* @property {string[]} [names]
|
||||
*/
|
||||
function makeAbsoluteSourceMap(cwd = process.cwd(), base = "", sourceMap) {
|
||||
const sourceRoot = sourceMap.sourceRoot || "";
|
||||
const resolvedBase = path.resolve(cwd, base);
|
||||
const resolvedSourceRoot = path.resolve(resolvedBase, sourceRoot);
|
||||
sourceMap.file = path.resolve(resolvedBase, sourceMap.file).replace(/\\/g, "/");
|
||||
sourceMap.sources = sourceMap.sources.map(source => path.resolve(resolvedSourceRoot, source).replace(/\\/g, "/"));
|
||||
sourceMap.sourceRoot = "";
|
||||
}
|
||||
exports.makeAbsoluteSourceMap = makeAbsoluteSourceMap;
|
||||
|
||||
/**
|
||||
* @param {string | undefined} cwd
|
||||
* @param {string | undefined} base
|
||||
* @param {string} sourceRoot
|
||||
* @param {RawSourceMap} sourceMap
|
||||
*/
|
||||
function makeRelativeSourceMap(cwd = process.cwd(), base = "", sourceRoot, sourceMap) {
|
||||
makeAbsoluteSourceMap(cwd, base, sourceMap);
|
||||
const resolvedBase = path.resolve(cwd, base);
|
||||
const resolvedSourceRoot = path.resolve(resolvedBase, sourceRoot);
|
||||
sourceMap.file = path.relative(resolvedBase, sourceMap.file).replace(/\\/g, "/");
|
||||
sourceMap.sources = sourceMap.sources.map(source => path.relative(resolvedSourceRoot, source).replace(/\\/g, "/"));
|
||||
sourceMap.sourceRoot = sourceRoot;
|
||||
}
|
||||
exports.makeRelativeSourceMap = makeRelativeSourceMap;
|
||||
|
||||
/**
|
||||
* @param {string} message
|
||||
* @returns {never}
|
||||
|
||||
@ -1,16 +1,16 @@
|
||||
// @ts-check
|
||||
const gulp = require("./gulp");
|
||||
const gulp = require("gulp");
|
||||
const del = require("del");
|
||||
const fs = require("fs");
|
||||
const os = require("os");
|
||||
const path = require("path");
|
||||
const mkdirP = require("./mkdirp");
|
||||
const mkdirP = require("mkdirp");
|
||||
const log = require("fancy-log");
|
||||
const cmdLineOptions = require("./options");
|
||||
const exec = require("./exec");
|
||||
const log = require("fancy-log"); // was `require("gulp-util").log (see https://github.com/gulpjs/gulp-util)
|
||||
const { CancellationToken } = require("prex");
|
||||
const mochaJs = require.resolve("mocha/bin/_mocha");
|
||||
const { exec } = require("./utils");
|
||||
|
||||
const mochaJs = require.resolve("mocha/bin/_mocha");
|
||||
exports.localBaseline = "tests/baselines/local/";
|
||||
exports.refBaseline = "tests/baselines/reference/";
|
||||
exports.localRwcBaseline = "internal/baselines/rwc/local";
|
||||
@ -27,7 +27,6 @@ exports.localTest262Baseline = "internal/baselines/test262/local";
|
||||
async function runConsoleTests(runJs, defaultReporter, runInParallel, watchMode, cancelToken = CancellationToken.none) {
|
||||
let testTimeout = cmdLineOptions.timeout;
|
||||
let tests = cmdLineOptions.tests;
|
||||
const lintFlag = cmdLineOptions.lint;
|
||||
const debug = cmdLineOptions.debug;
|
||||
const inspect = cmdLineOptions.inspect;
|
||||
const runners = cmdLineOptions.runners;
|
||||
@ -117,8 +116,16 @@ async function runConsoleTests(runJs, defaultReporter, runInParallel, watchMode,
|
||||
errorStatus = exitCode;
|
||||
error = new Error(`Process exited with status code ${errorStatus}.`);
|
||||
}
|
||||
else if (lintFlag) {
|
||||
await new Promise((resolve, reject) => gulp.start(["lint"], error => error ? reject(error) : resolve()));
|
||||
else if (process.env.CI === "true") {
|
||||
// finally, do a sanity check and build the compiler with the built version of itself
|
||||
log.info("Starting sanity check build...");
|
||||
// Cleanup everything except lint rules (we'll need those later and would rather not waste time rebuilding them)
|
||||
await exec("gulp", ["clean-tsc", "clean-services", "clean-tsserver", "clean-lssl", "clean-tests"], { cancelToken });
|
||||
const { exitCode } = await exec("gulp", ["local", "--lkg=false"], { cancelToken });
|
||||
if (exitCode !== 0) {
|
||||
errorStatus = exitCode;
|
||||
error = new Error(`Sanity check build process exited with status code ${errorStatus}.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
@ -133,26 +140,21 @@ async function runConsoleTests(runJs, defaultReporter, runInParallel, watchMode,
|
||||
await deleteTemporaryProjectOutput();
|
||||
|
||||
if (error !== undefined) {
|
||||
if (watchMode) {
|
||||
throw error;
|
||||
}
|
||||
else {
|
||||
log.error(error);
|
||||
process.exit(typeof errorStatus === "number" ? errorStatus : 2);
|
||||
}
|
||||
process.exitCode = typeof errorStatus === "number" ? errorStatus : 2;
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
exports.runConsoleTests = runConsoleTests;
|
||||
|
||||
function cleanTestDirs() {
|
||||
return del([exports.localBaseline, exports.localRwcBaseline])
|
||||
.then(() => mkdirP(exports.localRwcBaseline))
|
||||
.then(() => mkdirP(exports.localBaseline));
|
||||
async function cleanTestDirs() {
|
||||
await del([exports.localBaseline, exports.localRwcBaseline])
|
||||
mkdirP.sync(exports.localRwcBaseline);
|
||||
mkdirP.sync(exports.localBaseline);
|
||||
}
|
||||
exports.cleanTestDirs = cleanTestDirs;
|
||||
|
||||
/**
|
||||
* used to pass data from jake command line directly to run.js
|
||||
* used to pass data from gulp command line directly to run.js
|
||||
* @param {string} tests
|
||||
* @param {string} runners
|
||||
* @param {boolean} light
|
||||
@ -165,7 +167,7 @@ exports.cleanTestDirs = cleanTestDirs;
|
||||
function writeTestConfigFile(tests, runners, light, taskConfigsFolder, workerCount, stackTraceLimit, timeout, keepFailed) {
|
||||
const testConfigContents = JSON.stringify({
|
||||
test: tests ? [tests] : undefined,
|
||||
runner: runners ? runners.split(",") : undefined,
|
||||
runners: runners ? runners.split(",") : undefined,
|
||||
light,
|
||||
workerCount,
|
||||
stackTraceLimit,
|
||||
@ -192,4 +194,4 @@ function restoreSavedNodeEnv() {
|
||||
|
||||
function deleteTemporaryProjectOutput() {
|
||||
return del(path.join(exports.localBaseline, "projectOutput/"));
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,435 +0,0 @@
|
||||
// @ts-check
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
const log = require("fancy-log"); // was `require("gulp-util").log (see https://github.com/gulpjs/gulp-util)
|
||||
const ts = require("../../lib/typescript");
|
||||
const { Duplex } = require("stream");
|
||||
const chalk = /**@type {*} */(require("chalk"));
|
||||
const Vinyl = require("vinyl");
|
||||
|
||||
/**
|
||||
* Creates a stream that passes through its inputs only if the project outputs are not up to date
|
||||
* with respect to the inputs.
|
||||
* @param {ParsedCommandLine} parsedProject
|
||||
* @param {UpToDateOptions} [options]
|
||||
*
|
||||
* @typedef UpToDateOptions
|
||||
* @property {boolean | "minimal"} [verbose]
|
||||
* @property {(configFilePath: string) => ParsedCommandLine | undefined} [parseProject]
|
||||
*/
|
||||
function upToDate(parsedProject, options) {
|
||||
/** @type {File[]} */
|
||||
const inputs = [];
|
||||
/** @type {Map<string, File>} */
|
||||
const inputMap = new Map();
|
||||
/** @type {Map<string, fs.Stats>} */
|
||||
const statCache = new Map();
|
||||
/** @type {UpToDateHost} */
|
||||
const upToDateHost = {
|
||||
fileExists(fileName) {
|
||||
const stats = getStat(fileName);
|
||||
return stats ? stats.isFile() : false;
|
||||
},
|
||||
getModifiedTime(fileName) {
|
||||
return getStat(fileName).mtime;
|
||||
},
|
||||
parseConfigFile: options && options.parseProject
|
||||
};
|
||||
const duplex = new Duplex({
|
||||
objectMode: true,
|
||||
/**
|
||||
* @param {string|Buffer|File} file
|
||||
*/
|
||||
write(file, _, cb) {
|
||||
if (typeof file === "string" || Buffer.isBuffer(file)) return cb(new Error("Only Vinyl files are supported."));
|
||||
inputs.push(file);
|
||||
inputMap.set(path.resolve(file.path), file);
|
||||
cb();
|
||||
},
|
||||
final(cb) {
|
||||
const status = getUpToDateStatus(upToDateHost, parsedProject);
|
||||
reportStatus(parsedProject, status, options);
|
||||
if (status.type !== UpToDateStatusType.UpToDate) {
|
||||
for (const input of inputs) duplex.push(input);
|
||||
}
|
||||
duplex.push(null);
|
||||
inputMap.clear();
|
||||
statCache.clear();
|
||||
cb();
|
||||
},
|
||||
read() {
|
||||
}
|
||||
});
|
||||
return duplex;
|
||||
|
||||
function getStat(fileName) {
|
||||
fileName = path.resolve(fileName);
|
||||
const inputFile = inputMap.get(fileName);
|
||||
if (inputFile && inputFile.stat) return inputFile.stat;
|
||||
|
||||
let stats = statCache.get(fileName);
|
||||
if (!stats && fs.existsSync(fileName)) {
|
||||
stats = fs.statSync(fileName);
|
||||
statCache.set(fileName, stats);
|
||||
}
|
||||
return stats;
|
||||
}
|
||||
}
|
||||
module.exports = exports = upToDate;
|
||||
|
||||
/**
|
||||
* @param {DiagnosticMessage} message
|
||||
* @param {...string} args
|
||||
*/
|
||||
function formatMessage(message, ...args) {
|
||||
log.info(formatStringFromArgs(message.message, args));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ParsedCommandLine} project
|
||||
* @param {UpToDateStatus} status
|
||||
* @param {{verbose?: boolean | "minimal"}} options
|
||||
*/
|
||||
function reportStatus(project, status, options) {
|
||||
switch (options.verbose) {
|
||||
case "minimal":
|
||||
switch (status.type) {
|
||||
case UpToDateStatusType.UpToDate:
|
||||
log.info(`Project '${fileName(project.options.configFilePath)}' is up to date.`);
|
||||
break;
|
||||
default:
|
||||
log.info(`Project '${fileName(project.options.configFilePath)}' is out of date, rebuilding...`);
|
||||
break;
|
||||
}
|
||||
break;
|
||||
case true:
|
||||
/**@type {*}*/(ts).formatUpToDateStatus(project.options.configFilePath, status, fileName, formatMessage);
|
||||
break;
|
||||
}
|
||||
if (!options.verbose) return;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} file
|
||||
* @private
|
||||
*/
|
||||
function normalizeSlashes(file) {
|
||||
return file.replace(/\\/g, "/");
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} file
|
||||
* @private
|
||||
*/
|
||||
function fileName(file) {
|
||||
return chalk.cyan(normalizeSlashes(path.relative(process.cwd(), path.resolve(file))));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} text
|
||||
* @param {string[]} args
|
||||
* @param {number} [baseIndex]
|
||||
*/
|
||||
function formatStringFromArgs(text, args, baseIndex = 0) {
|
||||
return text.replace(/{(\d+)}/g, (_match, index) => args[+index + baseIndex]);
|
||||
}
|
||||
|
||||
const minimumDate = new Date(-8640000000000000);
|
||||
const maximumDate = new Date(8640000000000000);
|
||||
const missingFileModifiedTime = new Date(0);
|
||||
|
||||
/**
|
||||
* @typedef {0} UpToDateStatusType.Unbuildable
|
||||
* @typedef {1} UpToDateStatusType.UpToDate
|
||||
* @typedef {2} UpToDateStatusType.UpToDateWithUpstreamTypes
|
||||
* @typedef {3} UpToDateStatusType.OutputMissing
|
||||
* @typedef {4} UpToDateStatusType.OutOfDateWithSelf
|
||||
* @typedef {5} UpToDateStatusType.OutOfDateWithUpstream
|
||||
* @typedef {6} UpToDateStatusType.UpstreamOutOfDate
|
||||
* @typedef {7} UpToDateStatusType.UpstreamBlocked
|
||||
* @typedef {8} UpToDateStatusType.ComputingUpstream
|
||||
* @typedef {9} UpToDateStatusType.ContainerOnly
|
||||
* @enum {UpToDateStatusType.Unbuildable | UpToDateStatusType.UpToDate | UpToDateStatusType.UpToDateWithUpstreamTypes | UpToDateStatusType.OutputMissing | UpToDateStatusType.OutOfDateWithSelf | UpToDateStatusType.OutOfDateWithUpstream | UpToDateStatusType.UpstreamOutOfDate | UpToDateStatusType.UpstreamBlocked | UpToDateStatusType.ComputingUpstream | UpToDateStatusType.ContainerOnly}
|
||||
*/
|
||||
const UpToDateStatusType = {
|
||||
Unbuildable: /** @type {0} */(0),
|
||||
UpToDate: /** @type {1} */(1),
|
||||
UpToDateWithUpstreamTypes: /** @type {2} */(2),
|
||||
OutputMissing: /** @type {3} */(3),
|
||||
OutOfDateWithSelf: /** @type {4} */(4),
|
||||
OutOfDateWithUpstream: /** @type {5} */(5),
|
||||
UpstreamOutOfDate: /** @type {6} */(6),
|
||||
UpstreamBlocked: /** @type {7} */(7),
|
||||
ComputingUpstream: /** @type {8} */(8),
|
||||
ContainerOnly: /** @type {9} */(9),
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Date} date1
|
||||
* @param {Date} date2
|
||||
* @returns {Date}
|
||||
*/
|
||||
function newer(date1, date2) {
|
||||
return date2 > date1 ? date2 : date1;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {UpToDateHost} host
|
||||
* @param {ParsedCommandLine | undefined} project
|
||||
* @returns {UpToDateStatus}
|
||||
*/
|
||||
function getUpToDateStatus(host, project) {
|
||||
if (project === undefined) return { type: UpToDateStatusType.Unbuildable, reason: "File deleted mid-build" };
|
||||
const prior = host.getLastStatus ? host.getLastStatus(project.options.configFilePath) : undefined;
|
||||
if (prior !== undefined) {
|
||||
return prior;
|
||||
}
|
||||
const actual = getUpToDateStatusWorker(host, project);
|
||||
if (host.setLastStatus) {
|
||||
host.setLastStatus(project.options.configFilePath, actual);
|
||||
}
|
||||
return actual;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {UpToDateHost} host
|
||||
* @param {ParsedCommandLine | undefined} project
|
||||
* @returns {UpToDateStatus}
|
||||
*/
|
||||
function getUpToDateStatusWorker(host, project) {
|
||||
/** @type {string} */
|
||||
let newestInputFileName = undefined;
|
||||
let newestInputFileTime = minimumDate;
|
||||
// Get timestamps of input files
|
||||
for (const inputFile of project.fileNames) {
|
||||
if (!host.fileExists(inputFile)) {
|
||||
return {
|
||||
type: UpToDateStatusType.Unbuildable,
|
||||
reason: `${inputFile} does not exist`
|
||||
};
|
||||
}
|
||||
|
||||
const inputTime = host.getModifiedTime(inputFile) || missingFileModifiedTime;
|
||||
if (inputTime > newestInputFileTime) {
|
||||
newestInputFileName = inputFile;
|
||||
newestInputFileTime = inputTime;
|
||||
}
|
||||
}
|
||||
|
||||
// Collect the expected outputs of this project
|
||||
const outputs = /**@type {string[]}*/(/**@type {*}*/(ts).getAllProjectOutputs(project));
|
||||
|
||||
if (outputs.length === 0) {
|
||||
return {
|
||||
type: UpToDateStatusType.ContainerOnly
|
||||
};
|
||||
}
|
||||
|
||||
// Now see if all outputs are newer than the newest input
|
||||
let oldestOutputFileName = "(none)";
|
||||
let oldestOutputFileTime = maximumDate;
|
||||
let newestOutputFileName = "(none)";
|
||||
let newestOutputFileTime = minimumDate;
|
||||
/** @type {string | undefined} */
|
||||
let missingOutputFileName;
|
||||
let newestDeclarationFileContentChangedTime = minimumDate;
|
||||
let isOutOfDateWithInputs = false;
|
||||
for (const output of outputs) {
|
||||
// Output is missing; can stop checking
|
||||
// Don't immediately return because we can still be upstream-blocked, which is a higher-priority status
|
||||
if (!host.fileExists(output)) {
|
||||
missingOutputFileName = output;
|
||||
break;
|
||||
}
|
||||
|
||||
const outputTime = host.getModifiedTime(output) || missingFileModifiedTime;
|
||||
if (outputTime < oldestOutputFileTime) {
|
||||
oldestOutputFileTime = outputTime;
|
||||
oldestOutputFileName = output;
|
||||
}
|
||||
|
||||
// If an output is older than the newest input, we can stop checking
|
||||
// Don't immediately return because we can still be upstream-blocked, which is a higher-priority status
|
||||
if (outputTime < newestInputFileTime) {
|
||||
isOutOfDateWithInputs = true;
|
||||
break;
|
||||
}
|
||||
|
||||
if (outputTime > newestOutputFileTime) {
|
||||
newestOutputFileTime = outputTime;
|
||||
newestOutputFileName = output;
|
||||
}
|
||||
|
||||
// Keep track of when the most recent time a .d.ts file was changed.
|
||||
// In addition to file timestamps, we also keep track of when a .d.ts file
|
||||
// had its file touched but not had its contents changed - this allows us
|
||||
// to skip a downstream typecheck
|
||||
if (path.extname(output) === ".d.ts") {
|
||||
const unchangedTime = host.getUnchangedTime ? host.getUnchangedTime(output) : undefined;
|
||||
if (unchangedTime !== undefined) {
|
||||
newestDeclarationFileContentChangedTime = newer(unchangedTime, newestDeclarationFileContentChangedTime);
|
||||
}
|
||||
else {
|
||||
const outputModifiedTime = host.getModifiedTime(output) || missingFileModifiedTime;
|
||||
newestDeclarationFileContentChangedTime = newer(newestDeclarationFileContentChangedTime, outputModifiedTime);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let pseudoUpToDate = false;
|
||||
let usesPrepend = false;
|
||||
/** @type {string | undefined} */
|
||||
let upstreamChangedProject;
|
||||
if (project.projectReferences) {
|
||||
if (host.setLastStatus) host.setLastStatus(project.options.configFilePath, { type: UpToDateStatusType.ComputingUpstream });
|
||||
for (const ref of project.projectReferences) {
|
||||
usesPrepend = usesPrepend || !!(ref.prepend);
|
||||
const resolvedRef = ts.resolveProjectReferencePath(host, ref);
|
||||
const parsedRef = host.parseConfigFile ? host.parseConfigFile(resolvedRef) : ts.getParsedCommandLineOfConfigFile(resolvedRef, {}, parseConfigHost);
|
||||
const refStatus = getUpToDateStatus(host, parsedRef);
|
||||
|
||||
// Its a circular reference ignore the status of this project
|
||||
if (refStatus.type === UpToDateStatusType.ComputingUpstream) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// An upstream project is blocked
|
||||
if (refStatus.type === UpToDateStatusType.Unbuildable) {
|
||||
return {
|
||||
type: UpToDateStatusType.UpstreamBlocked,
|
||||
upstreamProjectName: ref.path
|
||||
};
|
||||
}
|
||||
|
||||
// If the upstream project is out of date, then so are we (someone shouldn't have asked, though?)
|
||||
if (refStatus.type !== UpToDateStatusType.UpToDate) {
|
||||
return {
|
||||
type: UpToDateStatusType.UpstreamOutOfDate,
|
||||
upstreamProjectName: ref.path
|
||||
};
|
||||
}
|
||||
|
||||
// If the upstream project's newest file is older than our oldest output, we
|
||||
// can't be out of date because of it
|
||||
if (refStatus.newestInputFileTime && refStatus.newestInputFileTime <= oldestOutputFileTime) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// If the upstream project has only change .d.ts files, and we've built
|
||||
// *after* those files, then we're "psuedo up to date" and eligible for a fast rebuild
|
||||
if (refStatus.newestDeclarationFileContentChangedTime && refStatus.newestDeclarationFileContentChangedTime <= oldestOutputFileTime) {
|
||||
pseudoUpToDate = true;
|
||||
upstreamChangedProject = ref.path;
|
||||
continue;
|
||||
}
|
||||
|
||||
// We have an output older than an upstream output - we are out of date
|
||||
return {
|
||||
type: UpToDateStatusType.OutOfDateWithUpstream,
|
||||
outOfDateOutputFileName: oldestOutputFileName,
|
||||
newerProjectName: ref.path
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (missingOutputFileName !== undefined) {
|
||||
return {
|
||||
type: UpToDateStatusType.OutputMissing,
|
||||
missingOutputFileName
|
||||
};
|
||||
}
|
||||
|
||||
if (isOutOfDateWithInputs) {
|
||||
return {
|
||||
type: UpToDateStatusType.OutOfDateWithSelf,
|
||||
outOfDateOutputFileName: oldestOutputFileName,
|
||||
newerInputFileName: newestInputFileName
|
||||
};
|
||||
}
|
||||
|
||||
if (usesPrepend && pseudoUpToDate) {
|
||||
return {
|
||||
type: UpToDateStatusType.OutOfDateWithUpstream,
|
||||
outOfDateOutputFileName: oldestOutputFileName,
|
||||
newerProjectName: upstreamChangedProject
|
||||
};
|
||||
}
|
||||
|
||||
// Up to date
|
||||
return {
|
||||
type: pseudoUpToDate ? UpToDateStatusType.UpToDateWithUpstreamTypes : UpToDateStatusType.UpToDate,
|
||||
newestDeclarationFileContentChangedTime,
|
||||
newestInputFileTime,
|
||||
newestOutputFileTime,
|
||||
newestInputFileName,
|
||||
newestOutputFileName,
|
||||
oldestOutputFileName
|
||||
};
|
||||
}
|
||||
|
||||
const parseConfigHost = {
|
||||
useCaseSensitiveFileNames: true,
|
||||
getCurrentDirectory: () => process.cwd(),
|
||||
readDirectory: (file) => fs.readdirSync(file),
|
||||
fileExists: file => fs.existsSync(file) && fs.statSync(file).isFile(),
|
||||
readFile: file => fs.readFileSync(file, "utf8"),
|
||||
onUnRecoverableConfigFileDiagnostic: () => undefined
|
||||
};
|
||||
|
||||
/**
|
||||
* @typedef {import("vinyl")} File
|
||||
* @typedef {import("../../lib/typescript").ParsedCommandLine & { options: CompilerOptions }} ParsedCommandLine
|
||||
* @typedef {import("../../lib/typescript").CompilerOptions & { configFilePath?: string }} CompilerOptions
|
||||
* @typedef {import("../../lib/typescript").DiagnosticMessage} DiagnosticMessage
|
||||
* @typedef UpToDateHost
|
||||
* @property {(fileName: string) => boolean} fileExists
|
||||
* @property {(fileName: string) => Date} getModifiedTime
|
||||
* @property {(fileName: string) => Date} [getUnchangedTime]
|
||||
* @property {(configFilePath: string) => ParsedCommandLine | undefined} parseConfigFile
|
||||
* @property {(configFilePath: string) => UpToDateStatus} [getLastStatus]
|
||||
* @property {(configFilePath: string, status: UpToDateStatus) => void} [setLastStatus]
|
||||
*
|
||||
* @typedef Status.Unbuildable
|
||||
* @property {UpToDateStatusType.Unbuildable} type
|
||||
* @property {string} reason
|
||||
*
|
||||
* @typedef Status.ContainerOnly
|
||||
* @property {UpToDateStatusType.ContainerOnly} type
|
||||
*
|
||||
* @typedef Status.UpToDate
|
||||
* @property {UpToDateStatusType.UpToDate | UpToDateStatusType.UpToDateWithUpstreamTypes} type
|
||||
* @property {Date} [newestInputFileTime]
|
||||
* @property {string} [newestInputFileName]
|
||||
* @property {Date} [newestDeclarationFileContentChangedTime]
|
||||
* @property {Date} [newestOutputFileTime]
|
||||
* @property {string} [newestOutputFileName]
|
||||
* @property {string} [oldestOutputFileName]
|
||||
*
|
||||
* @typedef Status.OutputMissing
|
||||
* @property {UpToDateStatusType.OutputMissing} type
|
||||
* @property {string} missingOutputFileName
|
||||
*
|
||||
* @typedef Status.OutOfDateWithSelf
|
||||
* @property {UpToDateStatusType.OutOfDateWithSelf} type
|
||||
* @property {string} outOfDateOutputFileName
|
||||
* @property {string} newerInputFileName
|
||||
*
|
||||
* @typedef Status.UpstreamOutOfDate
|
||||
* @property {UpToDateStatusType.UpstreamOutOfDate} type
|
||||
* @property {string} upstreamProjectName
|
||||
*
|
||||
* @typedef Status.UpstreamBlocked
|
||||
* @property {UpToDateStatusType.UpstreamBlocked} type
|
||||
* @property {string} upstreamProjectName
|
||||
*
|
||||
* @typedef Status.ComputingUpstream
|
||||
* @property {UpToDateStatusType.ComputingUpstream} type
|
||||
*
|
||||
* @typedef Status.OutOfDateWithUpstream
|
||||
* @property {UpToDateStatusType.OutOfDateWithUpstream} type
|
||||
* @property {string} outOfDateOutputFileName
|
||||
* @property {string} newerProjectName
|
||||
|
||||
* @typedef {Status.Unbuildable | Status.ContainerOnly | Status.UpToDate | Status.OutputMissing | Status.OutOfDateWithSelf | Status.UpstreamOutOfDate | Status.UpstreamBlocked | Status.ComputingUpstream | Status.OutOfDateWithUpstream} UpToDateStatus
|
||||
*/
|
||||
void 0;
|
||||
@ -1,7 +1,119 @@
|
||||
// @ts-check
|
||||
/// <reference path="../types/ambient.d.ts" />
|
||||
|
||||
const fs = require("fs");
|
||||
const File = require("./vinyl");
|
||||
const { Readable } = require("stream");
|
||||
const path = require("path");
|
||||
const log = require("fancy-log");
|
||||
const mkdirp = require("mkdirp");
|
||||
const del = require("del");
|
||||
const File = require("vinyl");
|
||||
const ts = require("../../lib/typescript");
|
||||
const { default: chalk } = require("chalk");
|
||||
const { spawn } = require("child_process");
|
||||
const { CancellationToken, CancelError, Deferred } = require("prex");
|
||||
const { Readable, Duplex } = require("stream");
|
||||
|
||||
const isWindows = /^win/.test(process.platform);
|
||||
|
||||
/**
|
||||
* Executes the provided command once with the supplied arguments.
|
||||
* @param {string} cmd
|
||||
* @param {string[]} args
|
||||
* @param {ExecOptions} [options]
|
||||
*
|
||||
* @typedef ExecOptions
|
||||
* @property {boolean} [ignoreExitCode]
|
||||
* @property {import("prex").CancellationToken} [cancelToken]
|
||||
* @property {boolean} [hidePrompt]
|
||||
*/
|
||||
function exec(cmd, args, options = {}) {
|
||||
return /**@type {Promise<{exitCode: number}>}*/(new Promise((resolve, reject) => {
|
||||
const { ignoreExitCode, cancelToken = CancellationToken.none } = options;
|
||||
cancelToken.throwIfCancellationRequested();
|
||||
|
||||
// TODO (weswig): Update child_process types to add windowsVerbatimArguments to the type definition
|
||||
const subshellFlag = isWindows ? "/c" : "-c";
|
||||
const command = isWindows ? [possiblyQuote(cmd), ...args] : [`${cmd} ${args.join(" ")}`];
|
||||
|
||||
if (!options.hidePrompt) log(`> ${chalk.green(cmd)} ${args.join(" ")}`);
|
||||
const proc = spawn(isWindows ? "cmd" : "/bin/sh", [subshellFlag, ...command], { stdio: "inherit", windowsVerbatimArguments: true });
|
||||
const registration = cancelToken.register(() => {
|
||||
log(`${chalk.red("killing")} '${chalk.green(cmd)} ${args.join(" ")}'...`);
|
||||
proc.kill("SIGINT");
|
||||
proc.kill("SIGTERM");
|
||||
reject(new CancelError());
|
||||
});
|
||||
proc.on("exit", exitCode => {
|
||||
registration.unregister();
|
||||
if (exitCode === 0 || ignoreExitCode) {
|
||||
resolve({ exitCode });
|
||||
}
|
||||
else {
|
||||
reject(new Error(`Process exited with code: ${exitCode}`));
|
||||
}
|
||||
});
|
||||
proc.on("error", error => {
|
||||
registration.unregister();
|
||||
reject(error);
|
||||
});
|
||||
}));
|
||||
}
|
||||
exports.exec = exec;
|
||||
|
||||
/**
|
||||
* @param {string} cmd
|
||||
*/
|
||||
function possiblyQuote(cmd) {
|
||||
return cmd.indexOf(" ") >= 0 ? `"${cmd}"` : cmd;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {ts.Diagnostic[]} diagnostics
|
||||
* @param {{ cwd?: string, pretty?: boolean }} [options]
|
||||
*/
|
||||
function formatDiagnostics(diagnostics, options) {
|
||||
return options && options.pretty
|
||||
? ts.formatDiagnosticsWithColorAndContext(diagnostics, getFormatDiagnosticsHost(options && options.cwd))
|
||||
: ts.formatDiagnostics(diagnostics, getFormatDiagnosticsHost(options && options.cwd));
|
||||
}
|
||||
exports.formatDiagnostics = formatDiagnostics;
|
||||
|
||||
/**
|
||||
* @param {ts.Diagnostic[]} diagnostics
|
||||
* @param {{ cwd?: string }} [options]
|
||||
*/
|
||||
function reportDiagnostics(diagnostics, options) {
|
||||
log(formatDiagnostics(diagnostics, { cwd: options && options.cwd, pretty: process.stdout.isTTY }));
|
||||
}
|
||||
exports.reportDiagnostics = reportDiagnostics;
|
||||
|
||||
/**
|
||||
* @param {string | undefined} cwd
|
||||
* @returns {ts.FormatDiagnosticsHost}
|
||||
*/
|
||||
function getFormatDiagnosticsHost(cwd) {
|
||||
return {
|
||||
getCanonicalFileName: fileName => fileName,
|
||||
getCurrentDirectory: () => cwd,
|
||||
getNewLine: () => ts.sys.newLine,
|
||||
};
|
||||
}
|
||||
exports.getFormatDiagnosticsHost = getFormatDiagnosticsHost;
|
||||
|
||||
/**
|
||||
* Reads JSON data with optional comments using the LKG TypeScript compiler
|
||||
* @param {string} jsonPath
|
||||
*/
|
||||
function readJson(jsonPath) {
|
||||
const jsonText = fs.readFileSync(jsonPath, "utf8");
|
||||
const result = ts.parseConfigFileTextToJson(jsonPath, jsonText);
|
||||
if (result.error) {
|
||||
reportDiagnostics([result.error]);
|
||||
throw new Error("An error occurred during parse.");
|
||||
}
|
||||
return result.config;
|
||||
}
|
||||
exports.readJson = readJson;
|
||||
|
||||
/**
|
||||
* @param {File} file
|
||||
@ -24,4 +136,300 @@ function streamFromBuffer(buffer) {
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.streamFromBuffer = streamFromBuffer;
|
||||
exports.streamFromBuffer = streamFromBuffer;
|
||||
|
||||
/**
|
||||
* @param {string | string[]} source
|
||||
* @param {string | string[]} dest
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function needsUpdate(source, dest) {
|
||||
if (typeof source === "string" && typeof dest === "string") {
|
||||
if (fs.existsSync(dest)) {
|
||||
const {mtime: outTime} = fs.statSync(dest);
|
||||
const {mtime: inTime} = fs.statSync(source);
|
||||
if (+inTime <= +outTime) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (typeof source === "string" && typeof dest !== "string") {
|
||||
const {mtime: inTime} = fs.statSync(source);
|
||||
for (const filepath of dest) {
|
||||
if (fs.existsSync(filepath)) {
|
||||
const {mtime: outTime} = fs.statSync(filepath);
|
||||
if (+inTime > +outTime) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
else if (typeof source !== "string" && typeof dest === "string") {
|
||||
if (fs.existsSync(dest)) {
|
||||
const {mtime: outTime} = fs.statSync(dest);
|
||||
for (const filepath of source) {
|
||||
if (fs.existsSync(filepath)) {
|
||||
const {mtime: inTime} = fs.statSync(filepath);
|
||||
if (+inTime > +outTime) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else if (typeof source !== "string" && typeof dest !== "string") {
|
||||
for (let i = 0; i < source.length; i++) {
|
||||
if (!dest[i]) {
|
||||
continue;
|
||||
}
|
||||
if (fs.existsSync(dest[i])) {
|
||||
const {mtime: outTime} = fs.statSync(dest[i]);
|
||||
const {mtime: inTime} = fs.statSync(source[i]);
|
||||
if (+inTime > +outTime) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
exports.needsUpdate = needsUpdate;
|
||||
|
||||
function getDiffTool() {
|
||||
const program = process.env.DIFF;
|
||||
if (!program) {
|
||||
log.warn("Add the 'DIFF' environment variable to the path of the program you want to use.");
|
||||
process.exit(1);
|
||||
}
|
||||
return program;
|
||||
}
|
||||
exports.getDiffTool = getDiffTool;
|
||||
|
||||
/**
|
||||
* Find the size of a directory recursively.
|
||||
* Symbolic links can cause a loop.
|
||||
* @param {string} root
|
||||
* @returns {number} bytes
|
||||
*/
|
||||
function getDirSize(root) {
|
||||
const stats = fs.lstatSync(root);
|
||||
|
||||
if (!stats.isDirectory()) {
|
||||
return stats.size;
|
||||
}
|
||||
|
||||
return fs.readdirSync(root)
|
||||
.map(file => getDirSize(path.join(root, file)))
|
||||
.reduce((acc, num) => acc + num, 0);
|
||||
}
|
||||
exports.getDirSize = getDirSize;
|
||||
|
||||
/**
|
||||
* Flattens a project with project references into a single project.
|
||||
* @param {string} projectSpec The path to a tsconfig.json file or its containing directory.
|
||||
* @param {string} flattenedProjectSpec The output path for the flattened tsconfig.json file.
|
||||
* @param {FlattenOptions} [options] Options used to flatten a project hierarchy.
|
||||
*
|
||||
* @typedef FlattenOptions
|
||||
* @property {string} [cwd] The path to use for the current working directory. Defaults to `process.cwd()`.
|
||||
* @property {import("../../lib/typescript").CompilerOptions} [compilerOptions] Compiler option overrides.
|
||||
* @property {boolean} [force] Forces creation of the output project.
|
||||
* @property {string[]} [exclude] Files to exclude (relative to `cwd`)
|
||||
*/
|
||||
function flatten(projectSpec, flattenedProjectSpec, options = {}) {
|
||||
const cwd = normalizeSlashes(options.cwd ? path.resolve(options.cwd) : process.cwd());
|
||||
const files = [];
|
||||
const resolvedOutputSpec = path.resolve(cwd, flattenedProjectSpec);
|
||||
const resolvedOutputDirectory = path.dirname(resolvedOutputSpec);
|
||||
const resolvedProjectSpec = resolveProjectSpec(projectSpec, cwd, undefined);
|
||||
const project = readJson(resolvedProjectSpec);
|
||||
const skipProjects = /**@type {Set<string>}*/(new Set());
|
||||
const skipFiles = new Set(options && options.exclude && options.exclude.map(file => normalizeSlashes(path.resolve(cwd, file))));
|
||||
recur(resolvedProjectSpec, project);
|
||||
|
||||
if (options.force || needsUpdate(files, resolvedOutputSpec)) {
|
||||
const config = {
|
||||
extends: normalizeSlashes(path.relative(resolvedOutputDirectory, resolvedProjectSpec)),
|
||||
compilerOptions: options.compilerOptions || {},
|
||||
files: files.map(file => normalizeSlashes(path.relative(resolvedOutputDirectory, file)))
|
||||
};
|
||||
mkdirp.sync(resolvedOutputDirectory);
|
||||
fs.writeFileSync(resolvedOutputSpec, JSON.stringify(config, undefined, 2), "utf8");
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} projectSpec
|
||||
* @param {object} project
|
||||
*/
|
||||
function recur(projectSpec, project) {
|
||||
if (skipProjects.has(projectSpec)) return;
|
||||
skipProjects.add(project);
|
||||
if (project.references) {
|
||||
for (const ref of project.references) {
|
||||
const referencedSpec = resolveProjectSpec(ref.path, cwd, projectSpec);
|
||||
const referencedProject = readJson(referencedSpec);
|
||||
recur(referencedSpec, referencedProject);
|
||||
}
|
||||
}
|
||||
if (project.include) {
|
||||
throw new Error("Flattened project may not have an 'include' list.");
|
||||
}
|
||||
if (!project.files) {
|
||||
throw new Error("Flattened project must have an explicit 'files' list.");
|
||||
}
|
||||
const projectDirectory = path.dirname(projectSpec);
|
||||
for (let file of project.files) {
|
||||
file = normalizeSlashes(path.resolve(projectDirectory, file));
|
||||
if (skipFiles.has(file)) continue;
|
||||
skipFiles.add(file);
|
||||
files.push(file);
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.flatten = flatten;
|
||||
|
||||
/**
|
||||
* @param {string} file
|
||||
*/
|
||||
function normalizeSlashes(file) {
|
||||
return file.replace(/\\/g, "/");
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} projectSpec
|
||||
* @param {string} cwd
|
||||
* @param {string | undefined} referrer
|
||||
* @returns {string}
|
||||
*/
|
||||
function resolveProjectSpec(projectSpec, cwd, referrer) {
|
||||
let projectPath = normalizeSlashes(path.resolve(cwd, referrer ? path.dirname(referrer) : "", projectSpec));
|
||||
const stats = fs.statSync(projectPath);
|
||||
if (stats.isFile()) return normalizeSlashes(projectPath);
|
||||
return normalizeSlashes(path.resolve(cwd, projectPath, "tsconfig.json"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string | ((file: File) => string) | { cwd?: string }} [dest]
|
||||
* @param {{ cwd?: string }} [opts]
|
||||
*/
|
||||
function rm(dest, opts) {
|
||||
if (dest && typeof dest === "object") opts = dest, dest = undefined;
|
||||
let failed = false;
|
||||
|
||||
const cwd = path.resolve(opts && opts.cwd || process.cwd());
|
||||
|
||||
/** @type {{ file: File, deleted: boolean, promise: Promise<any>, cb: Function }[]} */
|
||||
const pending = [];
|
||||
|
||||
const processDeleted = () => {
|
||||
if (failed) return;
|
||||
while (pending.length && pending[0].deleted) {
|
||||
const { file, cb } = pending.shift();
|
||||
duplex.push(file);
|
||||
cb();
|
||||
}
|
||||
};
|
||||
|
||||
const duplex = new Duplex({
|
||||
objectMode: true,
|
||||
/**
|
||||
* @param {string|Buffer|File} file
|
||||
*/
|
||||
write(file, _, cb) {
|
||||
if (failed) return;
|
||||
if (typeof file === "string" || Buffer.isBuffer(file)) return cb(new Error("Only Vinyl files are supported."));
|
||||
const basePath = typeof dest === "string" ? path.resolve(cwd, dest) :
|
||||
typeof dest === "function" ? path.resolve(cwd, dest(file)) :
|
||||
file.base;
|
||||
const filePath = path.resolve(basePath, file.relative);
|
||||
file.cwd = cwd;
|
||||
file.base = basePath;
|
||||
file.path = filePath;
|
||||
const entry = {
|
||||
file,
|
||||
deleted: false,
|
||||
cb,
|
||||
promise: del(file.path).then(() => {
|
||||
entry.deleted = true;
|
||||
processDeleted();
|
||||
}, err => {
|
||||
failed = true;
|
||||
pending.length = 0;
|
||||
cb(err);
|
||||
})
|
||||
};
|
||||
pending.push(entry);
|
||||
},
|
||||
final(cb) {
|
||||
const endThenCb = () => (duplex.push(null), cb()); // signal end of read queue
|
||||
processDeleted();
|
||||
if (pending.length) {
|
||||
Promise
|
||||
.all(pending.map(entry => entry.promise))
|
||||
.then(() => processDeleted())
|
||||
.then(() => endThenCb(), endThenCb);
|
||||
return;
|
||||
}
|
||||
endThenCb();
|
||||
},
|
||||
read() {
|
||||
}
|
||||
});
|
||||
return duplex;
|
||||
}
|
||||
exports.rm = rm;
|
||||
|
||||
class Debouncer {
|
||||
/**
|
||||
* @param {number} timeout
|
||||
* @param {() => Promise<any>} action
|
||||
*/
|
||||
constructor(timeout, action) {
|
||||
this._timeout = timeout;
|
||||
this._action = action;
|
||||
}
|
||||
|
||||
enqueue() {
|
||||
if (this._timer) {
|
||||
clearTimeout(this._timer);
|
||||
this._timer = undefined;
|
||||
}
|
||||
|
||||
if (!this._deferred) {
|
||||
this._deferred = new Deferred();
|
||||
}
|
||||
|
||||
this._timer = setTimeout(() => this.run(), 100);
|
||||
return this._deferred.promise;
|
||||
}
|
||||
|
||||
run() {
|
||||
if (this._timer) {
|
||||
clearTimeout(this._timer);
|
||||
this._timer = undefined;
|
||||
}
|
||||
|
||||
const deferred = this._deferred;
|
||||
this._deferred = undefined;
|
||||
this._projects = undefined;
|
||||
try {
|
||||
deferred.resolve(this._action());
|
||||
}
|
||||
catch (e) {
|
||||
deferred.reject(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.Debouncer = Debouncer;
|
||||
60
scripts/build/vinyl.d.ts
vendored
60
scripts/build/vinyl.d.ts
vendored
@ -1,60 +0,0 @@
|
||||
// NOTE: This makes it possible to correctly type vinyl Files under @ts-check.
|
||||
export = File;
|
||||
|
||||
declare class File<T extends File.Contents = File.Contents> {
|
||||
constructor(options?: File.VinylOptions<T>);
|
||||
|
||||
cwd: string;
|
||||
base: string;
|
||||
path: string;
|
||||
readonly history: ReadonlyArray<string>;
|
||||
contents: T;
|
||||
relative: string;
|
||||
dirname: string;
|
||||
basename: string;
|
||||
stem: string;
|
||||
extname: string;
|
||||
symlink: string | null;
|
||||
stat: import("fs").Stats | null;
|
||||
sourceMap?: import("./sourcemaps").RawSourceMap | string;
|
||||
|
||||
[custom: string]: any;
|
||||
|
||||
isBuffer(): this is T extends Buffer ? File<Buffer> : never;
|
||||
isStream(): this is T extends NodeJS.ReadableStream ? File<NodeJS.ReadableStream> : never;
|
||||
isNull(): this is T extends null ? File<null> : never;
|
||||
isDirectory(): this is T extends null ? File.Directory : never;
|
||||
isSymbolic(): this is T extends null ? File.Symbolic : never;
|
||||
clone(opts?: { contents?: boolean, deep?: boolean }): this;
|
||||
}
|
||||
|
||||
namespace File {
|
||||
export interface VinylOptions<T extends Contents = Contents> {
|
||||
cwd?: string;
|
||||
base?: string;
|
||||
path?: string;
|
||||
history?: ReadonlyArray<string>;
|
||||
stat?: import("fs").Stats;
|
||||
contents?: T;
|
||||
sourceMap?: import("./sourcemaps").RawSourceMap | string;
|
||||
[custom: string]: any;
|
||||
}
|
||||
|
||||
export type Contents = Buffer | NodeJS.ReadableStream | null;
|
||||
export type File = import("./vinyl");
|
||||
export type NullFile = File<null>;
|
||||
export type BufferFile = File<Buffer>;
|
||||
export type StreamFile = File<NodeJS.ReadableStream>;
|
||||
|
||||
export interface Directory extends NullFile {
|
||||
isNull(): true;
|
||||
isDirectory(): true;
|
||||
isSymbolic(): this is never;
|
||||
}
|
||||
|
||||
export interface Symbolic extends NullFile {
|
||||
isNull(): true;
|
||||
isDirectory(): this is never;
|
||||
isSymbolic(): true;
|
||||
}
|
||||
}
|
||||
@ -1 +0,0 @@
|
||||
module.exports = require("vinyl");
|
||||
@ -1,124 +0,0 @@
|
||||
/// <reference path="../src/harness/external/node.d.ts" />
|
||||
/// <reference path="../built/local/typescript.d.ts" />
|
||||
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
import * as typescript from "typescript";
|
||||
declare var ts: typeof typescript;
|
||||
|
||||
var tsSourceDir = "../src";
|
||||
var tsBuildDir = "../built/local";
|
||||
var testOutputDir = "../built/benchmark";
|
||||
var sourceFiles = [
|
||||
"compiler/types.ts",
|
||||
"compiler/core.ts",
|
||||
"compiler/sys.ts",
|
||||
"compiler/diagnosticInformationMap.generated.ts",
|
||||
"compiler/scanner.ts",
|
||||
"compiler/binder.ts",
|
||||
"compiler/utilities.ts",
|
||||
"compiler/parser.ts",
|
||||
"compiler/checker.ts",
|
||||
"compiler/declarationEmitter.ts",
|
||||
"compiler/emitter.ts",
|
||||
"compiler/program.ts",
|
||||
"compiler/commandLineParser.ts",
|
||||
"compiler/tsc.ts"];
|
||||
|
||||
// .ts sources for the compiler, used as a test input
|
||||
var rawCompilerSources = "";
|
||||
sourceFiles.forEach(f=> {
|
||||
rawCompilerSources += "\r\n" + fs.readFileSync(path.join(tsSourceDir, f)).toString();
|
||||
});
|
||||
var compilerSoruces = `var compilerSources = ${JSON.stringify(rawCompilerSources) };`;
|
||||
|
||||
// .js code for the compiler, what we are actually testing
|
||||
var rawCompilerJavaScript = fs.readFileSync(path.join(tsBuildDir, "tsc.js")).toString();
|
||||
rawCompilerJavaScript = rawCompilerJavaScript.replace("ts.executeCommandLine(ts.sys.args);", "");
|
||||
|
||||
// lib.d.ts sources
|
||||
var rawLibSources = fs.readFileSync(path.join(tsBuildDir, "lib.d.ts")).toString();
|
||||
var libSoruces = `var libSources = ${JSON.stringify(rawLibSources) };`;
|
||||
|
||||
// write test output
|
||||
if (!fs.existsSync(testOutputDir)) {
|
||||
fs.mkdirSync(testOutputDir);
|
||||
}
|
||||
|
||||
// 1. compiler ts sources, used to test
|
||||
fs.writeFileSync(
|
||||
path.join(testOutputDir, "compilerSources.js"),
|
||||
`${ compilerSoruces } \r\n ${ libSoruces }`);
|
||||
|
||||
// 2. the compiler js sources + a call the compiler
|
||||
fs.writeFileSync(
|
||||
path.join(testOutputDir, "benchmarktsc.js"),
|
||||
`${ rawCompilerJavaScript }\r\n${ compile.toString() }\r\ncompile(compilerSources, libSources);`);
|
||||
|
||||
// 3. test html file to drive the test
|
||||
fs.writeFileSync(
|
||||
path.join(testOutputDir, "benchmarktsc.html"),
|
||||
`<!DOCTYPE HTML>
|
||||
<html>
|
||||
<head>
|
||||
<meta content="IE=Edge" http-equiv="X-UA-Compatible">
|
||||
<title>Typescript 1.1 Compiler</title>
|
||||
<meta content="text/html; charset=ISO-8859-1" http-equiv="content-type">
|
||||
</head>
|
||||
<body>
|
||||
<div><span>Status: </span><span id="status">Running</span></div>
|
||||
<div id="main"><span>End-to-End Time: </span><span id="totalTime">N/A</span></div>
|
||||
<script>
|
||||
var startTime = performance.now();
|
||||
</script>
|
||||
<script src="compilerSources.js"></script>
|
||||
<script src="benchmarktsc.js"></script>
|
||||
<script>
|
||||
var endTime = performance.now();
|
||||
document.getElementById("totalTime").innerHTML = parseInt(endTime - startTime, 10);
|
||||
document.getElementById("status").innerHTML = "DONE";
|
||||
</script>
|
||||
</body>
|
||||
</html>`);
|
||||
|
||||
function compile(compilerSources, librarySources) {
|
||||
var program = ts.createProgram(
|
||||
["lib.d.ts", "compiler.ts"],
|
||||
{
|
||||
noResolve: true,
|
||||
out: "compiler.js",
|
||||
removeComments: true,
|
||||
target: ts.ScriptTarget.ES3
|
||||
}, {
|
||||
getDefaultLibFileName: () => "lib.d.ts",
|
||||
getSourceFile: (filename, languageVersion) => {
|
||||
var source: string;
|
||||
if (filename === "lib.d.ts") source = librarySources;
|
||||
else if (filename === "compiler.ts") source = compilerSources;
|
||||
else console.error("Unexpected read file request: " + filename);
|
||||
|
||||
return ts.createSourceFile(filename, source, languageVersion);
|
||||
},
|
||||
writeFile: (filename, data, writeByteOrderMark) => {
|
||||
if (filename !== "compiler.js")
|
||||
console.error("Unexpected write file request: " + filename);
|
||||
// console.log(data);
|
||||
},
|
||||
getCurrentDirectory: () => "",
|
||||
getCanonicalFileName: (filename) => filename,
|
||||
useCaseSensitiveFileNames: () => false,
|
||||
getNewLine: () => "\r\n"
|
||||
});
|
||||
|
||||
var emitOutput = program.emit();
|
||||
|
||||
var errors = program.getSyntacticDiagnostics()
|
||||
.concat(program.getSemanticDiagnostics())
|
||||
.concat(program.getGlobalDiagnostics())
|
||||
.concat(emitOutput.diagnostics);
|
||||
|
||||
if (errors.length) {
|
||||
console.error("Unexpected errors.");
|
||||
errors.forEach(e=> console.log(`${e.code}: ${e.messageText}`))
|
||||
}
|
||||
}
|
||||
@ -1,177 +1,177 @@
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
import * as xml2js from "xml2js";
|
||||
|
||||
function main(): void {
|
||||
const args = process.argv.slice(2);
|
||||
if (args.length !== 3) {
|
||||
console.log("Usage:");
|
||||
console.log("\tnode generateLocalizedDiagnosticMessages.js <lcl source directory> <output directory> <generated diagnostics map file>");
|
||||
return;
|
||||
}
|
||||
|
||||
const inputPath = args[0];
|
||||
const outputPath = args[1];
|
||||
const diagnosticsMapFilePath = args[2];
|
||||
|
||||
// generate the lcg file for enu
|
||||
generateLCGFile();
|
||||
|
||||
// generate other langs
|
||||
fs.readdir(inputPath, (err, files) => {
|
||||
handleError(err);
|
||||
files.forEach(visitDirectory);
|
||||
});
|
||||
|
||||
return;
|
||||
|
||||
function visitDirectory(name: string) {
|
||||
const inputFilePath = path.join(inputPath, name, "diagnosticMessages", "diagnosticMessages.generated.json.lcl");
|
||||
|
||||
fs.readFile(inputFilePath, (err, data) => {
|
||||
handleError(err);
|
||||
xml2js.parseString(data.toString(), (err, result) => {
|
||||
handleError(err);
|
||||
if (!result || !result.LCX || !result.LCX.$ || !result.LCX.$.TgtCul) {
|
||||
console.error("Unexpected XML file structure. Expected to find result.LCX.$.TgtCul.");
|
||||
process.exit(1);
|
||||
}
|
||||
const outputDirectoryName = getPreferedLocaleName(result.LCX.$.TgtCul).toLowerCase();
|
||||
if (!outputDirectoryName) {
|
||||
console.error(`Invalid output locale name for '${result.LCX.$.TgtCul}'.`);
|
||||
process.exit(1);
|
||||
}
|
||||
writeFile(path.join(outputPath, outputDirectoryName, "diagnosticMessages.generated.json"), xmlObjectToString(result));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* A locale name is based on the language tagging conventions of RFC 4646 (Windows Vista
|
||||
* and later), and is represented by LOCALE_SNAME.
|
||||
* Generally, the pattern <language>-<REGION> is used. Here, language is a lowercase ISO 639
|
||||
* language code. The codes from ISO 639-1 are used when available. Otherwise, codes from
|
||||
* ISO 639-2/T are used. REGION specifies an uppercase ISO 3166-1 country/region identifier.
|
||||
* For example, the locale name for English (United States) is "en-US" and the locale name
|
||||
* for Divehi (Maldives) is "dv-MV".
|
||||
*
|
||||
* If the locale is a neutral locale (no region), the LOCALE_SNAME value follows the
|
||||
* pattern <language>. If it is a neutral locale for which the script is significant, the
|
||||
* pattern is <language>-<Script>.
|
||||
*
|
||||
* More at https://msdn.microsoft.com/en-us/library/windows/desktop/dd373814(v=vs.85).aspx
|
||||
*
|
||||
* Most of the languages we support are neutral locales, so we want to use the language name.
|
||||
* There are three exceptions, zh-CN, zh-TW and pt-BR.
|
||||
*/
|
||||
function getPreferedLocaleName(localeName: string) {
|
||||
switch (localeName) {
|
||||
case "zh-CN":
|
||||
case "zh-TW":
|
||||
case "pt-BR":
|
||||
return localeName;
|
||||
default:
|
||||
return localeName.split("-")[0];
|
||||
}
|
||||
}
|
||||
|
||||
function handleError(err: null | object) {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
function xmlObjectToString(o: any) {
|
||||
const out: any = {};
|
||||
for (const item of o.LCX.Item[0].Item[0].Item) {
|
||||
let ItemId = item.$.ItemId;
|
||||
let val = item.Str[0].Tgt ? item.Str[0].Tgt[0].Val[0] : item.Str[0].Val[0];
|
||||
|
||||
if (typeof ItemId !== "string" || typeof val !== "string") {
|
||||
console.error("Unexpected XML file structure");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (ItemId.charAt(0) === ";") {
|
||||
ItemId = ItemId.slice(1); // remove leading semicolon
|
||||
}
|
||||
|
||||
val = val.replace(/]5D;/, "]"); // unescape `]`
|
||||
out[ItemId] = val;
|
||||
}
|
||||
return JSON.stringify(out, undefined, 2);
|
||||
}
|
||||
|
||||
|
||||
function ensureDirectoryExists(directoryPath: string, action: () => void) {
|
||||
fs.exists(directoryPath, exists => {
|
||||
if (!exists) {
|
||||
const basePath = path.dirname(directoryPath);
|
||||
if (basePath !== directoryPath) {
|
||||
return ensureDirectoryExists(basePath, () => fs.mkdir(directoryPath, action));
|
||||
}
|
||||
}
|
||||
action();
|
||||
});
|
||||
}
|
||||
|
||||
function writeFile(fileName: string, contents: string) {
|
||||
ensureDirectoryExists(path.dirname(fileName), () => {
|
||||
fs.writeFile(fileName, contents, handleError);
|
||||
});
|
||||
}
|
||||
|
||||
function objectToList(o: Record<string, string>) {
|
||||
const list: { key: string, value: string }[] = [];
|
||||
for (const key in o) {
|
||||
list.push({ key, value: o[key] });
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
function generateLCGFile() {
|
||||
return fs.readFile(diagnosticsMapFilePath, (err, data) => {
|
||||
handleError(err);
|
||||
writeFile(
|
||||
path.join(outputPath, "enu", "diagnosticMessages.generated.json.lcg"),
|
||||
getLCGFileXML(
|
||||
objectToList(JSON.parse(data.toString()))
|
||||
.sort((a, b) => a.key > b.key ? 1 : -1) // lcg sorted by property keys
|
||||
.reduce((s, { key, value }) => s + getItemXML(key, value), "")
|
||||
));
|
||||
});
|
||||
|
||||
function getItemXML(key: string, value: string) {
|
||||
// escape entrt value
|
||||
value = value.replace(/]/, "]5D;");
|
||||
|
||||
return `
|
||||
<Item ItemId=";${key}" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[${value}]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>`;
|
||||
}
|
||||
|
||||
function getLCGFileXML(items: string) {
|
||||
return `<?xml version="1.0" encoding="utf-8"?>
|
||||
<LCX SchemaVersion="6.0" Name="diagnosticMessages.generated.json" PsrId="306" FileType="1" SrcCul="en-US" xmlns="http://schemas.microsoft.com/locstudio/2006/6/lcx">
|
||||
<OwnedComments>
|
||||
<Cmt Name="Dev" />
|
||||
<Cmt Name="LcxAdmin" />
|
||||
<Cmt Name="Rccx" />
|
||||
</OwnedComments>
|
||||
<Item ItemId=";String Table" ItemType="0" PsrId="306" Leaf="false">
|
||||
<Disp Icon="Expand" Expand="true" Disp="true" LocTbl="false" />
|
||||
<Item ItemId=";Strings" ItemType="0" PsrId="306" Leaf="false">
|
||||
<Disp Icon="Str" Disp="true" LocTbl="false" />${items}
|
||||
</Item>
|
||||
</Item>
|
||||
</LCX>`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
import * as xml2js from "xml2js";
|
||||
|
||||
function main(): void {
|
||||
const args = process.argv.slice(2);
|
||||
if (args.length !== 3) {
|
||||
console.log("Usage:");
|
||||
console.log("\tnode generateLocalizedDiagnosticMessages.js <lcl source directory> <output directory> <generated diagnostics map file>");
|
||||
return;
|
||||
}
|
||||
|
||||
const inputPath = args[0];
|
||||
const outputPath = args[1];
|
||||
const diagnosticsMapFilePath = args[2];
|
||||
|
||||
// generate the lcg file for enu
|
||||
generateLCGFile();
|
||||
|
||||
// generate other langs
|
||||
fs.readdir(inputPath, (err, files) => {
|
||||
handleError(err);
|
||||
files.forEach(visitDirectory);
|
||||
});
|
||||
|
||||
return;
|
||||
|
||||
function visitDirectory(name: string) {
|
||||
const inputFilePath = path.join(inputPath, name, "diagnosticMessages", "diagnosticMessages.generated.json.lcl");
|
||||
|
||||
fs.readFile(inputFilePath, (err, data) => {
|
||||
handleError(err);
|
||||
xml2js.parseString(data.toString(), (err, result) => {
|
||||
handleError(err);
|
||||
if (!result || !result.LCX || !result.LCX.$ || !result.LCX.$.TgtCul) {
|
||||
console.error("Unexpected XML file structure. Expected to find result.LCX.$.TgtCul.");
|
||||
process.exit(1);
|
||||
}
|
||||
const outputDirectoryName = getPreferredLocaleName(result.LCX.$.TgtCul).toLowerCase();
|
||||
if (!outputDirectoryName) {
|
||||
console.error(`Invalid output locale name for '${result.LCX.$.TgtCul}'.`);
|
||||
process.exit(1);
|
||||
}
|
||||
writeFile(path.join(outputPath, outputDirectoryName, "diagnosticMessages.generated.json"), xmlObjectToString(result));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* A locale name is based on the language tagging conventions of RFC 4646 (Windows Vista
|
||||
* and later), and is represented by LOCALE_SNAME.
|
||||
* Generally, the pattern <language>-<REGION> is used. Here, language is a lowercase ISO 639
|
||||
* language code. The codes from ISO 639-1 are used when available. Otherwise, codes from
|
||||
* ISO 639-2/T are used. REGION specifies an uppercase ISO 3166-1 country/region identifier.
|
||||
* For example, the locale name for English (United States) is "en-US" and the locale name
|
||||
* for Divehi (Maldives) is "dv-MV".
|
||||
*
|
||||
* If the locale is a neutral locale (no region), the LOCALE_SNAME value follows the
|
||||
* pattern <language>. If it is a neutral locale for which the script is significant, the
|
||||
* pattern is <language>-<Script>.
|
||||
*
|
||||
* More at https://msdn.microsoft.com/en-us/library/windows/desktop/dd373814(v=vs.85).aspx
|
||||
*
|
||||
* Most of the languages we support are neutral locales, so we want to use the language name.
|
||||
* There are three exceptions, zh-CN, zh-TW and pt-BR.
|
||||
*/
|
||||
function getPreferredLocaleName(localeName: string) {
|
||||
switch (localeName) {
|
||||
case "zh-CN":
|
||||
case "zh-TW":
|
||||
case "pt-BR":
|
||||
return localeName;
|
||||
default:
|
||||
return localeName.split("-")[0];
|
||||
}
|
||||
}
|
||||
|
||||
function handleError(err: null | object) {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
function xmlObjectToString(o: any) {
|
||||
const out: any = {};
|
||||
for (const item of o.LCX.Item[0].Item[0].Item) {
|
||||
let ItemId = item.$.ItemId;
|
||||
let val = item.Str[0].Tgt ? item.Str[0].Tgt[0].Val[0] : item.Str[0].Val[0];
|
||||
|
||||
if (typeof ItemId !== "string" || typeof val !== "string") {
|
||||
console.error("Unexpected XML file structure");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (ItemId.charAt(0) === ";") {
|
||||
ItemId = ItemId.slice(1); // remove leading semicolon
|
||||
}
|
||||
|
||||
val = val.replace(/]5D;/, "]"); // unescape `]`
|
||||
out[ItemId] = val;
|
||||
}
|
||||
return JSON.stringify(out, undefined, 2);
|
||||
}
|
||||
|
||||
|
||||
function ensureDirectoryExists(directoryPath: string, action: () => void) {
|
||||
fs.exists(directoryPath, exists => {
|
||||
if (!exists) {
|
||||
const basePath = path.dirname(directoryPath);
|
||||
if (basePath !== directoryPath) {
|
||||
return ensureDirectoryExists(basePath, () => fs.mkdir(directoryPath, action));
|
||||
}
|
||||
}
|
||||
action();
|
||||
});
|
||||
}
|
||||
|
||||
function writeFile(fileName: string, contents: string) {
|
||||
ensureDirectoryExists(path.dirname(fileName), () => {
|
||||
fs.writeFile(fileName, contents, handleError);
|
||||
});
|
||||
}
|
||||
|
||||
function objectToList(o: Record<string, string>) {
|
||||
const list: { key: string, value: string }[] = [];
|
||||
for (const key in o) {
|
||||
list.push({ key, value: o[key] });
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
function generateLCGFile() {
|
||||
return fs.readFile(diagnosticsMapFilePath, (err, data) => {
|
||||
handleError(err);
|
||||
writeFile(
|
||||
path.join(outputPath, "enu", "diagnosticMessages.generated.json.lcg"),
|
||||
getLCGFileXML(
|
||||
objectToList(JSON.parse(data.toString()))
|
||||
.sort((a, b) => a.key > b.key ? 1 : -1) // lcg sorted by property keys
|
||||
.reduce((s, { key, value }) => s + getItemXML(key, value), "")
|
||||
));
|
||||
});
|
||||
|
||||
function getItemXML(key: string, value: string) {
|
||||
// escape entrt value
|
||||
value = value.replace(/]/, "]5D;");
|
||||
|
||||
return `
|
||||
<Item ItemId=";${key}" ItemType="0" PsrId="306" Leaf="true">
|
||||
<Str Cat="Text">
|
||||
<Val><![CDATA[${value}]]></Val>
|
||||
</Str>
|
||||
<Disp Icon="Str" />
|
||||
</Item>`;
|
||||
}
|
||||
|
||||
function getLCGFileXML(items: string) {
|
||||
return `<?xml version="1.0" encoding="utf-8"?>
|
||||
<LCX SchemaVersion="6.0" Name="diagnosticMessages.generated.json" PsrId="306" FileType="1" SrcCul="en-US" xmlns="http://schemas.microsoft.com/locstudio/2006/6/lcx">
|
||||
<OwnedComments>
|
||||
<Cmt Name="Dev" />
|
||||
<Cmt Name="LcxAdmin" />
|
||||
<Cmt Name="Rccx" />
|
||||
</OwnedComments>
|
||||
<Item ItemId=";String Table" ItemType="0" PsrId="306" Leaf="false">
|
||||
<Disp Icon="Expand" Expand="true" Disp="true" LocTbl="false" />
|
||||
<Item ItemId=";Strings" ItemType="0" PsrId="306" Leaf="false">
|
||||
<Disp Icon="Str" Disp="true" LocTbl="false" />${items}
|
||||
</Item>
|
||||
</Item>
|
||||
</LCX>`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
|
||||
@ -1,2 +1,2 @@
|
||||
#!/bin/sh
|
||||
npm run jake -- generate-diagnostics
|
||||
npm run gulp -- generate-diagnostics
|
||||
125
scripts/ior.ts
125
scripts/ior.ts
@ -1,125 +0,0 @@
|
||||
/// <reference types="node"/>
|
||||
|
||||
import fs = require('fs');
|
||||
import path = require('path');
|
||||
|
||||
interface IOLog {
|
||||
filesRead: {
|
||||
path: string;
|
||||
result: { contents: string; };
|
||||
}[];
|
||||
arguments: string[];
|
||||
}
|
||||
|
||||
module Commands {
|
||||
export function dir(obj: IOLog) {
|
||||
obj.filesRead.filter(f => f.result !== undefined).forEach(f => {
|
||||
console.log(f.path);
|
||||
});
|
||||
}
|
||||
dir['description'] = ': displays a list of files';
|
||||
|
||||
export function find(obj: IOLog, str: string) {
|
||||
obj.filesRead.filter(f => f.result !== undefined).forEach(f => {
|
||||
var lines = f.result.contents.split('\n');
|
||||
var printedHeader = false;
|
||||
lines.forEach(line => {
|
||||
if (line.indexOf(str) >= 0) {
|
||||
if (!printedHeader) {
|
||||
console.log(' === ' + f.path + ' ===');
|
||||
printedHeader = true;
|
||||
}
|
||||
console.log(line);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
find['description'] = ' string: finds text in files';
|
||||
|
||||
export function grab(obj: IOLog, filename: string) {
|
||||
obj.filesRead.filter(f => f.result !== undefined).forEach(f => {
|
||||
if (path.basename(f.path) === filename) {
|
||||
fs.writeFile(filename, f.result.contents);
|
||||
}
|
||||
});
|
||||
}
|
||||
grab['description'] = ' filename.ts: writes out the specified file to disk';
|
||||
|
||||
export function extract(obj: IOLog, outputFolder: string) {
|
||||
var directorySeparator = "/";
|
||||
function directoryExists(path: string): boolean {
|
||||
return fs.existsSync(path) && fs.statSync(path).isDirectory();
|
||||
}
|
||||
function getDirectoryPath(path: string) {
|
||||
return path.substr(0, Math.max(getRootLength(path), path.lastIndexOf(directorySeparator)));
|
||||
}
|
||||
function getRootLength(path: string): number {
|
||||
if (path.charAt(0) === directorySeparator) {
|
||||
if (path.charAt(1) !== directorySeparator) return 1;
|
||||
var p1 = path.indexOf(directorySeparator, 2);
|
||||
if (p1 < 0) return 2;
|
||||
var p2 = path.indexOf(directorySeparator, p1 + 1);
|
||||
if (p2 < 0) return p1 + 1;
|
||||
return p2 + 1;
|
||||
}
|
||||
if (path.charAt(1) === ":") {
|
||||
if (path.charAt(2) === directorySeparator) return 3;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
function ensureDirectoriesExist(directoryPath: string) {
|
||||
if (directoryPath.length > getRootLength(directoryPath) && !directoryExists(directoryPath)) {
|
||||
var parentDirectory = getDirectoryPath(directoryPath);
|
||||
ensureDirectoriesExist(parentDirectory);
|
||||
console.log("creating directory: " + directoryPath);
|
||||
fs.mkdirSync(directoryPath);
|
||||
}
|
||||
}
|
||||
function normalizeSlashes(path: string): string {
|
||||
return path.replace(/\\/g, "/");
|
||||
}
|
||||
function transalatePath(outputFolder:string, path: string): string {
|
||||
return normalizeSlashes(outputFolder + directorySeparator + path.replace(":", ""));
|
||||
}
|
||||
function fileExists(path: string): boolean {
|
||||
return fs.existsSync(path);
|
||||
}
|
||||
obj.filesRead.forEach(f => {
|
||||
var filename = transalatePath(outputFolder, f.path);
|
||||
ensureDirectoriesExist(getDirectoryPath(filename));
|
||||
console.log("writing filename: " + filename);
|
||||
fs.writeFileSync(filename, f.result.contents);
|
||||
});
|
||||
|
||||
console.log("Command: tsc ");
|
||||
obj.arguments.forEach(a => {
|
||||
if (getRootLength(a) > 0) {
|
||||
console.log(transalatePath(outputFolder, a));
|
||||
}
|
||||
else {
|
||||
console.log(a);
|
||||
}
|
||||
console.log(" ");
|
||||
});
|
||||
}
|
||||
extract['description'] = ' outputFolder: extract all input files to <outputFolder>';
|
||||
}
|
||||
|
||||
var args = process.argv.slice(2);
|
||||
if (args.length < 2) {
|
||||
console.log('Usage: node ior.js path_to_file.json [command]');
|
||||
console.log('List of commands: ');
|
||||
Object.keys(Commands).forEach(k => console.log(' ' + k + Commands[k]['description']));
|
||||
} else {
|
||||
var cmd: Function = Commands[args[1]];
|
||||
if (cmd === undefined) {
|
||||
console.log('Unknown command ' + args[1]);
|
||||
} else {
|
||||
fs.readFile(args[0], 'utf-8', (err, data) => {
|
||||
if (err) throw err;
|
||||
var json = JSON.parse(data);
|
||||
cmd.apply(undefined, [json].concat(args.slice(2)));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,3 +1,5 @@
|
||||
/// <reference lib="esnext.asynciterable" />
|
||||
// Must reference esnext.asynciterable lib, since octokit uses AsyncIterable internally
|
||||
import cp = require("child_process");
|
||||
import Octokit = require("@octokit/rest");
|
||||
|
||||
@ -22,7 +24,7 @@ const branchName = `user-update-${now.getFullYear()}${padNum(now.getMonth())}${p
|
||||
const remoteUrl = `https://${process.argv[2]}@github.com/${userName}/TypeScript.git`;
|
||||
runSequence([
|
||||
["git", ["checkout", "."]], // reset any changes
|
||||
["node", ["./node_modules/jake/bin/cli.js", "baseline-accept"]], // accept baselines
|
||||
["node", ["./node_modules/gulp/bin/gulp.js", "baseline-accept"]], // accept baselines
|
||||
["git", ["checkout", "-b", branchName]], // create a branch
|
||||
["git", ["add", "."]], // Add all changes
|
||||
["git", ["commit", "-m", `"Update user baselines"`]], // Commit all changes
|
||||
@ -35,7 +37,7 @@ gh.authenticate({
|
||||
type: "token",
|
||||
token: process.argv[2]
|
||||
});
|
||||
gh.pullRequests.create({
|
||||
gh.pulls.create({
|
||||
owner: process.env.TARGET_FORK,
|
||||
repo: "TypeScript",
|
||||
maintainer_can_modify: true,
|
||||
@ -50,7 +52,7 @@ cc ${reviewers.map(r => "@" + r).join(" ")}`,
|
||||
}).then(r => {
|
||||
const num = r.data.number;
|
||||
console.log(`Pull request ${num} created.`);
|
||||
return gh.pullRequests.createReviewRequest({
|
||||
return gh.pulls.createReviewRequest({
|
||||
owner: process.env.TARGET_FORK,
|
||||
repo: "TypeScript",
|
||||
number: num,
|
||||
|
||||
@ -1,50 +0,0 @@
|
||||
var tslint = require("tslint");
|
||||
var fs = require("fs");
|
||||
var path = require("path");
|
||||
|
||||
function getLinterOptions() {
|
||||
return {
|
||||
formatter: "prose",
|
||||
formattersDirectory: undefined,
|
||||
rulesDirectory: "built/local/tslint"
|
||||
};
|
||||
}
|
||||
function getLinterConfiguration() {
|
||||
return tslint.Configuration.loadConfigurationFromPath(path.join(__dirname, "../tslint.json"));
|
||||
}
|
||||
|
||||
function lintFileContents(options, configuration, path, contents) {
|
||||
var ll = new tslint.Linter(options);
|
||||
ll.lint(path, contents, configuration);
|
||||
return ll.getResult();
|
||||
}
|
||||
|
||||
function lintFileAsync(options, configuration, path, cb) {
|
||||
fs.readFile(path, "utf8", function (err, contents) {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
var result = lintFileContents(options, configuration, path, contents);
|
||||
cb(undefined, result);
|
||||
});
|
||||
}
|
||||
|
||||
process.on("message", function (data) {
|
||||
switch (data.kind) {
|
||||
case "file":
|
||||
var target = data.name;
|
||||
var lintOptions = getLinterOptions();
|
||||
var lintConfiguration = getLinterConfiguration();
|
||||
lintFileAsync(lintOptions, lintConfiguration, target, function (err, result) {
|
||||
if (err) {
|
||||
process.send({ kind: "error", error: err.toString() });
|
||||
return;
|
||||
}
|
||||
process.send({ kind: "result", failures: result.failureCount, output: result.output });
|
||||
});
|
||||
break;
|
||||
case "close":
|
||||
process.exit(0);
|
||||
break;
|
||||
}
|
||||
});
|
||||
@ -12,6 +12,7 @@
|
||||
"module": "commonjs",
|
||||
"outDir": "../../built/local/tslint",
|
||||
"baseUrl": "../..",
|
||||
"types": ["node"],
|
||||
"paths": {
|
||||
"typescript": ["lib/typescript.d.ts"]
|
||||
}
|
||||
|
||||
83
scripts/types/ambient.d.ts
vendored
83
scripts/types/ambient.d.ts
vendored
@ -1,10 +1,4 @@
|
||||
declare module "gulp-clone" {
|
||||
function Clone(): NodeJS.ReadWriteStream;
|
||||
namespace Clone {
|
||||
export function sink() : NodeJS.ReadWriteStream & {tap: () => NodeJS.ReadWriteStream};
|
||||
}
|
||||
export = Clone;
|
||||
}
|
||||
import { TaskFunction } from "gulp";
|
||||
|
||||
declare module "gulp-insert" {
|
||||
export function append(text: string | Buffer): NodeJS.ReadWriteStream;
|
||||
@ -14,3 +8,78 @@ declare module "gulp-insert" {
|
||||
}
|
||||
|
||||
declare module "sorcery";
|
||||
|
||||
declare module "vinyl" {
|
||||
// NOTE: This makes it possible to correctly type vinyl Files under @ts-check.
|
||||
export = File;
|
||||
|
||||
declare class File<T extends File.Contents = File.Contents> {
|
||||
constructor(options?: File.VinylOptions<T>);
|
||||
|
||||
cwd: string;
|
||||
base: string;
|
||||
path: string;
|
||||
readonly history: ReadonlyArray<string>;
|
||||
contents: T;
|
||||
relative: string;
|
||||
dirname: string;
|
||||
basename: string;
|
||||
stem: string;
|
||||
extname: string;
|
||||
symlink: string | null;
|
||||
stat: import("fs").Stats | null;
|
||||
sourceMap?: import("./sourcemaps").RawSourceMap | string;
|
||||
|
||||
[custom: string]: any;
|
||||
|
||||
isBuffer(): this is T extends Buffer ? File<Buffer> : never;
|
||||
isStream(): this is T extends NodeJS.ReadableStream ? File<NodeJS.ReadableStream> : never;
|
||||
isNull(): this is T extends null ? File<null> : never;
|
||||
isDirectory(): this is T extends null ? File.Directory : never;
|
||||
isSymbolic(): this is T extends null ? File.Symbolic : never;
|
||||
clone(opts?: { contents?: boolean, deep?: boolean }): this;
|
||||
}
|
||||
|
||||
namespace File {
|
||||
export interface VinylOptions<T extends Contents = Contents> {
|
||||
cwd?: string;
|
||||
base?: string;
|
||||
path?: string;
|
||||
history?: ReadonlyArray<string>;
|
||||
stat?: import("fs").Stats;
|
||||
contents?: T;
|
||||
sourceMap?: import("./sourcemaps").RawSourceMap | string;
|
||||
[custom: string]: any;
|
||||
}
|
||||
|
||||
export type Contents = Buffer | NodeJS.ReadableStream | null;
|
||||
export type File = import("./vinyl");
|
||||
export type NullFile = File<null>;
|
||||
export type BufferFile = File<Buffer>;
|
||||
export type StreamFile = File<NodeJS.ReadableStream>;
|
||||
|
||||
export interface Directory extends NullFile {
|
||||
isNull(): true;
|
||||
isDirectory(): true;
|
||||
isSymbolic(): this is never;
|
||||
}
|
||||
|
||||
export interface Symbolic extends NullFile {
|
||||
isNull(): true;
|
||||
isDirectory(): this is never;
|
||||
isSymbolic(): true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
declare module "undertaker" {
|
||||
interface TaskFunctionParams {
|
||||
flags?: Record<string, string>;
|
||||
}
|
||||
}
|
||||
|
||||
declare module "gulp-sourcemaps" {
|
||||
interface WriteOptions {
|
||||
destPath?: string;
|
||||
}
|
||||
}
|
||||
@ -1,21 +0,0 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE
|
||||
2856
scripts/types/mocha/index.d.ts
vendored
2856
scripts/types/mocha/index.d.ts
vendored
File diff suppressed because it is too large
Load Diff
109
scripts/types/mocha/lib/interfaces/common.d.ts
vendored
109
scripts/types/mocha/lib/interfaces/common.d.ts
vendored
@ -1,109 +0,0 @@
|
||||
import Mocha = require("../../");
|
||||
|
||||
export = common;
|
||||
|
||||
declare function common(suites: Mocha.Suite[], context: Mocha.MochaGlobals, mocha: Mocha): common.CommonFunctions;
|
||||
|
||||
declare namespace common {
|
||||
export interface CommonFunctions {
|
||||
/**
|
||||
* This is only present if flag --delay is passed into Mocha. It triggers
|
||||
* root suite execution.
|
||||
*/
|
||||
runWithSuite(suite: Mocha.Suite): () => void;
|
||||
|
||||
/**
|
||||
* Execute before running tests.
|
||||
*/
|
||||
before(fn?: Mocha.Func | Mocha.AsyncFunc): void;
|
||||
|
||||
/**
|
||||
* Execute before running tests.
|
||||
*/
|
||||
before(name: string, fn?: Mocha.Func | Mocha.AsyncFunc): void;
|
||||
|
||||
/**
|
||||
* Execute after running tests.
|
||||
*/
|
||||
after(fn?: Mocha.Func | Mocha.AsyncFunc): void;
|
||||
|
||||
/**
|
||||
* Execute after running tests.
|
||||
*/
|
||||
after(name: string, fn?: Mocha.Func | Mocha.AsyncFunc): void;
|
||||
|
||||
/**
|
||||
* Execute before each test case.
|
||||
*/
|
||||
beforeEach(fn?: Mocha.Func | Mocha.AsyncFunc): void;
|
||||
|
||||
/**
|
||||
* Execute before each test case.
|
||||
*/
|
||||
beforeEach(name: string, fn?: Mocha.Func | Mocha.AsyncFunc): void;
|
||||
|
||||
/**
|
||||
* Execute after each test case.
|
||||
*/
|
||||
afterEach(fn?: Mocha.Func | Mocha.AsyncFunc): void;
|
||||
|
||||
/**
|
||||
* Execute after each test case.
|
||||
*/
|
||||
afterEach(name: string, fn?: Mocha.Func | Mocha.AsyncFunc): void;
|
||||
|
||||
suite: SuiteFunctions;
|
||||
test: TestFunctions;
|
||||
}
|
||||
|
||||
export interface CreateOptions {
|
||||
/** Title of suite */
|
||||
title: string;
|
||||
|
||||
/** Suite function */
|
||||
fn?: (this: Mocha.Suite) => void;
|
||||
|
||||
/** Is suite pending? */
|
||||
pending?: boolean;
|
||||
|
||||
/** Filepath where this Suite resides */
|
||||
file?: string;
|
||||
|
||||
/** Is suite exclusive? */
|
||||
isOnly?: boolean;
|
||||
}
|
||||
|
||||
export interface SuiteFunctions {
|
||||
/**
|
||||
* Create an exclusive Suite; convenience function
|
||||
*/
|
||||
only(opts: CreateOptions): Mocha.Suite;
|
||||
|
||||
/**
|
||||
* Create a Suite, but skip it; convenience function
|
||||
*/
|
||||
skip(opts: CreateOptions): Mocha.Suite;
|
||||
|
||||
/**
|
||||
* Creates a suite.
|
||||
*/
|
||||
create(opts: CreateOptions): Mocha.Suite;
|
||||
}
|
||||
|
||||
export interface TestFunctions {
|
||||
/**
|
||||
* Exclusive test-case.
|
||||
*/
|
||||
only(mocha: Mocha, test: Mocha.Test): Mocha.Test;
|
||||
|
||||
/**
|
||||
* Pending test case.
|
||||
*/
|
||||
skip(title: string): void;
|
||||
|
||||
/**
|
||||
* Number of retry attempts
|
||||
*/
|
||||
retries(n: number): void;
|
||||
}
|
||||
}
|
||||
17
scripts/types/mocha/lib/ms.d.ts
vendored
17
scripts/types/mocha/lib/ms.d.ts
vendored
@ -1,17 +0,0 @@
|
||||
export = milliseconds;
|
||||
|
||||
/**
|
||||
* Parse the given `str` and return milliseconds.
|
||||
*
|
||||
* @see {@link https://mochajs.org/api/module-milliseconds.html}
|
||||
* @see {@link https://mochajs.org/api/module-milliseconds.html#~parse}
|
||||
*/
|
||||
declare function milliseconds(val: string): number;
|
||||
|
||||
/**
|
||||
* Format for `ms`.
|
||||
*
|
||||
* @see {@link https://mochajs.org/api/module-milliseconds.html}
|
||||
* @see {@link https://mochajs.org/api/module-milliseconds.html#~format}
|
||||
*/
|
||||
declare function milliseconds(val: number): string;
|
||||
@ -1,5 +0,0 @@
|
||||
{
|
||||
"name": "@types/mocha",
|
||||
"private": true,
|
||||
"version": "5.2.1"
|
||||
}
|
||||
@ -35,8 +35,8 @@ function createCancellationToken(args: string[]): ServerCancellationToken {
|
||||
}
|
||||
// cancellationPipeName is a string without '*' inside that can optionally end with '*'
|
||||
// when client wants to signal cancellation it should create a named pipe with name=<cancellationPipeName>
|
||||
// server will synchronously check the presence of the pipe and treat its existance as indicator that current request should be canceled.
|
||||
// in case if client prefers to use more fine-grained schema than one name for all request it can add '*' to the end of cancelellationPipeName.
|
||||
// server will synchronously check the presence of the pipe and treat its existence as indicator that current request should be canceled.
|
||||
// in case if client prefers to use more fine-grained schema than one name for all request it can add '*' to the end of cancellationPipeName.
|
||||
// in this case pipe name will be build dynamically as <cancellationPipeName><request_seq>.
|
||||
if (cancellationPipeName.charAt(cancellationPipeName.length - 1) === "*") {
|
||||
const namePrefix = cancellationPipeName.slice(0, -1);
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
{
|
||||
"extends": "../tsconfig-base",
|
||||
"extends": "../tsconfig-noncomposite-base",
|
||||
"compilerOptions": {
|
||||
"outDir": "../../built/local/",
|
||||
"rootDir": ".",
|
||||
@ -7,6 +7,7 @@
|
||||
"declaration": false,
|
||||
"declarationMap": false,
|
||||
"removeComments": true,
|
||||
"incremental": false,
|
||||
"module": "commonjs",
|
||||
"types": [
|
||||
"node"
|
||||
|
||||
@ -100,6 +100,8 @@ namespace ts {
|
||||
IsObjectLiteralOrClassExpressionMethod = 1 << 7,
|
||||
}
|
||||
|
||||
let flowNodeCreated: <T extends FlowNode>(node: T) => T = identity;
|
||||
|
||||
const binder = createBinder();
|
||||
|
||||
export function bindSourceFile(file: SourceFile, options: CompilerOptions) {
|
||||
@ -401,13 +403,15 @@ namespace ts {
|
||||
messageNeedsName = false;
|
||||
}
|
||||
|
||||
if (symbol.declarations && symbol.declarations.length) {
|
||||
let multipleDefaultExports = false;
|
||||
if (length(symbol.declarations)) {
|
||||
// If the current node is a default export of some sort, then check if
|
||||
// there are any other default exports that we need to error on.
|
||||
// We'll know whether we have other default exports depending on if `symbol` already has a declaration list set.
|
||||
if (isDefaultExport) {
|
||||
message = Diagnostics.A_module_cannot_have_multiple_default_exports;
|
||||
messageNeedsName = false;
|
||||
multipleDefaultExports = true;
|
||||
}
|
||||
else {
|
||||
// This is to properly report an error in the case "export default { }" is after export default of class declaration or function declaration.
|
||||
@ -418,15 +422,26 @@ namespace ts {
|
||||
(node.kind === SyntaxKind.ExportAssignment && !(<ExportAssignment>node).isExportEquals)) {
|
||||
message = Diagnostics.A_module_cannot_have_multiple_default_exports;
|
||||
messageNeedsName = false;
|
||||
multipleDefaultExports = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const addError = (decl: Declaration): void => {
|
||||
file.bindDiagnostics.push(createDiagnosticForNode(getNameOfDeclaration(decl) || decl, message, messageNeedsName ? getDisplayName(decl) : undefined));
|
||||
};
|
||||
forEach(symbol.declarations, addError);
|
||||
addError(node);
|
||||
const declarationName = getNameOfDeclaration(node) || node;
|
||||
const relatedInformation: DiagnosticRelatedInformation[] = [];
|
||||
forEach(symbol.declarations, (declaration, index) => {
|
||||
const decl = getNameOfDeclaration(declaration) || declaration;
|
||||
const diag = createDiagnosticForNode(decl, message, messageNeedsName ? getDisplayName(declaration) : undefined);
|
||||
file.bindDiagnostics.push(
|
||||
multipleDefaultExports ? addRelatedInfo(diag, createDiagnosticForNode(declarationName, index === 0 ? Diagnostics.Another_export_default_is_here : Diagnostics.and_here)) : diag
|
||||
);
|
||||
if (multipleDefaultExports) {
|
||||
relatedInformation.push(createDiagnosticForNode(decl, Diagnostics.The_first_export_default_is_here));
|
||||
}
|
||||
});
|
||||
|
||||
const diag = createDiagnosticForNode(declarationName, message, messageNeedsName ? getDisplayName(node) : undefined);
|
||||
file.bindDiagnostics.push(multipleDefaultExports ? addRelatedInfo(diag, ...relatedInformation) : diag);
|
||||
|
||||
symbol = createSymbol(SymbolFlags.None, name);
|
||||
}
|
||||
@ -530,6 +545,7 @@ namespace ts {
|
||||
blockScopeContainer.locals = undefined;
|
||||
}
|
||||
if (containerFlags & ContainerFlags.IsControlFlowContainer) {
|
||||
const saveFlowNodeCreated = flowNodeCreated;
|
||||
const saveCurrentFlow = currentFlow;
|
||||
const saveBreakTarget = currentBreakTarget;
|
||||
const saveContinueTarget = currentContinueTarget;
|
||||
@ -547,12 +563,13 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
// We create a return control flow graph for IIFEs and constructors. For constructors
|
||||
// we use the return control flow graph in strict property intialization checks.
|
||||
// we use the return control flow graph in strict property initialization checks.
|
||||
currentReturnTarget = isIIFE || node.kind === SyntaxKind.Constructor ? createBranchLabel() : undefined;
|
||||
currentBreakTarget = undefined;
|
||||
currentContinueTarget = undefined;
|
||||
activeLabels = undefined;
|
||||
hasExplicitReturn = false;
|
||||
flowNodeCreated = identity;
|
||||
bindChildren(node);
|
||||
// Reset all reachability check related flags on node (for incremental scenarios)
|
||||
node.flags &= ~NodeFlags.ReachabilityAndEmitFlags;
|
||||
@ -579,6 +596,7 @@ namespace ts {
|
||||
currentReturnTarget = saveReturnTarget;
|
||||
activeLabels = saveActiveLabels;
|
||||
hasExplicitReturn = saveHasExplicitReturn;
|
||||
flowNodeCreated = saveFlowNodeCreated;
|
||||
}
|
||||
else if (containerFlags & ContainerFlags.IsInterface) {
|
||||
seenThisKeyword = false;
|
||||
@ -753,7 +771,7 @@ namespace ts {
|
||||
|
||||
function isNarrowableReference(expr: Expression): boolean {
|
||||
return expr.kind === SyntaxKind.Identifier || expr.kind === SyntaxKind.ThisKeyword || expr.kind === SyntaxKind.SuperKeyword ||
|
||||
isPropertyAccessExpression(expr) && isNarrowableReference(expr.expression) ||
|
||||
(isPropertyAccessExpression(expr) || isNonNullExpression(expr) || isParenthesizedExpression(expr)) && isNarrowableReference(expr.expression) ||
|
||||
isElementAccessExpression(expr) && expr.argumentExpression &&
|
||||
(isStringLiteral(expr.argumentExpression) || isNumericLiteral(expr.argumentExpression)) &&
|
||||
isNarrowableReference(expr.expression);
|
||||
@ -858,7 +876,7 @@ namespace ts {
|
||||
return antecedent;
|
||||
}
|
||||
setFlowNodeReferenced(antecedent);
|
||||
return { flags, expression, antecedent };
|
||||
return flowNodeCreated({ flags, expression, antecedent });
|
||||
}
|
||||
|
||||
function createFlowSwitchClause(antecedent: FlowNode, switchStatement: SwitchStatement, clauseStart: number, clauseEnd: number): FlowNode {
|
||||
@ -866,17 +884,17 @@ namespace ts {
|
||||
return antecedent;
|
||||
}
|
||||
setFlowNodeReferenced(antecedent);
|
||||
return { flags: FlowFlags.SwitchClause, switchStatement, clauseStart, clauseEnd, antecedent };
|
||||
return flowNodeCreated({ flags: FlowFlags.SwitchClause, switchStatement, clauseStart, clauseEnd, antecedent });
|
||||
}
|
||||
|
||||
function createFlowAssignment(antecedent: FlowNode, node: Expression | VariableDeclaration | BindingElement): FlowNode {
|
||||
setFlowNodeReferenced(antecedent);
|
||||
return { flags: FlowFlags.Assignment, antecedent, node };
|
||||
return flowNodeCreated({ flags: FlowFlags.Assignment, antecedent, node });
|
||||
}
|
||||
|
||||
function createFlowArrayMutation(antecedent: FlowNode, node: CallExpression | BinaryExpression): FlowNode {
|
||||
setFlowNodeReferenced(antecedent);
|
||||
const res: FlowArrayMutation = { flags: FlowFlags.ArrayMutation, antecedent, node };
|
||||
const res: FlowArrayMutation = flowNodeCreated({ flags: FlowFlags.ArrayMutation, antecedent, node });
|
||||
return res;
|
||||
}
|
||||
|
||||
@ -1080,8 +1098,16 @@ namespace ts {
|
||||
function bindTryStatement(node: TryStatement): void {
|
||||
const preFinallyLabel = createBranchLabel();
|
||||
const preTryFlow = currentFlow;
|
||||
// TODO: Every statement in try block is potentially an exit point!
|
||||
const tryPriors: FlowNode[] = [];
|
||||
const oldFlowNodeCreated = flowNodeCreated;
|
||||
// We hook the creation of all flow nodes within the `try` scope and store them so we can add _all_ of them
|
||||
// as possible antecedents of the start of the `catch` or `finally` blocks.
|
||||
// Don't bother intercepting the call if there's no finally or catch block that needs the information
|
||||
if (node.catchClause || node.finallyBlock) {
|
||||
flowNodeCreated = node => (tryPriors.push(node), node);
|
||||
}
|
||||
bind(node.tryBlock);
|
||||
flowNodeCreated = oldFlowNodeCreated;
|
||||
addAntecedent(preFinallyLabel, currentFlow);
|
||||
|
||||
const flowAfterTry = currentFlow;
|
||||
@ -1089,12 +1115,36 @@ namespace ts {
|
||||
|
||||
if (node.catchClause) {
|
||||
currentFlow = preTryFlow;
|
||||
if (tryPriors.length) {
|
||||
const preCatchFlow = createBranchLabel();
|
||||
addAntecedent(preCatchFlow, currentFlow);
|
||||
for (const p of tryPriors) {
|
||||
addAntecedent(preCatchFlow, p);
|
||||
}
|
||||
currentFlow = finishFlowLabel(preCatchFlow);
|
||||
}
|
||||
|
||||
bind(node.catchClause);
|
||||
addAntecedent(preFinallyLabel, currentFlow);
|
||||
|
||||
flowAfterCatch = currentFlow;
|
||||
}
|
||||
if (node.finallyBlock) {
|
||||
// We add the nodes within the `try` block to the `finally`'s antecedents if there's no catch block
|
||||
// (If there is a `catch` block, it will have all these antecedents instead, and the `finally` will
|
||||
// have the end of the `try` block and the end of the `catch` block)
|
||||
let preFinallyPrior = preTryFlow;
|
||||
if (!node.catchClause) {
|
||||
if (tryPriors.length) {
|
||||
const preFinallyFlow = createBranchLabel();
|
||||
addAntecedent(preFinallyFlow, preTryFlow);
|
||||
for (const p of tryPriors) {
|
||||
addAntecedent(preFinallyFlow, p);
|
||||
}
|
||||
preFinallyPrior = finishFlowLabel(preFinallyFlow);
|
||||
}
|
||||
}
|
||||
|
||||
// in finally flow is combined from pre-try/flow from try/flow from catch
|
||||
// pre-flow is necessary to make sure that finally is reachable even if finally flows in both try and finally blocks are unreachable
|
||||
|
||||
@ -1123,7 +1173,7 @@ namespace ts {
|
||||
//
|
||||
// extra edges that we inject allows to control this behavior
|
||||
// if when walking the flow we step on post-finally edge - we can mark matching pre-finally edge as locked so it will be skipped.
|
||||
const preFinallyFlow: PreFinallyFlow = { flags: FlowFlags.PreFinally, antecedent: preTryFlow, lock: {} };
|
||||
const preFinallyFlow: PreFinallyFlow = { flags: FlowFlags.PreFinally, antecedent: preFinallyPrior, lock: {} };
|
||||
addAntecedent(preFinallyLabel, preFinallyFlow);
|
||||
|
||||
currentFlow = finishFlowLabel(preFinallyLabel);
|
||||
@ -1142,7 +1192,7 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
if (!(currentFlow.flags & FlowFlags.Unreachable)) {
|
||||
const afterFinallyFlow: AfterFinallyFlow = { flags: FlowFlags.AfterFinally, antecedent: currentFlow };
|
||||
const afterFinallyFlow: AfterFinallyFlow = flowNodeCreated({ flags: FlowFlags.AfterFinally, antecedent: currentFlow });
|
||||
preFinallyFlow.lock = afterFinallyFlow;
|
||||
currentFlow = afterFinallyFlow;
|
||||
}
|
||||
@ -2462,8 +2512,13 @@ namespace ts {
|
||||
declareSymbol(symbolTable, containingClass.symbol, node, SymbolFlags.Property, SymbolFlags.None, /*isReplaceableByMethod*/ true);
|
||||
break;
|
||||
case SyntaxKind.SourceFile:
|
||||
// this.foo assignment in a source file
|
||||
// Do not bind. It would be nice to support this someday though.
|
||||
// this.property = assignment in a source file -- declare symbol in exports for a module, in locals for a script
|
||||
if ((thisContainer as SourceFile).commonJsModuleIndicator) {
|
||||
declareSymbol(thisContainer.symbol.exports!, thisContainer.symbol, node, SymbolFlags.Property | SymbolFlags.ExportValue, SymbolFlags.None);
|
||||
}
|
||||
else {
|
||||
declareSymbolAndAddToSymbolTable(node, SymbolFlags.FunctionScopedVariable, SymbolFlags.FunctionScopedVariableExcludes);
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
@ -3051,32 +3106,24 @@ namespace ts {
|
||||
|
||||
function computeCallExpression(node: CallExpression, subtreeFlags: TransformFlags) {
|
||||
let transformFlags = subtreeFlags;
|
||||
const callee = skipOuterExpressions(node.expression);
|
||||
const expression = node.expression;
|
||||
|
||||
if (node.typeArguments) {
|
||||
transformFlags |= TransformFlags.AssertTypeScript;
|
||||
}
|
||||
|
||||
if (subtreeFlags & TransformFlags.ContainsRestOrSpread
|
||||
|| (expression.transformFlags & (TransformFlags.Super | TransformFlags.ContainsSuper))) {
|
||||
if (subtreeFlags & TransformFlags.ContainsRestOrSpread || isSuperOrSuperProperty(callee)) {
|
||||
// If the this node contains a SpreadExpression, or is a super call, then it is an ES6
|
||||
// node.
|
||||
transformFlags |= TransformFlags.AssertES2015;
|
||||
// super property or element accesses could be inside lambdas, etc, and need a captured `this`,
|
||||
// while super keyword for super calls (indicated by TransformFlags.Super) does not (since it can only be top-level in a constructor)
|
||||
if (expression.transformFlags & TransformFlags.ContainsSuper) {
|
||||
if (isSuperProperty(callee)) {
|
||||
transformFlags |= TransformFlags.ContainsLexicalThis;
|
||||
}
|
||||
}
|
||||
|
||||
if (expression.kind === SyntaxKind.ImportKeyword) {
|
||||
transformFlags |= TransformFlags.ContainsDynamicImport;
|
||||
|
||||
// A dynamic 'import()' call that contains a lexical 'this' will
|
||||
// require a captured 'this' when emitting down-level.
|
||||
if (subtreeFlags & TransformFlags.ContainsLexicalThis) {
|
||||
transformFlags |= TransformFlags.ContainsCapturedLexicalThis;
|
||||
}
|
||||
}
|
||||
|
||||
node.transformFlags = transformFlags | TransformFlags.HasComputedFlags;
|
||||
@ -3104,8 +3151,8 @@ namespace ts {
|
||||
|
||||
if (operatorTokenKind === SyntaxKind.EqualsToken && leftKind === SyntaxKind.ObjectLiteralExpression) {
|
||||
// Destructuring object assignments with are ES2015 syntax
|
||||
// and possibly ESNext if they contain rest
|
||||
transformFlags |= TransformFlags.AssertESNext | TransformFlags.AssertES2015 | TransformFlags.AssertDestructuringAssignment;
|
||||
// and possibly ES2018 if they contain rest
|
||||
transformFlags |= TransformFlags.AssertES2018 | TransformFlags.AssertES2015 | TransformFlags.AssertDestructuringAssignment;
|
||||
}
|
||||
else if (operatorTokenKind === SyntaxKind.EqualsToken && leftKind === SyntaxKind.ArrayLiteralExpression) {
|
||||
// Destructuring assignments are ES2015 syntax.
|
||||
@ -3141,15 +3188,15 @@ namespace ts {
|
||||
transformFlags |= TransformFlags.AssertTypeScript | TransformFlags.ContainsTypeScriptClassSyntax;
|
||||
}
|
||||
|
||||
// parameters with object rest destructuring are ES Next syntax
|
||||
// parameters with object rest destructuring are ES2018 syntax
|
||||
if (subtreeFlags & TransformFlags.ContainsObjectRestOrSpread) {
|
||||
transformFlags |= TransformFlags.AssertESNext;
|
||||
transformFlags |= TransformFlags.AssertES2018;
|
||||
}
|
||||
|
||||
// If a parameter has an initializer, a binding pattern or a dotDotDot token, then
|
||||
// it is ES6 syntax and its container must emit default value assignments or parameter destructuring downlevel.
|
||||
if (subtreeFlags & TransformFlags.ContainsBindingPattern || initializer || dotDotDotToken) {
|
||||
transformFlags |= TransformFlags.AssertES2015 | TransformFlags.ContainsDefaultValueAssignments;
|
||||
transformFlags |= TransformFlags.AssertES2015;
|
||||
}
|
||||
|
||||
node.transformFlags = transformFlags | TransformFlags.HasComputedFlags;
|
||||
@ -3160,7 +3207,6 @@ namespace ts {
|
||||
let transformFlags = subtreeFlags;
|
||||
const expression = node.expression;
|
||||
const expressionKind = expression.kind;
|
||||
const expressionTransformFlags = expression.transformFlags;
|
||||
|
||||
// If the node is synthesized, it means the emitter put the parentheses there,
|
||||
// not the user. If we didn't want them, the emitter would not have put them
|
||||
@ -3170,12 +3216,6 @@ namespace ts {
|
||||
transformFlags |= TransformFlags.AssertTypeScript;
|
||||
}
|
||||
|
||||
// If the expression of a ParenthesizedExpression is a destructuring assignment,
|
||||
// then the ParenthesizedExpression is a destructuring assignment.
|
||||
if (expressionTransformFlags & TransformFlags.DestructuringAssignment) {
|
||||
transformFlags |= TransformFlags.DestructuringAssignment;
|
||||
}
|
||||
|
||||
node.transformFlags = transformFlags | TransformFlags.HasComputedFlags;
|
||||
return transformFlags & ~TransformFlags.OuterExpressionExcludes;
|
||||
}
|
||||
@ -3199,12 +3239,6 @@ namespace ts {
|
||||
|| node.typeParameters) {
|
||||
transformFlags |= TransformFlags.AssertTypeScript;
|
||||
}
|
||||
|
||||
if (subtreeFlags & TransformFlags.ContainsLexicalThisInComputedPropertyName) {
|
||||
// A computed property name containing `this` might need to be rewritten,
|
||||
// so propagate the ContainsLexicalThis flag upward.
|
||||
transformFlags |= TransformFlags.ContainsLexicalThis;
|
||||
}
|
||||
}
|
||||
|
||||
node.transformFlags = transformFlags | TransformFlags.HasComputedFlags;
|
||||
@ -3222,12 +3256,6 @@ namespace ts {
|
||||
transformFlags |= TransformFlags.AssertTypeScript;
|
||||
}
|
||||
|
||||
if (subtreeFlags & TransformFlags.ContainsLexicalThisInComputedPropertyName) {
|
||||
// A computed property name containing `this` might need to be rewritten,
|
||||
// so propagate the ContainsLexicalThis flag upward.
|
||||
transformFlags |= TransformFlags.ContainsLexicalThis;
|
||||
}
|
||||
|
||||
node.transformFlags = transformFlags | TransformFlags.HasComputedFlags;
|
||||
return transformFlags & ~TransformFlags.ClassExcludes;
|
||||
}
|
||||
@ -3259,7 +3287,7 @@ namespace ts {
|
||||
let transformFlags = subtreeFlags;
|
||||
|
||||
if (!node.variableDeclaration) {
|
||||
transformFlags |= TransformFlags.AssertESNext;
|
||||
transformFlags |= TransformFlags.AssertES2019;
|
||||
}
|
||||
else if (isBindingPattern(node.variableDeclaration.name)) {
|
||||
transformFlags |= TransformFlags.AssertES2015;
|
||||
@ -3293,9 +3321,9 @@ namespace ts {
|
||||
transformFlags |= TransformFlags.AssertTypeScript;
|
||||
}
|
||||
|
||||
// function declarations with object rest destructuring are ES Next syntax
|
||||
// function declarations with object rest destructuring are ES2018 syntax
|
||||
if (subtreeFlags & TransformFlags.ContainsObjectRestOrSpread) {
|
||||
transformFlags |= TransformFlags.AssertESNext;
|
||||
transformFlags |= TransformFlags.AssertES2018;
|
||||
}
|
||||
|
||||
node.transformFlags = transformFlags | TransformFlags.HasComputedFlags;
|
||||
@ -3317,14 +3345,14 @@ namespace ts {
|
||||
transformFlags |= TransformFlags.AssertTypeScript;
|
||||
}
|
||||
|
||||
// function declarations with object rest destructuring are ES Next syntax
|
||||
// function declarations with object rest destructuring are ES2018 syntax
|
||||
if (subtreeFlags & TransformFlags.ContainsObjectRestOrSpread) {
|
||||
transformFlags |= TransformFlags.AssertESNext;
|
||||
transformFlags |= TransformFlags.AssertES2018;
|
||||
}
|
||||
|
||||
// An async method declaration is ES2017 syntax.
|
||||
if (hasModifier(node, ModifierFlags.Async)) {
|
||||
transformFlags |= node.asteriskToken ? TransformFlags.AssertESNext : TransformFlags.AssertES2017;
|
||||
transformFlags |= node.asteriskToken ? TransformFlags.AssertES2018 : TransformFlags.AssertES2017;
|
||||
}
|
||||
|
||||
if (node.asteriskToken) {
|
||||
@ -3332,7 +3360,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
node.transformFlags = transformFlags | TransformFlags.HasComputedFlags;
|
||||
return transformFlags & ~TransformFlags.MethodOrAccessorExcludes;
|
||||
return propagatePropertyNameFlags(node.name, transformFlags & ~TransformFlags.MethodOrAccessorExcludes);
|
||||
}
|
||||
|
||||
function computeAccessor(node: AccessorDeclaration, subtreeFlags: TransformFlags) {
|
||||
@ -3348,13 +3376,13 @@ namespace ts {
|
||||
transformFlags |= TransformFlags.AssertTypeScript;
|
||||
}
|
||||
|
||||
// function declarations with object rest destructuring are ES Next syntax
|
||||
// function declarations with object rest destructuring are ES2018 syntax
|
||||
if (subtreeFlags & TransformFlags.ContainsObjectRestOrSpread) {
|
||||
transformFlags |= TransformFlags.AssertESNext;
|
||||
transformFlags |= TransformFlags.AssertES2018;
|
||||
}
|
||||
|
||||
node.transformFlags = transformFlags | TransformFlags.HasComputedFlags;
|
||||
return transformFlags & ~TransformFlags.MethodOrAccessorExcludes;
|
||||
return propagatePropertyNameFlags(node.name, transformFlags & ~TransformFlags.MethodOrAccessorExcludes);
|
||||
}
|
||||
|
||||
function computePropertyDeclaration(node: PropertyDeclaration, subtreeFlags: TransformFlags) {
|
||||
@ -3368,7 +3396,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
node.transformFlags = transformFlags | TransformFlags.HasComputedFlags;
|
||||
return transformFlags & ~TransformFlags.NodeExcludes;
|
||||
return propagatePropertyNameFlags(node.name, transformFlags & ~TransformFlags.PropertyExcludes);
|
||||
}
|
||||
|
||||
function computeFunctionDeclaration(node: FunctionDeclaration, subtreeFlags: TransformFlags) {
|
||||
@ -3394,25 +3422,18 @@ namespace ts {
|
||||
|
||||
// An async function declaration is ES2017 syntax.
|
||||
if (modifierFlags & ModifierFlags.Async) {
|
||||
transformFlags |= node.asteriskToken ? TransformFlags.AssertESNext : TransformFlags.AssertES2017;
|
||||
transformFlags |= node.asteriskToken ? TransformFlags.AssertES2018 : TransformFlags.AssertES2017;
|
||||
}
|
||||
|
||||
// function declarations with object rest destructuring are ES Next syntax
|
||||
// function declarations with object rest destructuring are ES2018 syntax
|
||||
if (subtreeFlags & TransformFlags.ContainsObjectRestOrSpread) {
|
||||
transformFlags |= TransformFlags.AssertESNext;
|
||||
}
|
||||
|
||||
// If a FunctionDeclaration's subtree has marked the container as needing to capture the
|
||||
// lexical this, or the function contains parameters with initializers, then this node is
|
||||
// ES6 syntax.
|
||||
if (subtreeFlags & TransformFlags.ES2015FunctionSyntaxMask) {
|
||||
transformFlags |= TransformFlags.AssertES2015;
|
||||
transformFlags |= TransformFlags.AssertES2018;
|
||||
}
|
||||
|
||||
// If a FunctionDeclaration is generator function and is the body of a
|
||||
// transformed async function, then this node can be transformed to a
|
||||
// down-level generator.
|
||||
// Currently we do not support transforming any other generator fucntions
|
||||
// Currently we do not support transforming any other generator functions
|
||||
// down level.
|
||||
if (node.asteriskToken) {
|
||||
transformFlags |= TransformFlags.AssertGenerator;
|
||||
@ -3436,20 +3457,12 @@ namespace ts {
|
||||
|
||||
// An async function expression is ES2017 syntax.
|
||||
if (hasModifier(node, ModifierFlags.Async)) {
|
||||
transformFlags |= node.asteriskToken ? TransformFlags.AssertESNext : TransformFlags.AssertES2017;
|
||||
transformFlags |= node.asteriskToken ? TransformFlags.AssertES2018 : TransformFlags.AssertES2017;
|
||||
}
|
||||
|
||||
// function expressions with object rest destructuring are ES Next syntax
|
||||
// function expressions with object rest destructuring are ES2018 syntax
|
||||
if (subtreeFlags & TransformFlags.ContainsObjectRestOrSpread) {
|
||||
transformFlags |= TransformFlags.AssertESNext;
|
||||
}
|
||||
|
||||
|
||||
// If a FunctionExpression's subtree has marked the container as needing to capture the
|
||||
// lexical this, or the function contains parameters with initializers, then this node is
|
||||
// ES6 syntax.
|
||||
if (subtreeFlags & TransformFlags.ES2015FunctionSyntaxMask) {
|
||||
transformFlags |= TransformFlags.AssertES2015;
|
||||
transformFlags |= TransformFlags.AssertES2018;
|
||||
}
|
||||
|
||||
// If a FunctionExpression is generator function and is the body of a
|
||||
@ -3480,14 +3493,9 @@ namespace ts {
|
||||
transformFlags |= TransformFlags.AssertES2017;
|
||||
}
|
||||
|
||||
// arrow functions with object rest destructuring are ES Next syntax
|
||||
// arrow functions with object rest destructuring are ES2018 syntax
|
||||
if (subtreeFlags & TransformFlags.ContainsObjectRestOrSpread) {
|
||||
transformFlags |= TransformFlags.AssertESNext;
|
||||
}
|
||||
|
||||
// If an ArrowFunction contains a lexical this, its container must capture the lexical this.
|
||||
if (subtreeFlags & TransformFlags.ContainsLexicalThis) {
|
||||
transformFlags |= TransformFlags.ContainsCapturedLexicalThis;
|
||||
transformFlags |= TransformFlags.AssertES2018;
|
||||
}
|
||||
|
||||
node.transformFlags = transformFlags | TransformFlags.HasComputedFlags;
|
||||
@ -3499,11 +3507,10 @@ namespace ts {
|
||||
|
||||
// If a PropertyAccessExpression starts with a super keyword, then it is
|
||||
// ES6 syntax, and requires a lexical `this` binding.
|
||||
if (transformFlags & TransformFlags.Super) {
|
||||
transformFlags ^= TransformFlags.Super;
|
||||
if (node.expression.kind === SyntaxKind.SuperKeyword) {
|
||||
// super inside of an async function requires hoisting the super access (ES2017).
|
||||
// same for super inside of an async generator, which is ESNext.
|
||||
transformFlags |= TransformFlags.ContainsSuper | TransformFlags.ContainsES2017 | TransformFlags.ContainsESNext;
|
||||
// same for super inside of an async generator, which is ES2018.
|
||||
transformFlags |= TransformFlags.ContainsES2017 | TransformFlags.ContainsES2018;
|
||||
}
|
||||
|
||||
node.transformFlags = transformFlags | TransformFlags.HasComputedFlags;
|
||||
@ -3512,16 +3519,13 @@ namespace ts {
|
||||
|
||||
function computeElementAccess(node: ElementAccessExpression, subtreeFlags: TransformFlags) {
|
||||
let transformFlags = subtreeFlags;
|
||||
const expression = node.expression;
|
||||
const expressionFlags = expression.transformFlags; // We do not want to aggregate flags from the argument expression for super/this capturing
|
||||
|
||||
// If an ElementAccessExpression starts with a super keyword, then it is
|
||||
// ES6 syntax, and requires a lexical `this` binding.
|
||||
if (expressionFlags & TransformFlags.Super) {
|
||||
transformFlags &= ~TransformFlags.Super;
|
||||
if (node.expression.kind === SyntaxKind.SuperKeyword) {
|
||||
// super inside of an async function requires hoisting the super access (ES2017).
|
||||
// same for super inside of an async generator, which is ESNext.
|
||||
transformFlags |= TransformFlags.ContainsSuper | TransformFlags.ContainsES2017 | TransformFlags.ContainsESNext;
|
||||
// same for super inside of an async generator, which is ES2018.
|
||||
transformFlags |= TransformFlags.ContainsES2017 | TransformFlags.ContainsES2018;
|
||||
}
|
||||
|
||||
node.transformFlags = transformFlags | TransformFlags.HasComputedFlags;
|
||||
@ -3530,11 +3534,11 @@ namespace ts {
|
||||
|
||||
function computeVariableDeclaration(node: VariableDeclaration, subtreeFlags: TransformFlags) {
|
||||
let transformFlags = subtreeFlags;
|
||||
transformFlags |= TransformFlags.AssertES2015 | TransformFlags.ContainsBindingPattern;
|
||||
transformFlags |= TransformFlags.AssertES2015 | TransformFlags.ContainsBindingPattern; // TODO(rbuckton): Why are these set unconditionally?
|
||||
|
||||
// A VariableDeclaration containing ObjectRest is ESNext syntax
|
||||
// A VariableDeclaration containing ObjectRest is ES2018 syntax
|
||||
if (subtreeFlags & TransformFlags.ContainsObjectRestOrSpread) {
|
||||
transformFlags |= TransformFlags.AssertESNext;
|
||||
transformFlags |= TransformFlags.AssertES2018;
|
||||
}
|
||||
|
||||
// Type annotations are TypeScript syntax.
|
||||
@ -3592,15 +3596,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
function computeExpressionStatement(node: ExpressionStatement, subtreeFlags: TransformFlags) {
|
||||
let transformFlags = subtreeFlags;
|
||||
|
||||
// If the expression of an expression statement is a destructuring assignment,
|
||||
// then we treat the statement as ES6 so that we can indicate that we do not
|
||||
// need to hold on to the right-hand side.
|
||||
if (node.expression.transformFlags & TransformFlags.DestructuringAssignment) {
|
||||
transformFlags |= TransformFlags.AssertES2015;
|
||||
}
|
||||
|
||||
const transformFlags = subtreeFlags;
|
||||
node.transformFlags = transformFlags | TransformFlags.HasComputedFlags;
|
||||
return transformFlags & ~TransformFlags.NodeExcludes;
|
||||
}
|
||||
@ -3641,8 +3637,8 @@ namespace ts {
|
||||
switch (kind) {
|
||||
case SyntaxKind.AsyncKeyword:
|
||||
case SyntaxKind.AwaitExpression:
|
||||
// async/await is ES2017 syntax, but may be ESNext syntax (for async generators)
|
||||
transformFlags |= TransformFlags.AssertESNext | TransformFlags.AssertES2017;
|
||||
// async/await is ES2017 syntax, but may be ES2018 syntax (for async generators)
|
||||
transformFlags |= TransformFlags.AssertES2018 | TransformFlags.AssertES2017;
|
||||
break;
|
||||
|
||||
case SyntaxKind.TypeAssertionExpression:
|
||||
@ -3714,7 +3710,7 @@ namespace ts {
|
||||
case SyntaxKind.ForOfStatement:
|
||||
// This node is either ES2015 syntax or ES2017 syntax (if it is a for-await-of).
|
||||
if ((<ForOfStatement>node).awaitModifier) {
|
||||
transformFlags |= TransformFlags.AssertESNext;
|
||||
transformFlags |= TransformFlags.AssertES2018;
|
||||
}
|
||||
transformFlags |= TransformFlags.AssertES2015;
|
||||
break;
|
||||
@ -3722,7 +3718,7 @@ namespace ts {
|
||||
case SyntaxKind.YieldExpression:
|
||||
// This node is either ES2015 syntax (in a generator) or ES2017 syntax (in an async
|
||||
// generator).
|
||||
transformFlags |= TransformFlags.AssertESNext | TransformFlags.AssertES2015 | TransformFlags.ContainsYield;
|
||||
transformFlags |= TransformFlags.AssertES2018 | TransformFlags.AssertES2015 | TransformFlags.ContainsYield;
|
||||
break;
|
||||
|
||||
case SyntaxKind.AnyKeyword:
|
||||
@ -3773,17 +3769,6 @@ namespace ts {
|
||||
// This is so that they can flow through PropertyName transforms unaffected.
|
||||
// Instead, we mark the container as ES6, so that it can properly handle the transform.
|
||||
transformFlags |= TransformFlags.ContainsComputedPropertyName;
|
||||
if (subtreeFlags & TransformFlags.ContainsLexicalThis) {
|
||||
// A computed method name like `[this.getName()](x: string) { ... }` needs to
|
||||
// distinguish itself from the normal case of a method body containing `this`:
|
||||
// `this` inside a method doesn't need to be rewritten (the method provides `this`),
|
||||
// whereas `this` inside a computed name *might* need to be rewritten if the class/object
|
||||
// is inside an arrow function:
|
||||
// `_this = this; () => class K { [_this.getName()]() { ... } }`
|
||||
// To make this distinction, use ContainsLexicalThisInComputedPropertyName
|
||||
// instead of ContainsLexicalThis for computed property names
|
||||
transformFlags |= TransformFlags.ContainsLexicalThisInComputedPropertyName;
|
||||
}
|
||||
break;
|
||||
|
||||
case SyntaxKind.SpreadElement:
|
||||
@ -3791,12 +3776,12 @@ namespace ts {
|
||||
break;
|
||||
|
||||
case SyntaxKind.SpreadAssignment:
|
||||
transformFlags |= TransformFlags.AssertESNext | TransformFlags.ContainsObjectRestOrSpread;
|
||||
transformFlags |= TransformFlags.AssertES2018 | TransformFlags.ContainsObjectRestOrSpread;
|
||||
break;
|
||||
|
||||
case SyntaxKind.SuperKeyword:
|
||||
// This node is ES6 syntax.
|
||||
transformFlags |= TransformFlags.AssertES2015 | TransformFlags.Super;
|
||||
transformFlags |= TransformFlags.AssertES2015;
|
||||
excludeFlags = TransformFlags.OuterExpressionExcludes; // must be set to persist `Super`
|
||||
break;
|
||||
|
||||
@ -3808,7 +3793,7 @@ namespace ts {
|
||||
case SyntaxKind.ObjectBindingPattern:
|
||||
transformFlags |= TransformFlags.AssertES2015 | TransformFlags.ContainsBindingPattern;
|
||||
if (subtreeFlags & TransformFlags.ContainsRestOrSpread) {
|
||||
transformFlags |= TransformFlags.AssertESNext | TransformFlags.ContainsObjectRestOrSpread;
|
||||
transformFlags |= TransformFlags.AssertES2018 | TransformFlags.ContainsObjectRestOrSpread;
|
||||
}
|
||||
excludeFlags = TransformFlags.BindingPatternExcludes;
|
||||
break;
|
||||
@ -3838,29 +3823,16 @@ namespace ts {
|
||||
transformFlags |= TransformFlags.AssertES2015;
|
||||
}
|
||||
|
||||
if (subtreeFlags & TransformFlags.ContainsLexicalThisInComputedPropertyName) {
|
||||
// A computed property name containing `this` might need to be rewritten,
|
||||
// so propagate the ContainsLexicalThis flag upward.
|
||||
transformFlags |= TransformFlags.ContainsLexicalThis;
|
||||
}
|
||||
|
||||
if (subtreeFlags & TransformFlags.ContainsObjectRestOrSpread) {
|
||||
// If an ObjectLiteralExpression contains a spread element, then it
|
||||
// is an ES next node.
|
||||
transformFlags |= TransformFlags.AssertESNext;
|
||||
// is an ES2018 node.
|
||||
transformFlags |= TransformFlags.AssertES2018;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case SyntaxKind.ArrayLiteralExpression:
|
||||
case SyntaxKind.NewExpression:
|
||||
excludeFlags = TransformFlags.ArrayLiteralOrCallOrNewExcludes;
|
||||
if (subtreeFlags & TransformFlags.ContainsRestOrSpread) {
|
||||
// If the this node contains a SpreadExpression, then it is an ES6
|
||||
// node.
|
||||
transformFlags |= TransformFlags.AssertES2015;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case SyntaxKind.DoStatement:
|
||||
@ -3875,15 +3847,11 @@ namespace ts {
|
||||
break;
|
||||
|
||||
case SyntaxKind.SourceFile:
|
||||
if (subtreeFlags & TransformFlags.ContainsCapturedLexicalThis) {
|
||||
transformFlags |= TransformFlags.AssertES2015;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case SyntaxKind.ReturnStatement:
|
||||
// Return statements may require an `await` in ESNext.
|
||||
transformFlags |= TransformFlags.ContainsHoistedDeclarationOrCompletion | TransformFlags.AssertESNext;
|
||||
// Return statements may require an `await` in ES2018.
|
||||
transformFlags |= TransformFlags.ContainsHoistedDeclarationOrCompletion | TransformFlags.AssertES2018;
|
||||
break;
|
||||
|
||||
case SyntaxKind.ContinueStatement:
|
||||
@ -3896,6 +3864,10 @@ namespace ts {
|
||||
return transformFlags & ~excludeFlags;
|
||||
}
|
||||
|
||||
function propagatePropertyNameFlags(node: PropertyName, transformFlags: TransformFlags) {
|
||||
return transformFlags | (node.transformFlags & TransformFlags.PropertyNamePropagatingFlags);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the transform flags to exclude when unioning the transform flags of a subtree.
|
||||
*
|
||||
|
||||
@ -1,5 +1,80 @@
|
||||
/*@internal*/
|
||||
namespace ts {
|
||||
export interface ReusableDiagnostic extends ReusableDiagnosticRelatedInformation {
|
||||
/** May store more in future. For now, this will simply be `true` to indicate when a diagnostic is an unused-identifier diagnostic. */
|
||||
reportsUnnecessary?: {};
|
||||
source?: string;
|
||||
relatedInformation?: ReusableDiagnosticRelatedInformation[];
|
||||
}
|
||||
|
||||
export interface ReusableDiagnosticRelatedInformation {
|
||||
category: DiagnosticCategory;
|
||||
code: number;
|
||||
file: Path | undefined;
|
||||
start: number | undefined;
|
||||
length: number | undefined;
|
||||
messageText: string | ReusableDiagnosticMessageChain;
|
||||
}
|
||||
|
||||
export interface ReusableDiagnosticMessageChain {
|
||||
messageText: string;
|
||||
category: DiagnosticCategory;
|
||||
code: number;
|
||||
next?: ReusableDiagnosticMessageChain;
|
||||
}
|
||||
|
||||
export interface ReusableBuilderProgramState extends ReusableBuilderState {
|
||||
/**
|
||||
* Cache of semantic diagnostics for files with their Path being the key
|
||||
*/
|
||||
semanticDiagnosticsPerFile?: ReadonlyMap<ReadonlyArray<ReusableDiagnostic> | ReadonlyArray<Diagnostic>> | undefined;
|
||||
/**
|
||||
* The map has key by source file's path that has been changed
|
||||
*/
|
||||
changedFilesSet?: ReadonlyMap<true>;
|
||||
/**
|
||||
* Set of affected files being iterated
|
||||
*/
|
||||
affectedFiles?: ReadonlyArray<SourceFile> | undefined;
|
||||
/**
|
||||
* Current changed file for iterating over affected files
|
||||
*/
|
||||
currentChangedFilePath?: Path | undefined;
|
||||
/**
|
||||
* Map of file signatures, with key being file path, calculated while getting current changed file's affected files
|
||||
* These will be committed whenever the iteration through affected files of current changed file is complete
|
||||
*/
|
||||
currentAffectedFilesSignatures?: ReadonlyMap<string> | undefined;
|
||||
/**
|
||||
* Newly computed visible to outside referencedSet
|
||||
*/
|
||||
currentAffectedFilesExportedModulesMap?: Readonly<BuilderState.ComputingExportedModulesMap> | undefined;
|
||||
/**
|
||||
* True if the semantic diagnostics were copied from the old state
|
||||
*/
|
||||
semanticDiagnosticsFromOldState?: Map<true>;
|
||||
/**
|
||||
* program corresponding to this state
|
||||
*/
|
||||
program?: Program | undefined;
|
||||
/**
|
||||
* compilerOptions for the program
|
||||
*/
|
||||
compilerOptions: CompilerOptions;
|
||||
/**
|
||||
* Files pending to be emitted
|
||||
*/
|
||||
affectedFilesPendingEmit?: ReadonlyArray<Path> | undefined;
|
||||
/**
|
||||
* Current index to retrieve pending affected file
|
||||
*/
|
||||
affectedFilesPendingEmitIndex?: number | undefined;
|
||||
/*
|
||||
* true if semantic diagnostics are ReusableDiagnostic instead of Diagnostic
|
||||
*/
|
||||
hasReusableDiagnostic?: true;
|
||||
}
|
||||
|
||||
/**
|
||||
* State to store the changed files, affected files and cache semantic diagnostics
|
||||
*/
|
||||
@ -27,7 +102,7 @@ namespace ts {
|
||||
currentChangedFilePath: Path | undefined;
|
||||
/**
|
||||
* Map of file signatures, with key being file path, calculated while getting current changed file's affected files
|
||||
* These will be commited whenever the iteration through affected files of current changed file is complete
|
||||
* These will be committed whenever the iteration through affected files of current changed file is complete
|
||||
*/
|
||||
currentAffectedFilesSignatures: Map<string> | undefined;
|
||||
/**
|
||||
@ -49,7 +124,31 @@ namespace ts {
|
||||
/**
|
||||
* program corresponding to this state
|
||||
*/
|
||||
program: Program;
|
||||
program: Program | undefined;
|
||||
/**
|
||||
* compilerOptions for the program
|
||||
*/
|
||||
compilerOptions: CompilerOptions;
|
||||
/**
|
||||
* Files pending to be emitted
|
||||
*/
|
||||
affectedFilesPendingEmit: ReadonlyArray<Path> | undefined;
|
||||
/**
|
||||
* Current index to retrieve pending affected file
|
||||
*/
|
||||
affectedFilesPendingEmitIndex: number | undefined;
|
||||
/**
|
||||
* true if build info is emitted
|
||||
*/
|
||||
emittedBuildInfo?: boolean;
|
||||
/**
|
||||
* Already seen affected files
|
||||
*/
|
||||
seenEmittedFiles: Map<true> | undefined;
|
||||
/**
|
||||
* true if program has been emitted
|
||||
*/
|
||||
programEmitComplete?: true;
|
||||
}
|
||||
|
||||
function hasSameKeys<T, U>(map1: ReadonlyMap<T> | undefined, map2: ReadonlyMap<U> | undefined): boolean {
|
||||
@ -60,30 +159,41 @@ namespace ts {
|
||||
/**
|
||||
* Create the state so that we can iterate on changedFiles/affected files
|
||||
*/
|
||||
function createBuilderProgramState(newProgram: Program, getCanonicalFileName: GetCanonicalFileName, oldState?: Readonly<BuilderProgramState>): BuilderProgramState {
|
||||
function createBuilderProgramState(newProgram: Program, getCanonicalFileName: GetCanonicalFileName, oldState?: Readonly<ReusableBuilderProgramState>): BuilderProgramState {
|
||||
const state = BuilderState.create(newProgram, getCanonicalFileName, oldState) as BuilderProgramState;
|
||||
state.program = newProgram;
|
||||
const compilerOptions = newProgram.getCompilerOptions();
|
||||
if (!compilerOptions.outFile && !compilerOptions.out) {
|
||||
state.compilerOptions = compilerOptions;
|
||||
// With --out or --outFile, any change affects all semantic diagnostics so no need to cache them
|
||||
// With --isolatedModules, emitting changed file doesnt emit dependent files so we cant know of dependent files to retrieve errors so dont cache the errors
|
||||
if (!compilerOptions.outFile && !compilerOptions.out && !compilerOptions.isolatedModules) {
|
||||
state.semanticDiagnosticsPerFile = createMap<ReadonlyArray<Diagnostic>>();
|
||||
}
|
||||
state.changedFilesSet = createMap<true>();
|
||||
|
||||
const useOldState = BuilderState.canReuseOldState(state.referencedMap, oldState);
|
||||
const oldCompilerOptions = useOldState ? oldState!.program.getCompilerOptions() : undefined;
|
||||
const oldCompilerOptions = useOldState ? oldState!.compilerOptions : undefined;
|
||||
const canCopySemanticDiagnostics = useOldState && oldState!.semanticDiagnosticsPerFile && !!state.semanticDiagnosticsPerFile &&
|
||||
!compilerOptionsAffectSemanticDiagnostics(compilerOptions, oldCompilerOptions!);
|
||||
if (useOldState) {
|
||||
// Verify the sanity of old state
|
||||
if (!oldState!.currentChangedFilePath) {
|
||||
Debug.assert(!oldState!.affectedFiles && (!oldState!.currentAffectedFilesSignatures || !oldState!.currentAffectedFilesSignatures!.size), "Cannot reuse if only few affected files of currentChangedFile were iterated");
|
||||
const affectedSignatures = oldState!.currentAffectedFilesSignatures;
|
||||
Debug.assert(!oldState!.affectedFiles && (!affectedSignatures || !affectedSignatures.size), "Cannot reuse if only few affected files of currentChangedFile were iterated");
|
||||
}
|
||||
const changedFilesSet = oldState!.changedFilesSet;
|
||||
if (canCopySemanticDiagnostics) {
|
||||
Debug.assert(!forEachKey(oldState!.changedFilesSet, path => oldState!.semanticDiagnosticsPerFile!.has(path)), "Semantic diagnostics shouldnt be available for changed files");
|
||||
Debug.assert(!changedFilesSet || !forEachKey(changedFilesSet, path => oldState!.semanticDiagnosticsPerFile!.has(path)), "Semantic diagnostics shouldnt be available for changed files");
|
||||
}
|
||||
|
||||
// Copy old state's changed files set
|
||||
copyEntries(oldState!.changedFilesSet, state.changedFilesSet);
|
||||
if (changedFilesSet) {
|
||||
copyEntries(changedFilesSet, state.changedFilesSet);
|
||||
}
|
||||
if (!compilerOptions.outFile && !compilerOptions.out && oldState!.affectedFilesPendingEmit) {
|
||||
state.affectedFilesPendingEmit = oldState!.affectedFilesPendingEmit;
|
||||
state.affectedFilesPendingEmitIndex = oldState!.affectedFilesPendingEmitIndex;
|
||||
}
|
||||
}
|
||||
|
||||
// Update changed files and copy semantic diagnostics if we can
|
||||
@ -109,7 +219,7 @@ namespace ts {
|
||||
state.changedFilesSet.set(sourceFilePath, true);
|
||||
}
|
||||
else if (canCopySemanticDiagnostics) {
|
||||
const sourceFile = state.program.getSourceFileByPath(sourceFilePath as Path)!;
|
||||
const sourceFile = newProgram.getSourceFileByPath(sourceFilePath as Path)!;
|
||||
|
||||
if (sourceFile.isDeclarationFile && !copyDeclarationFileDiagnostics) { return; }
|
||||
if (sourceFile.hasNoDefaultLib && !copyLibFileDiagnostics) { return; }
|
||||
@ -117,7 +227,7 @@ namespace ts {
|
||||
// Unchanged file copy diagnostics
|
||||
const diagnostics = oldState!.semanticDiagnosticsPerFile!.get(sourceFilePath);
|
||||
if (diagnostics) {
|
||||
state.semanticDiagnosticsPerFile!.set(sourceFilePath, diagnostics);
|
||||
state.semanticDiagnosticsPerFile!.set(sourceFilePath, oldState!.hasReusableDiagnostic ? convertToDiagnostics(diagnostics as ReadonlyArray<ReusableDiagnostic>, newProgram) : diagnostics as ReadonlyArray<Diagnostic>);
|
||||
if (!state.semanticDiagnosticsFromOldState) {
|
||||
state.semanticDiagnosticsFromOldState = createMap<true>();
|
||||
}
|
||||
@ -126,9 +236,88 @@ namespace ts {
|
||||
}
|
||||
});
|
||||
|
||||
if (oldCompilerOptions &&
|
||||
(oldCompilerOptions.outDir !== compilerOptions.outDir ||
|
||||
oldCompilerOptions.declarationDir !== compilerOptions.declarationDir ||
|
||||
(oldCompilerOptions.outFile || oldCompilerOptions.out) !== (compilerOptions.outFile || compilerOptions.out))) {
|
||||
// Add all files to affectedFilesPendingEmit since emit changed
|
||||
state.affectedFilesPendingEmit = concatenate(state.affectedFilesPendingEmit, newProgram.getSourceFiles().map(f => f.path));
|
||||
if (state.affectedFilesPendingEmitIndex === undefined) {
|
||||
state.affectedFilesPendingEmitIndex = 0;
|
||||
}
|
||||
Debug.assert(state.seenAffectedFiles === undefined);
|
||||
state.seenAffectedFiles = createMap<true>();
|
||||
}
|
||||
|
||||
return state;
|
||||
}
|
||||
|
||||
function convertToDiagnostics(diagnostics: ReadonlyArray<ReusableDiagnostic>, newProgram: Program): ReadonlyArray<Diagnostic> {
|
||||
if (!diagnostics.length) return emptyArray;
|
||||
return diagnostics.map(diagnostic => {
|
||||
const result: Diagnostic = convertToDiagnosticRelatedInformation(diagnostic, newProgram);
|
||||
result.reportsUnnecessary = diagnostic.reportsUnnecessary;
|
||||
result.source = diagnostic.source;
|
||||
const { relatedInformation } = diagnostic;
|
||||
result.relatedInformation = relatedInformation ?
|
||||
relatedInformation.length ?
|
||||
relatedInformation.map(r => convertToDiagnosticRelatedInformation(r, newProgram)) :
|
||||
emptyArray :
|
||||
undefined;
|
||||
return result;
|
||||
});
|
||||
}
|
||||
|
||||
function convertToDiagnosticRelatedInformation(diagnostic: ReusableDiagnosticRelatedInformation, newProgram: Program): DiagnosticRelatedInformation {
|
||||
const { file, messageText } = diagnostic;
|
||||
return {
|
||||
...diagnostic,
|
||||
file: file && newProgram.getSourceFileByPath(file),
|
||||
messageText: messageText === undefined || isString(messageText) ?
|
||||
messageText :
|
||||
convertToDiagnosticMessageChain(messageText, newProgram)
|
||||
};
|
||||
}
|
||||
|
||||
function convertToDiagnosticMessageChain(diagnostic: ReusableDiagnosticMessageChain, newProgram: Program): DiagnosticMessageChain {
|
||||
return {
|
||||
...diagnostic,
|
||||
next: diagnostic.next && convertToDiagnosticMessageChain(diagnostic.next, newProgram)
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Releases program and other related not needed properties
|
||||
*/
|
||||
function releaseCache(state: BuilderProgramState) {
|
||||
BuilderState.releaseCache(state);
|
||||
state.program = undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a clone of the state
|
||||
*/
|
||||
function cloneBuilderProgramState(state: Readonly<BuilderProgramState>): BuilderProgramState {
|
||||
const newState = BuilderState.clone(state) as BuilderProgramState;
|
||||
newState.semanticDiagnosticsPerFile = cloneMapOrUndefined(state.semanticDiagnosticsPerFile);
|
||||
newState.changedFilesSet = cloneMap(state.changedFilesSet);
|
||||
newState.affectedFiles = state.affectedFiles;
|
||||
newState.affectedFilesIndex = state.affectedFilesIndex;
|
||||
newState.currentChangedFilePath = state.currentChangedFilePath;
|
||||
newState.currentAffectedFilesSignatures = cloneMapOrUndefined(state.currentAffectedFilesSignatures);
|
||||
newState.currentAffectedFilesExportedModulesMap = cloneMapOrUndefined(state.currentAffectedFilesExportedModulesMap);
|
||||
newState.seenAffectedFiles = cloneMapOrUndefined(state.seenAffectedFiles);
|
||||
newState.cleanedDiagnosticsOfLibFiles = state.cleanedDiagnosticsOfLibFiles;
|
||||
newState.semanticDiagnosticsFromOldState = cloneMapOrUndefined(state.semanticDiagnosticsFromOldState);
|
||||
newState.program = state.program;
|
||||
newState.compilerOptions = state.compilerOptions;
|
||||
newState.affectedFilesPendingEmit = state.affectedFilesPendingEmit;
|
||||
newState.affectedFilesPendingEmitIndex = state.affectedFilesPendingEmitIndex;
|
||||
newState.seenEmittedFiles = cloneMapOrUndefined(state.seenEmittedFiles);
|
||||
newState.programEmitComplete = state.programEmitComplete;
|
||||
return newState;
|
||||
}
|
||||
|
||||
/**
|
||||
* Verifies that source file is ok to be used in calls that arent handled by next
|
||||
*/
|
||||
@ -179,10 +368,11 @@ namespace ts {
|
||||
|
||||
// With --out or --outFile all outputs go into single file
|
||||
// so operations are performed directly on program, return program
|
||||
const compilerOptions = state.program.getCompilerOptions();
|
||||
const program = Debug.assertDefined(state.program);
|
||||
const compilerOptions = program.getCompilerOptions();
|
||||
if (compilerOptions.outFile || compilerOptions.out) {
|
||||
Debug.assert(!state.semanticDiagnosticsPerFile);
|
||||
return state.program;
|
||||
return program;
|
||||
}
|
||||
|
||||
// Get next batch of affected files
|
||||
@ -190,13 +380,34 @@ namespace ts {
|
||||
if (state.exportedModulesMap) {
|
||||
state.currentAffectedFilesExportedModulesMap = state.currentAffectedFilesExportedModulesMap || createMap<BuilderState.ReferencedSet | false>();
|
||||
}
|
||||
state.affectedFiles = BuilderState.getFilesAffectedBy(state, state.program, nextKey.value as Path, cancellationToken, computeHash, state.currentAffectedFilesSignatures, state.currentAffectedFilesExportedModulesMap);
|
||||
state.affectedFiles = BuilderState.getFilesAffectedBy(state, program, nextKey.value as Path, cancellationToken, computeHash, state.currentAffectedFilesSignatures, state.currentAffectedFilesExportedModulesMap);
|
||||
state.currentChangedFilePath = nextKey.value as Path;
|
||||
state.affectedFilesIndex = 0;
|
||||
state.seenAffectedFiles = state.seenAffectedFiles || createMap<true>();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns next file to be emitted from files that retrieved semantic diagnostics but did not emit yet
|
||||
*/
|
||||
function getNextAffectedFilePendingEmit(state: BuilderProgramState): SourceFile | undefined {
|
||||
const { affectedFilesPendingEmit } = state;
|
||||
if (affectedFilesPendingEmit) {
|
||||
const seenEmittedFiles = state.seenEmittedFiles || (state.seenEmittedFiles = createMap());
|
||||
for (let i = state.affectedFilesPendingEmitIndex!; i < affectedFilesPendingEmit.length; i++) {
|
||||
const affectedFile = Debug.assertDefined(state.program).getSourceFileByPath(affectedFilesPendingEmit[i]);
|
||||
if (affectedFile && !seenEmittedFiles.has(affectedFile.path)) {
|
||||
// emit this file
|
||||
state.affectedFilesPendingEmitIndex = i;
|
||||
return affectedFile;
|
||||
}
|
||||
}
|
||||
state.affectedFilesPendingEmit = undefined;
|
||||
state.affectedFilesPendingEmitIndex = undefined;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove the semantic diagnostics cached from old state for affected File and the files that are referencing modules that export entities from affected file
|
||||
*/
|
||||
@ -209,9 +420,10 @@ namespace ts {
|
||||
// Clean lib file diagnostics if its all files excluding default files to emit
|
||||
if (state.allFilesExcludingDefaultLibraryFile === state.affectedFiles && !state.cleanedDiagnosticsOfLibFiles) {
|
||||
state.cleanedDiagnosticsOfLibFiles = true;
|
||||
const options = state.program.getCompilerOptions();
|
||||
if (forEach(state.program.getSourceFiles(), f =>
|
||||
state.program.isSourceFileDefaultLibrary(f) &&
|
||||
const program = Debug.assertDefined(state.program);
|
||||
const options = program.getCompilerOptions();
|
||||
if (forEach(program.getSourceFiles(), f =>
|
||||
program.isSourceFileDefaultLibrary(f) &&
|
||||
!skipTypeChecking(f, options) &&
|
||||
removeSemanticDiagnosticsOf(state, f.path)
|
||||
)) {
|
||||
@ -281,10 +493,19 @@ namespace ts {
|
||||
}
|
||||
|
||||
// If exported from path is not from cache and exported modules has path, all files referencing file exported from are affected
|
||||
return !!forEachEntry(state.exportedModulesMap!, (exportedModules, exportedFromPath) =>
|
||||
if (forEachEntry(state.exportedModulesMap!, (exportedModules, exportedFromPath) =>
|
||||
!state.currentAffectedFilesExportedModulesMap!.has(exportedFromPath) && // If we already iterated this through cache, ignore it
|
||||
exportedModules.has(filePath) &&
|
||||
removeSemanticDiagnosticsOfFileAndExportsOfFile(state, exportedFromPath as Path, seenFileAndExportsOfFile)
|
||||
)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Remove diagnostics of files that import this file (without going to exports of referencing files)
|
||||
return !!forEachEntry(state.referencedMap!, (referencesInFile, referencingFilePath) =>
|
||||
referencesInFile.has(filePath) &&
|
||||
!seenFileAndExportsOfFile.has(referencingFilePath) && // Not already removed diagnostic file
|
||||
removeSemanticDiagnosticsOf(state, referencingFilePath as Path) // Dont add to seen since this is not yet done with the export removal
|
||||
);
|
||||
}
|
||||
|
||||
@ -305,21 +526,30 @@ namespace ts {
|
||||
* This is called after completing operation on the next affected file.
|
||||
* The operations here are postponed to ensure that cancellation during the iteration is handled correctly
|
||||
*/
|
||||
function doneWithAffectedFile(state: BuilderProgramState, affected: SourceFile | Program) {
|
||||
if (affected === state.program) {
|
||||
function doneWithAffectedFile(state: BuilderProgramState, affected: SourceFile | Program, isPendingEmit?: boolean, isBuildInfoEmit?: boolean) {
|
||||
if (isBuildInfoEmit) {
|
||||
state.emittedBuildInfo = true;
|
||||
}
|
||||
else if (affected === state.program) {
|
||||
state.changedFilesSet.clear();
|
||||
state.programEmitComplete = true;
|
||||
}
|
||||
else {
|
||||
state.seenAffectedFiles!.set((affected as SourceFile).path, true);
|
||||
state.affectedFilesIndex!++;
|
||||
if (isPendingEmit) {
|
||||
state.affectedFilesPendingEmitIndex!++;
|
||||
}
|
||||
else {
|
||||
state.affectedFilesIndex!++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the result with affected file
|
||||
*/
|
||||
function toAffectedFileResult<T>(state: BuilderProgramState, result: T, affected: SourceFile | Program): AffectedFileResult<T> {
|
||||
doneWithAffectedFile(state, affected);
|
||||
function toAffectedFileResult<T>(state: BuilderProgramState, result: T, affected: SourceFile | Program, isPendingEmit?: boolean, isBuildInfoEmit?: boolean): AffectedFileResult<T> {
|
||||
doneWithAffectedFile(state, affected, isPendingEmit, isBuildInfoEmit);
|
||||
return { result, affected };
|
||||
}
|
||||
|
||||
@ -329,18 +559,116 @@ namespace ts {
|
||||
*/
|
||||
function getSemanticDiagnosticsOfFile(state: BuilderProgramState, sourceFile: SourceFile, cancellationToken?: CancellationToken): ReadonlyArray<Diagnostic> {
|
||||
const path = sourceFile.path;
|
||||
const cachedDiagnostics = state.semanticDiagnosticsPerFile!.get(path);
|
||||
// Report the semantic diagnostics from the cache if we already have those diagnostics present
|
||||
if (cachedDiagnostics) {
|
||||
return cachedDiagnostics;
|
||||
if (state.semanticDiagnosticsPerFile) {
|
||||
const cachedDiagnostics = state.semanticDiagnosticsPerFile.get(path);
|
||||
// Report the semantic diagnostics from the cache if we already have those diagnostics present
|
||||
if (cachedDiagnostics) {
|
||||
return cachedDiagnostics;
|
||||
}
|
||||
}
|
||||
|
||||
// Diagnostics werent cached, get them from program, and cache the result
|
||||
const diagnostics = state.program.getSemanticDiagnostics(sourceFile, cancellationToken);
|
||||
state.semanticDiagnosticsPerFile!.set(path, diagnostics);
|
||||
const diagnostics = Debug.assertDefined(state.program).getSemanticDiagnostics(sourceFile, cancellationToken);
|
||||
if (state.semanticDiagnosticsPerFile) {
|
||||
state.semanticDiagnosticsPerFile.set(path, diagnostics);
|
||||
}
|
||||
return diagnostics;
|
||||
}
|
||||
|
||||
export type ProgramBuildInfoDiagnostic = string | [string, ReadonlyArray<ReusableDiagnostic>];
|
||||
export interface ProgramBuildInfo {
|
||||
fileInfos: MapLike<BuilderState.FileInfo>;
|
||||
options: CompilerOptions;
|
||||
referencedMap?: MapLike<string[]>;
|
||||
exportedModulesMap?: MapLike<string[]>;
|
||||
semanticDiagnosticsPerFile?: ProgramBuildInfoDiagnostic[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the program information to be emitted in buildInfo so that we can use it to create new program
|
||||
*/
|
||||
function getProgramBuildInfo(state: Readonly<ReusableBuilderProgramState>): ProgramBuildInfo | undefined {
|
||||
if (state.compilerOptions.outFile || state.compilerOptions.out) return undefined;
|
||||
const fileInfos: MapLike<BuilderState.FileInfo> = {};
|
||||
state.fileInfos.forEach((value, key) => {
|
||||
const signature = state.currentAffectedFilesSignatures && state.currentAffectedFilesSignatures.get(key);
|
||||
fileInfos[key] = signature === undefined ? value : { version: value.version, signature };
|
||||
});
|
||||
|
||||
const result: ProgramBuildInfo = { fileInfos, options: state.compilerOptions };
|
||||
if (state.referencedMap) {
|
||||
const referencedMap: MapLike<string[]> = {};
|
||||
state.referencedMap.forEach((value, key) => {
|
||||
referencedMap[key] = arrayFrom(value.keys());
|
||||
});
|
||||
result.referencedMap = referencedMap;
|
||||
}
|
||||
|
||||
if (state.exportedModulesMap) {
|
||||
const exportedModulesMap: MapLike<string[]> = {};
|
||||
state.exportedModulesMap.forEach((value, key) => {
|
||||
const newValue = state.currentAffectedFilesExportedModulesMap && state.currentAffectedFilesExportedModulesMap.get(key);
|
||||
// Not in temporary cache, use existing value
|
||||
if (newValue === undefined) exportedModulesMap[key] = arrayFrom(value.keys());
|
||||
// Value in cache and has updated value map, use that
|
||||
else if (newValue) exportedModulesMap[key] = arrayFrom(newValue.keys());
|
||||
});
|
||||
result.exportedModulesMap = exportedModulesMap;
|
||||
}
|
||||
|
||||
if (state.semanticDiagnosticsPerFile) {
|
||||
const semanticDiagnosticsPerFile: ProgramBuildInfoDiagnostic[] = [];
|
||||
// Currently not recording actual errors since those mean no emit for tsc --build
|
||||
state.semanticDiagnosticsPerFile.forEach((value, key) => semanticDiagnosticsPerFile.push(
|
||||
value.length ?
|
||||
[
|
||||
key,
|
||||
state.hasReusableDiagnostic ?
|
||||
value as ReadonlyArray<ReusableDiagnostic> :
|
||||
convertToReusableDiagnostics(value as ReadonlyArray<Diagnostic>)
|
||||
] :
|
||||
key
|
||||
));
|
||||
result.semanticDiagnosticsPerFile = semanticDiagnosticsPerFile;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function convertToReusableDiagnostics(diagnostics: ReadonlyArray<Diagnostic>): ReadonlyArray<ReusableDiagnostic> {
|
||||
Debug.assert(!!diagnostics.length);
|
||||
return diagnostics.map(diagnostic => {
|
||||
const result: ReusableDiagnostic = convertToReusableDiagnosticRelatedInformation(diagnostic);
|
||||
result.reportsUnnecessary = diagnostic.reportsUnnecessary;
|
||||
result.source = diagnostic.source;
|
||||
const { relatedInformation } = diagnostic;
|
||||
result.relatedInformation = relatedInformation ?
|
||||
relatedInformation.length ?
|
||||
relatedInformation.map(r => convertToReusableDiagnosticRelatedInformation(r)) :
|
||||
emptyArray :
|
||||
undefined;
|
||||
return result;
|
||||
});
|
||||
}
|
||||
|
||||
function convertToReusableDiagnosticRelatedInformation(diagnostic: DiagnosticRelatedInformation): ReusableDiagnosticRelatedInformation {
|
||||
const { file, messageText } = diagnostic;
|
||||
return {
|
||||
...diagnostic,
|
||||
file: file && file.path,
|
||||
messageText: messageText === undefined || isString(messageText) ?
|
||||
messageText :
|
||||
convertToReusableDiagnosticMessageChain(messageText)
|
||||
};
|
||||
}
|
||||
|
||||
function convertToReusableDiagnosticMessageChain(diagnostic: DiagnosticMessageChain): ReusableDiagnosticMessageChain {
|
||||
return {
|
||||
...diagnostic,
|
||||
next: diagnostic.next && convertToReusableDiagnosticMessageChain(diagnostic.next)
|
||||
};
|
||||
}
|
||||
|
||||
export enum BuilderProgramKind {
|
||||
SemanticDiagnosticsBuilderProgram,
|
||||
EmitAndSemanticDiagnosticsBuilderProgram
|
||||
@ -370,7 +698,7 @@ namespace ts {
|
||||
rootNames: newProgramOrRootNames,
|
||||
options: hostOrOptions as CompilerOptions,
|
||||
host: oldProgramOrHost as CompilerHost,
|
||||
oldProgram: oldProgram && oldProgram.getProgram(),
|
||||
oldProgram: oldProgram && oldProgram.getProgramOrUndefined(),
|
||||
configFileParsingDiagnostics,
|
||||
projectReferences
|
||||
});
|
||||
@ -403,28 +731,32 @@ namespace ts {
|
||||
/**
|
||||
* Computing hash to for signature verification
|
||||
*/
|
||||
const computeHash = host.createHash || identity;
|
||||
const state = createBuilderProgramState(newProgram, getCanonicalFileName, oldState);
|
||||
const computeHash = host.createHash || generateDjb2Hash;
|
||||
let state = createBuilderProgramState(newProgram, getCanonicalFileName, oldState);
|
||||
let backupState: BuilderProgramState | undefined;
|
||||
newProgram.getProgramBuildInfo = () => getProgramBuildInfo(state);
|
||||
|
||||
// To ensure that we arent storing any references to old program or new program without state
|
||||
newProgram = undefined!; // TODO: GH#18217
|
||||
oldProgram = undefined;
|
||||
oldState = undefined;
|
||||
|
||||
const result: BuilderProgram = {
|
||||
getState: () => state,
|
||||
getProgram: () => state.program,
|
||||
getCompilerOptions: () => state.program.getCompilerOptions(),
|
||||
getSourceFile: fileName => state.program.getSourceFile(fileName),
|
||||
getSourceFiles: () => state.program.getSourceFiles(),
|
||||
getOptionsDiagnostics: cancellationToken => state.program.getOptionsDiagnostics(cancellationToken),
|
||||
getGlobalDiagnostics: cancellationToken => state.program.getGlobalDiagnostics(cancellationToken),
|
||||
getConfigFileParsingDiagnostics: () => configFileParsingDiagnostics || state.program.getConfigFileParsingDiagnostics(),
|
||||
getSyntacticDiagnostics: (sourceFile, cancellationToken) => state.program.getSyntacticDiagnostics(sourceFile, cancellationToken),
|
||||
getSemanticDiagnostics,
|
||||
emit,
|
||||
getAllDependencies: sourceFile => BuilderState.getAllDependencies(state, state.program, sourceFile),
|
||||
getCurrentDirectory: () => state.program.getCurrentDirectory()
|
||||
const result = createRedirectedBuilderProgram(state, configFileParsingDiagnostics);
|
||||
result.getState = () => state;
|
||||
result.backupState = () => {
|
||||
Debug.assert(backupState === undefined);
|
||||
backupState = cloneBuilderProgramState(state);
|
||||
};
|
||||
result.restoreState = () => {
|
||||
state = Debug.assertDefined(backupState);
|
||||
backupState = undefined;
|
||||
};
|
||||
result.getAllDependencies = sourceFile => BuilderState.getAllDependencies(state, Debug.assertDefined(state.program), sourceFile);
|
||||
result.getSemanticDiagnostics = getSemanticDiagnostics;
|
||||
result.emit = emit;
|
||||
result.releaseProgram = () => {
|
||||
releaseCache(state);
|
||||
backupState = undefined;
|
||||
};
|
||||
|
||||
if (kind === BuilderProgramKind.SemanticDiagnosticsBuilderProgram) {
|
||||
@ -445,18 +777,52 @@ namespace ts {
|
||||
* in that order would be used to write the files
|
||||
*/
|
||||
function emitNextAffectedFile(writeFile?: WriteFileCallback, cancellationToken?: CancellationToken, emitOnlyDtsFiles?: boolean, customTransformers?: CustomTransformers): AffectedFileResult<EmitResult> {
|
||||
const affected = getNextAffectedFile(state, cancellationToken, computeHash);
|
||||
let affected = getNextAffectedFile(state, cancellationToken, computeHash);
|
||||
let isPendingEmitFile = false;
|
||||
if (!affected) {
|
||||
// Done
|
||||
return undefined;
|
||||
if (!state.compilerOptions.out && !state.compilerOptions.outFile) {
|
||||
affected = getNextAffectedFilePendingEmit(state);
|
||||
if (!affected) {
|
||||
if (state.emittedBuildInfo) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const affected = Debug.assertDefined(state.program);
|
||||
return toAffectedFileResult(
|
||||
state,
|
||||
// When whole program is affected, do emit only once (eg when --out or --outFile is specified)
|
||||
// Otherwise just affected file
|
||||
affected.emitBuildInfo(writeFile || host.writeFile, cancellationToken),
|
||||
affected,
|
||||
/*isPendingEmitFile*/ false,
|
||||
/*isBuildInfoEmit*/ true
|
||||
);
|
||||
}
|
||||
isPendingEmitFile = true;
|
||||
}
|
||||
else {
|
||||
const program = Debug.assertDefined(state.program);
|
||||
// Check if program uses any prepend project references, if thats the case we cant track of the js files of those, so emit even though there are no changes
|
||||
if (state.programEmitComplete || !some(program.getProjectReferences(), ref => !!ref.prepend)) {
|
||||
state.programEmitComplete = true;
|
||||
return undefined;
|
||||
}
|
||||
affected = program;
|
||||
}
|
||||
}
|
||||
|
||||
// Mark seen emitted files if there are pending files to be emitted
|
||||
if (state.affectedFilesPendingEmit && state.program !== affected) {
|
||||
(state.seenEmittedFiles || (state.seenEmittedFiles = createMap())).set((affected as SourceFile).path, true);
|
||||
}
|
||||
|
||||
return toAffectedFileResult(
|
||||
state,
|
||||
// When whole program is affected, do emit only once (eg when --out or --outFile is specified)
|
||||
// Otherwise just affected file
|
||||
state.program.emit(affected === state.program ? undefined : affected as SourceFile, writeFile || host.writeFile, cancellationToken, emitOnlyDtsFiles, customTransformers),
|
||||
affected
|
||||
Debug.assertDefined(state.program).emit(affected === state.program ? undefined : affected as SourceFile, writeFile || host.writeFile, cancellationToken, emitOnlyDtsFiles, customTransformers),
|
||||
affected,
|
||||
isPendingEmitFile
|
||||
);
|
||||
}
|
||||
|
||||
@ -496,7 +862,7 @@ namespace ts {
|
||||
};
|
||||
}
|
||||
}
|
||||
return state.program.emit(targetSourceFile, writeFile || host.writeFile, cancellationToken, emitOnlyDtsFiles, customTransformers);
|
||||
return Debug.assertDefined(state.program).emit(targetSourceFile, writeFile || host.writeFile, cancellationToken, emitOnlyDtsFiles, customTransformers);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -544,33 +910,121 @@ namespace ts {
|
||||
*/
|
||||
function getSemanticDiagnostics(sourceFile?: SourceFile, cancellationToken?: CancellationToken): ReadonlyArray<Diagnostic> {
|
||||
assertSourceFileOkWithoutNextAffectedCall(state, sourceFile);
|
||||
const compilerOptions = state.program.getCompilerOptions();
|
||||
const compilerOptions = Debug.assertDefined(state.program).getCompilerOptions();
|
||||
if (compilerOptions.outFile || compilerOptions.out) {
|
||||
Debug.assert(!state.semanticDiagnosticsPerFile);
|
||||
// We dont need to cache the diagnostics just return them from program
|
||||
return state.program.getSemanticDiagnostics(sourceFile, cancellationToken);
|
||||
return Debug.assertDefined(state.program).getSemanticDiagnostics(sourceFile, cancellationToken);
|
||||
}
|
||||
|
||||
if (sourceFile) {
|
||||
return getSemanticDiagnosticsOfFile(state, sourceFile, cancellationToken);
|
||||
}
|
||||
|
||||
if (kind === BuilderProgramKind.SemanticDiagnosticsBuilderProgram) {
|
||||
// When semantic builder asks for diagnostics of the whole program,
|
||||
// ensure that all the affected files are handled
|
||||
let affected: SourceFile | Program | undefined;
|
||||
while (affected = getNextAffectedFile(state, cancellationToken, computeHash)) {
|
||||
doneWithAffectedFile(state, affected);
|
||||
// When semantic builder asks for diagnostics of the whole program,
|
||||
// ensure that all the affected files are handled
|
||||
let affected: SourceFile | Program | undefined;
|
||||
let affectedFilesPendingEmit: Path[] | undefined;
|
||||
while (affected = getNextAffectedFile(state, cancellationToken, computeHash)) {
|
||||
if (affected !== state.program && kind === BuilderProgramKind.EmitAndSemanticDiagnosticsBuilderProgram) {
|
||||
(affectedFilesPendingEmit || (affectedFilesPendingEmit = [])).push((affected as SourceFile).path);
|
||||
}
|
||||
doneWithAffectedFile(state, affected);
|
||||
}
|
||||
|
||||
// In case of emit builder, cache the files to be emitted
|
||||
if (affectedFilesPendingEmit) {
|
||||
state.affectedFilesPendingEmit = concatenate(state.affectedFilesPendingEmit, affectedFilesPendingEmit);
|
||||
// affectedFilesPendingEmitIndex === undefined
|
||||
// - means the emit state.affectedFilesPendingEmit was undefined before adding current affected files
|
||||
// so start from 0 as array would be affectedFilesPendingEmit
|
||||
// else, continue to iterate from existing index, the current set is appended to existing files
|
||||
if (state.affectedFilesPendingEmitIndex === undefined) {
|
||||
state.affectedFilesPendingEmitIndex = 0;
|
||||
}
|
||||
}
|
||||
|
||||
let diagnostics: Diagnostic[] | undefined;
|
||||
for (const sourceFile of state.program.getSourceFiles()) {
|
||||
for (const sourceFile of Debug.assertDefined(state.program).getSourceFiles()) {
|
||||
diagnostics = addRange(diagnostics, getSemanticDiagnosticsOfFile(state, sourceFile, cancellationToken));
|
||||
}
|
||||
return diagnostics || emptyArray;
|
||||
}
|
||||
}
|
||||
|
||||
function getMapOfReferencedSet(mapLike: MapLike<ReadonlyArray<string>> | undefined): ReadonlyMap<BuilderState.ReferencedSet> | undefined {
|
||||
if (!mapLike) return undefined;
|
||||
const map = createMap<BuilderState.ReferencedSet>();
|
||||
// Copies keys/values from template. Note that for..in will not throw if
|
||||
// template is undefined, and instead will just exit the loop.
|
||||
for (const key in mapLike) {
|
||||
if (hasProperty(mapLike, key)) {
|
||||
map.set(key, arrayToSet(mapLike[key]));
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
export function createBuildProgramUsingProgramBuildInfo(program: ProgramBuildInfo): EmitAndSemanticDiagnosticsBuilderProgram & SemanticDiagnosticsBuilderProgram {
|
||||
const fileInfos = createMapFromTemplate(program.fileInfos);
|
||||
const state: ReusableBuilderProgramState = {
|
||||
fileInfos,
|
||||
compilerOptions: program.options,
|
||||
referencedMap: getMapOfReferencedSet(program.referencedMap),
|
||||
exportedModulesMap: getMapOfReferencedSet(program.exportedModulesMap),
|
||||
semanticDiagnosticsPerFile: program.semanticDiagnosticsPerFile && arrayToMap(program.semanticDiagnosticsPerFile, value => isString(value) ? value : value[0], value => isString(value) ? emptyArray : value[1]),
|
||||
hasReusableDiagnostic: true
|
||||
};
|
||||
return {
|
||||
getState: () => state,
|
||||
backupState: noop,
|
||||
restoreState: noop,
|
||||
getProgram: notImplemented,
|
||||
getProgramOrUndefined: returnUndefined,
|
||||
releaseProgram: noop,
|
||||
getCompilerOptions: () => state.compilerOptions,
|
||||
getSourceFile: notImplemented,
|
||||
getSourceFiles: notImplemented,
|
||||
getOptionsDiagnostics: notImplemented,
|
||||
getGlobalDiagnostics: notImplemented,
|
||||
getConfigFileParsingDiagnostics: notImplemented,
|
||||
getSyntacticDiagnostics: notImplemented,
|
||||
getDeclarationDiagnostics: notImplemented,
|
||||
getSemanticDiagnostics: notImplemented,
|
||||
emit: notImplemented,
|
||||
getAllDependencies: notImplemented,
|
||||
getCurrentDirectory: notImplemented,
|
||||
emitNextAffectedFile: notImplemented,
|
||||
getSemanticDiagnosticsOfNextAffectedFile: notImplemented,
|
||||
};
|
||||
}
|
||||
|
||||
export function createRedirectedBuilderProgram(state: { program: Program | undefined; compilerOptions: CompilerOptions; }, configFileParsingDiagnostics: ReadonlyArray<Diagnostic>): BuilderProgram {
|
||||
return {
|
||||
getState: notImplemented,
|
||||
backupState: noop,
|
||||
restoreState: noop,
|
||||
getProgram,
|
||||
getProgramOrUndefined: () => state.program,
|
||||
releaseProgram: () => state.program = undefined,
|
||||
getCompilerOptions: () => state.compilerOptions,
|
||||
getSourceFile: fileName => getProgram().getSourceFile(fileName),
|
||||
getSourceFiles: () => getProgram().getSourceFiles(),
|
||||
getOptionsDiagnostics: cancellationToken => getProgram().getOptionsDiagnostics(cancellationToken),
|
||||
getGlobalDiagnostics: cancellationToken => getProgram().getGlobalDiagnostics(cancellationToken),
|
||||
getConfigFileParsingDiagnostics: () => configFileParsingDiagnostics,
|
||||
getSyntacticDiagnostics: (sourceFile, cancellationToken) => getProgram().getSyntacticDiagnostics(sourceFile, cancellationToken),
|
||||
getDeclarationDiagnostics: (sourceFile, cancellationToken) => getProgram().getDeclarationDiagnostics(sourceFile, cancellationToken),
|
||||
getSemanticDiagnostics: (sourceFile, cancellationToken) => getProgram().getSemanticDiagnostics(sourceFile, cancellationToken),
|
||||
emit: (sourceFile, writeFile, cancellationToken, emitOnlyDts, customTransformers) => getProgram().emit(sourceFile, writeFile, cancellationToken, emitOnlyDts, customTransformers),
|
||||
getAllDependencies: notImplemented,
|
||||
getCurrentDirectory: () => getProgram().getCurrentDirectory(),
|
||||
};
|
||||
|
||||
function getProgram() {
|
||||
return Debug.assertDefined(state.program);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
namespace ts {
|
||||
@ -597,11 +1051,25 @@ namespace ts {
|
||||
*/
|
||||
export interface BuilderProgram {
|
||||
/*@internal*/
|
||||
getState(): BuilderProgramState;
|
||||
getState(): ReusableBuilderProgramState;
|
||||
/*@internal*/
|
||||
backupState(): void;
|
||||
/*@internal*/
|
||||
restoreState(): void;
|
||||
/**
|
||||
* Returns current program
|
||||
*/
|
||||
getProgram(): Program;
|
||||
/**
|
||||
* Returns current program that could be undefined if the program was released
|
||||
*/
|
||||
/*@internal*/
|
||||
getProgramOrUndefined(): Program | undefined;
|
||||
/**
|
||||
* Releases reference to the program, making all the other operations that need program to fail.
|
||||
*/
|
||||
/*@internal*/
|
||||
releaseProgram(): void;
|
||||
/**
|
||||
* Get compiler options of the program
|
||||
*/
|
||||
@ -630,10 +1098,15 @@ namespace ts {
|
||||
* Get the syntax diagnostics, for all source files if source file is not supplied
|
||||
*/
|
||||
getSyntacticDiagnostics(sourceFile?: SourceFile, cancellationToken?: CancellationToken): ReadonlyArray<Diagnostic>;
|
||||
/**
|
||||
* Get the declaration diagnostics, for all source files if source file is not supplied
|
||||
*/
|
||||
getDeclarationDiagnostics(sourceFile?: SourceFile, cancellationToken?: CancellationToken): ReadonlyArray<DiagnosticWithLocation>;
|
||||
/**
|
||||
* Get all the dependencies of the file
|
||||
*/
|
||||
getAllDependencies(sourceFile: SourceFile): ReadonlyArray<string>;
|
||||
|
||||
/**
|
||||
* Gets the semantic diagnostics from the program corresponding to this state of file (if provided) or whole program
|
||||
* The semantic diagnostics are cached and managed here
|
||||
@ -710,22 +1183,7 @@ namespace ts {
|
||||
export function createAbstractBuilder(newProgram: Program, host: BuilderProgramHost, oldProgram?: BuilderProgram, configFileParsingDiagnostics?: ReadonlyArray<Diagnostic>): BuilderProgram;
|
||||
export function createAbstractBuilder(rootNames: ReadonlyArray<string> | undefined, options: CompilerOptions | undefined, host?: CompilerHost, oldProgram?: BuilderProgram, configFileParsingDiagnostics?: ReadonlyArray<Diagnostic>, projectReferences?: ReadonlyArray<ProjectReference>): BuilderProgram;
|
||||
export function createAbstractBuilder(newProgramOrRootNames: Program | ReadonlyArray<string> | undefined, hostOrOptions: BuilderProgramHost | CompilerOptions | undefined, oldProgramOrHost?: CompilerHost | BuilderProgram, configFileParsingDiagnosticsOrOldProgram?: ReadonlyArray<Diagnostic> | BuilderProgram, configFileParsingDiagnostics?: ReadonlyArray<Diagnostic>, projectReferences?: ReadonlyArray<ProjectReference>): BuilderProgram {
|
||||
const { newProgram: program } = getBuilderCreationParameters(newProgramOrRootNames, hostOrOptions, oldProgramOrHost, configFileParsingDiagnosticsOrOldProgram, configFileParsingDiagnostics, projectReferences);
|
||||
return {
|
||||
// Only return program, all other methods are not implemented
|
||||
getProgram: () => program,
|
||||
getState: notImplemented,
|
||||
getCompilerOptions: notImplemented,
|
||||
getSourceFile: notImplemented,
|
||||
getSourceFiles: notImplemented,
|
||||
getOptionsDiagnostics: notImplemented,
|
||||
getGlobalDiagnostics: notImplemented,
|
||||
getConfigFileParsingDiagnostics: notImplemented,
|
||||
getSyntacticDiagnostics: notImplemented,
|
||||
getSemanticDiagnostics: notImplemented,
|
||||
emit: notImplemented,
|
||||
getAllDependencies: notImplemented,
|
||||
getCurrentDirectory: notImplemented
|
||||
};
|
||||
const { newProgram, configFileParsingDiagnostics: newConfigFileParsingDiagnostics } = getBuilderCreationParameters(newProgramOrRootNames, hostOrOptions, oldProgramOrHost, configFileParsingDiagnosticsOrOldProgram, configFileParsingDiagnostics, projectReferences);
|
||||
return createRedirectedBuilderProgram({ program: newProgram, compilerOptions: newProgram.getCompilerOptions() }, newConfigFileParsingDiagnostics);
|
||||
}
|
||||
}
|
||||
|
||||
@ -25,6 +25,24 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
|
||||
export interface ReusableBuilderState {
|
||||
/**
|
||||
* Information of the file eg. its version, signature etc
|
||||
*/
|
||||
fileInfos: ReadonlyMap<BuilderState.FileInfo>;
|
||||
/**
|
||||
* Contains the map of ReferencedSet=Referenced files of the file if module emit is enabled
|
||||
* Otherwise undefined
|
||||
* Thus non undefined value indicates, module emit
|
||||
*/
|
||||
readonly referencedMap?: ReadonlyMap<BuilderState.ReferencedSet> | undefined;
|
||||
/**
|
||||
* Contains the map of exported modules ReferencedSet=exported module files from the file if module emit is enabled
|
||||
* Otherwise undefined
|
||||
*/
|
||||
readonly exportedModulesMap?: ReadonlyMap<BuilderState.ReferencedSet> | undefined;
|
||||
}
|
||||
|
||||
export interface BuilderState {
|
||||
/**
|
||||
* Information of the file eg. its version, signature etc
|
||||
@ -37,7 +55,7 @@ namespace ts {
|
||||
*/
|
||||
readonly referencedMap: ReadonlyMap<BuilderState.ReferencedSet> | undefined;
|
||||
/**
|
||||
* Contains the map of exported modules ReferencedSet=exorted module files from the file if module emit is enabled
|
||||
* Contains the map of exported modules ReferencedSet=exported module files from the file if module emit is enabled
|
||||
* Otherwise undefined
|
||||
*/
|
||||
readonly exportedModulesMap: Map<BuilderState.ReferencedSet> | undefined;
|
||||
@ -50,11 +68,15 @@ namespace ts {
|
||||
/**
|
||||
* Cache of all files excluding default library file for the current program
|
||||
*/
|
||||
allFilesExcludingDefaultLibraryFile: ReadonlyArray<SourceFile> | undefined;
|
||||
allFilesExcludingDefaultLibraryFile?: ReadonlyArray<SourceFile>;
|
||||
/**
|
||||
* Cache of all the file names
|
||||
*/
|
||||
allFileNames: ReadonlyArray<string> | undefined;
|
||||
allFileNames?: ReadonlyArray<string>;
|
||||
}
|
||||
|
||||
export function cloneMapOrUndefined<T>(map: ReadonlyMap<T> | undefined) {
|
||||
return map ? cloneMap(map) : undefined;
|
||||
}
|
||||
}
|
||||
|
||||
@ -192,14 +214,14 @@ namespace ts.BuilderState {
|
||||
/**
|
||||
* Returns true if oldState is reusable, that is the emitKind = module/non module has not changed
|
||||
*/
|
||||
export function canReuseOldState(newReferencedMap: ReadonlyMap<ReferencedSet> | undefined, oldState: Readonly<BuilderState> | undefined) {
|
||||
export function canReuseOldState(newReferencedMap: ReadonlyMap<ReferencedSet> | undefined, oldState: Readonly<ReusableBuilderState> | undefined) {
|
||||
return oldState && !oldState.referencedMap === !newReferencedMap;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the state of file references and signature for the new program from oldState if it is safe
|
||||
*/
|
||||
export function create(newProgram: Program, getCanonicalFileName: GetCanonicalFileName, oldState?: Readonly<BuilderState>): BuilderState {
|
||||
export function create(newProgram: Program, getCanonicalFileName: GetCanonicalFileName, oldState?: Readonly<ReusableBuilderState>): BuilderState {
|
||||
const fileInfos = createMap<FileInfo>();
|
||||
const referencedMap = newProgram.getCompilerOptions().module !== ModuleKind.None ? createMap<ReferencedSet>() : undefined;
|
||||
const exportedModulesMap = referencedMap ? createMap<ReferencedSet>() : undefined;
|
||||
@ -230,9 +252,32 @@ namespace ts.BuilderState {
|
||||
fileInfos,
|
||||
referencedMap,
|
||||
exportedModulesMap,
|
||||
hasCalledUpdateShapeSignature,
|
||||
allFilesExcludingDefaultLibraryFile: undefined,
|
||||
allFileNames: undefined
|
||||
hasCalledUpdateShapeSignature
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Releases needed properties
|
||||
*/
|
||||
export function releaseCache(state: BuilderState) {
|
||||
state.allFilesExcludingDefaultLibraryFile = undefined;
|
||||
state.allFileNames = undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a clone of the state
|
||||
*/
|
||||
export function clone(state: Readonly<BuilderState>): BuilderState {
|
||||
const fileInfos = createMap<FileInfo>();
|
||||
state.fileInfos.forEach((value, key) => {
|
||||
fileInfos.set(key, { ...value });
|
||||
});
|
||||
// Dont need to backup allFiles info since its cache anyway
|
||||
return {
|
||||
fileInfos,
|
||||
referencedMap: cloneMapOrUndefined(state.referencedMap),
|
||||
exportedModulesMap: cloneMapOrUndefined(state.exportedModulesMap),
|
||||
hasCalledUpdateShapeSignature: cloneMap(state.hasCalledUpdateShapeSignature),
|
||||
};
|
||||
}
|
||||
|
||||
@ -241,9 +286,9 @@ namespace ts.BuilderState {
|
||||
*/
|
||||
export function getFilesAffectedBy(state: BuilderState, programOfThisState: Program, path: Path, cancellationToken: CancellationToken | undefined, computeHash: ComputeHash, cacheToUpdateSignature?: Map<string>, exportedModulesMapCache?: ComputingExportedModulesMap): ReadonlyArray<SourceFile> {
|
||||
// Since the operation could be cancelled, the signatures are always stored in the cache
|
||||
// They will be commited once it is safe to use them
|
||||
// They will be committed once it is safe to use them
|
||||
// eg when calling this api from tsserver, if there is no cancellation of the operation
|
||||
// In the other cases the affected files signatures are commited only after the iteration through the result is complete
|
||||
// In the other cases the affected files signatures are committed only after the iteration through the result is complete
|
||||
const signatureCache = cacheToUpdateSignature || createMap();
|
||||
const sourceFile = programOfThisState.getSourceFileByPath(path);
|
||||
if (!sourceFile) {
|
||||
@ -505,14 +550,14 @@ namespace ts.BuilderState {
|
||||
|
||||
// Start with the paths this file was referenced by
|
||||
seenFileNamesMap.set(sourceFileWithUpdatedShape.path, sourceFileWithUpdatedShape);
|
||||
const queue = getReferencedByPaths(state, sourceFileWithUpdatedShape.path);
|
||||
const queue = getReferencedByPaths(state, sourceFileWithUpdatedShape.resolvedPath);
|
||||
while (queue.length > 0) {
|
||||
const currentPath = queue.pop()!;
|
||||
if (!seenFileNamesMap.has(currentPath)) {
|
||||
const currentSourceFile = programOfThisState.getSourceFileByPath(currentPath)!;
|
||||
seenFileNamesMap.set(currentPath, currentSourceFile);
|
||||
if (currentSourceFile && updateShapeSignature(state, programOfThisState, currentSourceFile, cacheToUpdateSignature, cancellationToken, computeHash!, exportedModulesMapCache)) { // TODO: GH#18217
|
||||
queue.push(...getReferencedByPaths(state, currentPath));
|
||||
queue.push(...getReferencedByPaths(state, currentSourceFile.resolvedPath));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -15,6 +15,7 @@ namespace ts {
|
||||
["es2016", "lib.es2016.d.ts"],
|
||||
["es2017", "lib.es2017.d.ts"],
|
||||
["es2018", "lib.es2018.d.ts"],
|
||||
["es2019", "lib.es2019.d.ts"],
|
||||
["esnext", "lib.esnext.d.ts"],
|
||||
// Host only
|
||||
["dom", "lib.dom.d.ts"],
|
||||
@ -38,12 +39,16 @@ namespace ts {
|
||||
["es2017.string", "lib.es2017.string.d.ts"],
|
||||
["es2017.intl", "lib.es2017.intl.d.ts"],
|
||||
["es2017.typedarrays", "lib.es2017.typedarrays.d.ts"],
|
||||
["es2018.asynciterable", "lib.es2018.asynciterable.d.ts"],
|
||||
["es2018.intl", "lib.es2018.intl.d.ts"],
|
||||
["es2018.promise", "lib.es2018.promise.d.ts"],
|
||||
["es2018.regexp", "lib.es2018.regexp.d.ts"],
|
||||
["esnext.array", "lib.esnext.array.d.ts"],
|
||||
["esnext.symbol", "lib.esnext.symbol.d.ts"],
|
||||
["esnext.asynciterable", "lib.esnext.asynciterable.d.ts"],
|
||||
["es2019.array", "lib.es2019.array.d.ts"],
|
||||
["es2019.string", "lib.es2019.string.d.ts"],
|
||||
["es2019.symbol", "lib.es2019.symbol.d.ts"],
|
||||
["esnext.array", "lib.es2019.array.d.ts"],
|
||||
["esnext.symbol", "lib.es2019.symbol.d.ts"],
|
||||
["esnext.asynciterable", "lib.es2018.asynciterable.d.ts"],
|
||||
["esnext.intl", "lib.esnext.intl.d.ts"],
|
||||
["esnext.bigint", "lib.esnext.bigint.d.ts"]
|
||||
];
|
||||
@ -197,6 +202,7 @@ namespace ts {
|
||||
es2016: ScriptTarget.ES2016,
|
||||
es2017: ScriptTarget.ES2017,
|
||||
es2018: ScriptTarget.ES2018,
|
||||
es2019: ScriptTarget.ES2019,
|
||||
esnext: ScriptTarget.ESNext,
|
||||
}),
|
||||
affectsSourceFile: true,
|
||||
@ -204,7 +210,7 @@ namespace ts {
|
||||
paramType: Diagnostics.VERSION,
|
||||
showInSimplifiedHelpView: true,
|
||||
category: Diagnostics.Basic_Options,
|
||||
description: Diagnostics.Specify_ECMAScript_target_version_Colon_ES3_default_ES5_ES2015_ES2016_ES2017_ES2018_or_ESNEXT,
|
||||
description: Diagnostics.Specify_ECMAScript_target_version_Colon_ES3_default_ES5_ES2015_ES2016_ES2017_ES2018_ES2019_or_ESNEXT,
|
||||
},
|
||||
{
|
||||
name: "module",
|
||||
@ -325,6 +331,22 @@ namespace ts {
|
||||
category: Diagnostics.Basic_Options,
|
||||
description: Diagnostics.Enable_project_compilation,
|
||||
},
|
||||
{
|
||||
name: "incremental",
|
||||
type: "boolean",
|
||||
isTSConfigOnly: true,
|
||||
category: Diagnostics.Basic_Options,
|
||||
description: Diagnostics.Enable_incremental_compilation,
|
||||
},
|
||||
{
|
||||
name: "tsBuildInfoFile",
|
||||
type: "string",
|
||||
isFilePath: true,
|
||||
paramType: Diagnostics.FILE,
|
||||
isTSConfigOnly: true,
|
||||
category: Diagnostics.Basic_Options,
|
||||
description: Diagnostics.Specify_file_to_store_incremental_compilation_information,
|
||||
},
|
||||
{
|
||||
name: "removeComments",
|
||||
type: "boolean",
|
||||
@ -1421,6 +1443,7 @@ namespace ts {
|
||||
return _tsconfigRootOptions;
|
||||
}
|
||||
|
||||
/*@internal*/
|
||||
interface JsonConversionNotifier {
|
||||
/**
|
||||
* Notifies parent option object is being set with the optionKey and a valid optionValue
|
||||
@ -1935,7 +1958,7 @@ namespace ts {
|
||||
|
||||
function directoryOfCombinedPath(fileName: string, basePath: string) {
|
||||
// Use the `getNormalizedAbsolutePath` function to avoid canonicalizing the path, as it must remain noncanonical
|
||||
// until consistient casing errors are reported
|
||||
// until consistent casing errors are reported
|
||||
return getDirectoryPath(getNormalizedAbsolutePath(fileName, basePath));
|
||||
}
|
||||
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
namespace ts {
|
||||
// WARNING: The script `configureNightly.ts` uses a regexp to parse out these values.
|
||||
// If changing the text in this section, be sure to test `configureNightly` too.
|
||||
export const versionMajorMinor = "3.3";
|
||||
export const versionMajorMinor = "3.4";
|
||||
/** The version of the TypeScript compiler release */
|
||||
export const version = `${versionMajorMinor}.0-dev`;
|
||||
}
|
||||
@ -118,42 +118,105 @@ namespace ts {
|
||||
export const MapCtr = typeof Map !== "undefined" && "entries" in Map.prototype ? Map : shimMap();
|
||||
|
||||
// Keep the class inside a function so it doesn't get compiled if it's not used.
|
||||
function shimMap(): new <T>() => Map<T> {
|
||||
export function shimMap(): new <T>() => Map<T> {
|
||||
|
||||
interface MapEntry<T> {
|
||||
readonly key?: string;
|
||||
value?: T;
|
||||
|
||||
// Linked list references for iterators.
|
||||
nextEntry?: MapEntry<T>;
|
||||
previousEntry?: MapEntry<T>;
|
||||
|
||||
/**
|
||||
* Specifies if iterators should skip the next entry.
|
||||
* This will be set when an entry is deleted.
|
||||
* See https://github.com/Microsoft/TypeScript/pull/27292 for more information.
|
||||
*/
|
||||
skipNext?: boolean;
|
||||
}
|
||||
|
||||
class MapIterator<T, U extends (string | T | [string, T])> {
|
||||
private data: MapLike<T>;
|
||||
private keys: ReadonlyArray<string>;
|
||||
private index = 0;
|
||||
private selector: (data: MapLike<T>, key: string) => U;
|
||||
constructor(data: MapLike<T>, selector: (data: MapLike<T>, key: string) => U) {
|
||||
this.data = data;
|
||||
private currentEntry?: MapEntry<T>;
|
||||
private selector: (key: string, value: T) => U;
|
||||
|
||||
constructor(currentEntry: MapEntry<T>, selector: (key: string, value: T) => U) {
|
||||
this.currentEntry = currentEntry;
|
||||
this.selector = selector;
|
||||
this.keys = Object.keys(data);
|
||||
}
|
||||
|
||||
public next(): { value: U, done: false } | { value: never, done: true } {
|
||||
const index = this.index;
|
||||
if (index < this.keys.length) {
|
||||
this.index++;
|
||||
return { value: this.selector(this.data, this.keys[index]), done: false };
|
||||
// Navigate to the next entry.
|
||||
while (this.currentEntry) {
|
||||
const skipNext = !!this.currentEntry.skipNext;
|
||||
this.currentEntry = this.currentEntry.nextEntry;
|
||||
|
||||
if (!skipNext) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.currentEntry) {
|
||||
return { value: this.selector(this.currentEntry.key!, this.currentEntry.value!), done: false };
|
||||
}
|
||||
else {
|
||||
return { value: undefined as never, done: true };
|
||||
}
|
||||
return { value: undefined as never, done: true };
|
||||
}
|
||||
}
|
||||
|
||||
return class <T> implements Map<T> {
|
||||
private data = createDictionaryObject<T>();
|
||||
private data = createDictionaryObject<MapEntry<T>>();
|
||||
public size = 0;
|
||||
|
||||
// Linked list references for iterators.
|
||||
// See https://github.com/Microsoft/TypeScript/pull/27292
|
||||
// for more information.
|
||||
|
||||
/**
|
||||
* The first entry in the linked list.
|
||||
* Note that this is only a stub that serves as starting point
|
||||
* for iterators and doesn't contain a key and a value.
|
||||
*/
|
||||
private readonly firstEntry: MapEntry<T>;
|
||||
private lastEntry: MapEntry<T>;
|
||||
|
||||
constructor() {
|
||||
// Create a first (stub) map entry that will not contain a key
|
||||
// and value but serves as starting point for iterators.
|
||||
this.firstEntry = {};
|
||||
// When the map is empty, the last entry is the same as the
|
||||
// first one.
|
||||
this.lastEntry = this.firstEntry;
|
||||
}
|
||||
|
||||
get(key: string): T | undefined {
|
||||
return this.data[key];
|
||||
const entry = this.data[key] as MapEntry<T> | undefined;
|
||||
return entry && entry.value!;
|
||||
}
|
||||
|
||||
set(key: string, value: T): this {
|
||||
if (!this.has(key)) {
|
||||
this.size++;
|
||||
|
||||
// Create a new entry that will be appended at the
|
||||
// end of the linked list.
|
||||
const newEntry: MapEntry<T> = {
|
||||
key,
|
||||
value
|
||||
};
|
||||
this.data[key] = newEntry;
|
||||
|
||||
// Adjust the references.
|
||||
const previousLastEntry = this.lastEntry;
|
||||
previousLastEntry.nextEntry = newEntry;
|
||||
newEntry.previousEntry = previousLastEntry;
|
||||
this.lastEntry = newEntry;
|
||||
}
|
||||
this.data[key] = value;
|
||||
else {
|
||||
this.data[key].value = value;
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -165,32 +228,81 @@ namespace ts {
|
||||
delete(key: string): boolean {
|
||||
if (this.has(key)) {
|
||||
this.size--;
|
||||
const entry = this.data[key];
|
||||
delete this.data[key];
|
||||
|
||||
// Adjust the linked list references of the neighbor entries.
|
||||
const previousEntry = entry.previousEntry!;
|
||||
previousEntry.nextEntry = entry.nextEntry;
|
||||
if (entry.nextEntry) {
|
||||
entry.nextEntry.previousEntry = previousEntry;
|
||||
}
|
||||
|
||||
// When the deleted entry was the last one, we need to
|
||||
// adust the lastEntry reference.
|
||||
if (this.lastEntry === entry) {
|
||||
this.lastEntry = previousEntry;
|
||||
}
|
||||
|
||||
// Adjust the forward reference of the deleted entry
|
||||
// in case an iterator still references it. This allows us
|
||||
// to throw away the entry, but when an active iterator
|
||||
// (which points to the current entry) continues, it will
|
||||
// navigate to the entry that originally came before the
|
||||
// current one and skip it.
|
||||
entry.previousEntry = undefined;
|
||||
entry.nextEntry = previousEntry;
|
||||
entry.skipNext = true;
|
||||
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
clear(): void {
|
||||
this.data = createDictionaryObject<T>();
|
||||
this.data = createDictionaryObject<MapEntry<T>>();
|
||||
this.size = 0;
|
||||
|
||||
// Reset the linked list. Note that we must adjust the forward
|
||||
// references of the deleted entries to ensure iterators stuck
|
||||
// in the middle of the list don't continue with deleted entries,
|
||||
// but can continue with new entries added after the clear()
|
||||
// operation.
|
||||
const firstEntry = this.firstEntry;
|
||||
let currentEntry = firstEntry.nextEntry;
|
||||
while (currentEntry) {
|
||||
const nextEntry = currentEntry.nextEntry;
|
||||
currentEntry.previousEntry = undefined;
|
||||
currentEntry.nextEntry = firstEntry;
|
||||
currentEntry.skipNext = true;
|
||||
|
||||
currentEntry = nextEntry;
|
||||
}
|
||||
firstEntry.nextEntry = undefined;
|
||||
this.lastEntry = firstEntry;
|
||||
}
|
||||
|
||||
keys(): Iterator<string> {
|
||||
return new MapIterator(this.data, (_data, key) => key);
|
||||
return new MapIterator(this.firstEntry, key => key);
|
||||
}
|
||||
|
||||
values(): Iterator<T> {
|
||||
return new MapIterator(this.data, (data, key) => data[key]);
|
||||
return new MapIterator(this.firstEntry, (_key, value) => value);
|
||||
}
|
||||
|
||||
entries(): Iterator<[string, T]> {
|
||||
return new MapIterator(this.data, (data, key) => [key, data[key]] as [string, T]);
|
||||
return new MapIterator(this.firstEntry, (key, value) => [key, value] as [string, T]);
|
||||
}
|
||||
|
||||
forEach(action: (value: T, key: string) => void): void {
|
||||
for (const key in this.data) {
|
||||
action(this.data[key], key);
|
||||
const iterator = this.entries();
|
||||
while (true) {
|
||||
const { value: entry, done } = iterator.next();
|
||||
if (done) {
|
||||
break;
|
||||
}
|
||||
|
||||
action(entry[1], entry[0]);
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -805,7 +917,7 @@ namespace ts {
|
||||
|
||||
/**
|
||||
* Deduplicates an unsorted array.
|
||||
* @param equalityComparer An optional `EqualityComparer` used to determine if two values are duplicates.
|
||||
* @param equalityComparer An `EqualityComparer` used to determine if two values are duplicates.
|
||||
* @param comparer An optional `Comparer` used to sort entries before comparison, though the
|
||||
* result will remain in the original order in `array`.
|
||||
*/
|
||||
@ -884,8 +996,11 @@ namespace ts {
|
||||
/**
|
||||
* Compacts an array, removing any falsey elements.
|
||||
*/
|
||||
export function compact<T>(array: T[]): T[];
|
||||
export function compact<T>(array: ReadonlyArray<T>): ReadonlyArray<T>;
|
||||
export function compact<T>(array: (T | undefined | null | false | 0 | "")[]): T[];
|
||||
export function compact<T>(array: ReadonlyArray<T | undefined | null | false | 0 | "">): ReadonlyArray<T>;
|
||||
// TSLint thinks these can be combined with the above - they cannot; they'd produce higher-priority inferences and prevent the falsey types from being stripped
|
||||
export function compact<T>(array: T[]): T[]; // tslint:disable-line unified-signatures
|
||||
export function compact<T>(array: ReadonlyArray<T>): ReadonlyArray<T>; // tslint:disable-line unified-signatures
|
||||
export function compact<T>(array: T[]): T[] {
|
||||
let result: T[] | undefined;
|
||||
if (array) {
|
||||
@ -1058,6 +1173,21 @@ namespace ts {
|
||||
}};
|
||||
}
|
||||
|
||||
export function arrayReverseIterator<T>(array: ReadonlyArray<T>): Iterator<T> {
|
||||
let i = array.length;
|
||||
return {
|
||||
next: () => {
|
||||
if (i === 0) {
|
||||
return { value: undefined as never, done: true };
|
||||
}
|
||||
else {
|
||||
i--;
|
||||
return { value: array[i], done: false };
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Stable sort of an array. Elements equal to each other maintain their relative position in the array.
|
||||
*/
|
||||
@ -1281,9 +1411,10 @@ namespace ts {
|
||||
|
||||
export function assign<T extends object>(t: T, ...args: (T | undefined)[]) {
|
||||
for (const arg of args) {
|
||||
for (const p in arg!) {
|
||||
if (hasProperty(arg!, p)) {
|
||||
t![p] = arg![p]; // TODO: GH#23368
|
||||
if (arg === undefined) continue;
|
||||
for (const p in arg) {
|
||||
if (hasProperty(arg, p)) {
|
||||
t[p] = arg[p];
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1387,6 +1518,18 @@ namespace ts {
|
||||
return result;
|
||||
}
|
||||
|
||||
export function copyProperties<T1 extends T2, T2>(first: T1, second: T2) {
|
||||
for (const id in second) {
|
||||
if (hasOwnProperty.call(second, id)) {
|
||||
(first as any)[id] = second[id];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function maybeBind<T, A extends any[], R>(obj: T, fn: ((this: T, ...args: A) => R) | undefined): ((...args: A) => R) | undefined {
|
||||
return fn ? fn.bind(obj) : undefined;
|
||||
}
|
||||
|
||||
export interface MultiMap<T> extends Map<T[]> {
|
||||
/**
|
||||
* Adds the value to an array of values associated with the key, and returns the array.
|
||||
@ -1471,6 +1614,9 @@ namespace ts {
|
||||
/** Do nothing and return true */
|
||||
export function returnTrue(): true { return true; }
|
||||
|
||||
/** Do nothing and return undefined */
|
||||
export function returnUndefined(): undefined { return undefined; }
|
||||
|
||||
/** Returns its argument. */
|
||||
export function identity<T>(x: T) { return x; }
|
||||
|
||||
@ -1637,7 +1783,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
export function assertNever(member: never, message = "Illegal value:", stackCrawlMark?: AnyFunction): never {
|
||||
const detail = "kind" in member && "pos" in member ? "SyntaxKind: " + showSyntaxKind(member as Node) : JSON.stringify(member);
|
||||
const detail = typeof member === "object" && "kind" in member && "pos" in member ? "SyntaxKind: " + showSyntaxKind(member as Node) : JSON.stringify(member);
|
||||
return fail(`${message} ${detail}`, stackCrawlMark || assertNever);
|
||||
}
|
||||
|
||||
|
||||
@ -659,10 +659,6 @@
|
||||
"category": "Error",
|
||||
"code": 1208
|
||||
},
|
||||
"Ambient const enums are not allowed when the '--isolatedModules' flag is provided.": {
|
||||
"category": "Error",
|
||||
"code": 1209
|
||||
},
|
||||
"Invalid use of '{0}'. Class definitions are automatically in strict mode.": {
|
||||
"category": "Error",
|
||||
"code": 1210
|
||||
@ -1023,6 +1019,14 @@
|
||||
"category": "Error",
|
||||
"code": 1353
|
||||
},
|
||||
"'readonly' type modifier is only permitted on array and tuple literal types.": {
|
||||
"category": "Error",
|
||||
"code": 1354
|
||||
},
|
||||
"A 'const' assertion can only be applied to a string, number, boolean, array, or object literal.": {
|
||||
"category": "Error",
|
||||
"code": 1355
|
||||
},
|
||||
|
||||
"Duplicate identifier '{0}'.": {
|
||||
"category": "Error",
|
||||
@ -1400,7 +1404,7 @@
|
||||
"category": "Error",
|
||||
"code": 2393
|
||||
},
|
||||
"Overload signature is not compatible with function implementation.": {
|
||||
"This overload signature is not compatible with its implementation signature.": {
|
||||
"category": "Error",
|
||||
"code": 2394
|
||||
},
|
||||
@ -1776,7 +1780,7 @@
|
||||
"category": "Error",
|
||||
"code": 2496
|
||||
},
|
||||
"Module '{0}' resolves to a non-module entity and cannot be imported using this construct.": {
|
||||
"This module can only be referenced with ECMAScript imports/exports by turning on the '{0}' flag and referencing its default export.": {
|
||||
"category": "Error",
|
||||
"code": 2497
|
||||
},
|
||||
@ -2056,10 +2060,6 @@
|
||||
"category": "Error",
|
||||
"code": 2567
|
||||
},
|
||||
"Type '{0}' is not an array type. Use compiler option '--downlevelIteration' to allow iterating of iterators.": {
|
||||
"category": "Error",
|
||||
"code": 2568
|
||||
},
|
||||
"Type '{0}' is not an array type or a string type. Use compiler option '--downlevelIteration' to allow iterating of iterators.": {
|
||||
"category": "Error",
|
||||
"code": 2569
|
||||
@ -2096,15 +2096,15 @@
|
||||
"category": "Error",
|
||||
"code": 2577
|
||||
},
|
||||
"Cannot find name '{0}'. Do you need to install type definitions for node? Try `npm i @types/node` and then add `node` to the types field in your tsconfig.": {
|
||||
"Cannot find name '{0}'. Do you need to install type definitions for node? Try `npm i @types/node`.": {
|
||||
"category": "Error",
|
||||
"code": 2580
|
||||
},
|
||||
"Cannot find name '{0}'. Do you need to install type definitions for jQuery? Try `npm i @types/jquery` and then add `jquery` to the types field in your tsconfig.": {
|
||||
"Cannot find name '{0}'. Do you need to install type definitions for jQuery? Try `npm i @types/jquery`.": {
|
||||
"category": "Error",
|
||||
"code": 2581
|
||||
},
|
||||
"Cannot find name '{0}'. Do you need to install type definitions for a test runner? Try `npm i @types/jest` or `npm i @types/mocha` and then add `jest` or `mocha` to the types field in your tsconfig.": {
|
||||
"Cannot find name '{0}'. Do you need to install type definitions for a test runner? Try `npm i @types/jest` or `npm i @types/mocha`.": {
|
||||
"category": "Error",
|
||||
"code": 2582
|
||||
},
|
||||
@ -2132,6 +2132,26 @@
|
||||
"category": "Error",
|
||||
"code": 2588
|
||||
},
|
||||
"Type instantiation is excessively deep and possibly infinite.": {
|
||||
"category": "Error",
|
||||
"code": 2589
|
||||
},
|
||||
"Expression produces a union type that is too complex to represent.": {
|
||||
"category": "Error",
|
||||
"code": 2590
|
||||
},
|
||||
"Cannot find name '{0}'. Do you need to install type definitions for node? Try `npm i @types/node` and then add `node` to the types field in your tsconfig.": {
|
||||
"category": "Error",
|
||||
"code": 2591
|
||||
},
|
||||
"Cannot find name '{0}'. Do you need to install type definitions for jQuery? Try `npm i @types/jquery` and then add `jquery` to the types field in your tsconfig.": {
|
||||
"category": "Error",
|
||||
"code": 2592
|
||||
},
|
||||
"Cannot find name '{0}'. Do you need to install type definitions for a test runner? Try `npm i @types/jest` or `npm i @types/mocha` and then add `jest` or `mocha` to the types field in your tsconfig.": {
|
||||
"category": "Error",
|
||||
"code": 2593
|
||||
},
|
||||
"JSX element attributes type '{0}' may not be a union type.": {
|
||||
"category": "Error",
|
||||
"code": 2600
|
||||
@ -2437,7 +2457,7 @@
|
||||
"category": "Error",
|
||||
"code": 2717
|
||||
},
|
||||
"Duplicate declaration '{0}'.": {
|
||||
"Duplicate property '{0}'.": {
|
||||
"category": "Error",
|
||||
"code": 2718
|
||||
},
|
||||
@ -2497,6 +2517,10 @@
|
||||
"category": "Error",
|
||||
"code": 2732
|
||||
},
|
||||
"Property '{0}' was also declared here.": {
|
||||
"category": "Error",
|
||||
"code": 2733
|
||||
},
|
||||
"It is highly likely that you are missing a semicolon.": {
|
||||
"category": "Error",
|
||||
"code": 2734
|
||||
@ -2541,6 +2565,42 @@
|
||||
"category": "Error",
|
||||
"code": 2744
|
||||
},
|
||||
"This JSX tag's '{0}' prop expects type '{1}' which requires multiple children, but only a single child was provided.": {
|
||||
"category": "Error",
|
||||
"code": 2745
|
||||
},
|
||||
"This JSX tag's '{0}' prop expects a single child of type '{1}', but multiple children were provided.": {
|
||||
"category": "Error",
|
||||
"code": 2746
|
||||
},
|
||||
"'{0}' components don't accept text as child elements. Text in JSX has the type 'string', but the expected type of '{1}' is '{2}'.": {
|
||||
"category": "Error",
|
||||
"code": 2747
|
||||
},
|
||||
"Cannot access ambient const enums when the '--isolatedModules' flag is provided.": {
|
||||
"category": "Error",
|
||||
"code": 2748
|
||||
},
|
||||
"'{0}' refers to a value, but is being used as a type here.": {
|
||||
"category": "Error",
|
||||
"code": 2749
|
||||
},
|
||||
"The implementation signature is declared here.": {
|
||||
"category": "Error",
|
||||
"code": 2750
|
||||
},
|
||||
"Circularity originates in type at this location.": {
|
||||
"category": "Error",
|
||||
"code": 2751
|
||||
},
|
||||
"The first export default is here.": {
|
||||
"category": "Error",
|
||||
"code": 2752
|
||||
},
|
||||
"Another export default is here.": {
|
||||
"category": "Error",
|
||||
"code": 2753
|
||||
},
|
||||
|
||||
"Import declaration '{0}' is using private name '{1}'.": {
|
||||
"category": "Error",
|
||||
@ -2875,6 +2935,10 @@
|
||||
"category": "Error",
|
||||
"code": 4102
|
||||
},
|
||||
"Type parameter '{0}' of exported mapped object type is using private name '{1}'.": {
|
||||
"category": "Error",
|
||||
"code": 4103
|
||||
},
|
||||
|
||||
"The current host does not support the '{0}' option.": {
|
||||
"category": "Error",
|
||||
@ -3069,7 +3133,7 @@
|
||||
"category": "Message",
|
||||
"code": 6014
|
||||
},
|
||||
"Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017','ES2018' or 'ESNEXT'.": {
|
||||
"Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'.": {
|
||||
"category": "Message",
|
||||
"code": 6015
|
||||
},
|
||||
@ -3880,7 +3944,6 @@
|
||||
"category": "Message",
|
||||
"code": 6353
|
||||
},
|
||||
|
||||
"Project '{0}' is up to date with .d.ts files from its dependencies": {
|
||||
"category": "Message",
|
||||
"code": 6354
|
||||
@ -3949,6 +4012,50 @@
|
||||
"category": "Error",
|
||||
"code": 6370
|
||||
},
|
||||
"Updating unchanged output timestamps of project '{0}'...": {
|
||||
"category": "Message",
|
||||
"code": 6371
|
||||
},
|
||||
"Project '{0}' is out of date because output of its dependency '{1}' has changed": {
|
||||
"category": "Message",
|
||||
"code": 6372
|
||||
},
|
||||
"Updating output of project '{0}'...": {
|
||||
"category": "Message",
|
||||
"code": 6373
|
||||
},
|
||||
"A non-dry build would update timestamps for output of project '{0}'": {
|
||||
"category": "Message",
|
||||
"code": 6374
|
||||
},
|
||||
"A non-dry build would update output of project '{0}'": {
|
||||
"category": "Message",
|
||||
"code": 6375
|
||||
},
|
||||
"Cannot update output of project '{0}' because there was error reading file '{1}'": {
|
||||
"category": "Message",
|
||||
"code": 6376
|
||||
},
|
||||
"Cannot write file '{0}' because it will overwrite '.tsbuildinfo' file generated by referenced project '{1}'": {
|
||||
"category": "Error",
|
||||
"code": 6377
|
||||
},
|
||||
"Enable incremental compilation": {
|
||||
"category": "Message",
|
||||
"code": 6378
|
||||
},
|
||||
"Composite projects may not disable incremental compilation.": {
|
||||
"category": "Error",
|
||||
"code": 6379
|
||||
},
|
||||
"Specify file to store incremental compilation information": {
|
||||
"category": "Message",
|
||||
"code": 6380
|
||||
},
|
||||
"Project '{0}' is out of date because output for it was generated with version '{1}' that differs with current version '{2}'": {
|
||||
"category": "Message",
|
||||
"code": 6381
|
||||
},
|
||||
|
||||
"The expected type comes from property '{0}' which is declared here on type '{1}'": {
|
||||
"category": "Message",
|
||||
@ -4097,7 +4204,7 @@
|
||||
"category": "Error",
|
||||
"code": 7040
|
||||
},
|
||||
"The containing arrow function captures the global value of 'this' which implicitly has type 'any'.": {
|
||||
"The containing arrow function captures the global value of 'this'.": {
|
||||
"category": "Error",
|
||||
"code": 7041
|
||||
},
|
||||
@ -4266,6 +4373,10 @@
|
||||
"category": "Error",
|
||||
"code": 8031
|
||||
},
|
||||
"Qualified name '{0}' is not allowed without a leading '@param {object} {1}'.": {
|
||||
"category": "Error",
|
||||
"code": 8032
|
||||
},
|
||||
"Only identifiers/qualified-names with optional type arguments are currently supported in a class 'extends' clause.": {
|
||||
"category": "Error",
|
||||
"code": 9002
|
||||
@ -4811,5 +4922,13 @@
|
||||
"Add names to all parameters without names": {
|
||||
"category": "Message",
|
||||
"code": 95073
|
||||
},
|
||||
"Enable the 'experimentalDecorators' option in your configuration file": {
|
||||
"category": "Message",
|
||||
"code": 95074
|
||||
},
|
||||
"Convert parameters to destructured object": {
|
||||
"category": "Message",
|
||||
"code": 95075
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -89,10 +89,10 @@ namespace ts {
|
||||
return createLiteralFromNode(value);
|
||||
}
|
||||
|
||||
export function createNumericLiteral(value: string): NumericLiteral {
|
||||
export function createNumericLiteral(value: string, numericLiteralFlags: TokenFlags = TokenFlags.None): NumericLiteral {
|
||||
const node = <NumericLiteral>createSynthesizedNode(SyntaxKind.NumericLiteral);
|
||||
node.text = value;
|
||||
node.numericLiteralFlags = 0;
|
||||
node.numericLiteralFlags = numericLiteralFlags;
|
||||
return node;
|
||||
}
|
||||
|
||||
@ -143,8 +143,8 @@ namespace ts {
|
||||
export function updateIdentifier(node: Identifier, typeArguments: NodeArray<TypeNode | TypeParameterDeclaration> | undefined): Identifier; // tslint:disable-line unified-signatures
|
||||
export function updateIdentifier(node: Identifier, typeArguments?: NodeArray<TypeNode | TypeParameterDeclaration> | undefined): Identifier {
|
||||
return node.typeArguments !== typeArguments
|
||||
? updateNode(createIdentifier(idText(node), typeArguments), node)
|
||||
: node;
|
||||
? updateNode(createIdentifier(idText(node), typeArguments), node)
|
||||
: node;
|
||||
}
|
||||
|
||||
let nextAutoGenerateId = 0;
|
||||
@ -876,8 +876,8 @@ namespace ts {
|
||||
}
|
||||
|
||||
export function createTypeOperatorNode(type: TypeNode): TypeOperatorNode;
|
||||
export function createTypeOperatorNode(operator: SyntaxKind.KeyOfKeyword | SyntaxKind.UniqueKeyword, type: TypeNode): TypeOperatorNode;
|
||||
export function createTypeOperatorNode(operatorOrType: SyntaxKind.KeyOfKeyword | SyntaxKind.UniqueKeyword | TypeNode, type?: TypeNode) {
|
||||
export function createTypeOperatorNode(operator: SyntaxKind.KeyOfKeyword | SyntaxKind.UniqueKeyword | SyntaxKind.ReadonlyKeyword, type: TypeNode): TypeOperatorNode;
|
||||
export function createTypeOperatorNode(operatorOrType: SyntaxKind.KeyOfKeyword | SyntaxKind.UniqueKeyword | SyntaxKind.ReadonlyKeyword | TypeNode, type?: TypeNode) {
|
||||
const node = createSynthesizedNode(SyntaxKind.TypeOperator) as TypeOperatorNode;
|
||||
node.operator = typeof operatorOrType === "number" ? operatorOrType : SyntaxKind.KeyOfKeyword;
|
||||
node.type = parenthesizeElementTypeMember(typeof operatorOrType === "number" ? type! : operatorOrType);
|
||||
@ -2299,6 +2299,28 @@ namespace ts {
|
||||
return node;
|
||||
}
|
||||
|
||||
export function createJsxText(text: string, containsOnlyTriviaWhiteSpaces?: boolean) {
|
||||
const node = <JsxText>createSynthesizedNode(SyntaxKind.JsxText);
|
||||
node.text = text;
|
||||
node.containsOnlyTriviaWhiteSpaces = !!containsOnlyTriviaWhiteSpaces;
|
||||
return node;
|
||||
}
|
||||
|
||||
export function updateJsxText(node: JsxText, text: string, containsOnlyTriviaWhiteSpaces?: boolean) {
|
||||
return node.text !== text
|
||||
|| node.containsOnlyTriviaWhiteSpaces !== containsOnlyTriviaWhiteSpaces
|
||||
? updateNode(createJsxText(text, containsOnlyTriviaWhiteSpaces), node)
|
||||
: node;
|
||||
}
|
||||
|
||||
export function createJsxOpeningFragment() {
|
||||
return <JsxOpeningFragment>createSynthesizedNode(SyntaxKind.JsxOpeningFragment);
|
||||
}
|
||||
|
||||
export function createJsxJsxClosingFragment() {
|
||||
return <JsxClosingFragment>createSynthesizedNode(SyntaxKind.JsxClosingFragment);
|
||||
}
|
||||
|
||||
export function updateJsxFragment(node: JsxFragment, openingFragment: JsxOpeningFragment, children: ReadonlyArray<JsxChild>, closingFragment: JsxClosingFragment) {
|
||||
return node.openingFragment !== openingFragment
|
||||
|| node.children !== children
|
||||
@ -2629,42 +2651,301 @@ namespace ts {
|
||||
return node;
|
||||
}
|
||||
|
||||
export function createUnparsedSourceFile(text: string): UnparsedSource;
|
||||
export function createUnparsedSourceFile(text: string, mapPath: string | undefined, map: string | undefined): UnparsedSource;
|
||||
export function createUnparsedSourceFile(text: string, mapPath?: string, map?: string): UnparsedSource {
|
||||
let allUnscopedEmitHelpers: ReadonlyMap<UnscopedEmitHelper> | undefined;
|
||||
function getAllUnscopedEmitHelpers() {
|
||||
return allUnscopedEmitHelpers || (allUnscopedEmitHelpers = arrayToMap([
|
||||
valuesHelper,
|
||||
readHelper,
|
||||
spreadHelper,
|
||||
restHelper,
|
||||
decorateHelper,
|
||||
metadataHelper,
|
||||
paramHelper,
|
||||
awaiterHelper,
|
||||
assignHelper,
|
||||
awaitHelper,
|
||||
asyncGeneratorHelper,
|
||||
asyncDelegator,
|
||||
asyncValues,
|
||||
extendsHelper,
|
||||
templateObjectHelper,
|
||||
generatorHelper,
|
||||
importStarHelper,
|
||||
importDefaultHelper
|
||||
], helper => helper.name));
|
||||
}
|
||||
|
||||
function createUnparsedSource() {
|
||||
const node = <UnparsedSource>createNode(SyntaxKind.UnparsedSource);
|
||||
node.text = text;
|
||||
node.sourceMapPath = mapPath;
|
||||
node.sourceMapText = map;
|
||||
node.prologues = emptyArray;
|
||||
node.referencedFiles = emptyArray;
|
||||
node.libReferenceDirectives = emptyArray;
|
||||
node.getLineAndCharacterOfPosition = pos => getLineAndCharacterOfPosition(node, pos);
|
||||
return node;
|
||||
}
|
||||
|
||||
export function createUnparsedSourceFile(text: string): UnparsedSource;
|
||||
export function createUnparsedSourceFile(inputFile: InputFiles, type: "js" | "dts", stripInternal?: boolean): UnparsedSource;
|
||||
export function createUnparsedSourceFile(text: string, mapPath: string | undefined, map: string | undefined): UnparsedSource;
|
||||
export function createUnparsedSourceFile(textOrInputFiles: string | InputFiles, mapPathOrType?: string, mapTextOrStripInternal?: string | boolean): UnparsedSource {
|
||||
const node = createUnparsedSource();
|
||||
let stripInternal: boolean | undefined;
|
||||
let bundleFileInfo: BundleFileInfo | undefined;
|
||||
if (!isString(textOrInputFiles)) {
|
||||
Debug.assert(mapPathOrType === "js" || mapPathOrType === "dts");
|
||||
node.fileName = (mapPathOrType === "js" ? textOrInputFiles.javascriptPath : textOrInputFiles.declarationPath) || "";
|
||||
node.sourceMapPath = mapPathOrType === "js" ? textOrInputFiles.javascriptMapPath : textOrInputFiles.declarationMapPath;
|
||||
Object.defineProperties(node, {
|
||||
text: { get() { return mapPathOrType === "js" ? textOrInputFiles.javascriptText : textOrInputFiles.declarationText; } },
|
||||
sourceMapText: { get() { return mapPathOrType === "js" ? textOrInputFiles.javascriptMapText : textOrInputFiles.declarationMapText; } },
|
||||
});
|
||||
|
||||
|
||||
if (textOrInputFiles.buildInfo && textOrInputFiles.buildInfo.bundle) {
|
||||
node.oldFileOfCurrentEmit = textOrInputFiles.oldFileOfCurrentEmit;
|
||||
Debug.assert(mapTextOrStripInternal === undefined || typeof mapTextOrStripInternal === "boolean");
|
||||
stripInternal = mapTextOrStripInternal as boolean | undefined;
|
||||
bundleFileInfo = mapPathOrType === "js" ? textOrInputFiles.buildInfo.bundle.js : textOrInputFiles.buildInfo.bundle.dts;
|
||||
if (node.oldFileOfCurrentEmit) {
|
||||
parseOldFileOfCurrentEmit(node, Debug.assertDefined(bundleFileInfo));
|
||||
return node;
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
node.fileName = "";
|
||||
node.text = textOrInputFiles;
|
||||
node.sourceMapPath = mapPathOrType;
|
||||
node.sourceMapText = mapTextOrStripInternal as string;
|
||||
}
|
||||
Debug.assert(!node.oldFileOfCurrentEmit);
|
||||
parseUnparsedSourceFile(node, bundleFileInfo, stripInternal);
|
||||
return node;
|
||||
}
|
||||
|
||||
function parseUnparsedSourceFile(node: UnparsedSource, bundleFileInfo: BundleFileInfo | undefined, stripInternal: boolean | undefined) {
|
||||
let prologues: UnparsedPrologue[] | undefined;
|
||||
let helpers: UnscopedEmitHelper[] | undefined;
|
||||
let referencedFiles: FileReference[] | undefined;
|
||||
let typeReferenceDirectives: string[] | undefined;
|
||||
let libReferenceDirectives: FileReference[] | undefined;
|
||||
let texts: UnparsedSourceText[] | undefined;
|
||||
|
||||
for (const section of bundleFileInfo ? bundleFileInfo.sections : emptyArray) {
|
||||
switch (section.kind) {
|
||||
case BundleFileSectionKind.Prologue:
|
||||
(prologues || (prologues = [])).push(createUnparsedNode(section, node) as UnparsedPrologue);
|
||||
break;
|
||||
case BundleFileSectionKind.EmitHelpers:
|
||||
(helpers || (helpers = [])).push(getAllUnscopedEmitHelpers().get(section.data)!);
|
||||
break;
|
||||
case BundleFileSectionKind.NoDefaultLib:
|
||||
node.hasNoDefaultLib = true;
|
||||
break;
|
||||
case BundleFileSectionKind.Reference:
|
||||
(referencedFiles || (referencedFiles = [])).push({ pos: -1, end: -1, fileName: section.data });
|
||||
break;
|
||||
case BundleFileSectionKind.Type:
|
||||
(typeReferenceDirectives || (typeReferenceDirectives = [])).push(section.data);
|
||||
break;
|
||||
case BundleFileSectionKind.Lib:
|
||||
(libReferenceDirectives || (libReferenceDirectives = [])).push({ pos: -1, end: -1, fileName: section.data });
|
||||
break;
|
||||
case BundleFileSectionKind.Prepend:
|
||||
const prependNode = createUnparsedNode(section, node) as UnparsedPrepend;
|
||||
let prependTexts: UnparsedTextLike[] | undefined;
|
||||
for (const text of section.texts) {
|
||||
if (!stripInternal || text.kind !== BundleFileSectionKind.Internal) {
|
||||
(prependTexts || (prependTexts = [])).push(createUnparsedNode(text, node) as UnparsedTextLike);
|
||||
}
|
||||
}
|
||||
prependNode.texts = prependTexts || emptyArray;
|
||||
(texts || (texts = [])).push(prependNode);
|
||||
break;
|
||||
case BundleFileSectionKind.Internal:
|
||||
if (stripInternal) break;
|
||||
// falls through
|
||||
case BundleFileSectionKind.Text:
|
||||
(texts || (texts = [])).push(createUnparsedNode(section, node) as UnparsedTextLike);
|
||||
break;
|
||||
default:
|
||||
Debug.assertNever(section);
|
||||
}
|
||||
}
|
||||
|
||||
node.prologues = prologues || emptyArray;
|
||||
node.helpers = helpers;
|
||||
node.referencedFiles = referencedFiles || emptyArray;
|
||||
node.typeReferenceDirectives = typeReferenceDirectives;
|
||||
node.libReferenceDirectives = libReferenceDirectives || emptyArray;
|
||||
node.texts = texts || [<UnparsedTextLike>createUnparsedNode({ kind: BundleFileSectionKind.Text, pos: 0, end: node.text.length }, node)];
|
||||
}
|
||||
|
||||
function parseOldFileOfCurrentEmit(node: UnparsedSource, bundleFileInfo: BundleFileInfo) {
|
||||
Debug.assert(!!node.oldFileOfCurrentEmit);
|
||||
let texts: UnparsedTextLike[] | undefined;
|
||||
let syntheticReferences: UnparsedSyntheticReference[] | undefined;
|
||||
for (const section of bundleFileInfo.sections) {
|
||||
switch (section.kind) {
|
||||
case BundleFileSectionKind.Internal:
|
||||
case BundleFileSectionKind.Text:
|
||||
(texts || (texts = [])).push(createUnparsedNode(section, node) as UnparsedTextLike);
|
||||
break;
|
||||
|
||||
case BundleFileSectionKind.NoDefaultLib:
|
||||
case BundleFileSectionKind.Reference:
|
||||
case BundleFileSectionKind.Type:
|
||||
case BundleFileSectionKind.Lib:
|
||||
(syntheticReferences || (syntheticReferences = [])).push(createUnparsedSyntheticReference(section, node));
|
||||
break;
|
||||
|
||||
// Ignore
|
||||
case BundleFileSectionKind.Prologue:
|
||||
case BundleFileSectionKind.EmitHelpers:
|
||||
case BundleFileSectionKind.Prepend:
|
||||
break;
|
||||
|
||||
default:
|
||||
Debug.assertNever(section);
|
||||
}
|
||||
}
|
||||
node.texts = texts || emptyArray;
|
||||
node.helpers = map(bundleFileInfo.sources && bundleFileInfo.sources.helpers, name => getAllUnscopedEmitHelpers().get(name)!);
|
||||
node.syntheticReferences = syntheticReferences;
|
||||
return node;
|
||||
}
|
||||
|
||||
function mapBundleFileSectionKindToSyntaxKind(kind: BundleFileSectionKind): SyntaxKind {
|
||||
switch (kind) {
|
||||
case BundleFileSectionKind.Prologue: return SyntaxKind.UnparsedPrologue;
|
||||
case BundleFileSectionKind.Prepend: return SyntaxKind.UnparsedPrepend;
|
||||
case BundleFileSectionKind.Internal: return SyntaxKind.UnparsedInternalText;
|
||||
case BundleFileSectionKind.Text: return SyntaxKind.UnparsedText;
|
||||
|
||||
case BundleFileSectionKind.EmitHelpers:
|
||||
case BundleFileSectionKind.NoDefaultLib:
|
||||
case BundleFileSectionKind.Reference:
|
||||
case BundleFileSectionKind.Type:
|
||||
case BundleFileSectionKind.Lib:
|
||||
return Debug.fail(`BundleFileSectionKind: ${kind} not yet mapped to SyntaxKind`);
|
||||
|
||||
default:
|
||||
return Debug.assertNever(kind);
|
||||
}
|
||||
}
|
||||
|
||||
function createUnparsedNode(section: BundleFileSection, parent: UnparsedSource): UnparsedNode {
|
||||
const node = createNode(mapBundleFileSectionKindToSyntaxKind(section.kind), section.pos, section.end) as UnparsedNode;
|
||||
node.parent = parent;
|
||||
node.data = section.data;
|
||||
return node;
|
||||
}
|
||||
|
||||
function createUnparsedSyntheticReference(section: BundleFileHasNoDefaultLib | BundleFileReference, parent: UnparsedSource) {
|
||||
const node = createNode(SyntaxKind.UnparsedSyntheticReference, section.pos, section.end) as UnparsedSyntheticReference;
|
||||
node.parent = parent;
|
||||
node.data = section.data;
|
||||
node.section = section;
|
||||
return node;
|
||||
}
|
||||
|
||||
export function createInputFiles(
|
||||
javascript: string,
|
||||
declaration: string
|
||||
javascriptText: string,
|
||||
declarationText: string
|
||||
): InputFiles;
|
||||
export function createInputFiles(
|
||||
javascript: string,
|
||||
declaration: string,
|
||||
readFileText: (path: string) => string | undefined,
|
||||
javascriptPath: string,
|
||||
javascriptMapPath: string | undefined,
|
||||
declarationPath: string,
|
||||
declarationMapPath: string | undefined,
|
||||
buildInfoPath: string | undefined
|
||||
): InputFiles;
|
||||
export function createInputFiles(
|
||||
javascriptText: string,
|
||||
declarationText: string,
|
||||
javascriptMapPath: string | undefined,
|
||||
javascriptMapText: string | undefined,
|
||||
declarationMapPath: string | undefined,
|
||||
declarationMapText: string | undefined
|
||||
): InputFiles;
|
||||
/*@internal*/
|
||||
export function createInputFiles(
|
||||
javascript: string,
|
||||
declaration: string,
|
||||
javascriptText: string,
|
||||
declarationText: string,
|
||||
javascriptMapPath: string | undefined,
|
||||
javascriptMapText: string | undefined,
|
||||
declarationMapPath: string | undefined,
|
||||
declarationMapText: string | undefined,
|
||||
javascriptPath: string | undefined,
|
||||
declarationPath: string | undefined,
|
||||
buildInfoPath?: string | undefined,
|
||||
buildInfo?: BuildInfo,
|
||||
oldFileOfCurrentEmit?: boolean
|
||||
): InputFiles;
|
||||
export function createInputFiles(
|
||||
javascriptTextOrReadFileText: string | ((path: string) => string | undefined),
|
||||
declarationTextOrJavascriptPath: string,
|
||||
javascriptMapPath?: string,
|
||||
javascriptMapText?: string,
|
||||
javascriptMapTextOrDeclarationPath?: string,
|
||||
declarationMapPath?: string,
|
||||
declarationMapText?: string
|
||||
declarationMapTextOrBuildInfoPath?: string,
|
||||
javascriptPath?: string | undefined,
|
||||
declarationPath?: string | undefined,
|
||||
buildInfoPath?: string | undefined,
|
||||
buildInfo?: BuildInfo,
|
||||
oldFileOfCurrentEmit?: boolean
|
||||
): InputFiles {
|
||||
const node = <InputFiles>createNode(SyntaxKind.InputFiles);
|
||||
node.javascriptText = javascript;
|
||||
node.javascriptMapPath = javascriptMapPath;
|
||||
node.javascriptMapText = javascriptMapText;
|
||||
node.declarationText = declaration;
|
||||
node.declarationMapPath = declarationMapPath;
|
||||
node.declarationMapText = declarationMapText;
|
||||
if (!isString(javascriptTextOrReadFileText)) {
|
||||
const cache = createMap<string | false>();
|
||||
const textGetter = (path: string | undefined) => {
|
||||
if (path === undefined) return undefined;
|
||||
let value = cache.get(path);
|
||||
if (value === undefined) {
|
||||
value = javascriptTextOrReadFileText(path);
|
||||
cache.set(path, value !== undefined ? value : false);
|
||||
}
|
||||
return value !== false ? value as string : undefined;
|
||||
};
|
||||
const definedTextGetter = (path: string) => {
|
||||
const result = textGetter(path);
|
||||
return result !== undefined ? result : `/* Input file ${path} was missing */\r\n`;
|
||||
};
|
||||
let buildInfo: BuildInfo | false;
|
||||
const getAndCacheBuildInfo = (getText: () => string | undefined) => {
|
||||
if (buildInfo === undefined) {
|
||||
const result = getText();
|
||||
buildInfo = result !== undefined ? getBuildInfo(result) : false;
|
||||
}
|
||||
return buildInfo || undefined;
|
||||
};
|
||||
node.javascriptPath = declarationTextOrJavascriptPath;
|
||||
node.javascriptMapPath = javascriptMapPath;
|
||||
node.declarationPath = Debug.assertDefined(javascriptMapTextOrDeclarationPath);
|
||||
node.declarationMapPath = declarationMapPath;
|
||||
node.buildInfoPath = declarationMapTextOrBuildInfoPath;
|
||||
Object.defineProperties(node, {
|
||||
javascriptText: { get() { return definedTextGetter(declarationTextOrJavascriptPath); } },
|
||||
javascriptMapText: { get() { return textGetter(javascriptMapPath); } }, // TODO:: if there is inline sourceMap in jsFile, use that
|
||||
declarationText: { get() { return definedTextGetter(Debug.assertDefined(javascriptMapTextOrDeclarationPath)); } },
|
||||
declarationMapText: { get() { return textGetter(declarationMapPath); } }, // TODO:: if there is inline sourceMap in dtsFile, use that
|
||||
buildInfo: { get() { return getAndCacheBuildInfo(() => textGetter(declarationMapTextOrBuildInfoPath)); } }
|
||||
});
|
||||
}
|
||||
else {
|
||||
node.javascriptText = javascriptTextOrReadFileText;
|
||||
node.javascriptMapPath = javascriptMapPath;
|
||||
node.javascriptMapText = javascriptMapTextOrDeclarationPath;
|
||||
node.declarationText = declarationTextOrJavascriptPath;
|
||||
node.declarationMapPath = declarationMapPath;
|
||||
node.declarationMapText = declarationMapTextOrBuildInfoPath;
|
||||
node.javascriptPath = javascriptPath;
|
||||
node.declarationPath = declarationPath,
|
||||
node.buildInfoPath = buildInfoPath;
|
||||
node.buildInfo = buildInfo;
|
||||
node.oldFileOfCurrentEmit = oldFileOfCurrentEmit;
|
||||
}
|
||||
return node;
|
||||
}
|
||||
|
||||
@ -2827,7 +3108,7 @@ namespace ts {
|
||||
return node.emitNode = { annotatedNodes: [node] } as EmitNode;
|
||||
}
|
||||
|
||||
const sourceFile = getSourceFileOfNode(node);
|
||||
const sourceFile = getSourceFileOfNode(getParseTreeNode(getSourceFileOfNode(node)));
|
||||
getOrCreateEmitNode(sourceFile).annotatedNodes!.push(node);
|
||||
}
|
||||
|
||||
@ -3124,7 +3405,7 @@ namespace ts {
|
||||
export const nullTransformationContext: TransformationContext = {
|
||||
enableEmitNotification: noop,
|
||||
enableSubstitution: noop,
|
||||
endLexicalEnvironment: () => undefined,
|
||||
endLexicalEnvironment: returnUndefined,
|
||||
getCompilerOptions: notImplemented,
|
||||
getEmitHost: notImplemented,
|
||||
getEmitResolver: notImplemented,
|
||||
@ -3325,7 +3606,7 @@ namespace ts {
|
||||
return setEmitFlags(createIdentifier(name), EmitFlags.HelperName | EmitFlags.AdviseOnEmitNode);
|
||||
}
|
||||
|
||||
const valuesHelper: EmitHelper = {
|
||||
export const valuesHelper: UnscopedEmitHelper = {
|
||||
name: "typescript:values",
|
||||
scoped: false,
|
||||
text: `
|
||||
@ -3353,7 +3634,7 @@ namespace ts {
|
||||
);
|
||||
}
|
||||
|
||||
const readHelper: EmitHelper = {
|
||||
export const readHelper: UnscopedEmitHelper = {
|
||||
name: "typescript:read",
|
||||
scoped: false,
|
||||
text: `
|
||||
@ -3389,7 +3670,7 @@ namespace ts {
|
||||
);
|
||||
}
|
||||
|
||||
const spreadHelper: EmitHelper = {
|
||||
export const spreadHelper: UnscopedEmitHelper = {
|
||||
name: "typescript:spread",
|
||||
scoped: false,
|
||||
text: `
|
||||
|
||||
@ -437,6 +437,8 @@ namespace ts.moduleSpecifiers {
|
||||
case Extension.Jsx:
|
||||
case Extension.Json:
|
||||
return ext;
|
||||
case Extension.TsBuildInfo:
|
||||
return Debug.fail(`Extension ${Extension.TsBuildInfo} is unsupported:: FileName:: ${fileName}`);
|
||||
default:
|
||||
return Debug.assertNever(ext);
|
||||
}
|
||||
|
||||
@ -1093,6 +1093,10 @@ namespace ts {
|
||||
return currentToken = scanner.reScanTemplateToken();
|
||||
}
|
||||
|
||||
function reScanLessThanToken(): SyntaxKind {
|
||||
return currentToken = scanner.reScanLessThanToken();
|
||||
}
|
||||
|
||||
function scanJsxIdentifier(): SyntaxKind {
|
||||
return currentToken = scanner.scanJsxIdentifier();
|
||||
}
|
||||
@ -2276,7 +2280,7 @@ namespace ts {
|
||||
function parseTypeReference(): TypeReferenceNode {
|
||||
const node = <TypeReferenceNode>createNode(SyntaxKind.TypeReference);
|
||||
node.typeName = parseEntityName(/*allowReservedWords*/ true, Diagnostics.Type_expected);
|
||||
if (!scanner.hasPrecedingLineBreak() && token() === SyntaxKind.LessThanToken) {
|
||||
if (!scanner.hasPrecedingLineBreak() && reScanLessThanToken() === SyntaxKind.LessThanToken) {
|
||||
node.typeArguments = parseBracketedList(ParsingContext.TypeArguments, parseType, SyntaxKind.LessThanToken, SyntaxKind.GreaterThanToken);
|
||||
}
|
||||
return finishNode(node);
|
||||
@ -2962,6 +2966,7 @@ namespace ts {
|
||||
case SyntaxKind.NumberKeyword:
|
||||
case SyntaxKind.BigIntKeyword:
|
||||
case SyntaxKind.BooleanKeyword:
|
||||
case SyntaxKind.ReadonlyKeyword:
|
||||
case SyntaxKind.SymbolKeyword:
|
||||
case SyntaxKind.UniqueKeyword:
|
||||
case SyntaxKind.VoidKeyword:
|
||||
@ -3051,7 +3056,7 @@ namespace ts {
|
||||
return finishNode(postfix);
|
||||
}
|
||||
|
||||
function parseTypeOperator(operator: SyntaxKind.KeyOfKeyword | SyntaxKind.UniqueKeyword) {
|
||||
function parseTypeOperator(operator: SyntaxKind.KeyOfKeyword | SyntaxKind.UniqueKeyword | SyntaxKind.ReadonlyKeyword) {
|
||||
const node = <TypeOperatorNode>createNode(SyntaxKind.TypeOperator);
|
||||
parseExpected(operator);
|
||||
node.operator = operator;
|
||||
@ -3073,6 +3078,7 @@ namespace ts {
|
||||
switch (operator) {
|
||||
case SyntaxKind.KeyOfKeyword:
|
||||
case SyntaxKind.UniqueKeyword:
|
||||
case SyntaxKind.ReadonlyKeyword:
|
||||
return parseTypeOperator(operator);
|
||||
case SyntaxKind.InferKeyword:
|
||||
return parseInferType();
|
||||
@ -4250,7 +4256,8 @@ namespace ts {
|
||||
|
||||
function parseJsxText(): JsxText {
|
||||
const node = <JsxText>createNode(SyntaxKind.JsxText);
|
||||
node.containsOnlyWhiteSpaces = currentToken === SyntaxKind.JsxTextAllWhiteSpaces;
|
||||
node.text = scanner.getTokenValue();
|
||||
node.containsOnlyTriviaWhiteSpaces = currentToken === SyntaxKind.JsxTextAllWhiteSpaces;
|
||||
currentToken = scanner.scanJsxToken();
|
||||
return finishNode(node);
|
||||
}
|
||||
@ -4523,7 +4530,8 @@ namespace ts {
|
||||
function parseCallExpressionRest(expression: LeftHandSideExpression): LeftHandSideExpression {
|
||||
while (true) {
|
||||
expression = parseMemberExpressionRest(expression);
|
||||
if (token() === SyntaxKind.LessThanToken) {
|
||||
// handle 'foo<<T>()'
|
||||
if (token() === SyntaxKind.LessThanToken || token() === SyntaxKind.LessThanLessThanToken) {
|
||||
// See if this is the start of a generic invocation. If so, consume it and
|
||||
// keep checking for postfix expressions. Otherwise, it's just a '<' that's
|
||||
// part of an arithmetic expression. Break out so we consume it higher in the
|
||||
@ -4565,9 +4573,10 @@ namespace ts {
|
||||
}
|
||||
|
||||
function parseTypeArgumentsInExpression() {
|
||||
if (!parseOptional(SyntaxKind.LessThanToken)) {
|
||||
if (reScanLessThanToken() !== SyntaxKind.LessThanToken) {
|
||||
return undefined;
|
||||
}
|
||||
nextToken();
|
||||
|
||||
const typeArguments = parseDelimitedList(ParsingContext.TypeArguments, parseType);
|
||||
if (!parseExpected(SyntaxKind.GreaterThanToken)) {
|
||||
@ -7754,6 +7763,7 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
|
||||
/*@internal*/
|
||||
type PragmaDiagnosticReporter = (pos: number, length: number, message: DiagnosticMessage) => void;
|
||||
|
||||
/*@internal*/
|
||||
@ -7774,17 +7784,18 @@ namespace ts {
|
||||
const libReferenceDirectives = context.libReferenceDirectives;
|
||||
forEach(toArray(entryOrList), (arg: PragmaPseudoMap["reference"]) => {
|
||||
// TODO: GH#18217
|
||||
const { types, lib, path } = arg!.arguments;
|
||||
if (arg!.arguments["no-default-lib"]) {
|
||||
context.hasNoDefaultLib = true;
|
||||
}
|
||||
else if (arg!.arguments.types) {
|
||||
typeReferenceDirectives.push({ pos: arg!.arguments.types!.pos, end: arg!.arguments.types!.end, fileName: arg!.arguments.types!.value });
|
||||
else if (types) {
|
||||
typeReferenceDirectives.push({ pos: types.pos, end: types.end, fileName: types.value });
|
||||
}
|
||||
else if (arg!.arguments.lib) {
|
||||
libReferenceDirectives.push({ pos: arg!.arguments.lib!.pos, end: arg!.arguments.lib!.end, fileName: arg!.arguments.lib!.value });
|
||||
else if (lib) {
|
||||
libReferenceDirectives.push({ pos: lib.pos, end: lib.end, fileName: lib.value });
|
||||
}
|
||||
else if (arg!.arguments.path) {
|
||||
referencedFiles.push({ pos: arg!.arguments.path!.pos, end: arg!.arguments.path!.end, fileName: arg!.arguments.path!.value });
|
||||
else if (path) {
|
||||
referencedFiles.push({ pos: path.pos, end: path.end, fileName: path.value });
|
||||
}
|
||||
else {
|
||||
reportDiagnostic(arg!.range.pos, arg!.range.end - arg!.range.pos, Diagnostics.Invalid_reference_directive_syntax);
|
||||
|
||||
@ -69,6 +69,7 @@ namespace ts {
|
||||
export function createCompilerHost(options: CompilerOptions, setParentNodes?: boolean): CompilerHost {
|
||||
return createCompilerHostWorker(options, setParentNodes);
|
||||
}
|
||||
|
||||
/*@internal*/
|
||||
// TODO(shkamat): update this after reworking ts build API
|
||||
export function createCompilerHostWorker(options: CompilerOptions, setParentNodes?: boolean, system = sys): CompilerHost {
|
||||
@ -93,7 +94,6 @@ namespace ts {
|
||||
}
|
||||
text = "";
|
||||
}
|
||||
|
||||
return text !== undefined ? createSourceFile(fileName, text, languageVersion, setParentNodes) : undefined;
|
||||
}
|
||||
|
||||
@ -204,17 +204,25 @@ namespace ts {
|
||||
}
|
||||
|
||||
/*@internal*/
|
||||
export function changeCompilerHostToUseCache(
|
||||
host: CompilerHost,
|
||||
interface CompilerHostLikeForCache {
|
||||
fileExists(fileName: string): boolean;
|
||||
readFile(fileName: string, encoding?: string): string | undefined;
|
||||
directoryExists?(directory: string): boolean;
|
||||
createDirectory?(directory: string): void;
|
||||
writeFile?: WriteFileCallback;
|
||||
}
|
||||
|
||||
/*@internal*/
|
||||
export function changeCompilerHostLikeToUseCache(
|
||||
host: CompilerHostLikeForCache,
|
||||
toPath: (fileName: string) => Path,
|
||||
useCacheForSourceFile: boolean
|
||||
getSourceFile?: CompilerHost["getSourceFile"]
|
||||
) {
|
||||
const originalReadFile = host.readFile;
|
||||
const originalFileExists = host.fileExists;
|
||||
const originalDirectoryExists = host.directoryExists;
|
||||
const originalCreateDirectory = host.createDirectory;
|
||||
const originalWriteFile = host.writeFile;
|
||||
const originalGetSourceFile = host.getSourceFile;
|
||||
const readFileCache = createMap<string | false>();
|
||||
const fileExistsCache = createMap<boolean>();
|
||||
const directoryExistsCache = createMap<boolean>();
|
||||
@ -223,38 +231,37 @@ namespace ts {
|
||||
const readFileWithCache = (fileName: string): string | undefined => {
|
||||
const key = toPath(fileName);
|
||||
const value = readFileCache.get(key);
|
||||
if (value !== undefined) return value || undefined;
|
||||
if (value !== undefined) return value !== false ? value : undefined;
|
||||
return setReadFileCache(key, fileName);
|
||||
};
|
||||
const setReadFileCache = (key: Path, fileName: string) => {
|
||||
const newValue = originalReadFile.call(host, fileName);
|
||||
readFileCache.set(key, newValue || false);
|
||||
readFileCache.set(key, newValue !== undefined ? newValue : false);
|
||||
return newValue;
|
||||
};
|
||||
host.readFile = fileName => {
|
||||
const key = toPath(fileName);
|
||||
const value = readFileCache.get(key);
|
||||
if (value !== undefined) return value; // could be .d.ts from output
|
||||
if (!fileExtensionIs(fileName, Extension.Json)) {
|
||||
if (value !== undefined) return value !== false ? value : undefined; // could be .d.ts from output
|
||||
// Cache json or buildInfo
|
||||
if (!fileExtensionIs(fileName, Extension.Json) && !isBuildInfoFile(fileName)) {
|
||||
return originalReadFile.call(host, fileName);
|
||||
}
|
||||
|
||||
return setReadFileCache(key, fileName);
|
||||
};
|
||||
|
||||
if (useCacheForSourceFile) {
|
||||
host.getSourceFile = (fileName, languageVersion, onError, shouldCreateNewSourceFile) => {
|
||||
const key = toPath(fileName);
|
||||
const value = sourceFileCache.get(key);
|
||||
if (value) return value;
|
||||
const getSourceFileWithCache: CompilerHost["getSourceFile"] | undefined = getSourceFile ? (fileName, languageVersion, onError, shouldCreateNewSourceFile) => {
|
||||
const key = toPath(fileName);
|
||||
const value = sourceFileCache.get(key);
|
||||
if (value) return value;
|
||||
|
||||
const sourceFile = originalGetSourceFile.call(host, fileName, languageVersion, onError, shouldCreateNewSourceFile);
|
||||
if (sourceFile && (isDeclarationFileName(fileName) || fileExtensionIs(fileName, Extension.Json))) {
|
||||
sourceFileCache.set(key, sourceFile);
|
||||
}
|
||||
return sourceFile;
|
||||
};
|
||||
}
|
||||
const sourceFile = getSourceFile(fileName, languageVersion, onError, shouldCreateNewSourceFile);
|
||||
if (sourceFile && (isDeclarationFileName(fileName) || fileExtensionIs(fileName, Extension.Json))) {
|
||||
sourceFileCache.set(key, sourceFile);
|
||||
}
|
||||
return sourceFile;
|
||||
} : undefined;
|
||||
|
||||
// fileExists for any kind of extension
|
||||
host.fileExists = fileName => {
|
||||
@ -265,23 +272,25 @@ namespace ts {
|
||||
fileExistsCache.set(key, !!newValue);
|
||||
return newValue;
|
||||
};
|
||||
host.writeFile = (fileName, data, writeByteOrderMark, onError, sourceFiles) => {
|
||||
const key = toPath(fileName);
|
||||
fileExistsCache.delete(key);
|
||||
if (originalWriteFile) {
|
||||
host.writeFile = (fileName, data, writeByteOrderMark, onError, sourceFiles) => {
|
||||
const key = toPath(fileName);
|
||||
fileExistsCache.delete(key);
|
||||
|
||||
const value = readFileCache.get(key);
|
||||
if (value && value !== data) {
|
||||
readFileCache.delete(key);
|
||||
sourceFileCache.delete(key);
|
||||
}
|
||||
else if (useCacheForSourceFile) {
|
||||
const sourceFile = sourceFileCache.get(key);
|
||||
if (sourceFile && sourceFile.text !== data) {
|
||||
const value = readFileCache.get(key);
|
||||
if (value !== undefined && value !== data) {
|
||||
readFileCache.delete(key);
|
||||
sourceFileCache.delete(key);
|
||||
}
|
||||
}
|
||||
originalWriteFile.call(host, fileName, data, writeByteOrderMark, onError, sourceFiles);
|
||||
};
|
||||
else if (getSourceFileWithCache) {
|
||||
const sourceFile = sourceFileCache.get(key);
|
||||
if (sourceFile && sourceFile.text !== data) {
|
||||
sourceFileCache.delete(key);
|
||||
}
|
||||
}
|
||||
originalWriteFile.call(host, fileName, data, writeByteOrderMark, onError, sourceFiles);
|
||||
};
|
||||
}
|
||||
|
||||
// directoryExists
|
||||
if (originalDirectoryExists && originalCreateDirectory) {
|
||||
@ -306,7 +315,7 @@ namespace ts {
|
||||
originalDirectoryExists,
|
||||
originalCreateDirectory,
|
||||
originalWriteFile,
|
||||
originalGetSourceFile,
|
||||
getSourceFileWithCache,
|
||||
readFileWithCache
|
||||
};
|
||||
}
|
||||
@ -735,7 +744,7 @@ namespace ts {
|
||||
performance.mark("beforeProgram");
|
||||
|
||||
const host = createProgramOptions.host || createCompilerHost(options);
|
||||
const configParsingHost = parseConfigHostFromCompilerHost(host);
|
||||
const configParsingHost = parseConfigHostFromCompilerHostLike(host);
|
||||
|
||||
let skipDefaultLib = options.noLib;
|
||||
const getDefaultLibraryFileName = memoize(() => host.getDefaultLibFileName(options));
|
||||
@ -816,11 +825,16 @@ namespace ts {
|
||||
}
|
||||
if (rootNames.length) {
|
||||
for (const parsedRef of resolvedProjectReferences) {
|
||||
if (parsedRef) {
|
||||
const out = parsedRef.commandLine.options.outFile || parsedRef.commandLine.options.out;
|
||||
if (out) {
|
||||
const dtsOutfile = changeExtension(out, ".d.ts");
|
||||
processSourceFile(dtsOutfile, /*isDefaultLib*/ false, /*ignoreNoDefaultLib*/ false, /*packageId*/ undefined);
|
||||
if (!parsedRef) continue;
|
||||
const out = parsedRef.commandLine.options.outFile || parsedRef.commandLine.options.out;
|
||||
if (out) {
|
||||
processSourceFile(changeExtension(out, ".d.ts"), /*isDefaultLib*/ false, /*ignoreNoDefaultLib*/ false, /*packageId*/ undefined);
|
||||
}
|
||||
else if (getEmitModuleKind(parsedRef.commandLine.options) === ModuleKind.None) {
|
||||
for (const fileName of parsedRef.commandLine.fileNames) {
|
||||
if (!fileExtensionIs(fileName, Extension.Dts) && hasTSFileExtension(fileName)) {
|
||||
processSourceFile(getOutputDeclarationFileName(fileName, parsedRef.commandLine, !host.useCaseSensitiveFileNames()), /*isDefaultLib*/ false, /*ignoreNoDefaultLib*/ false, /*packageId*/ undefined);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -930,7 +944,8 @@ namespace ts {
|
||||
getProjectReferenceRedirect,
|
||||
getResolvedProjectReferenceToRedirect,
|
||||
getResolvedProjectReferenceByPath,
|
||||
forEachResolvedProjectReference
|
||||
forEachResolvedProjectReference,
|
||||
emitBuildInfo
|
||||
};
|
||||
|
||||
verifyCompilerOptions();
|
||||
@ -964,7 +979,7 @@ namespace ts {
|
||||
|
||||
function getCommonSourceDirectory() {
|
||||
if (commonSourceDirectory === undefined) {
|
||||
const emittedFiles = filter(files, file => sourceFileMayBeEmitted(file, options, isSourceFileFromExternalLibrary));
|
||||
const emittedFiles = filter(files, file => sourceFileMayBeEmitted(file, options, isSourceFileFromExternalLibrary, getResolvedProjectReferenceToRedirect));
|
||||
if (options.rootDir && checkSourceFilesBelongToPath(emittedFiles, options.rootDir)) {
|
||||
// If a rootDir is specified use it as the commonSourceDirectory
|
||||
commonSourceDirectory = getNormalizedAbsolutePath(options.rootDir, currentDirectory);
|
||||
@ -1410,6 +1425,7 @@ namespace ts {
|
||||
getSourceFiles: program.getSourceFiles,
|
||||
getLibFileFromReference: program.getLibFileFromReference,
|
||||
isSourceFileFromExternalLibrary,
|
||||
getResolvedProjectReferenceToRedirect,
|
||||
writeFile: writeFileCallback || (
|
||||
(fileName, data, writeByteOrderMark, onError, sourceFiles) => host.writeFile(fileName, data, writeByteOrderMark, onError, sourceFiles)),
|
||||
isEmitBlocked,
|
||||
@ -1424,9 +1440,28 @@ namespace ts {
|
||||
},
|
||||
...(host.directoryExists ? { directoryExists: f => host.directoryExists!(f) } : {}),
|
||||
useCaseSensitiveFileNames: () => host.useCaseSensitiveFileNames(),
|
||||
getProgramBuildInfo: () => program.getProgramBuildInfo && program.getProgramBuildInfo()
|
||||
};
|
||||
}
|
||||
|
||||
function emitBuildInfo(writeFileCallback?: WriteFileCallback): EmitResult {
|
||||
Debug.assert(!options.out && !options.outFile);
|
||||
performance.mark("beforeEmit");
|
||||
const emitResult = emitFiles(
|
||||
notImplementedResolver,
|
||||
getEmitHost(writeFileCallback),
|
||||
/*targetSourceFile*/ undefined,
|
||||
/*emitOnlyDtsFiles*/ false,
|
||||
/*transformers*/ undefined,
|
||||
/*declaraitonTransformers*/ undefined,
|
||||
/*onlyBuildInfo*/ true
|
||||
);
|
||||
|
||||
performance.mark("afterEmit");
|
||||
performance.measure("Emit", "beforeEmit", "afterEmit");
|
||||
return emitResult;
|
||||
}
|
||||
|
||||
function getResolvedProjectReferences() {
|
||||
return resolvedProjectReferences;
|
||||
}
|
||||
@ -1435,32 +1470,16 @@ namespace ts {
|
||||
return projectReferences;
|
||||
}
|
||||
|
||||
function getPrependNodes(): InputFiles[] {
|
||||
if (!projectReferences) {
|
||||
return emptyArray;
|
||||
}
|
||||
|
||||
const nodes: InputFiles[] = [];
|
||||
for (let i = 0; i < projectReferences.length; i++) {
|
||||
const ref = projectReferences[i];
|
||||
const resolvedRefOpts = resolvedProjectReferences![i]!.commandLine;
|
||||
if (ref.prepend && resolvedRefOpts && resolvedRefOpts.options) {
|
||||
const out = resolvedRefOpts.options.outFile || resolvedRefOpts.options.out;
|
||||
// Upstream project didn't have outFile set -- skip (error will have been issued earlier)
|
||||
if (!out) continue;
|
||||
|
||||
const dtsFilename = changeExtension(out, ".d.ts");
|
||||
const js = host.readFile(out) || `/* Input file ${out} was missing */\r\n`;
|
||||
const jsMapPath = out + ".map"; // TODO: try to read sourceMappingUrl comment from the file
|
||||
const jsMap = host.readFile(jsMapPath);
|
||||
const dts = host.readFile(dtsFilename) || `/* Input file ${dtsFilename} was missing */\r\n`;
|
||||
const dtsMapPath = dtsFilename + ".map";
|
||||
const dtsMap = host.readFile(dtsMapPath);
|
||||
const node = createInputFiles(js, dts, jsMap && jsMapPath, jsMap, dtsMap && dtsMapPath, dtsMap);
|
||||
nodes.push(node);
|
||||
function getPrependNodes() {
|
||||
return createPrependNodes(
|
||||
projectReferences,
|
||||
(_ref, index) => resolvedProjectReferences![index]!.commandLine,
|
||||
fileName => {
|
||||
const path = toPath(fileName);
|
||||
const sourceFile = getSourceFileByPath(path);
|
||||
return sourceFile ? sourceFile.text : filesByName.has(path) ? undefined : host.readFile(path);
|
||||
}
|
||||
}
|
||||
return nodes;
|
||||
);
|
||||
}
|
||||
|
||||
function isSourceFileFromExternalLibrary(file: SourceFile): boolean {
|
||||
@ -1557,7 +1576,7 @@ namespace ts {
|
||||
const emitResult = emitFiles(
|
||||
emitResolver,
|
||||
getEmitHost(writeFileCallback),
|
||||
sourceFile!, // TODO: GH#18217
|
||||
sourceFile,
|
||||
emitOnlyDtsFiles,
|
||||
transformers,
|
||||
customTransformers && customTransformers.afterDeclarations
|
||||
@ -2227,8 +2246,9 @@ namespace ts {
|
||||
processReferencedFiles(file, isDefaultLib);
|
||||
processTypeReferenceDirectives(file);
|
||||
}
|
||||
|
||||
processLibReferenceDirectives(file);
|
||||
if (!options.noLib) {
|
||||
processLibReferenceDirectives(file);
|
||||
}
|
||||
|
||||
modulesWithElidedImports.set(file.path, false);
|
||||
processImportedModules(file);
|
||||
@ -2315,8 +2335,10 @@ namespace ts {
|
||||
processReferencedFiles(file, isDefaultLib);
|
||||
processTypeReferenceDirectives(file);
|
||||
}
|
||||
if (!options.noLib) {
|
||||
processLibReferenceDirectives(file);
|
||||
}
|
||||
|
||||
processLibReferenceDirectives(file);
|
||||
|
||||
// always process imported modules to record module name resolutions
|
||||
processImportedModules(file);
|
||||
@ -2357,7 +2379,7 @@ namespace ts {
|
||||
const out = referencedProject.commandLine.options.outFile || referencedProject.commandLine.options.out;
|
||||
return out ?
|
||||
changeExtension(out, Extension.Dts) :
|
||||
getOutputDeclarationFileName(fileName, referencedProject.commandLine);
|
||||
getOutputDeclarationFileName(fileName, referencedProject.commandLine, !host.useCaseSensitiveFileNames());
|
||||
}
|
||||
|
||||
/**
|
||||
@ -2704,19 +2726,29 @@ namespace ts {
|
||||
if (options.declaration === false) {
|
||||
createDiagnosticForOptionName(Diagnostics.Composite_projects_may_not_disable_declaration_emit, "declaration");
|
||||
}
|
||||
if (options.incremental === false) {
|
||||
createDiagnosticForOptionName(Diagnostics.Composite_projects_may_not_disable_incremental_compilation, "declaration");
|
||||
}
|
||||
}
|
||||
|
||||
if (options.tsBuildInfoFile) {
|
||||
if (!isIncrementalCompilation(options)) {
|
||||
createDiagnosticForOptionName(Diagnostics.Option_0_cannot_be_specified_without_specifying_option_1_or_option_2, "tsBuildInfoFile", "incremental", "composite");
|
||||
}
|
||||
}
|
||||
|
||||
verifyProjectReferences();
|
||||
|
||||
// List of collected files is complete; validate exhautiveness if this is a project with a file list
|
||||
if (options.composite) {
|
||||
const sourceFiles = files.filter(f => !f.isDeclarationFile);
|
||||
if (rootNames.length < sourceFiles.length) {
|
||||
const normalizedRootNames = rootNames.map(r => normalizePath(r).toLowerCase());
|
||||
for (const file of sourceFiles.map(f => normalizePath(f.path).toLowerCase())) {
|
||||
if (normalizedRootNames.indexOf(file) === -1) {
|
||||
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.File_0_is_not_in_project_file_list_Projects_must_list_all_files_or_use_an_include_pattern, file));
|
||||
}
|
||||
const rootPaths = rootNames.map(toPath);
|
||||
for (const file of files) {
|
||||
// Ignore declaration files
|
||||
if (file.isDeclarationFile) continue;
|
||||
// Ignore json file thats from project reference
|
||||
if (isJsonSourceFile(file) && getResolvedProjectReferenceToRedirect(file.fileName)) continue;
|
||||
if (rootPaths.indexOf(file.path) === -1) {
|
||||
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.File_0_is_not_in_project_file_list_Projects_must_list_all_files_or_use_an_include_pattern, file.fileName));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2925,6 +2957,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
function verifyProjectReferences() {
|
||||
const buildInfoPath = !options.noEmit && !options.suppressOutputPathCheck ? getOutputPathForBuildInfo(options) : undefined;
|
||||
forEachProjectReference(projectReferences, resolvedProjectReferences, (resolvedRef, index, parent) => {
|
||||
const ref = (parent ? parent.commandLine.projectReferences : projectReferences)![index];
|
||||
const parentFile = parent && parent.sourceFile as JsonSourceFile;
|
||||
@ -2951,6 +2984,10 @@ namespace ts {
|
||||
createDiagnosticForReference(parentFile, index, Diagnostics.Cannot_prepend_project_0_because_it_does_not_have_outFile_set, ref.path);
|
||||
}
|
||||
}
|
||||
if (!parent && buildInfoPath && buildInfoPath === getOutputPathForBuildInfo(options)) {
|
||||
createDiagnosticForReference(parentFile, index, Diagnostics.Cannot_write_file_0_because_it_will_overwrite_tsbuildinfo_file_generated_by_referenced_project_1, buildInfoPath, ref.path);
|
||||
hasEmitBlockingDiagnostics.set(toPath(buildInfoPath), true);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@ -3101,18 +3138,29 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
|
||||
/*@internal*/
|
||||
interface CompilerHostLike {
|
||||
useCaseSensitiveFileNames(): boolean;
|
||||
getCurrentDirectory(): string;
|
||||
fileExists(fileName: string): boolean;
|
||||
readFile(fileName: string): string | undefined;
|
||||
readDirectory?(rootDir: string, extensions: ReadonlyArray<string>, excludes: ReadonlyArray<string> | undefined, includes: ReadonlyArray<string>, depth?: number): string[];
|
||||
trace?(s: string): void;
|
||||
onUnRecoverableConfigFileDiagnostic?: DiagnosticReporter;
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
export function parseConfigHostFromCompilerHost(host: CompilerHost): ParseConfigFileHost {
|
||||
export function parseConfigHostFromCompilerHostLike(host: CompilerHostLike, directoryStructureHost: DirectoryStructureHost = host): ParseConfigFileHost {
|
||||
return {
|
||||
fileExists: f => host.fileExists(f),
|
||||
fileExists: f => directoryStructureHost.fileExists(f),
|
||||
readDirectory(root, extensions, excludes, includes, depth) {
|
||||
Debug.assertDefined(host.readDirectory, "'CompilerHost.readDirectory' must be implemented to correctly process 'projectReferences'");
|
||||
return host.readDirectory!(root, extensions, excludes, includes, depth);
|
||||
Debug.assertDefined(directoryStructureHost.readDirectory, "'CompilerHost.readDirectory' must be implemented to correctly process 'projectReferences'");
|
||||
return directoryStructureHost.readDirectory!(root, extensions, excludes, includes, depth);
|
||||
},
|
||||
readFile: f => host.readFile(f),
|
||||
readFile: f => directoryStructureHost.readFile(f),
|
||||
useCaseSensitiveFileNames: host.useCaseSensitiveFileNames(),
|
||||
getCurrentDirectory: () => host.getCurrentDirectory(),
|
||||
onUnRecoverableConfigFileDiagnostic: () => undefined,
|
||||
onUnRecoverableConfigFileDiagnostic: host.onUnRecoverableConfigFileDiagnostic || returnUndefined,
|
||||
trace: host.trace ? (s) => host.trace!(s) : undefined
|
||||
};
|
||||
}
|
||||
@ -3122,6 +3170,25 @@ namespace ts {
|
||||
fileExists(fileName: string): boolean;
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
export function createPrependNodes(projectReferences: ReadonlyArray<ProjectReference> | undefined, getCommandLine: (ref: ProjectReference, index: number) => ParsedCommandLine | undefined, readFile: (path: string) => string | undefined) {
|
||||
if (!projectReferences) return emptyArray;
|
||||
let nodes: InputFiles[] | undefined;
|
||||
for (let i = 0; i < projectReferences.length; i++) {
|
||||
const ref = projectReferences[i];
|
||||
const resolvedRefOpts = getCommandLine(ref, i);
|
||||
if (ref.prepend && resolvedRefOpts && resolvedRefOpts.options) {
|
||||
const out = resolvedRefOpts.options.outFile || resolvedRefOpts.options.out;
|
||||
// Upstream project didn't have outFile set -- skip (error will have been issued earlier)
|
||||
if (!out) continue;
|
||||
|
||||
const { jsFilePath, sourceMapFilePath, declarationFilePath, declarationMapPath, buildInfoPath } = getOutputPathsForBundle(resolvedRefOpts.options, /*forceDtsPaths*/ true);
|
||||
const node = createInputFiles(readFile, jsFilePath!, sourceMapFilePath, declarationFilePath!, declarationMapPath, buildInfoPath);
|
||||
(nodes || (nodes = [])).push(node);
|
||||
}
|
||||
}
|
||||
return nodes || emptyArray;
|
||||
}
|
||||
/**
|
||||
* Returns the target config filename of a project reference.
|
||||
* Note: The file might not exist.
|
||||
|
||||
@ -71,8 +71,8 @@ namespace ts {
|
||||
nonRecursive?: boolean;
|
||||
}
|
||||
|
||||
export function isPathInNodeModulesStartingWithDot(path: Path) {
|
||||
return stringContains(path, "/node_modules/.");
|
||||
export function isPathIgnored(path: Path) {
|
||||
return some(ignoredPaths, searchPath => stringContains(path, searchPath));
|
||||
}
|
||||
|
||||
export const maxNumberOfFilesToIterateForInvalidation = 256;
|
||||
@ -696,7 +696,7 @@ namespace ts {
|
||||
}
|
||||
else {
|
||||
// If something to do with folder/file starting with "." in node_modules folder, skip it
|
||||
if (isPathInNodeModulesStartingWithDot(fileOrDirectoryPath)) return false;
|
||||
if (isPathIgnored(fileOrDirectoryPath)) return false;
|
||||
|
||||
// Some file or directory in the watching directory is created
|
||||
// Return early if it does not have any of the watching extension or not the custom failed lookup path
|
||||
|
||||
@ -31,6 +31,7 @@ namespace ts {
|
||||
scanJsxIdentifier(): SyntaxKind;
|
||||
scanJsxAttributeValue(): SyntaxKind;
|
||||
reScanJsxToken(): JsxTokenSyntaxKind;
|
||||
reScanLessThanToken(): SyntaxKind;
|
||||
scanJsxToken(): JsxTokenSyntaxKind;
|
||||
scanJSDocToken(): JsDocSyntaxKind;
|
||||
scan(): SyntaxKind;
|
||||
@ -622,13 +623,15 @@ namespace ts {
|
||||
|
||||
const shebangTriviaRegex = /^#!.*/;
|
||||
|
||||
function isShebangTrivia(text: string, pos: number) {
|
||||
/*@internal*/
|
||||
export function isShebangTrivia(text: string, pos: number) {
|
||||
// Shebangs check must only be done at the start of the file
|
||||
Debug.assert(pos === 0);
|
||||
return shebangTriviaRegex.test(text);
|
||||
}
|
||||
|
||||
function scanShebangTrivia(text: string, pos: number) {
|
||||
/*@internal*/
|
||||
export function scanShebangTrivia(text: string, pos: number) {
|
||||
const shebang = shebangTriviaRegex.exec(text)![0];
|
||||
pos = pos + shebang.length;
|
||||
return pos;
|
||||
@ -660,8 +663,15 @@ namespace ts {
|
||||
let pendingKind!: CommentKind;
|
||||
let pendingHasTrailingNewLine!: boolean;
|
||||
let hasPendingCommentRange = false;
|
||||
let collecting = trailing || pos === 0;
|
||||
let collecting = trailing;
|
||||
let accumulator = initial;
|
||||
if (pos === 0) {
|
||||
collecting = true;
|
||||
const shebang = getShebang(text);
|
||||
if (shebang) {
|
||||
pos = shebang.length;
|
||||
}
|
||||
}
|
||||
scan: while (pos >= 0 && pos < text.length) {
|
||||
const ch = text.charCodeAt(pos);
|
||||
switch (ch) {
|
||||
@ -874,6 +884,7 @@ namespace ts {
|
||||
scanJsxIdentifier,
|
||||
scanJsxAttributeValue,
|
||||
reScanJsxToken,
|
||||
reScanLessThanToken,
|
||||
scanJsxToken,
|
||||
scanJSDocToken,
|
||||
scan,
|
||||
@ -1939,6 +1950,14 @@ namespace ts {
|
||||
return token = scanJsxToken();
|
||||
}
|
||||
|
||||
function reScanLessThanToken(): SyntaxKind {
|
||||
if (token === SyntaxKind.LessThanLessThanToken) {
|
||||
pos = tokenPos + 1;
|
||||
return token = SyntaxKind.LessThanToken;
|
||||
}
|
||||
return token;
|
||||
}
|
||||
|
||||
function scanJsxToken(): JsxTokenSyntaxKind {
|
||||
startPos = tokenPos = pos;
|
||||
|
||||
@ -1994,6 +2013,7 @@ namespace ts {
|
||||
pos++;
|
||||
}
|
||||
|
||||
tokenValue = text.substring(startPos, pos);
|
||||
return firstNonWhitespace === -1 ? SyntaxKind.JsxTextAllWhiteSpaces : SyntaxKind.JsxText;
|
||||
}
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user