mirror of
https://github.com/microsoft/TypeScript.git
synced 2025-12-12 11:50:54 -06:00
Revert "Merge pull request #11354 from Microsoft/map4"
This reverts commit adfdae0dc47db5ad8248d26929a7b31cb895a539, reversing changes made to aad663cebf6a89f7029af57d8cb6a0a011854978.
This commit is contained in:
parent
adfdae0dc4
commit
12f6dcefa1
@ -57,7 +57,6 @@ function measure(marker) {
|
||||
}
|
||||
|
||||
var compilerSources = [
|
||||
"collections.ts",
|
||||
"core.ts",
|
||||
"performance.ts",
|
||||
"sys.ts",
|
||||
@ -94,7 +93,6 @@ var compilerSources = [
|
||||
});
|
||||
|
||||
var servicesSources = [
|
||||
"collections.ts",
|
||||
"core.ts",
|
||||
"performance.ts",
|
||||
"sys.ts",
|
||||
|
||||
@ -27,7 +27,7 @@ function main(): void {
|
||||
|
||||
var inputFilePath = sys.args[0].replace(/\\/g, "/");
|
||||
var inputStr = sys.readFile(inputFilePath);
|
||||
|
||||
|
||||
var diagnosticMessages: InputDiagnosticMessageTable = JSON.parse(inputStr);
|
||||
|
||||
var names = Utilities.getObjectKeys(diagnosticMessages);
|
||||
@ -44,7 +44,7 @@ function main(): void {
|
||||
function checkForUniqueCodes(messages: string[], diagnosticTable: InputDiagnosticMessageTable) {
|
||||
const originalMessageForCode: string[] = [];
|
||||
let numConflicts = 0;
|
||||
|
||||
|
||||
for (const currentMessage of messages) {
|
||||
const code = diagnosticTable[currentMessage].code;
|
||||
|
||||
@ -68,19 +68,19 @@ function checkForUniqueCodes(messages: string[], diagnosticTable: InputDiagnosti
|
||||
}
|
||||
}
|
||||
|
||||
function buildUniqueNameMap(names: string[]): ts.Map<string, string> {
|
||||
var nameMap = ts.createMap<string, string>();
|
||||
function buildUniqueNameMap(names: string[]): ts.Map<string> {
|
||||
var nameMap = ts.createMap<string>();
|
||||
|
||||
var uniqueNames = NameGenerator.ensureUniqueness(names, /* isCaseSensitive */ false, /* isFixed */ undefined);
|
||||
|
||||
for (var i = 0; i < names.length; i++) {
|
||||
nameMap.set(names[i], uniqueNames[i]);
|
||||
nameMap[names[i]] = uniqueNames[i];
|
||||
}
|
||||
|
||||
return nameMap;
|
||||
}
|
||||
|
||||
function buildInfoFileOutput(messageTable: InputDiagnosticMessageTable, nameMap: ts.Map<string, string>): string {
|
||||
function buildInfoFileOutput(messageTable: InputDiagnosticMessageTable, nameMap: ts.Map<string>): string {
|
||||
var result =
|
||||
'// <auto-generated />\r\n' +
|
||||
'/// <reference path="types.ts" />\r\n' +
|
||||
@ -91,7 +91,7 @@ function buildInfoFileOutput(messageTable: InputDiagnosticMessageTable, nameMap:
|
||||
for (var i = 0; i < names.length; i++) {
|
||||
var name = names[i];
|
||||
var diagnosticDetails = messageTable[name];
|
||||
var propName = convertPropertyName(nameMap.get(name));
|
||||
var propName = convertPropertyName(nameMap[name]);
|
||||
|
||||
result +=
|
||||
' ' + propName +
|
||||
@ -107,14 +107,14 @@ function buildInfoFileOutput(messageTable: InputDiagnosticMessageTable, nameMap:
|
||||
return result;
|
||||
}
|
||||
|
||||
function buildDiagnosticMessageOutput(messageTable: InputDiagnosticMessageTable, nameMap: ts.Map<string, string>): string {
|
||||
function buildDiagnosticMessageOutput(messageTable: InputDiagnosticMessageTable, nameMap: ts.Map<string>): string {
|
||||
var result =
|
||||
'{';
|
||||
var names = Utilities.getObjectKeys(messageTable);
|
||||
for (var i = 0; i < names.length; i++) {
|
||||
var name = names[i];
|
||||
var diagnosticDetails = messageTable[name];
|
||||
var propName = convertPropertyName(nameMap.get(name));
|
||||
var propName = convertPropertyName(nameMap[name]);
|
||||
|
||||
result += '\r\n "' + createKey(propName, diagnosticDetails.code) + '"' + ' : "' + name.replace(/[\"]/g, '\\"') + '"';
|
||||
if (i !== names.length - 1) {
|
||||
|
||||
@ -133,7 +133,7 @@ namespace ts {
|
||||
|
||||
let symbolCount = 0;
|
||||
let Symbol: { new (flags: SymbolFlags, name: string): Symbol };
|
||||
let classifiableNames: Set<string>;
|
||||
let classifiableNames: Map<string>;
|
||||
|
||||
const unreachableFlow: FlowNode = { flags: FlowFlags.Unreachable };
|
||||
const reportedUnreachableFlow: FlowNode = { flags: FlowFlags.Unreachable };
|
||||
@ -147,7 +147,7 @@ namespace ts {
|
||||
options = opts;
|
||||
languageVersion = getEmitScriptTarget(options);
|
||||
inStrictMode = bindInStrictMode(file, opts);
|
||||
classifiableNames = createSet();
|
||||
classifiableNames = createMap<string>();
|
||||
symbolCount = 0;
|
||||
skipTransformFlagAggregation = isDeclarationFile(file);
|
||||
|
||||
@ -207,11 +207,11 @@ namespace ts {
|
||||
symbol.declarations.push(node);
|
||||
|
||||
if (symbolFlags & SymbolFlags.HasExports && !symbol.exports) {
|
||||
symbol.exports = createMap<string, Symbol>();
|
||||
symbol.exports = createMap<Symbol>();
|
||||
}
|
||||
|
||||
if (symbolFlags & SymbolFlags.HasMembers && !symbol.members) {
|
||||
symbol.members = createMap<string, Symbol>();
|
||||
symbol.members = createMap<Symbol>();
|
||||
}
|
||||
|
||||
if (symbolFlags & SymbolFlags.Value) {
|
||||
@ -349,17 +349,17 @@ namespace ts {
|
||||
// Otherwise, we'll be merging into a compatible existing symbol (for example when
|
||||
// you have multiple 'vars' with the same name in the same container). In this case
|
||||
// just add this node into the declarations list of the symbol.
|
||||
symbol = getOrUpdate(symbolTable, name, name => createSymbol(SymbolFlags.None, name));
|
||||
symbol = symbolTable[name] || (symbolTable[name] = createSymbol(SymbolFlags.None, name));
|
||||
|
||||
if (name && (includes & SymbolFlags.Classifiable)) {
|
||||
classifiableNames.add(name);
|
||||
classifiableNames[name] = name;
|
||||
}
|
||||
|
||||
if (symbol.flags & excludes) {
|
||||
if (symbol.isReplaceableByMethod) {
|
||||
// Javascript constructor-declared symbols can be discarded in favor of
|
||||
// prototype symbols like methods.
|
||||
symbol = setAndReturn(symbolTable, name, createSymbol(SymbolFlags.None, name));
|
||||
symbol = symbolTable[name] = createSymbol(SymbolFlags.None, name);
|
||||
}
|
||||
else {
|
||||
if (node.name) {
|
||||
@ -484,7 +484,7 @@ namespace ts {
|
||||
if (containerFlags & ContainerFlags.IsContainer) {
|
||||
container = blockScopeContainer = node;
|
||||
if (containerFlags & ContainerFlags.HasLocals) {
|
||||
container.locals = createMap<string, Symbol>();
|
||||
container.locals = createMap<Symbol>();
|
||||
}
|
||||
addToContainerChain(container);
|
||||
}
|
||||
@ -1525,7 +1525,8 @@ namespace ts {
|
||||
|
||||
const typeLiteralSymbol = createSymbol(SymbolFlags.TypeLiteral, "__type");
|
||||
addDeclarationToSymbol(typeLiteralSymbol, node, SymbolFlags.TypeLiteral);
|
||||
typeLiteralSymbol.members = createMap([[symbol.name, symbol]]);
|
||||
typeLiteralSymbol.members = createMap<Symbol>();
|
||||
typeLiteralSymbol.members[symbol.name] = symbol;
|
||||
}
|
||||
|
||||
function bindObjectLiteralExpression(node: ObjectLiteralExpression) {
|
||||
@ -1535,7 +1536,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
if (inStrictMode) {
|
||||
const seen = createMap<string, ElementKind>();
|
||||
const seen = createMap<ElementKind>();
|
||||
|
||||
for (const prop of node.properties) {
|
||||
if (prop.name.kind !== SyntaxKind.Identifier) {
|
||||
@ -1556,9 +1557,9 @@ namespace ts {
|
||||
? ElementKind.Property
|
||||
: ElementKind.Accessor;
|
||||
|
||||
const existingKind = seen.get(identifier.text);
|
||||
const existingKind = seen[identifier.text];
|
||||
if (!existingKind) {
|
||||
seen.set(identifier.text, currentKind);
|
||||
seen[identifier.text] = currentKind;
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -1591,7 +1592,7 @@ namespace ts {
|
||||
// fall through.
|
||||
default:
|
||||
if (!blockScopeContainer.locals) {
|
||||
blockScopeContainer.locals = createMap<string, Symbol>();
|
||||
blockScopeContainer.locals = createMap<Symbol>();
|
||||
addToContainerChain(blockScopeContainer);
|
||||
}
|
||||
declareSymbol(blockScopeContainer.locals, undefined, node, symbolFlags, symbolExcludes);
|
||||
@ -2071,7 +2072,7 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
|
||||
file.symbol.globalExports = file.symbol.globalExports || createMap<string, Symbol>();
|
||||
file.symbol.globalExports = file.symbol.globalExports || createMap<Symbol>();
|
||||
declareSymbol(file.symbol.globalExports, file.symbol, node, SymbolFlags.Alias, SymbolFlags.AliasExcludes);
|
||||
}
|
||||
|
||||
@ -2118,7 +2119,7 @@ namespace ts {
|
||||
Debug.assert(isInJavaScriptFile(node));
|
||||
// Declare a 'member' if the container is an ES5 class or ES6 constructor
|
||||
if (container.kind === SyntaxKind.FunctionDeclaration || container.kind === SyntaxKind.FunctionExpression) {
|
||||
container.symbol.members = container.symbol.members || createMap<string, Symbol>();
|
||||
container.symbol.members = container.symbol.members || createMap<Symbol>();
|
||||
// It's acceptable for multiple 'this' assignments of the same identifier to occur
|
||||
declareSymbol(container.symbol.members, container.symbol, node, SymbolFlags.Property, SymbolFlags.PropertyExcludes & ~SymbolFlags.Property);
|
||||
}
|
||||
@ -2150,14 +2151,14 @@ namespace ts {
|
||||
constructorFunction.parent = classPrototype;
|
||||
classPrototype.parent = leftSideOfAssignment;
|
||||
|
||||
const funcSymbol = container.locals.get(constructorFunction.text);
|
||||
const funcSymbol = container.locals[constructorFunction.text];
|
||||
if (!funcSymbol || !(funcSymbol.flags & SymbolFlags.Function || isDeclarationOfFunctionExpression(funcSymbol))) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Set up the members collection if it doesn't exist already
|
||||
if (!funcSymbol.members) {
|
||||
funcSymbol.members = createMap<string, Symbol>();
|
||||
funcSymbol.members = createMap<Symbol>();
|
||||
}
|
||||
|
||||
// Declare the method/property
|
||||
@ -2190,7 +2191,7 @@ namespace ts {
|
||||
bindAnonymousDeclaration(node, SymbolFlags.Class, bindingName);
|
||||
// Add name of class expression into the map for semantic classifier
|
||||
if (node.name) {
|
||||
classifiableNames.add(node.name.text);
|
||||
classifiableNames[node.name.text] = node.name.text;
|
||||
}
|
||||
}
|
||||
|
||||
@ -2206,15 +2207,14 @@ namespace ts {
|
||||
// module might have an exported variable called 'prototype'. We can't allow that as
|
||||
// that would clash with the built-in 'prototype' for the class.
|
||||
const prototypeSymbol = createSymbol(SymbolFlags.Property | SymbolFlags.Prototype, "prototype");
|
||||
const symbolExport = symbol.exports.get(prototypeSymbol.name);
|
||||
if (symbolExport) {
|
||||
if (symbol.exports[prototypeSymbol.name]) {
|
||||
if (node.name) {
|
||||
node.name.parent = node;
|
||||
}
|
||||
file.bindDiagnostics.push(createDiagnosticForNode(symbolExport.declarations[0],
|
||||
file.bindDiagnostics.push(createDiagnosticForNode(symbol.exports[prototypeSymbol.name].declarations[0],
|
||||
Diagnostics.Duplicate_identifier_0, prototypeSymbol.name));
|
||||
}
|
||||
symbol.exports.set(prototypeSymbol.name, prototypeSymbol);
|
||||
symbol.exports[prototypeSymbol.name] = prototypeSymbol;
|
||||
prototypeSymbol.parent = symbol;
|
||||
}
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,609 +0,0 @@
|
||||
// NumberMap, StringMap, and StringSet shims
|
||||
/* @internal */
|
||||
namespace ts {
|
||||
// The global Map object. This may not be available, so we must test for it.
|
||||
// Non-ES6 native maps don't support constructor arguments, so `createMap` must provide that functionality.
|
||||
declare const Map: { new<K, V>(): Map<K, V> } | undefined;
|
||||
const usingNativeMaps = typeof Map !== "undefined";
|
||||
// tslint:disable-next-line:no-in-operator
|
||||
const usingES6NativeMaps = usingNativeMaps && "keys" in Map.prototype && "values" in Map.prototype && "entries" in Map.prototype;
|
||||
|
||||
/** Extra Map methods that may not be available, so we must provide fallbacks. */
|
||||
interface ES6Map<K, V> extends Map<K, V> {
|
||||
keys(): Iterator<K>;
|
||||
values(): Iterator<V>;
|
||||
entries(): Iterator<[K, V]>;
|
||||
}
|
||||
|
||||
/** Simplified ES6 Iterator interface. */
|
||||
interface Iterator<T> {
|
||||
next(): { value: T, done: false } | { value: never, done: true };
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides Map-like functionality for ES5 runtimes.
|
||||
* This is intentionally *not* a full Map shim, and doesn't provide iterators (which aren't available for IE Maps anyway).
|
||||
* We can only efficiently support strings and number keys, and iteration will always yield stringified keys.
|
||||
*/
|
||||
class ShimMap<K extends string | number, V> implements Map<K, V> {
|
||||
private data = createDictionaryModeObject<V>();
|
||||
|
||||
/*
|
||||
So long as `K extends string | number`, we can cast `key as string` and insert it into the map.
|
||||
However, `forEach` will iterate over strings because values are stringified before being put in the map.
|
||||
*/
|
||||
|
||||
constructor() {}
|
||||
|
||||
clear(): void {
|
||||
this.data = createDictionaryModeObject<V>();
|
||||
}
|
||||
|
||||
delete(key: K): boolean {
|
||||
const had = this.has(key);
|
||||
if (had) {
|
||||
delete this.data[key as string];
|
||||
}
|
||||
return had;
|
||||
}
|
||||
|
||||
get(key: K): V {
|
||||
return this.data[key as string];
|
||||
}
|
||||
|
||||
has(key: K): boolean {
|
||||
// tslint:disable-next-line:no-in-operator
|
||||
return (key as string) in this.data;
|
||||
}
|
||||
|
||||
set(key: K, value: V): void {
|
||||
this.data[key as string] = value;
|
||||
}
|
||||
|
||||
forEach(action: (value: V, key: string) => void): void {
|
||||
for (const key in this.data) {
|
||||
action(this.data[key], key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const MapCtr = usingNativeMaps ? Map : ShimMap;
|
||||
/**
|
||||
* In runtimes without Maps, this is implemented using an object.
|
||||
* `pairs` is an optional list of entries to add to the new map.
|
||||
*/
|
||||
export function createMap<K extends string | number, V>(pairs?: [K, V][]): Map<K, V> {
|
||||
const map = new MapCtr<K, V>();
|
||||
|
||||
if (pairs) {
|
||||
for (const [key, value] of pairs) {
|
||||
map.set(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
const createObject = Object.create;
|
||||
function createDictionaryModeObject<T>(): MapLike<T> {
|
||||
const map = createObject(null); // tslint:disable-line:no-null-keyword
|
||||
|
||||
// Using 'delete' on an object causes V8 to put the object in dictionary mode.
|
||||
// This disables creation of hidden classes, which are expensive when an object is
|
||||
// constantly changing shape.
|
||||
map["__"] = undefined;
|
||||
delete map["__"];
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterates over entries in the map, returning the first output of `getResult` that is not `undefined`.
|
||||
* Only works for strings because shims iterate with `for-in`.
|
||||
*/
|
||||
export const findInMap: <V, U>(map: Map<string, V>, getResult: (value: V, key: string) => U | undefined) => U | undefined = usingES6NativeMaps
|
||||
? <V, U>(map: ES6Map<string, V>, f: (value: V, key: string) => U | undefined) => {
|
||||
const iter = map.entries();
|
||||
while (true) {
|
||||
const { value: pair, done } = iter.next();
|
||||
if (done) {
|
||||
return undefined;
|
||||
}
|
||||
const [key, value] = pair;
|
||||
const result = f(value, key);
|
||||
if (result !== undefined) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
: <V, U>(map: Map<string, V>, f: (value: V, key: string) => U | undefined) => {
|
||||
let result: U | undefined;
|
||||
map.forEach((value, key) => {
|
||||
if (result === undefined)
|
||||
result = f(value, key);
|
||||
});
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Whether `predicate` is true for at least one entry in the map.
|
||||
* Only works for strings because shims iterate with `for-in`.
|
||||
*/
|
||||
export const someInMap: <V>(map: Map<string, V>, predicate: (value: V, key: string) => boolean) => boolean = usingES6NativeMaps
|
||||
? <V>(map: ES6Map<string, V>, predicate: (value: V, key: string) => boolean) =>
|
||||
someInIterator(map.entries(), ([key, value]) => predicate(value, key))
|
||||
: <V>(map: Map<string, V>, predicate: (value: V, key: string) => boolean) => {
|
||||
let found = false;
|
||||
map.forEach((value, key) => {
|
||||
found = found || predicate(value, key);
|
||||
});
|
||||
return found;
|
||||
};
|
||||
|
||||
/**
|
||||
* Whether `predicate` is true for at least one key in the map.
|
||||
* Only works for strings because shims iterate with `for-in`.
|
||||
*/
|
||||
export const someKeyInMap: (map: Map<string, any>, predicate: (key: string) => boolean) => boolean = usingES6NativeMaps
|
||||
? (map: ES6Map<string, any>, predicate: (key: string) => boolean) => someInIterator(map.keys(), predicate)
|
||||
: (map: Map<string, any>, predicate: (key: string) => boolean) =>
|
||||
someInMap(map, (_value, key) => predicate(key));
|
||||
|
||||
/** Whether `predicate` is true for at least one value in the map. */
|
||||
export const someValueInMap: <T>(map: Map<any, T>, predicate: (value: T) => boolean) => boolean = usingES6NativeMaps
|
||||
? <T>(map: ES6Map<any, T>, predicate: (value: T) => boolean) =>
|
||||
someInIterator(map.values(), predicate)
|
||||
: someInMap;
|
||||
|
||||
function someInIterator<T>(iterator: Iterator<T>, predicate: (value: T) => boolean): boolean {
|
||||
while (true) {
|
||||
const { value, done } = iterator.next();
|
||||
if (done) {
|
||||
return false;
|
||||
}
|
||||
if (predicate(value)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Equivalent to the ES6 code:
|
||||
* `for (const key of map.keys()) action(key);`
|
||||
* Only works for strings because shims iterate with `for-in`.
|
||||
*/
|
||||
export const forEachKeyInMap: (map: Map<string, any>, action: (key: string) => void) => void = usingES6NativeMaps
|
||||
? (map: ES6Map<string, any>, action: (key: string) => void) => {
|
||||
const iter: Iterator<string> = map.keys();
|
||||
while (true) {
|
||||
const { value: key, done } = iter.next();
|
||||
if (done) {
|
||||
return;
|
||||
}
|
||||
action(key);
|
||||
}
|
||||
}
|
||||
: (map: Map<string, any>, action: (key: string) => void) => {
|
||||
map.forEach((_value, key) => action(key));
|
||||
};
|
||||
|
||||
/** Size of a map. */
|
||||
export const mapSize: (map: Map<any, any>) => number = usingNativeMaps
|
||||
? map => (map as any).size
|
||||
: map => {
|
||||
let size = 0;
|
||||
map.forEach(() => { size++; });
|
||||
return size;
|
||||
};
|
||||
|
||||
/** Convert a Map to a MapLike. */
|
||||
export function mapLikeOfMap<T>(map: Map<string, T>): MapLike<T> {
|
||||
const obj = createDictionaryModeObject<T>();
|
||||
map.forEach((value, key) => {
|
||||
obj[key] = value;
|
||||
});
|
||||
return obj;
|
||||
}
|
||||
|
||||
/** Create a map from a MapLike. This is useful for writing large maps as object literals. */
|
||||
export function mapOfMapLike<T>(object: MapLike<T>): Map<string, T> {
|
||||
const map = createMap<string, T>();
|
||||
// Copies keys/values from template. Note that for..in will not throw if
|
||||
// template is undefined, and instead will just exit the loop.
|
||||
for (const key in object) if (hasProperty(object, key)) {
|
||||
map.set(key, object[key]);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
class ShimStringSet implements Set<string> {
|
||||
private data = createDictionaryModeObject<true>();
|
||||
|
||||
constructor() {}
|
||||
|
||||
add(value: string) {
|
||||
this.data[value] = true;
|
||||
}
|
||||
|
||||
clear() {
|
||||
this.data = createDictionaryModeObject<true>();
|
||||
}
|
||||
|
||||
delete(value: string): boolean {
|
||||
const had = this.has(value);
|
||||
if (had) {
|
||||
delete this.data[value];
|
||||
}
|
||||
return had;
|
||||
}
|
||||
|
||||
forEach(action: (value: string) => void) {
|
||||
for (const value in this.data) {
|
||||
action(value);
|
||||
}
|
||||
}
|
||||
|
||||
has(value: string) {
|
||||
// tslint:disable-next-line:no-in-operator
|
||||
return value in this.data;
|
||||
}
|
||||
|
||||
isEmpty() {
|
||||
for (const _ in this.data) {
|
||||
// TODO: GH#11734
|
||||
_;
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
declare const Set: { new(): Set<string> } | undefined;
|
||||
const usingNativeSets = typeof Set !== "undefined";
|
||||
|
||||
const SetCtr = usingNativeSets ? Set : ShimStringSet;
|
||||
export function createSet(): Set<string> {
|
||||
return new SetCtr();
|
||||
}
|
||||
|
||||
/** False if there are any values in the set. */
|
||||
export const setIsEmpty: (set: Set<string>) => boolean = usingNativeSets
|
||||
? set => (set as any).size === 0
|
||||
: (set: ShimStringSet) => set.isEmpty();
|
||||
|
||||
// Map utilities
|
||||
|
||||
/** Set a value in a map, then return that value. */
|
||||
export function setAndReturn<K, V>(map: Map<K, V>, key: K, value: V): V {
|
||||
map.set(key, value);
|
||||
return value;
|
||||
}
|
||||
|
||||
/** False if there are any entries in the map. */
|
||||
export function mapIsEmpty(map: Map<any, any>): boolean {
|
||||
return !someKeyInMap(map, () => true);
|
||||
}
|
||||
|
||||
/** Create a new copy of a Map. */
|
||||
export function cloneMap<T>(map: Map<string, T>) {
|
||||
const clone = createMap<string, T>();
|
||||
copyMapEntriesFromTo(map, clone);
|
||||
return clone;
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs a shallow copy of the properties from a source Map to a target Map
|
||||
*
|
||||
* @param source A map from which properties should be copied.
|
||||
* @param target A map to which properties should be copied.
|
||||
*/
|
||||
export function copyMapEntriesFromTo<K, V>(source: Map<K, V>, target: Map<K, V>): void {
|
||||
source.forEach((value: V, key: K) => {
|
||||
target.set(key, value);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Equivalent to `Array.from(map.keys())`.
|
||||
* Only works for strings because shims iterate with `for-in`.
|
||||
*/
|
||||
export function keysOfMap(map: Map<string, any>): string[] {
|
||||
const keys: string[] = [];
|
||||
forEachKeyInMap(map, key => { keys.push(key); });
|
||||
return keys;
|
||||
}
|
||||
|
||||
/** Equivalent to `Array.from(map.values())`. */
|
||||
export function valuesOfMap<V>(map: Map<any, V>): V[] {
|
||||
const values: V[] = [];
|
||||
map.forEach((value) => { values.push(value); });
|
||||
return values;
|
||||
}
|
||||
|
||||
/** Return a new map with each key transformed by `getNewKey`. */
|
||||
export function transformKeys<T>(map: Map<string, T>, getNewKey: (key: string) => string): Map<string, T> {
|
||||
const newMap = createMap<string, T>();
|
||||
map.forEach((value, key) => {
|
||||
newMap.set(getNewKey(key), value);
|
||||
});
|
||||
return newMap;
|
||||
}
|
||||
|
||||
/** Replace each value with the result of calling `getNewValue`. */
|
||||
export function updateMapValues<V>(map: Map<any, V>, getNewValue: (value: V) => V): void {
|
||||
map.forEach((value, key) => {
|
||||
map.set(key, getNewValue(value));
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Change the value at `key` by applying the given function to it.
|
||||
* If there is no value at `key` then `getNewValue` will be passed `undefined`.
|
||||
*/
|
||||
export function modifyValue<K, V>(map: Map<K, V>, key: K, getNewValue: (value: V) => V) {
|
||||
map.set(key, getNewValue(map.get(key)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a value in the map, or if not already present, set and return it.
|
||||
* Treats entries set to `undefined` as equivalent to not being set (saving a call to `has`).
|
||||
*/
|
||||
export function getOrUpdate<K, V>(map: Map<K, V>, key: K, getValue: (key: K) => V): V {
|
||||
const value = map.get(key);
|
||||
return value !== undefined ? value : setAndReturn(map, key, getValue(key));
|
||||
}
|
||||
|
||||
/** Like `getOrUpdate`, but recognizes `undefined` as having been already set. */
|
||||
export function getOrUpdateAndAllowUndefined<K, V>(map: Map<K, V>, key: K, getValue: (key: K) => V): V {
|
||||
return map.has(key) ? map.get(key) : setAndReturn(map, key, getValue(key));
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the the value if the key is not already in the map.
|
||||
* Returns whether the value was set.
|
||||
*/
|
||||
export function setIfNotSet<K, V>(map: Map<K, V>, key: K, value: V): boolean {
|
||||
const shouldSet = !map.has(key);
|
||||
if (shouldSet) {
|
||||
map.set(key, value);
|
||||
}
|
||||
return shouldSet;
|
||||
}
|
||||
|
||||
/** Deletes an entry from a map and returns it; or returns undefined if the key was not in the map. */
|
||||
export function tryDelete<K, V>(map: Map<K, V>, key: K): V | undefined {
|
||||
const current = map.get(key);
|
||||
if (current !== undefined) {
|
||||
map.delete(key);
|
||||
return current;
|
||||
}
|
||||
else {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a map from the elements of an array.
|
||||
*
|
||||
* @param array the array of input elements.
|
||||
* @param makeKey a function that produces a key for a given element.
|
||||
*
|
||||
* This function makes no effort to avoid collisions; if any two elements produce
|
||||
* the same key with the given 'makeKey' function, then the element with the higher
|
||||
* index in the array will be the one associated with the produced key.
|
||||
*/
|
||||
export function arrayToMap<T>(array: T[], makeKey: (value: T) => string): Map<string, T>;
|
||||
export function arrayToMap<T, U>(array: T[], makeKey: (value: T) => string, makeValue: (value: T) => U): Map<string, U>;
|
||||
export function arrayToMap<T, U>(array: T[], makeKey: (value: T) => string, makeValue?: (value: T) => U): Map<string, T | U> {
|
||||
const result = createMap<string, T | U>();
|
||||
for (const value of array) {
|
||||
result.set(makeKey(value), makeValue ? makeValue(value) : value);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the value to an array of values associated with the key, and returns the array.
|
||||
* Creates the array if it does not already exist.
|
||||
*/
|
||||
export function multiMapAdd<K, V>(map: Map<K, V[]>, key: K, value: V): V[] {
|
||||
const values = map.get(key);
|
||||
if (values) {
|
||||
values.push(value);
|
||||
return values;
|
||||
}
|
||||
else {
|
||||
return setAndReturn(map, key, [value]);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes a value from an array of values associated with the key.
|
||||
* Does not preserve the order of those values.
|
||||
* Does nothing if `key` is not in `map`, or `value` is not in `map[key]`.
|
||||
*/
|
||||
export function multiMapRemove<K, V>(map: Map<K, V[]>, key: K, value: V): void {
|
||||
const values = map.get(key);
|
||||
if (values) {
|
||||
unorderedRemoveItem(values, value);
|
||||
if (!values.length) {
|
||||
map.delete(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** True if the maps have the same keys and values. */
|
||||
export function mapsAreEqual<V>(left: Map<string, V>, right: Map<string, V>, valuesAreEqual?: (left: V, right: V) => boolean): boolean {
|
||||
if (left === right) return true;
|
||||
if (!left || !right) return false;
|
||||
const someInLeftHasNoMatch = someInMap(left, (leftValue, leftKey) => {
|
||||
if (!right.has(leftKey)) return true;
|
||||
const rightValue = right.get(leftKey);
|
||||
return !(valuesAreEqual ? valuesAreEqual(leftValue, rightValue) : leftValue === rightValue);
|
||||
});
|
||||
if (someInLeftHasNoMatch) return false;
|
||||
const someInRightHasNoMatch = someKeyInMap(right, rightKey => !left.has(rightKey));
|
||||
return !someInRightHasNoMatch;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a sorted array of keys.
|
||||
* Sorts keys according to the iteration order they would have if they were in an object, instead of from a Map.
|
||||
* This is so that tests run consistently whether or not we have a Map shim in place.
|
||||
* The difference between Map iteration order and V8 object insertion order is that V8 moves natural-number-like keys to the front.
|
||||
*/
|
||||
export function sortInV8ObjectInsertionOrder<T>(values: T[], toKey: (t: T) => string): T[] {
|
||||
const naturalNumberKeys: T[] = [];
|
||||
const allOtherKeys: T[] = [];
|
||||
for (const value of values) {
|
||||
// "0" looks like a natural but "08" doesn't.
|
||||
const looksLikeNatural = /^(0|([1-9]\d*))$/.test(toKey(value));
|
||||
(looksLikeNatural ? naturalNumberKeys : allOtherKeys).push(value);
|
||||
}
|
||||
function toInt(value: T): number {
|
||||
return parseInt(toKey(value), 10);
|
||||
}
|
||||
naturalNumberKeys.sort((a, b) => toInt(a) - toInt(b));
|
||||
return naturalNumberKeys.concat(allOtherKeys);
|
||||
}
|
||||
|
||||
// Set utilities
|
||||
|
||||
/** Union of the `getSet` of each element in the array. */
|
||||
export function setAggregate<T>(array: T[], getSet: (t: T) => Set<string>): Set<string> {
|
||||
const result = createSet();
|
||||
for (const value of array) {
|
||||
copySetValuesFromTo(getSet(value), result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/** Adds all values in `source` to `target`. */
|
||||
function copySetValuesFromTo<T>(source: Set<T>, target: Set<T>): void {
|
||||
source.forEach(value => target.add(value));
|
||||
}
|
||||
|
||||
/** Returns the values in `set` satisfying `predicate`. */
|
||||
export function filterSetToArray<T>(set: Set<T>, predicate: (value: T) => boolean): T[] {
|
||||
const result: T[] = [];
|
||||
set.forEach(value => {
|
||||
if (predicate(value)) {
|
||||
result.push(value);
|
||||
}
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
// MapLike utilities
|
||||
|
||||
const hasOwnProperty = Object.prototype.hasOwnProperty;
|
||||
|
||||
export function clone<T>(object: T): T {
|
||||
const result: any = {};
|
||||
for (const id in object) {
|
||||
if (hasOwnProperty.call(object, id)) {
|
||||
result[id] = (<any>object)[id];
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Indicates whether a map-like contains an own property with the specified key.
|
||||
*
|
||||
* NOTE: This is intended for use only with MapLike<T> objects. For Map<T> objects, use
|
||||
* the 'in' operator.
|
||||
*
|
||||
* @param map A map-like.
|
||||
* @param key A property key.
|
||||
*/
|
||||
export function hasProperty<T>(map: MapLike<T>, key: string): boolean {
|
||||
return hasOwnProperty.call(map, key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the value of an owned property in a map-like.
|
||||
*
|
||||
* NOTE: This is intended for use only with MapLike<T> objects. For Map<T> objects, use
|
||||
* an indexer.
|
||||
*
|
||||
* @param map A map-like.
|
||||
* @param key A property key.
|
||||
*/
|
||||
export function getProperty<T>(map: MapLike<T>, key: string): T | undefined {
|
||||
return hasOwnProperty.call(map, key) ? map[key] : undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the owned, enumerable property keys of a map-like.
|
||||
*
|
||||
* NOTE: This is intended for use with MapLike<T> objects. For Map<T> objects, use
|
||||
* Object.keys instead as it offers better performance.
|
||||
*
|
||||
* @param map A map-like.
|
||||
*/
|
||||
export function getOwnKeys<T>(map: MapLike<T>): string[] {
|
||||
const keys: string[] = [];
|
||||
for (const key in map) if (hasOwnProperty.call(map, key)) {
|
||||
keys.push(key);
|
||||
}
|
||||
return keys;
|
||||
}
|
||||
|
||||
export function assign<T1 extends MapLike<{}>, T2, T3>(t: T1, arg1: T2, arg2: T3): T1 & T2 & T3;
|
||||
export function assign<T1 extends MapLike<{}>, T2>(t: T1, arg1: T2): T1 & T2;
|
||||
export function assign<T1 extends MapLike<{}>>(t: T1, ...args: any[]): any;
|
||||
export function assign<T1 extends MapLike<{}>>(t: T1, ...args: any[]) {
|
||||
for (const arg of args) {
|
||||
for (const p of getOwnKeys(arg)) {
|
||||
t[p] = arg[p];
|
||||
}
|
||||
}
|
||||
return t;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reduce the properties defined on a map-like (but not from its prototype chain).
|
||||
*
|
||||
* @param map The map-like to reduce
|
||||
* @param callback An aggregation function that is called for each entry in the map
|
||||
* @param initial The initial value for the reduction.
|
||||
*/
|
||||
export function reduceOwnProperties<T, U>(map: MapLike<T>, callback: (aggregate: U, value: T, key: string) => U, initial: U): U {
|
||||
let result = initial;
|
||||
for (const key in map) if (hasOwnProperty.call(map, key)) {
|
||||
result = callback(result, map[key], String(key));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs a shallow equality comparison of the contents of two map-likes.
|
||||
*
|
||||
* @param left A map-like whose properties should be compared.
|
||||
* @param right A map-like whose properties should be compared.
|
||||
*/
|
||||
export function equalOwnProperties<T>(left: MapLike<T>, right: MapLike<T>, equalityComparer?: (left: T, right: T) => boolean) {
|
||||
if (left === right) return true;
|
||||
if (!left || !right) return false;
|
||||
for (const key in left) if (hasOwnProperty.call(left, key)) {
|
||||
if (!hasOwnProperty.call(right, key) === undefined) return false;
|
||||
if (equalityComparer ? !equalityComparer(left[key], right[key]) : left[key] !== right[key]) return false;
|
||||
}
|
||||
for (const key in right) if (hasOwnProperty.call(right, key)) {
|
||||
if (!hasOwnProperty.call(left, key)) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
export function extend<T1, T2>(first: T1, second: T2): T1 & T2 {
|
||||
const result: T1 & T2 = <any>{};
|
||||
for (const id in second) if (hasOwnProperty.call(second, id)) {
|
||||
(result as any)[id] = (second as any)[id];
|
||||
}
|
||||
for (const id in first) if (hasOwnProperty.call(first, id)) {
|
||||
(result as any)[id] = (first as any)[id];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
@ -65,7 +65,7 @@ namespace ts {
|
||||
},
|
||||
{
|
||||
name: "jsx",
|
||||
type: mapOfMapLike({
|
||||
type: createMap({
|
||||
"preserve": JsxEmit.Preserve,
|
||||
"react": JsxEmit.React
|
||||
}),
|
||||
@ -95,7 +95,7 @@ namespace ts {
|
||||
{
|
||||
name: "module",
|
||||
shortName: "m",
|
||||
type: mapOfMapLike({
|
||||
type: createMap({
|
||||
"none": ModuleKind.None,
|
||||
"commonjs": ModuleKind.CommonJS,
|
||||
"amd": ModuleKind.AMD,
|
||||
@ -109,7 +109,7 @@ namespace ts {
|
||||
},
|
||||
{
|
||||
name: "newLine",
|
||||
type: mapOfMapLike({
|
||||
type: createMap({
|
||||
"crlf": NewLineKind.CarriageReturnLineFeed,
|
||||
"lf": NewLineKind.LineFeed
|
||||
}),
|
||||
@ -258,7 +258,7 @@ namespace ts {
|
||||
{
|
||||
name: "target",
|
||||
shortName: "t",
|
||||
type: mapOfMapLike({
|
||||
type: createMap({
|
||||
"es3": ScriptTarget.ES3,
|
||||
"es5": ScriptTarget.ES5,
|
||||
"es6": ScriptTarget.ES2015,
|
||||
@ -294,7 +294,7 @@ namespace ts {
|
||||
},
|
||||
{
|
||||
name: "moduleResolution",
|
||||
type: mapOfMapLike({
|
||||
type: createMap({
|
||||
"node": ModuleResolutionKind.NodeJs,
|
||||
"classic": ModuleResolutionKind.Classic,
|
||||
}),
|
||||
@ -403,7 +403,7 @@ namespace ts {
|
||||
type: "list",
|
||||
element: {
|
||||
name: "lib",
|
||||
type: mapOfMapLike({
|
||||
type: createMap({
|
||||
// JavaScript only
|
||||
"es5": "lib.es5.d.ts",
|
||||
"es6": "lib.es2015.d.ts",
|
||||
@ -480,8 +480,8 @@ namespace ts {
|
||||
|
||||
/* @internal */
|
||||
export interface OptionNameMap {
|
||||
optionNameMap: Map<string, CommandLineOption>;
|
||||
shortOptionNames: Map<string, string>;
|
||||
optionNameMap: Map<CommandLineOption>;
|
||||
shortOptionNames: Map<string>;
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
@ -500,12 +500,12 @@ namespace ts {
|
||||
return optionNameMapCache;
|
||||
}
|
||||
|
||||
const optionNameMap = createMap<string, CommandLineOption>();
|
||||
const shortOptionNames = createMap<string, string>();
|
||||
const optionNameMap = createMap<CommandLineOption>();
|
||||
const shortOptionNames = createMap<string>();
|
||||
forEach(optionDeclarations, option => {
|
||||
optionNameMap.set(option.name.toLowerCase(), option);
|
||||
optionNameMap[option.name.toLowerCase()] = option;
|
||||
if (option.shortName) {
|
||||
shortOptionNames.set(option.shortName, option.name);
|
||||
shortOptionNames[option.shortName] = option.name;
|
||||
}
|
||||
});
|
||||
|
||||
@ -515,16 +515,16 @@ namespace ts {
|
||||
|
||||
/* @internal */
|
||||
export function createCompilerDiagnosticForInvalidCustomType(opt: CommandLineOptionOfCustomType): Diagnostic {
|
||||
const namesOfType = keysOfMap(opt.type).map(key => `'${key}'`).join(", ");
|
||||
const namesOfType = Object.keys(opt.type).map(key => `'${key}'`).join(", ");
|
||||
return createCompilerDiagnostic(Diagnostics.Argument_for_0_option_must_be_Colon_1, `--${opt.name}`, namesOfType);
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
export function parseCustomTypeOption(opt: CommandLineOptionOfCustomType, value: string, errors: Diagnostic[]) {
|
||||
const key = trimString((value || "")).toLowerCase();
|
||||
const customType = opt.type.get(key);
|
||||
if (customType !== undefined) {
|
||||
return customType;
|
||||
const map = opt.type;
|
||||
if (key in map) {
|
||||
return map[key];
|
||||
}
|
||||
else {
|
||||
errors.push(createCompilerDiagnosticForInvalidCustomType(opt));
|
||||
@ -577,13 +577,13 @@ namespace ts {
|
||||
s = s.slice(s.charCodeAt(1) === CharacterCodes.minus ? 2 : 1).toLowerCase();
|
||||
|
||||
// Try to translate short option names to their full equivalents.
|
||||
const short = shortOptionNames.get(s);
|
||||
if (short !== undefined) {
|
||||
s = short;
|
||||
if (s in shortOptionNames) {
|
||||
s = shortOptionNames[s];
|
||||
}
|
||||
|
||||
const opt = optionNameMap.get(s);
|
||||
if (opt !== undefined) {
|
||||
if (s in optionNameMap) {
|
||||
const opt = optionNameMap[s];
|
||||
|
||||
if (opt.isTSConfigOnly) {
|
||||
errors.push(createCompilerDiagnostic(Diagnostics.Option_0_can_only_be_specified_in_tsconfig_json_file, opt.name));
|
||||
}
|
||||
@ -706,7 +706,7 @@ namespace ts {
|
||||
* @param fileNames array of filenames to be generated into tsconfig.json
|
||||
*/
|
||||
/* @internal */
|
||||
export function generateTSConfig(options: CompilerOptions, fileNames: string[]): { compilerOptions: MapLike<CompilerOptionsValue> } {
|
||||
export function generateTSConfig(options: CompilerOptions, fileNames: string[]): { compilerOptions: Map<CompilerOptionsValue> } {
|
||||
const compilerOptions = extend(options, defaultInitCompilerOptions);
|
||||
const configurations: any = {
|
||||
compilerOptions: serializeCompilerOptions(compilerOptions)
|
||||
@ -718,7 +718,7 @@ namespace ts {
|
||||
|
||||
return configurations;
|
||||
|
||||
function getCustomTypeMapOfCommandLineOption(optionDefinition: CommandLineOption): Map<string, string | number> | undefined {
|
||||
function getCustomTypeMapOfCommandLineOption(optionDefinition: CommandLineOption): Map<string | number> | undefined {
|
||||
if (optionDefinition.type === "string" || optionDefinition.type === "number" || optionDefinition.type === "boolean") {
|
||||
// this is of a type CommandLineOptionOfPrimitiveType
|
||||
return undefined;
|
||||
@ -731,17 +731,18 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
|
||||
function getNameOfCompilerOptionValue(value: CompilerOptionsValue, customTypeMap: Map<string, string | number>): string | undefined {
|
||||
function getNameOfCompilerOptionValue(value: CompilerOptionsValue, customTypeMap: MapLike<string | number>): string | undefined {
|
||||
// There is a typeMap associated with this command-line option so use it to map value back to its name
|
||||
return findInMap(customTypeMap, (customValue, key) => {
|
||||
if (customValue === value) {
|
||||
for (const key in customTypeMap) {
|
||||
if (customTypeMap[key] === value) {
|
||||
return key;
|
||||
}
|
||||
});
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function serializeCompilerOptions(options: CompilerOptions): MapLike<CompilerOptionsValue> {
|
||||
const result = createMap<string, CompilerOptionsValue>();
|
||||
function serializeCompilerOptions(options: CompilerOptions): Map<CompilerOptionsValue> {
|
||||
const result = createMap<CompilerOptionsValue>();
|
||||
const optionsNameMap = getOptionNameMap().optionNameMap;
|
||||
|
||||
for (const name in options) {
|
||||
@ -757,13 +758,13 @@ namespace ts {
|
||||
break;
|
||||
default:
|
||||
const value = options[name];
|
||||
let optionDefinition = optionsNameMap.get(name.toLowerCase());
|
||||
let optionDefinition = optionsNameMap[name.toLowerCase()];
|
||||
if (optionDefinition) {
|
||||
const customTypeMap = getCustomTypeMapOfCommandLineOption(optionDefinition);
|
||||
if (!customTypeMap) {
|
||||
// There is no map associated with this compiler option then use the value as-is
|
||||
// This is the case if the value is expect to be string, number, boolean or list of string
|
||||
result.set(name, value);
|
||||
result[name] = value;
|
||||
}
|
||||
else {
|
||||
if (optionDefinition.type === "list") {
|
||||
@ -771,11 +772,11 @@ namespace ts {
|
||||
for (const element of value as (string | number)[]) {
|
||||
convertedValue.push(getNameOfCompilerOptionValue(element, customTypeMap));
|
||||
}
|
||||
result.set(name, convertedValue);
|
||||
result[name] = convertedValue;
|
||||
}
|
||||
else {
|
||||
// There is a typeMap associated with this command-line option so use it to map value back to its name
|
||||
result.set(name, getNameOfCompilerOptionValue(value, customTypeMap));
|
||||
result[name] = getNameOfCompilerOptionValue(value, customTypeMap);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -783,7 +784,7 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
}
|
||||
return mapLikeOfMap(result);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
@ -1024,8 +1025,8 @@ namespace ts {
|
||||
const optionNameMap = arrayToMap(optionDeclarations, opt => opt.name);
|
||||
|
||||
for (const id in jsonOptions) {
|
||||
const opt = optionNameMap.get(id);
|
||||
if (opt !== undefined) {
|
||||
if (id in optionNameMap) {
|
||||
const opt = optionNameMap[id];
|
||||
defaultOptions[opt.name] = convertJsonOption(opt, jsonOptions[id], basePath, errors);
|
||||
}
|
||||
else {
|
||||
@ -1061,9 +1062,8 @@ namespace ts {
|
||||
|
||||
function convertJsonOptionOfCustomType(opt: CommandLineOptionOfCustomType, value: string, errors: Diagnostic[]) {
|
||||
const key = value.toLowerCase();
|
||||
const val = opt.type.get(key);
|
||||
if (val !== undefined) {
|
||||
return val;
|
||||
if (key in opt.type) {
|
||||
return opt.type[key];
|
||||
}
|
||||
else {
|
||||
errors.push(createCompilerDiagnosticForInvalidCustomType(opt));
|
||||
@ -1172,12 +1172,12 @@ namespace ts {
|
||||
// Literal file names (provided via the "files" array in tsconfig.json) are stored in a
|
||||
// file map with a possibly case insensitive key. We use this map later when when including
|
||||
// wildcard paths.
|
||||
const literalFileMap = createMap<string, string>();
|
||||
const literalFileMap = createMap<string>();
|
||||
|
||||
// Wildcard paths (provided via the "includes" array in tsconfig.json) are stored in a
|
||||
// file map with a possibly case insensitive key. We use this map to store paths matched
|
||||
// via wildcard, and to handle extension priority.
|
||||
const wildcardFileMap = createMap<string, string>();
|
||||
const wildcardFileMap = createMap<string>();
|
||||
|
||||
if (include) {
|
||||
include = validateSpecs(include, errors, /*allowTrailingRecursion*/ false);
|
||||
@ -1191,7 +1191,7 @@ namespace ts {
|
||||
// file map that marks whether it was a regular wildcard match (with a `*` or `?` token),
|
||||
// or a recursive directory. This information is used by filesystem watchers to monitor for
|
||||
// new entries in these paths.
|
||||
const wildcardDirectories: Map<string, WatchDirectoryFlags> = getWildcardDirectories(include, exclude, basePath, host.useCaseSensitiveFileNames);
|
||||
const wildcardDirectories: Map<WatchDirectoryFlags> = getWildcardDirectories(include, exclude, basePath, host.useCaseSensitiveFileNames);
|
||||
|
||||
// Rather than requery this for each file and filespec, we query the supported extensions
|
||||
// once and store it on the expansion context.
|
||||
@ -1202,7 +1202,7 @@ namespace ts {
|
||||
if (fileNames) {
|
||||
for (const fileName of fileNames) {
|
||||
const file = combinePaths(basePath, fileName);
|
||||
literalFileMap.set(keyMapper(file), file);
|
||||
literalFileMap[keyMapper(file)] = file;
|
||||
}
|
||||
}
|
||||
|
||||
@ -1225,17 +1225,18 @@ namespace ts {
|
||||
removeWildcardFilesWithLowerPriorityExtension(file, wildcardFileMap, supportedExtensions, keyMapper);
|
||||
|
||||
const key = keyMapper(file);
|
||||
if (!literalFileMap.has(key)) {
|
||||
setIfNotSet(wildcardFileMap, key, file);
|
||||
if (!(key in literalFileMap) && !(key in wildcardFileMap)) {
|
||||
wildcardFileMap[key] = file;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const literalFiles = valuesOfMap(literalFileMap);
|
||||
const wildcardFiles = valuesOfMap(wildcardFileMap).sort(host.useCaseSensitiveFileNames ? compareStrings : compareStringsCaseInsensitive);
|
||||
const literalFiles = reduceProperties(literalFileMap, addFileToOutput, []);
|
||||
const wildcardFiles = reduceProperties(wildcardFileMap, addFileToOutput, []);
|
||||
wildcardFiles.sort(host.useCaseSensitiveFileNames ? compareStrings : compareStringsCaseInsensitive);
|
||||
return {
|
||||
fileNames: literalFiles.concat(wildcardFiles),
|
||||
wildcardDirectories: mapLikeOfMap(wildcardDirectories)
|
||||
wildcardDirectories
|
||||
};
|
||||
}
|
||||
|
||||
@ -1276,7 +1277,7 @@ namespace ts {
|
||||
// /a/b/a?z - Watch /a/b directly to catch any new file matching a?z
|
||||
const rawExcludeRegex = getRegularExpressionForWildcard(exclude, path, "exclude");
|
||||
const excludeRegex = rawExcludeRegex && new RegExp(rawExcludeRegex, useCaseSensitiveFileNames ? "" : "i");
|
||||
const wildcardDirectories = createMap<string, WatchDirectoryFlags>();
|
||||
const wildcardDirectories = createMap<WatchDirectoryFlags>();
|
||||
if (include !== undefined) {
|
||||
const recursiveKeys: string[] = [];
|
||||
for (const file of include) {
|
||||
@ -1289,9 +1290,9 @@ namespace ts {
|
||||
if (match) {
|
||||
const key = useCaseSensitiveFileNames ? match[0] : match[0].toLowerCase();
|
||||
const flags = watchRecursivePattern.test(name) ? WatchDirectoryFlags.Recursive : WatchDirectoryFlags.None;
|
||||
const existingFlags = wildcardDirectories.get(key);
|
||||
const existingFlags = wildcardDirectories[key];
|
||||
if (existingFlags === undefined || existingFlags < flags) {
|
||||
wildcardDirectories.set(key, flags);
|
||||
wildcardDirectories[key] = flags;
|
||||
if (flags === WatchDirectoryFlags.Recursive) {
|
||||
recursiveKeys.push(key);
|
||||
}
|
||||
@ -1300,13 +1301,13 @@ namespace ts {
|
||||
}
|
||||
|
||||
// Remove any subpaths under an existing recursively watched directory.
|
||||
forEachKeyInMap(wildcardDirectories, key => {
|
||||
for (const key in wildcardDirectories) {
|
||||
for (const recursiveKey of recursiveKeys) {
|
||||
if (key !== recursiveKey && containsPath(recursiveKey, key, path, !useCaseSensitiveFileNames)) {
|
||||
wildcardDirectories.delete(key);
|
||||
delete wildcardDirectories[key];
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return wildcardDirectories;
|
||||
@ -1320,13 +1321,13 @@ namespace ts {
|
||||
* @param extensionPriority The priority of the extension.
|
||||
* @param context The expansion context.
|
||||
*/
|
||||
function hasFileWithHigherPriorityExtension(file: string, literalFiles: Map<string, string>, wildcardFiles: Map<string, string>, extensions: string[], keyMapper: (value: string) => string) {
|
||||
function hasFileWithHigherPriorityExtension(file: string, literalFiles: Map<string>, wildcardFiles: Map<string>, extensions: string[], keyMapper: (value: string) => string) {
|
||||
const extensionPriority = getExtensionPriority(file, extensions);
|
||||
const adjustedExtensionPriority = adjustExtensionPriority(extensionPriority);
|
||||
for (let i = ExtensionPriority.Highest; i < adjustedExtensionPriority; i++) {
|
||||
const higherPriorityExtension = extensions[i];
|
||||
const higherPriorityPath = keyMapper(changeExtension(file, higherPriorityExtension));
|
||||
if (literalFiles.has(higherPriorityPath) || wildcardFiles.has(higherPriorityPath)) {
|
||||
if (higherPriorityPath in literalFiles || higherPriorityPath in wildcardFiles) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@ -1342,16 +1343,27 @@ namespace ts {
|
||||
* @param extensionPriority The priority of the extension.
|
||||
* @param context The expansion context.
|
||||
*/
|
||||
function removeWildcardFilesWithLowerPriorityExtension(file: string, wildcardFiles: Map<string, string>, extensions: string[], keyMapper: (value: string) => string) {
|
||||
function removeWildcardFilesWithLowerPriorityExtension(file: string, wildcardFiles: Map<string>, extensions: string[], keyMapper: (value: string) => string) {
|
||||
const extensionPriority = getExtensionPriority(file, extensions);
|
||||
const nextExtensionPriority = getNextLowestExtensionPriority(extensionPriority);
|
||||
for (let i = nextExtensionPriority; i < extensions.length; i++) {
|
||||
const lowerPriorityExtension = extensions[i];
|
||||
const lowerPriorityPath = keyMapper(changeExtension(file, lowerPriorityExtension));
|
||||
wildcardFiles.delete(lowerPriorityPath);
|
||||
delete wildcardFiles[lowerPriorityPath];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a file to an array of files.
|
||||
*
|
||||
* @param output The output array.
|
||||
* @param file The file path.
|
||||
*/
|
||||
function addFileToOutput(output: string[], file: string) {
|
||||
output.push(file);
|
||||
return output;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a case sensitive key.
|
||||
*
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
/// <reference path="collections.ts" />
|
||||
/// <reference path="types.ts"/>
|
||||
/// <reference path="types.ts"/>
|
||||
/// <reference path="performance.ts" />
|
||||
|
||||
/* @internal */
|
||||
@ -19,11 +18,31 @@ namespace ts {
|
||||
True = -1
|
||||
}
|
||||
|
||||
const createObject = Object.create;
|
||||
|
||||
// More efficient to create a collator once and use its `compare` than to call `a.localeCompare(b)` many times.
|
||||
export const collator: { compare(a: string, b: string): number } = typeof Intl === "object" && typeof Intl.Collator === "function" ? new Intl.Collator() : undefined;
|
||||
|
||||
export function createMap<T>(template?: MapLike<T>): Map<T> {
|
||||
const map: Map<T> = createObject(null); // tslint:disable-line:no-null-keyword
|
||||
|
||||
// Using 'delete' on an object causes V8 to put the object in dictionary mode.
|
||||
// This disables creation of hidden classes, which are expensive when an object is
|
||||
// constantly changing shape.
|
||||
map["__"] = undefined;
|
||||
delete map["__"];
|
||||
|
||||
// Copies keys/values from template. Note that for..in will not throw if
|
||||
// template is undefined, and instead will just exit the loop.
|
||||
for (const key in template) if (hasOwnProperty.call(template, key)) {
|
||||
map[key] = template[key];
|
||||
}
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
export function createFileMap<T>(keyMapper?: (key: string) => string): FileMap<T> {
|
||||
const files = createMap<string, T>();
|
||||
let files = createMap<T>();
|
||||
return {
|
||||
get,
|
||||
set,
|
||||
@ -35,33 +54,39 @@ namespace ts {
|
||||
};
|
||||
|
||||
function forEachValueInMap(f: (key: Path, value: T) => void) {
|
||||
files.forEach((value, key) => f(key as Path, value));
|
||||
for (const key in files) {
|
||||
f(<Path>key, files[key]);
|
||||
}
|
||||
}
|
||||
|
||||
function getKeys(): Path[] {
|
||||
return keysOfMap(files) as Path[];
|
||||
function getKeys() {
|
||||
const keys: Path[] = [];
|
||||
for (const key in files) {
|
||||
keys.push(<Path>key);
|
||||
}
|
||||
return keys;
|
||||
}
|
||||
|
||||
// path should already be well-formed so it does not need to be normalized
|
||||
function get(path: Path): T {
|
||||
return files.get(toKey(path));
|
||||
return files[toKey(path)];
|
||||
}
|
||||
|
||||
function set(path: Path, value: T) {
|
||||
files.set(toKey(path), value);
|
||||
files[toKey(path)] = value;
|
||||
}
|
||||
|
||||
function contains(path: Path) {
|
||||
return files.has(toKey(path));
|
||||
return toKey(path) in files;
|
||||
}
|
||||
|
||||
function remove(path: Path) {
|
||||
const key = toKey(path);
|
||||
files.delete(key);
|
||||
delete files[key];
|
||||
}
|
||||
|
||||
function clear() {
|
||||
files.clear();
|
||||
files = createMap<T>();
|
||||
}
|
||||
|
||||
function toKey(path: Path): string {
|
||||
@ -682,6 +707,242 @@ namespace ts {
|
||||
return initial;
|
||||
}
|
||||
|
||||
const hasOwnProperty = Object.prototype.hasOwnProperty;
|
||||
|
||||
/**
|
||||
* Indicates whether a map-like contains an own property with the specified key.
|
||||
*
|
||||
* NOTE: This is intended for use only with MapLike<T> objects. For Map<T> objects, use
|
||||
* the 'in' operator.
|
||||
*
|
||||
* @param map A map-like.
|
||||
* @param key A property key.
|
||||
*/
|
||||
export function hasProperty<T>(map: MapLike<T>, key: string): boolean {
|
||||
return hasOwnProperty.call(map, key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the value of an owned property in a map-like.
|
||||
*
|
||||
* NOTE: This is intended for use only with MapLike<T> objects. For Map<T> objects, use
|
||||
* an indexer.
|
||||
*
|
||||
* @param map A map-like.
|
||||
* @param key A property key.
|
||||
*/
|
||||
export function getProperty<T>(map: MapLike<T>, key: string): T | undefined {
|
||||
return hasOwnProperty.call(map, key) ? map[key] : undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the owned, enumerable property keys of a map-like.
|
||||
*
|
||||
* NOTE: This is intended for use with MapLike<T> objects. For Map<T> objects, use
|
||||
* Object.keys instead as it offers better performance.
|
||||
*
|
||||
* @param map A map-like.
|
||||
*/
|
||||
export function getOwnKeys<T>(map: MapLike<T>): string[] {
|
||||
const keys: string[] = [];
|
||||
for (const key in map) if (hasOwnProperty.call(map, key)) {
|
||||
keys.push(key);
|
||||
}
|
||||
return keys;
|
||||
}
|
||||
|
||||
/**
|
||||
* Enumerates the properties of a Map<T>, invoking a callback and returning the first truthy result.
|
||||
*
|
||||
* @param map A map for which properties should be enumerated.
|
||||
* @param callback A callback to invoke for each property.
|
||||
*/
|
||||
export function forEachProperty<T, U>(map: Map<T>, callback: (value: T, key: string) => U): U {
|
||||
let result: U;
|
||||
for (const key in map) {
|
||||
if (result = callback(map[key], key)) break;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if a Map<T> has some matching property.
|
||||
*
|
||||
* @param map A map whose properties should be tested.
|
||||
* @param predicate An optional callback used to test each property.
|
||||
*/
|
||||
export function someProperties<T>(map: Map<T>, predicate?: (value: T, key: string) => boolean) {
|
||||
for (const key in map) {
|
||||
if (!predicate || predicate(map[key], key)) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs a shallow copy of the properties from a source Map<T> to a target MapLike<T>
|
||||
*
|
||||
* @param source A map from which properties should be copied.
|
||||
* @param target A map to which properties should be copied.
|
||||
*/
|
||||
export function copyProperties<T>(source: Map<T>, target: MapLike<T>): void {
|
||||
for (const key in source) {
|
||||
target[key] = source[key];
|
||||
}
|
||||
}
|
||||
|
||||
export function assign<T1 extends MapLike<{}>, T2, T3>(t: T1, arg1: T2, arg2: T3): T1 & T2 & T3;
|
||||
export function assign<T1 extends MapLike<{}>, T2>(t: T1, arg1: T2): T1 & T2;
|
||||
export function assign<T1 extends MapLike<{}>>(t: T1, ...args: any[]): any;
|
||||
export function assign<T1 extends MapLike<{}>>(t: T1, ...args: any[]) {
|
||||
for (const arg of args) {
|
||||
for (const p of getOwnKeys(arg)) {
|
||||
t[p] = arg[p];
|
||||
}
|
||||
}
|
||||
return t;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reduce the properties of a map.
|
||||
*
|
||||
* NOTE: This is intended for use with Map<T> objects. For MapLike<T> objects, use
|
||||
* reduceOwnProperties instead as it offers better runtime safety.
|
||||
*
|
||||
* @param map The map to reduce
|
||||
* @param callback An aggregation function that is called for each entry in the map
|
||||
* @param initial The initial value for the reduction.
|
||||
*/
|
||||
export function reduceProperties<T, U>(map: Map<T>, callback: (aggregate: U, value: T, key: string) => U, initial: U): U {
|
||||
let result = initial;
|
||||
for (const key in map) {
|
||||
result = callback(result, map[key], String(key));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reduce the properties defined on a map-like (but not from its prototype chain).
|
||||
*
|
||||
* NOTE: This is intended for use with MapLike<T> objects. For Map<T> objects, use
|
||||
* reduceProperties instead as it offers better performance.
|
||||
*
|
||||
* @param map The map-like to reduce
|
||||
* @param callback An aggregation function that is called for each entry in the map
|
||||
* @param initial The initial value for the reduction.
|
||||
*/
|
||||
export function reduceOwnProperties<T, U>(map: MapLike<T>, callback: (aggregate: U, value: T, key: string) => U, initial: U): U {
|
||||
let result = initial;
|
||||
for (const key in map) if (hasOwnProperty.call(map, key)) {
|
||||
result = callback(result, map[key], String(key));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs a shallow equality comparison of the contents of two map-likes.
|
||||
*
|
||||
* @param left A map-like whose properties should be compared.
|
||||
* @param right A map-like whose properties should be compared.
|
||||
*/
|
||||
export function equalOwnProperties<T>(left: MapLike<T>, right: MapLike<T>, equalityComparer?: (left: T, right: T) => boolean) {
|
||||
if (left === right) return true;
|
||||
if (!left || !right) return false;
|
||||
for (const key in left) if (hasOwnProperty.call(left, key)) {
|
||||
if (!hasOwnProperty.call(right, key) === undefined) return false;
|
||||
if (equalityComparer ? !equalityComparer(left[key], right[key]) : left[key] !== right[key]) return false;
|
||||
}
|
||||
for (const key in right) if (hasOwnProperty.call(right, key)) {
|
||||
if (!hasOwnProperty.call(left, key)) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a map from the elements of an array.
|
||||
*
|
||||
* @param array the array of input elements.
|
||||
* @param makeKey a function that produces a key for a given element.
|
||||
*
|
||||
* This function makes no effort to avoid collisions; if any two elements produce
|
||||
* the same key with the given 'makeKey' function, then the element with the higher
|
||||
* index in the array will be the one associated with the produced key.
|
||||
*/
|
||||
export function arrayToMap<T>(array: T[], makeKey: (value: T) => string): Map<T>;
|
||||
export function arrayToMap<T, U>(array: T[], makeKey: (value: T) => string, makeValue: (value: T) => U): Map<U>;
|
||||
export function arrayToMap<T, U>(array: T[], makeKey: (value: T) => string, makeValue?: (value: T) => U): Map<T | U> {
|
||||
const result = createMap<T | U>();
|
||||
for (const value of array) {
|
||||
result[makeKey(value)] = makeValue ? makeValue(value) : value;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
export function isEmpty<T>(map: Map<T>) {
|
||||
for (const id in map) {
|
||||
if (hasProperty(map, id)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
export function cloneMap<T>(map: Map<T>) {
|
||||
const clone = createMap<T>();
|
||||
copyProperties(map, clone);
|
||||
return clone;
|
||||
}
|
||||
|
||||
export function clone<T>(object: T): T {
|
||||
const result: any = {};
|
||||
for (const id in object) {
|
||||
if (hasOwnProperty.call(object, id)) {
|
||||
result[id] = (<any>object)[id];
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
export function extend<T1, T2>(first: T1, second: T2): T1 & T2 {
|
||||
const result: T1 & T2 = <any>{};
|
||||
for (const id in second) if (hasOwnProperty.call(second, id)) {
|
||||
(result as any)[id] = (second as any)[id];
|
||||
}
|
||||
for (const id in first) if (hasOwnProperty.call(first, id)) {
|
||||
(result as any)[id] = (first as any)[id];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds the value to an array of values associated with the key, and returns the array.
|
||||
* Creates the array if it does not already exist.
|
||||
*/
|
||||
export function multiMapAdd<V>(map: Map<V[]>, key: string | number, value: V): V[] {
|
||||
const values = map[key];
|
||||
if (values) {
|
||||
values.push(value);
|
||||
return values;
|
||||
}
|
||||
else {
|
||||
return map[key] = [value];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes a value from an array of values associated with the key.
|
||||
* Does not preserve the order of those values.
|
||||
* Does nothing if `key` is not in `map`, or `value` is not in `map[key]`.
|
||||
*/
|
||||
export function multiMapRemove<V>(map: Map<V[]>, key: string, value: V): void {
|
||||
const values = map[key];
|
||||
if (values) {
|
||||
unorderedRemoveItem(values, value);
|
||||
if (!values.length) {
|
||||
delete map[key];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests whether a value is an array.
|
||||
*/
|
||||
@ -780,10 +1041,10 @@ namespace ts {
|
||||
return text.replace(/{(\d+)}/g, (_match, index?) => args[+index + baseIndex]);
|
||||
}
|
||||
|
||||
export let localizedDiagnosticMessages: Map<string, string> = undefined;
|
||||
export let localizedDiagnosticMessages: Map<string> = undefined;
|
||||
|
||||
export function getLocaleSpecificMessage(message: DiagnosticMessage) {
|
||||
return localizedDiagnosticMessages && localizedDiagnosticMessages.get(message.key) || message.message;
|
||||
return localizedDiagnosticMessages && localizedDiagnosticMessages[message.key] || message.message;
|
||||
}
|
||||
|
||||
export function createFileDiagnostic(file: SourceFile, start: number, length: number, message: DiagnosticMessage, ...args: (string | number)[]): Diagnostic;
|
||||
|
||||
@ -59,7 +59,7 @@ namespace ts {
|
||||
let resultHasExternalModuleIndicator: boolean;
|
||||
let currentText: string;
|
||||
let currentLineMap: number[];
|
||||
let currentIdentifiers: Map<string, string>;
|
||||
let currentIdentifiers: Map<string>;
|
||||
let isCurrentFileExternalModule: boolean;
|
||||
let reportedDeclarationError = false;
|
||||
let errorNameNode: DeclarationName;
|
||||
@ -75,7 +75,7 @@ namespace ts {
|
||||
// and we could be collecting these paths from multiple files into single one with --out option
|
||||
let referencesOutput = "";
|
||||
|
||||
let usedTypeDirectiveReferences: Set<string>;
|
||||
let usedTypeDirectiveReferences: Map<string>;
|
||||
|
||||
// Emit references corresponding to each file
|
||||
const emittedReferencedFiles: SourceFile[] = [];
|
||||
@ -156,9 +156,9 @@ namespace ts {
|
||||
});
|
||||
|
||||
if (usedTypeDirectiveReferences) {
|
||||
usedTypeDirectiveReferences.forEach(directive => {
|
||||
for (const directive in usedTypeDirectiveReferences) {
|
||||
referencesOutput += `/// <reference types="${directive}" />${newLine}`;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
@ -267,11 +267,11 @@ namespace ts {
|
||||
}
|
||||
|
||||
if (!usedTypeDirectiveReferences) {
|
||||
usedTypeDirectiveReferences = createSet();
|
||||
usedTypeDirectiveReferences = createMap<string>();
|
||||
}
|
||||
for (const directive of typeReferenceDirectives) {
|
||||
if (!usedTypeDirectiveReferences.has(directive)) {
|
||||
usedTypeDirectiveReferences.add(directive);
|
||||
if (!(directive in usedTypeDirectiveReferences)) {
|
||||
usedTypeDirectiveReferences[directive] = directive;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -535,14 +535,14 @@ namespace ts {
|
||||
// do not need to keep track of created temp names.
|
||||
function getExportDefaultTempVariableName(): string {
|
||||
const baseName = "_default";
|
||||
if (!currentIdentifiers.has(baseName)) {
|
||||
if (!(baseName in currentIdentifiers)) {
|
||||
return baseName;
|
||||
}
|
||||
let count = 0;
|
||||
while (true) {
|
||||
count++;
|
||||
const name = baseName + "_" + count;
|
||||
if (!currentIdentifiers.has(name)) {
|
||||
if (!(name in currentIdentifiers)) {
|
||||
return name;
|
||||
}
|
||||
}
|
||||
|
||||
@ -219,11 +219,11 @@ const _super = (function (geti, seti) {
|
||||
|
||||
let nodeIdToGeneratedName: string[];
|
||||
let autoGeneratedIdToGeneratedName: string[];
|
||||
let generatedNameSet: Set<string>;
|
||||
let generatedNameSet: Map<string>;
|
||||
let tempFlags: TempFlags;
|
||||
let currentSourceFile: SourceFile;
|
||||
let currentText: string;
|
||||
let currentFileIdentifiers: Map<string, string>;
|
||||
let currentFileIdentifiers: Map<string>;
|
||||
let extendsEmitted: boolean;
|
||||
let assignEmitted: boolean;
|
||||
let decorateEmitted: boolean;
|
||||
@ -292,7 +292,7 @@ const _super = (function (geti, seti) {
|
||||
sourceMap.initialize(jsFilePath, sourceMapFilePath, sourceFiles, isBundledEmit);
|
||||
nodeIdToGeneratedName = [];
|
||||
autoGeneratedIdToGeneratedName = [];
|
||||
generatedNameSet = createSet();
|
||||
generatedNameSet = createMap<string>();
|
||||
isOwnFileEmit = !isBundledEmit;
|
||||
|
||||
// Emit helpers from all the files
|
||||
@ -2645,16 +2645,15 @@ const _super = (function (geti, seti) {
|
||||
|
||||
function isUniqueName(name: string): boolean {
|
||||
return !resolver.hasGlobalName(name) &&
|
||||
!currentFileIdentifiers.has(name) &&
|
||||
!generatedNameSet.has(name);
|
||||
!hasProperty(currentFileIdentifiers, name) &&
|
||||
!hasProperty(generatedNameSet, name);
|
||||
}
|
||||
|
||||
function isUniqueLocalName(name: string, container: Node): boolean {
|
||||
for (let node = container; isNodeDescendantOf(node, container); node = node.nextContainer) {
|
||||
if (node.locals) {
|
||||
const local = node.locals.get(name);
|
||||
if (node.locals && hasProperty(node.locals, name)) {
|
||||
// We conservatively include alias symbols to cover cases where they're emitted as locals
|
||||
if (local && local.flags & (SymbolFlags.Value | SymbolFlags.ExportValue | SymbolFlags.Alias)) {
|
||||
if (node.locals[name].flags & (SymbolFlags.Value | SymbolFlags.ExportValue | SymbolFlags.Alias)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@ -2703,8 +2702,7 @@ const _super = (function (geti, seti) {
|
||||
while (true) {
|
||||
const generatedName = baseName + i;
|
||||
if (isUniqueName(generatedName)) {
|
||||
generatedNameSet.add(generatedName);
|
||||
return generatedName;
|
||||
return generatedNameSet[generatedName] = generatedName;
|
||||
}
|
||||
i++;
|
||||
}
|
||||
|
||||
@ -1622,7 +1622,7 @@ namespace ts {
|
||||
// flag and setting a parent node.
|
||||
const react = createIdentifier(reactNamespace || "React");
|
||||
react.flags &= ~NodeFlags.Synthesized;
|
||||
// Set the parent that is in parse tree
|
||||
// Set the parent that is in parse tree
|
||||
// this makes sure that parent chain is intact for checker to traverse complete scope tree
|
||||
react.parent = getParseTreeNode(parent);
|
||||
return react;
|
||||
@ -2805,9 +2805,9 @@ namespace ts {
|
||||
return destEmitNode;
|
||||
}
|
||||
|
||||
function mergeTokenSourceMapRanges(sourceRanges: Map<SyntaxKind, TextRange>, destRanges: Map<SyntaxKind, TextRange>): Map<SyntaxKind, TextRange> {
|
||||
if (!destRanges) destRanges = createMap<SyntaxKind, TextRange>();
|
||||
copyMapEntriesFromTo(sourceRanges, destRanges);
|
||||
function mergeTokenSourceMapRanges(sourceRanges: Map<TextRange>, destRanges: Map<TextRange>) {
|
||||
if (!destRanges) destRanges = createMap<TextRange>();
|
||||
copyProperties(sourceRanges, destRanges);
|
||||
return destRanges;
|
||||
}
|
||||
|
||||
@ -2899,8 +2899,8 @@ namespace ts {
|
||||
*/
|
||||
export function setTokenSourceMapRange<T extends Node>(node: T, token: SyntaxKind, range: TextRange) {
|
||||
const emitNode = getOrCreateEmitNode(node);
|
||||
const tokenSourceMapRanges = emitNode.tokenSourceMapRanges || (emitNode.tokenSourceMapRanges = createMap<SyntaxKind, TextRange>());
|
||||
tokenSourceMapRanges.set(token, range);
|
||||
const tokenSourceMapRanges = emitNode.tokenSourceMapRanges || (emitNode.tokenSourceMapRanges = createMap<TextRange>());
|
||||
tokenSourceMapRanges[token] = range;
|
||||
return node;
|
||||
}
|
||||
|
||||
@ -2941,7 +2941,7 @@ namespace ts {
|
||||
export function getTokenSourceMapRange(node: Node, token: SyntaxKind) {
|
||||
const emitNode = node.emitNode;
|
||||
const tokenSourceMapRanges = emitNode && emitNode.tokenSourceMapRanges;
|
||||
return tokenSourceMapRanges && tokenSourceMapRanges.get(token);
|
||||
return tokenSourceMapRanges && tokenSourceMapRanges[token];
|
||||
}
|
||||
|
||||
/**
|
||||
@ -3026,8 +3026,10 @@ namespace ts {
|
||||
* Here we check if alternative name was provided for a given moduleName and return it if possible.
|
||||
*/
|
||||
function tryRenameExternalModule(moduleName: LiteralExpression, sourceFile: SourceFile) {
|
||||
const rename = sourceFile.renamedDependencies && sourceFile.renamedDependencies.get(moduleName.text);
|
||||
return rename && createLiteral(rename);
|
||||
if (sourceFile.renamedDependencies && hasProperty(sourceFile.renamedDependencies, moduleName.text)) {
|
||||
return createLiteral(sourceFile.renamedDependencies[moduleName.text]);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -491,7 +491,7 @@ namespace ts {
|
||||
let currentToken: SyntaxKind;
|
||||
let sourceText: string;
|
||||
let nodeCount: number;
|
||||
let identifiers: Map<string, string>;
|
||||
let identifiers: Map<string>;
|
||||
let identifierCount: number;
|
||||
|
||||
let parsingContext: ParsingContext;
|
||||
@ -601,7 +601,7 @@ namespace ts {
|
||||
|
||||
parseDiagnostics = [];
|
||||
parsingContext = 0;
|
||||
identifiers = createMap<string, string>();
|
||||
identifiers = createMap<string>();
|
||||
identifierCount = 0;
|
||||
nodeCount = 0;
|
||||
|
||||
@ -1104,7 +1104,7 @@ namespace ts {
|
||||
|
||||
function internIdentifier(text: string): string {
|
||||
text = escapeIdentifier(text);
|
||||
return getOrUpdate(identifiers, text, text => text);
|
||||
return identifiers[text] || (identifiers[text] = text);
|
||||
}
|
||||
|
||||
// An identifier that starts with two underscores has an extra underscore character prepended to it to avoid issues
|
||||
|
||||
@ -16,9 +16,9 @@ namespace ts.performance {
|
||||
|
||||
let enabled = false;
|
||||
let profilerStart = 0;
|
||||
let counts: Map<string, number>;
|
||||
let marks: Map<string, number>;
|
||||
let measures: Map<string, number>;
|
||||
let counts: Map<number>;
|
||||
let marks: Map<number>;
|
||||
let measures: Map<number>;
|
||||
|
||||
/**
|
||||
* Marks a performance event.
|
||||
@ -27,8 +27,8 @@ namespace ts.performance {
|
||||
*/
|
||||
export function mark(markName: string) {
|
||||
if (enabled) {
|
||||
marks.set(markName, timestamp());
|
||||
counts.set(markName, (counts.get(markName) || 0) + 1);
|
||||
marks[markName] = timestamp();
|
||||
counts[markName] = (counts[markName] || 0) + 1;
|
||||
profilerEvent(markName);
|
||||
}
|
||||
}
|
||||
@ -44,9 +44,9 @@ namespace ts.performance {
|
||||
*/
|
||||
export function measure(measureName: string, startMarkName?: string, endMarkName?: string) {
|
||||
if (enabled) {
|
||||
const end = endMarkName && marks.get(endMarkName) || timestamp();
|
||||
const start = startMarkName && marks.get(startMarkName) || profilerStart;
|
||||
measures.set(measureName, (measures.get(measureName) || 0) + (end - start));
|
||||
const end = endMarkName && marks[endMarkName] || timestamp();
|
||||
const start = startMarkName && marks[startMarkName] || profilerStart;
|
||||
measures[measureName] = (measures[measureName] || 0) + (end - start);
|
||||
}
|
||||
}
|
||||
|
||||
@ -56,7 +56,7 @@ namespace ts.performance {
|
||||
* @param markName The name of the mark.
|
||||
*/
|
||||
export function getCount(markName: string) {
|
||||
return counts && counts.get(markName) || 0;
|
||||
return counts && counts[markName] || 0;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -65,7 +65,7 @@ namespace ts.performance {
|
||||
* @param measureName The name of the measure whose durations should be accumulated.
|
||||
*/
|
||||
export function getDuration(measureName: string) {
|
||||
return measures && measures.get(measureName) || 0;
|
||||
return measures && measures[measureName] || 0;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -74,14 +74,16 @@ namespace ts.performance {
|
||||
* @param cb The action to perform for each measure
|
||||
*/
|
||||
export function forEachMeasure(cb: (measureName: string, duration: number) => void) {
|
||||
measures.forEach((duration, measureName) => cb(measureName, duration));
|
||||
for (const key in measures) {
|
||||
cb(key, measures[key]);
|
||||
}
|
||||
}
|
||||
|
||||
/** Enables (and resets) performance measurements for the compiler. */
|
||||
export function enable() {
|
||||
counts = createMap<string, number>();
|
||||
marks = createMap<string, number>();
|
||||
measures = createMap<string, number>();
|
||||
counts = createMap<number>();
|
||||
marks = createMap<number>();
|
||||
measures = createMap<number>();
|
||||
enabled = true;
|
||||
profilerStart = timestamp();
|
||||
}
|
||||
|
||||
@ -82,7 +82,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
export function createCompilerHost(options: CompilerOptions, setParentNodes?: boolean): CompilerHost {
|
||||
const existingDirectories = createSet();
|
||||
const existingDirectories = createMap<boolean>();
|
||||
|
||||
function getCanonicalFileName(fileName: string): string {
|
||||
// if underlying system can distinguish between two files whose names differs only in cases then file name already in canonical form.
|
||||
@ -114,11 +114,11 @@ namespace ts {
|
||||
}
|
||||
|
||||
function directoryExists(directoryPath: string): boolean {
|
||||
if (existingDirectories.has(directoryPath)) {
|
||||
if (directoryPath in existingDirectories) {
|
||||
return true;
|
||||
}
|
||||
if (sys.directoryExists(directoryPath)) {
|
||||
existingDirectories.add(directoryPath);
|
||||
existingDirectories[directoryPath] = true;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
@ -132,21 +132,21 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
|
||||
let outputFingerprints: Map<string, OutputFingerprint>;
|
||||
let outputFingerprints: Map<OutputFingerprint>;
|
||||
|
||||
function writeFileIfUpdated(fileName: string, data: string, writeByteOrderMark: boolean): void {
|
||||
if (!outputFingerprints) {
|
||||
outputFingerprints = createMap<string, OutputFingerprint>();
|
||||
outputFingerprints = createMap<OutputFingerprint>();
|
||||
}
|
||||
|
||||
const hash = sys.createHash(data);
|
||||
const mtimeBefore = sys.getModifiedTime(fileName);
|
||||
|
||||
if (mtimeBefore) {
|
||||
const fingerprint = outputFingerprints.get(fileName);
|
||||
if (mtimeBefore && fileName in outputFingerprints) {
|
||||
const fingerprint = outputFingerprints[fileName];
|
||||
|
||||
// If output has not been changed, and the file has no external modification
|
||||
if (fingerprint && fingerprint.byteOrderMark === writeByteOrderMark &&
|
||||
if (fingerprint.byteOrderMark === writeByteOrderMark &&
|
||||
fingerprint.hash === hash &&
|
||||
fingerprint.mtime.getTime() === mtimeBefore.getTime()) {
|
||||
return;
|
||||
@ -157,11 +157,11 @@ namespace ts {
|
||||
|
||||
const mtimeAfter = sys.getModifiedTime(fileName);
|
||||
|
||||
outputFingerprints.set(fileName, {
|
||||
outputFingerprints[fileName] = {
|
||||
hash,
|
||||
byteOrderMark: writeByteOrderMark,
|
||||
mtime: mtimeAfter
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
function writeFile(fileName: string, data: string, writeByteOrderMark: boolean, onError?: (message: string) => void) {
|
||||
@ -279,11 +279,11 @@ namespace ts {
|
||||
return [];
|
||||
}
|
||||
const resolutions: T[] = [];
|
||||
const cache = createMap<string, T>();
|
||||
const cache = createMap<T>();
|
||||
for (const name of names) {
|
||||
const result = cache.has(name)
|
||||
? cache.get(name)
|
||||
: setAndReturn(cache, name, loader(name, containingFile));
|
||||
const result = name in cache
|
||||
? cache[name]
|
||||
: cache[name] = loader(name, containingFile);
|
||||
resolutions.push(result);
|
||||
}
|
||||
return resolutions;
|
||||
@ -295,9 +295,9 @@ namespace ts {
|
||||
let commonSourceDirectory: string;
|
||||
let diagnosticsProducingTypeChecker: TypeChecker;
|
||||
let noDiagnosticsTypeChecker: TypeChecker;
|
||||
let classifiableNames: Set<string>;
|
||||
let classifiableNames: Map<string>;
|
||||
|
||||
let resolvedTypeReferenceDirectives = createMap<string, ResolvedTypeReferenceDirective>();
|
||||
let resolvedTypeReferenceDirectives = createMap<ResolvedTypeReferenceDirective>();
|
||||
let fileProcessingDiagnostics = createDiagnosticCollection();
|
||||
|
||||
// The below settings are to track if a .js file should be add to the program if loaded via searching under node_modules.
|
||||
@ -312,10 +312,10 @@ namespace ts {
|
||||
|
||||
// If a module has some of its imports skipped due to being at the depth limit under node_modules, then track
|
||||
// this, as it may be imported at a shallower depth later, and then it will need its skipped imports processed.
|
||||
const modulesWithElidedImports = createMap<string, boolean>();
|
||||
const modulesWithElidedImports = createMap<boolean>();
|
||||
|
||||
// Track source files that are source files found by searching under node_modules, as these shouldn't be compiled.
|
||||
const sourceFilesFoundSearchingNodeModules = createMap<string, boolean>();
|
||||
const sourceFilesFoundSearchingNodeModules = createMap<boolean>();
|
||||
|
||||
performance.mark("beforeProgram");
|
||||
|
||||
@ -448,11 +448,15 @@ namespace ts {
|
||||
return commonSourceDirectory;
|
||||
}
|
||||
|
||||
function getClassifiableNames(): Set<string> {
|
||||
function getClassifiableNames() {
|
||||
if (!classifiableNames) {
|
||||
// Initialize a checker so that all our files are bound.
|
||||
getTypeChecker();
|
||||
classifiableNames = setAggregate(files, sourceFile => sourceFile.classifiableNames);
|
||||
classifiableNames = createMap<string>();
|
||||
|
||||
for (const sourceFile of files) {
|
||||
copyProperties(sourceFile.classifiableNames, classifiableNames);
|
||||
}
|
||||
}
|
||||
|
||||
return classifiableNames;
|
||||
@ -588,7 +592,7 @@ namespace ts {
|
||||
getSourceFile: program.getSourceFile,
|
||||
getSourceFileByPath: program.getSourceFileByPath,
|
||||
getSourceFiles: program.getSourceFiles,
|
||||
isSourceFileFromExternalLibrary: (file: SourceFile) => !!sourceFilesFoundSearchingNodeModules.get(file.path),
|
||||
isSourceFileFromExternalLibrary: (file: SourceFile) => !!sourceFilesFoundSearchingNodeModules[file.path],
|
||||
writeFile: writeFileCallback || (
|
||||
(fileName, data, writeByteOrderMark, onError, sourceFiles) => host.writeFile(fileName, data, writeByteOrderMark, onError, sourceFiles)),
|
||||
isEmitBlocked,
|
||||
@ -1128,8 +1132,8 @@ namespace ts {
|
||||
|
||||
// Get source file from normalized fileName
|
||||
function findSourceFile(fileName: string, path: Path, isDefaultLib: boolean, refFile?: SourceFile, refPos?: number, refEnd?: number): SourceFile {
|
||||
let file = filesByName.get(path);
|
||||
if (file !== undefined) {
|
||||
if (filesByName.contains(path)) {
|
||||
const file = filesByName.get(path);
|
||||
// try to check if we've already seen this file but with a different casing in path
|
||||
// NOTE: this only makes sense for case-insensitive file systems
|
||||
if (file && options.forceConsistentCasingInFileNames && getNormalizedAbsolutePath(file.fileName, currentDirectory) !== getNormalizedAbsolutePath(fileName, currentDirectory)) {
|
||||
@ -1138,20 +1142,20 @@ namespace ts {
|
||||
|
||||
// If the file was previously found via a node_modules search, but is now being processed as a root file,
|
||||
// then everything it sucks in may also be marked incorrectly, and needs to be checked again.
|
||||
if (file && sourceFilesFoundSearchingNodeModules.get(file.path) && currentNodeModulesDepth == 0) {
|
||||
sourceFilesFoundSearchingNodeModules.set(file.path, false);
|
||||
if (file && sourceFilesFoundSearchingNodeModules[file.path] && currentNodeModulesDepth == 0) {
|
||||
sourceFilesFoundSearchingNodeModules[file.path] = false;
|
||||
if (!options.noResolve) {
|
||||
processReferencedFiles(file, isDefaultLib);
|
||||
processTypeReferenceDirectives(file);
|
||||
}
|
||||
|
||||
modulesWithElidedImports.set(file.path, false);
|
||||
modulesWithElidedImports[file.path] = false;
|
||||
processImportedModules(file);
|
||||
}
|
||||
// See if we need to reprocess the imports due to prior skipped imports
|
||||
else if (file && modulesWithElidedImports.get(file.path)) {
|
||||
else if (file && modulesWithElidedImports[file.path]) {
|
||||
if (currentNodeModulesDepth < maxNodeModuleJsDepth) {
|
||||
modulesWithElidedImports.set(file.path, false);
|
||||
modulesWithElidedImports[file.path] = false;
|
||||
processImportedModules(file);
|
||||
}
|
||||
}
|
||||
@ -1160,7 +1164,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
// We haven't looked for this file, do so now and cache result
|
||||
file = host.getSourceFile(fileName, options.target, hostErrorMessage => {
|
||||
const file = host.getSourceFile(fileName, options.target, hostErrorMessage => {
|
||||
if (refFile !== undefined && refPos !== undefined && refEnd !== undefined) {
|
||||
fileProcessingDiagnostics.add(createFileDiagnostic(refFile, refPos, refEnd - refPos,
|
||||
Diagnostics.Cannot_read_file_0_Colon_1, fileName, hostErrorMessage));
|
||||
@ -1172,7 +1176,7 @@ namespace ts {
|
||||
|
||||
filesByName.set(path, file);
|
||||
if (file) {
|
||||
sourceFilesFoundSearchingNodeModules.set(path, currentNodeModulesDepth > 0);
|
||||
sourceFilesFoundSearchingNodeModules[path] = (currentNodeModulesDepth > 0);
|
||||
file.path = path;
|
||||
|
||||
if (host.useCaseSensitiveFileNames()) {
|
||||
@ -1233,7 +1237,7 @@ namespace ts {
|
||||
refFile?: SourceFile, refPos?: number, refEnd?: number): void {
|
||||
|
||||
// If we already found this library as a primary reference - nothing to do
|
||||
const previousResolution = resolvedTypeReferenceDirectives.get(typeReferenceDirective);
|
||||
const previousResolution = resolvedTypeReferenceDirectives[typeReferenceDirective];
|
||||
if (previousResolution && previousResolution.primary) {
|
||||
return;
|
||||
}
|
||||
@ -1270,7 +1274,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
if (saveResolution) {
|
||||
resolvedTypeReferenceDirectives.set(typeReferenceDirective, resolvedTypeReferenceDirective);
|
||||
resolvedTypeReferenceDirectives[typeReferenceDirective] = resolvedTypeReferenceDirective;
|
||||
}
|
||||
}
|
||||
|
||||
@ -1290,7 +1294,7 @@ namespace ts {
|
||||
function processImportedModules(file: SourceFile) {
|
||||
collectExternalModuleReferences(file);
|
||||
if (file.imports.length || file.moduleAugmentations.length) {
|
||||
file.resolvedModules = createMap<string, ResolvedModuleFull>();
|
||||
file.resolvedModules = createMap<ResolvedModuleFull>();
|
||||
const moduleNames = map(concatenate(file.imports, file.moduleAugmentations), getTextOfLiteral);
|
||||
const resolutions = resolveModuleNamesWorker(moduleNames, getNormalizedAbsolutePath(file.fileName, currentDirectory));
|
||||
Debug.assert(resolutions.length === moduleNames.length);
|
||||
@ -1321,7 +1325,7 @@ namespace ts {
|
||||
const shouldAddFile = resolvedFileName && !getResolutionDiagnostic(options, resolution) && !options.noResolve && i < file.imports.length && !elideImport;
|
||||
|
||||
if (elideImport) {
|
||||
modulesWithElidedImports.set(file.path, true);
|
||||
modulesWithElidedImports[file.path] = true;
|
||||
}
|
||||
else if (shouldAddFile) {
|
||||
const path = toPath(resolvedFileName, currentDirectory, getCanonicalFileName);
|
||||
|
||||
@ -56,7 +56,7 @@ namespace ts {
|
||||
tryScan<T>(callback: () => T): T;
|
||||
}
|
||||
|
||||
const textToToken = mapOfMapLike({
|
||||
const textToToken = createMap({
|
||||
"abstract": SyntaxKind.AbstractKeyword,
|
||||
"any": SyntaxKind.AnyKeyword,
|
||||
"as": SyntaxKind.AsKeyword,
|
||||
@ -272,11 +272,11 @@ namespace ts {
|
||||
lookupInUnicodeMap(code, unicodeES3IdentifierPart);
|
||||
}
|
||||
|
||||
function makeReverseMap(source: Map<string, number>): string[] {
|
||||
function makeReverseMap(source: Map<number>): string[] {
|
||||
const result: string[] = [];
|
||||
source.forEach((num, name) => {
|
||||
result[num] = name;
|
||||
});
|
||||
for (const name in source) {
|
||||
result[source[name]] = name;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@ -288,7 +288,7 @@ namespace ts {
|
||||
|
||||
/* @internal */
|
||||
export function stringToToken(s: string): SyntaxKind {
|
||||
return textToToken.get(s);
|
||||
return textToToken[s];
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
@ -362,6 +362,8 @@ namespace ts {
|
||||
return computeLineAndCharacterOfPosition(getLineStarts(sourceFile), position);
|
||||
}
|
||||
|
||||
const hasOwnProperty = Object.prototype.hasOwnProperty;
|
||||
|
||||
export function isWhiteSpace(ch: number): boolean {
|
||||
return isWhiteSpaceSingleLine(ch) || isLineBreak(ch);
|
||||
}
|
||||
@ -1180,11 +1182,8 @@ namespace ts {
|
||||
const len = tokenValue.length;
|
||||
if (len >= 2 && len <= 11) {
|
||||
const ch = tokenValue.charCodeAt(0);
|
||||
if (ch >= CharacterCodes.a && ch <= CharacterCodes.z) {
|
||||
token = textToToken.get(tokenValue);
|
||||
if (token !== undefined) {
|
||||
return token;
|
||||
}
|
||||
if (ch >= CharacterCodes.a && ch <= CharacterCodes.z && hasOwnProperty.call(textToToken, tokenValue)) {
|
||||
return token = textToToken[tokenValue];
|
||||
}
|
||||
}
|
||||
return token = SyntaxKind.Identifier;
|
||||
|
||||
@ -362,7 +362,7 @@ namespace ts {
|
||||
|
||||
const emitNode = node && node.emitNode;
|
||||
const emitFlags = emitNode && emitNode.flags;
|
||||
const range = emitNode && emitNode.tokenSourceMapRanges && emitNode.tokenSourceMapRanges.get(token);
|
||||
const range = emitNode && emitNode.tokenSourceMapRanges && emitNode.tokenSourceMapRanges[token];
|
||||
|
||||
tokenPos = skipTrivia(currentSourceText, range ? range.pos : tokenPos);
|
||||
if ((emitFlags & EmitFlags.NoTokenLeadingSourceMaps) === 0 && tokenPos >= 0) {
|
||||
|
||||
@ -237,25 +237,25 @@ namespace ts {
|
||||
const useNonPollingWatchers = process.env["TSC_NONPOLLING_WATCHER"];
|
||||
|
||||
function createWatchedFileSet() {
|
||||
const dirWatchers = createMap<string, DirectoryWatcher>();
|
||||
const dirWatchers = createMap<DirectoryWatcher>();
|
||||
// One file can have multiple watchers
|
||||
const fileWatcherCallbacks = createMap<string, FileWatcherCallback[]>();
|
||||
const fileWatcherCallbacks = createMap<FileWatcherCallback[]>();
|
||||
return { addFile, removeFile };
|
||||
|
||||
function reduceDirWatcherRefCountForFile(fileName: string) {
|
||||
const dirName = getDirectoryPath(fileName);
|
||||
const watcher = dirWatchers.get(dirName);
|
||||
const watcher = dirWatchers[dirName];
|
||||
if (watcher) {
|
||||
watcher.referenceCount -= 1;
|
||||
if (watcher.referenceCount <= 0) {
|
||||
watcher.close();
|
||||
dirWatchers.delete(dirName);
|
||||
delete dirWatchers[dirName];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function addDirWatcher(dirPath: string): void {
|
||||
let watcher = dirWatchers.get(dirPath);
|
||||
let watcher = dirWatchers[dirPath];
|
||||
if (watcher) {
|
||||
watcher.referenceCount += 1;
|
||||
return;
|
||||
@ -266,7 +266,7 @@ namespace ts {
|
||||
(eventName: string, relativeFileName: string) => fileEventHandler(eventName, relativeFileName, dirPath)
|
||||
);
|
||||
watcher.referenceCount = 1;
|
||||
dirWatchers.set(dirPath, watcher);
|
||||
dirWatchers[dirPath] = watcher;
|
||||
return;
|
||||
}
|
||||
|
||||
@ -296,12 +296,9 @@ namespace ts {
|
||||
? undefined
|
||||
: ts.getNormalizedAbsolutePath(relativeFileName, baseDirPath);
|
||||
// Some applications save a working file via rename operations
|
||||
if ((eventName === "change" || eventName === "rename")) {
|
||||
const callbacks = fileWatcherCallbacks.get(fileName);
|
||||
if (callbacks) {
|
||||
for (const fileCallback of callbacks) {
|
||||
fileCallback(fileName);
|
||||
}
|
||||
if ((eventName === "change" || eventName === "rename") && fileWatcherCallbacks[fileName]) {
|
||||
for (const fileCallback of fileWatcherCallbacks[fileName]) {
|
||||
fileCallback(fileName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -12,14 +12,14 @@
|
||||
|
||||
/* @internal */
|
||||
namespace ts {
|
||||
const moduleTransformerMap = createMap<ModuleKind, Transformer>([
|
||||
[ModuleKind.ES2015, transformES2015Module],
|
||||
[ModuleKind.System, transformSystemModule],
|
||||
[ModuleKind.AMD, transformModule],
|
||||
[ModuleKind.CommonJS, transformModule],
|
||||
[ModuleKind.UMD, transformModule],
|
||||
[ModuleKind.None, transformModule],
|
||||
]);
|
||||
const moduleTransformerMap = createMap<Transformer>({
|
||||
[ModuleKind.ES2015]: transformES2015Module,
|
||||
[ModuleKind.System]: transformSystemModule,
|
||||
[ModuleKind.AMD]: transformModule,
|
||||
[ModuleKind.CommonJS]: transformModule,
|
||||
[ModuleKind.UMD]: transformModule,
|
||||
[ModuleKind.None]: transformModule,
|
||||
});
|
||||
|
||||
const enum SyntaxKindFeatureFlags {
|
||||
Substitution = 1 << 0,
|
||||
@ -129,7 +129,7 @@ namespace ts {
|
||||
transformers.push(transformGenerators);
|
||||
}
|
||||
|
||||
transformers.push(moduleTransformerMap.get(moduleKind) || moduleTransformerMap.get(ModuleKind.None));
|
||||
transformers.push(moduleTransformerMap[moduleKind] || moduleTransformerMap[ModuleKind.None]);
|
||||
|
||||
// The ES5 transformer is last so that it can substitute expressions like `exports.default`
|
||||
// for ES3.
|
||||
|
||||
@ -70,15 +70,15 @@ namespace ts {
|
||||
* set of labels that occurred inside the converted loop
|
||||
* used to determine if labeled jump can be emitted as is or it should be dispatched to calling code
|
||||
*/
|
||||
labels?: Map<string, string>;
|
||||
labels?: Map<string>;
|
||||
/*
|
||||
* collection of labeled jumps that transfer control outside the converted loop.
|
||||
* maps store association 'label -> labelMarker' where
|
||||
* - label - value of label as it appear in code
|
||||
* - label marker - return value that should be interpreted by calling code as 'jump to <label>'
|
||||
*/
|
||||
labeledNonLocalBreaks?: Map<string, string>;
|
||||
labeledNonLocalContinues?: Map<string, string>;
|
||||
labeledNonLocalBreaks?: Map<string>;
|
||||
labeledNonLocalContinues?: Map<string>;
|
||||
|
||||
/*
|
||||
* set of non-labeled jumps that transfer control outside the converted loop
|
||||
@ -279,7 +279,7 @@ namespace ts {
|
||||
else if (node.transformFlags & TransformFlags.ContainsES2015 || (isInConstructorWithCapturedSuper && !isExpression(node))) {
|
||||
// we want to dive in this branch either if node has children with ES2015 specific syntax
|
||||
// or we are inside constructor that captures result of the super call so all returns without expression should be
|
||||
// rewritten. Note: we skip expressions since returns should never appear there
|
||||
// rewritten. Note: we skip expressions since returns should never appear there
|
||||
return visitEachChild(node, visitor, context);
|
||||
}
|
||||
else {
|
||||
@ -543,7 +543,7 @@ namespace ts {
|
||||
// - break/continue is non-labeled and located in non-converted loop/switch statement
|
||||
const jump = node.kind === SyntaxKind.BreakStatement ? Jump.Break : Jump.Continue;
|
||||
const canUseBreakOrContinue =
|
||||
(node.label && convertedLoopState.labels && convertedLoopState.labels.get(node.label.text)) ||
|
||||
(node.label && convertedLoopState.labels && convertedLoopState.labels[node.label.text]) ||
|
||||
(!node.label && (convertedLoopState.allowedNonLabeledJumps & jump));
|
||||
|
||||
if (!canUseBreakOrContinue) {
|
||||
@ -1880,9 +1880,9 @@ namespace ts {
|
||||
function visitLabeledStatement(node: LabeledStatement): VisitResult<Statement> {
|
||||
if (convertedLoopState) {
|
||||
if (!convertedLoopState.labels) {
|
||||
convertedLoopState.labels = createMap<string, string>();
|
||||
convertedLoopState.labels = createMap<string>();
|
||||
}
|
||||
convertedLoopState.labels.set(node.label.text, node.label.text);
|
||||
convertedLoopState.labels[node.label.text] = node.label.text;
|
||||
}
|
||||
|
||||
let result: VisitResult<Statement>;
|
||||
@ -1894,7 +1894,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
if (convertedLoopState) {
|
||||
convertedLoopState.labels.set(node.label.text, undefined);
|
||||
convertedLoopState.labels[node.label.text] = undefined;
|
||||
}
|
||||
|
||||
return result;
|
||||
@ -2497,28 +2497,29 @@ namespace ts {
|
||||
function setLabeledJump(state: ConvertedLoopState, isBreak: boolean, labelText: string, labelMarker: string): void {
|
||||
if (isBreak) {
|
||||
if (!state.labeledNonLocalBreaks) {
|
||||
state.labeledNonLocalBreaks = createMap<string, string>();
|
||||
state.labeledNonLocalBreaks = createMap<string>();
|
||||
}
|
||||
state.labeledNonLocalBreaks.set(labelText, labelMarker);
|
||||
state.labeledNonLocalBreaks[labelText] = labelMarker;
|
||||
}
|
||||
else {
|
||||
if (!state.labeledNonLocalContinues) {
|
||||
state.labeledNonLocalContinues = createMap<string, string>();
|
||||
state.labeledNonLocalContinues = createMap<string>();
|
||||
}
|
||||
state.labeledNonLocalContinues.set(labelText, labelMarker);
|
||||
state.labeledNonLocalContinues[labelText] = labelMarker;
|
||||
}
|
||||
}
|
||||
|
||||
function processLabeledJumps(table: Map<string, string>, isBreak: boolean, loopResultName: Identifier, outerLoop: ConvertedLoopState, caseClauses: CaseClause[]): void {
|
||||
function processLabeledJumps(table: Map<string>, isBreak: boolean, loopResultName: Identifier, outerLoop: ConvertedLoopState, caseClauses: CaseClause[]): void {
|
||||
if (!table) {
|
||||
return;
|
||||
}
|
||||
table.forEach((labelMarker, labelText) => {
|
||||
for (const labelText in table) {
|
||||
const labelMarker = table[labelText];
|
||||
const statements: Statement[] = [];
|
||||
// if there are no outer converted loop or outer label in question is located inside outer converted loop
|
||||
// then emit labeled break\continue
|
||||
// otherwise propagate pair 'label -> marker' to outer converted loop and emit 'return labelMarker' so outer loop can later decide what to do
|
||||
if (!outerLoop || (outerLoop.labels && outerLoop.labels.get(labelText))) {
|
||||
if (!outerLoop || (outerLoop.labels && outerLoop.labels[labelText])) {
|
||||
const label = createIdentifier(labelText);
|
||||
statements.push(isBreak ? createBreak(label) : createContinue(label));
|
||||
}
|
||||
@ -2527,7 +2528,7 @@ namespace ts {
|
||||
statements.push(createReturn(loopResultName));
|
||||
}
|
||||
caseClauses.push(createCaseClause(createLiteral(labelMarker), statements));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function processLoopVariableDeclaration(decl: VariableDeclaration | BindingElement, loopParameters: ParameterDeclaration[], loopOutParameters: LoopOutParameter[]) {
|
||||
|
||||
@ -217,13 +217,13 @@ namespace ts {
|
||||
Endfinally = 7,
|
||||
}
|
||||
|
||||
const instructionNames = createMap<Instruction, string>([
|
||||
[Instruction.Return, "return"],
|
||||
[Instruction.Break, "break"],
|
||||
[Instruction.Yield, "yield"],
|
||||
[Instruction.YieldStar, "yield*"],
|
||||
[Instruction.Endfinally, "endfinally"],
|
||||
]);
|
||||
const instructionNames = createMap<string>({
|
||||
[Instruction.Return]: "return",
|
||||
[Instruction.Break]: "break",
|
||||
[Instruction.Yield]: "yield",
|
||||
[Instruction.YieldStar]: "yield*",
|
||||
[Instruction.Endfinally]: "endfinally",
|
||||
});
|
||||
|
||||
export function transformGenerators(context: TransformationContext) {
|
||||
const {
|
||||
@ -240,8 +240,8 @@ namespace ts {
|
||||
context.onSubstituteNode = onSubstituteNode;
|
||||
|
||||
let currentSourceFile: SourceFile;
|
||||
let renamedCatchVariables: Set<string>;
|
||||
let renamedCatchVariableDeclarations: Map<number, Identifier>;
|
||||
let renamedCatchVariables: Map<boolean>;
|
||||
let renamedCatchVariableDeclarations: Map<Identifier>;
|
||||
|
||||
let inGeneratorFunctionBody: boolean;
|
||||
let inStatementContainingYield: boolean;
|
||||
@ -1908,12 +1908,12 @@ namespace ts {
|
||||
}
|
||||
|
||||
function substituteExpressionIdentifier(node: Identifier) {
|
||||
if (renamedCatchVariables && renamedCatchVariables.has(node.text)) {
|
||||
if (renamedCatchVariables && hasProperty(renamedCatchVariables, node.text)) {
|
||||
const original = getOriginalNode(node);
|
||||
if (isIdentifier(original) && original.parent) {
|
||||
const declaration = resolver.getReferencedValueDeclaration(original);
|
||||
if (declaration) {
|
||||
const name = renamedCatchVariableDeclarations.get(getOriginalNodeId(declaration));
|
||||
const name = getProperty(renamedCatchVariableDeclarations, String(getOriginalNodeId(declaration)));
|
||||
if (name) {
|
||||
const clone = getMutableClone(name);
|
||||
setSourceMapRange(clone, node);
|
||||
@ -2078,13 +2078,13 @@ namespace ts {
|
||||
const name = declareLocal(text);
|
||||
|
||||
if (!renamedCatchVariables) {
|
||||
renamedCatchVariables = createSet();
|
||||
renamedCatchVariableDeclarations = createMap<number, Identifier>();
|
||||
renamedCatchVariables = createMap<boolean>();
|
||||
renamedCatchVariableDeclarations = createMap<Identifier>();
|
||||
context.enableSubstitution(SyntaxKind.Identifier);
|
||||
}
|
||||
|
||||
renamedCatchVariables.add(text);
|
||||
renamedCatchVariableDeclarations.set(getOriginalNodeId(variable), name);
|
||||
renamedCatchVariables[text] = true;
|
||||
renamedCatchVariableDeclarations[getOriginalNodeId(variable)] = name;
|
||||
|
||||
const exception = <ExceptionBlock>peekBlock();
|
||||
Debug.assert(exception.state < ExceptionBlockState.Catch);
|
||||
@ -2388,7 +2388,7 @@ namespace ts {
|
||||
*/
|
||||
function createInstruction(instruction: Instruction): NumericLiteral {
|
||||
const literal = createLiteral(instruction);
|
||||
literal.trailingComment = instructionNames.get(instruction);
|
||||
literal.trailingComment = instructionNames[instruction];
|
||||
return literal;
|
||||
}
|
||||
|
||||
|
||||
@ -3,7 +3,7 @@
|
||||
|
||||
/*@internal*/
|
||||
namespace ts {
|
||||
const entities: Map<string, number> = createEntitiesMap();
|
||||
const entities: Map<number> = createEntitiesMap();
|
||||
|
||||
export function transformJsx(context: TransformationContext) {
|
||||
const compilerOptions = context.getCompilerOptions();
|
||||
@ -227,7 +227,7 @@ namespace ts {
|
||||
return String.fromCharCode(parseInt(hex, 16));
|
||||
}
|
||||
else {
|
||||
const ch = entities.get(word);
|
||||
const ch = entities[word];
|
||||
// If this is not a valid entity, then just use `match` (replace it with itself, i.e. don't replace)
|
||||
return ch ? String.fromCharCode(ch) : match;
|
||||
}
|
||||
@ -275,8 +275,8 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
|
||||
function createEntitiesMap(): Map<string, number> {
|
||||
return mapOfMapLike<number>({
|
||||
function createEntitiesMap(): Map<number> {
|
||||
return createMap<number>({
|
||||
"quot": 0x0022,
|
||||
"amp": 0x0026,
|
||||
"apos": 0x0027,
|
||||
|
||||
@ -10,12 +10,12 @@ namespace ts {
|
||||
importAliasNames: ParameterDeclaration[];
|
||||
}
|
||||
|
||||
const transformModuleDelegates = createMap<ModuleKind, (node: SourceFile) => SourceFile>([
|
||||
[ModuleKind.None, transformCommonJSModule],
|
||||
[ModuleKind.CommonJS, transformCommonJSModule],
|
||||
[ModuleKind.AMD, transformAMDModule],
|
||||
[ModuleKind.UMD, transformUMDModule],
|
||||
]);
|
||||
const transformModuleDelegates = createMap<(node: SourceFile) => SourceFile>({
|
||||
[ModuleKind.None]: transformCommonJSModule,
|
||||
[ModuleKind.CommonJS]: transformCommonJSModule,
|
||||
[ModuleKind.AMD]: transformAMDModule,
|
||||
[ModuleKind.UMD]: transformUMDModule,
|
||||
});
|
||||
|
||||
const {
|
||||
startLexicalEnvironment,
|
||||
@ -32,7 +32,6 @@ namespace ts {
|
||||
const previousOnEmitNode = context.onEmitNode;
|
||||
context.onSubstituteNode = onSubstituteNode;
|
||||
context.onEmitNode = onEmitNode;
|
||||
|
||||
context.enableSubstitution(SyntaxKind.Identifier); // Substitutes expression identifiers with imported/exported symbols.
|
||||
context.enableSubstitution(SyntaxKind.BinaryExpression); // Substitutes assignments to exported symbols.
|
||||
context.enableSubstitution(SyntaxKind.PrefixUnaryExpression); // Substitutes updates to exported symbols.
|
||||
@ -40,12 +39,12 @@ namespace ts {
|
||||
context.enableSubstitution(SyntaxKind.ShorthandPropertyAssignment); // Substitutes shorthand property assignments for imported/exported symbols.
|
||||
context.enableEmitNotification(SyntaxKind.SourceFile); // Restore state when substituting nodes in a file.
|
||||
|
||||
const moduleInfoMap = createMap<number, ExternalModuleInfo>(); // The ExternalModuleInfo for each file.
|
||||
const deferredExports = createMap<number, Statement[]>(); // Exports to defer until an EndOfDeclarationMarker is found.
|
||||
const moduleInfoMap = createMap<ExternalModuleInfo>(); // The ExternalModuleInfo for each file.
|
||||
const deferredExports = createMap<Statement[]>(); // Exports to defer until an EndOfDeclarationMarker is found.
|
||||
|
||||
let currentSourceFile: SourceFile; // The current file.
|
||||
let currentModuleInfo: ExternalModuleInfo; // The ExternalModuleInfo for the current file.
|
||||
let noSubstitution: Map<number, boolean>; // Set of nodes for which substitution rules should be ignored.
|
||||
let noSubstitution: Map<boolean>; // Set of nodes for which substitution rules should be ignored.
|
||||
|
||||
return transformSourceFile;
|
||||
|
||||
@ -62,10 +61,10 @@ namespace ts {
|
||||
}
|
||||
|
||||
currentSourceFile = node;
|
||||
currentModuleInfo = setAndReturn(moduleInfoMap, getOriginalNodeId(node), collectExternalModuleInfo(node, resolver));
|
||||
currentModuleInfo = moduleInfoMap[getOriginalNodeId(node)] = collectExternalModuleInfo(node, resolver);
|
||||
|
||||
// Perform the transformation.
|
||||
const transformModule = transformModuleDelegates.get(moduleKind) || transformModuleDelegates.get(ModuleKind.None);
|
||||
const transformModule = transformModuleDelegates[moduleKind] || transformModuleDelegates[ModuleKind.None];
|
||||
const updated = transformModule(node);
|
||||
|
||||
currentSourceFile = undefined;
|
||||
@ -445,7 +444,7 @@ namespace ts {
|
||||
if (hasAssociatedEndOfDeclarationMarker(node)) {
|
||||
// Defer exports until we encounter an EndOfDeclarationMarker node
|
||||
const id = getOriginalNodeId(node);
|
||||
deferredExports.set(id, appendExportsOfImportDeclaration(deferredExports.get(id), node));
|
||||
deferredExports[id] = appendExportsOfImportDeclaration(deferredExports[id], node);
|
||||
}
|
||||
else {
|
||||
statements = appendExportsOfImportDeclaration(statements, node);
|
||||
@ -524,7 +523,7 @@ namespace ts {
|
||||
if (hasAssociatedEndOfDeclarationMarker(node)) {
|
||||
// Defer exports until we encounter an EndOfDeclarationMarker node
|
||||
const id = getOriginalNodeId(node);
|
||||
deferredExports.set(id, appendExportsOfImportEqualsDeclaration(deferredExports.get(id), node));
|
||||
deferredExports[id] = appendExportsOfImportEqualsDeclaration(deferredExports[id], node);
|
||||
}
|
||||
else {
|
||||
statements = appendExportsOfImportEqualsDeclaration(statements, node);
|
||||
@ -611,7 +610,7 @@ namespace ts {
|
||||
if (original && hasAssociatedEndOfDeclarationMarker(original)) {
|
||||
// Defer exports until we encounter an EndOfDeclarationMarker node
|
||||
const id = getOriginalNodeId(node);
|
||||
deferredExports.set(id, appendExportStatement(deferredExports.get(id), createIdentifier("default"), node.expression, /*location*/ node, /*allowComments*/ true));
|
||||
deferredExports[id] = appendExportStatement(deferredExports[id], createIdentifier("default"), node.expression, /*location*/ node, /*allowComments*/ true);
|
||||
}
|
||||
else {
|
||||
statements = appendExportStatement(statements, createIdentifier("default"), node.expression, /*location*/ node, /*allowComments*/ true);
|
||||
@ -652,7 +651,7 @@ namespace ts {
|
||||
if (hasAssociatedEndOfDeclarationMarker(node)) {
|
||||
// Defer exports until we encounter an EndOfDeclarationMarker node
|
||||
const id = getOriginalNodeId(node);
|
||||
deferredExports.set(id, appendExportsOfHoistedDeclaration(deferredExports.get(id), node));
|
||||
deferredExports[id] = appendExportsOfHoistedDeclaration(deferredExports[id], node);
|
||||
}
|
||||
else {
|
||||
statements = appendExportsOfHoistedDeclaration(statements, node);
|
||||
@ -691,7 +690,7 @@ namespace ts {
|
||||
if (hasAssociatedEndOfDeclarationMarker(node)) {
|
||||
// Defer exports until we encounter an EndOfDeclarationMarker node
|
||||
const id = getOriginalNodeId(node);
|
||||
deferredExports.set(id, appendExportsOfHoistedDeclaration(deferredExports.get(id), node));
|
||||
deferredExports[id] = appendExportsOfHoistedDeclaration(deferredExports[id], node);
|
||||
}
|
||||
else {
|
||||
statements = appendExportsOfHoistedDeclaration(statements, node);
|
||||
@ -742,7 +741,7 @@ namespace ts {
|
||||
if (hasAssociatedEndOfDeclarationMarker(node)) {
|
||||
// Defer exports until we encounter an EndOfDeclarationMarker node
|
||||
const id = getOriginalNodeId(node);
|
||||
deferredExports.set(id, appendExportsOfVariableStatement(deferredExports.get(id), node));
|
||||
deferredExports[id] = appendExportsOfVariableStatement(deferredExports[id], node);
|
||||
}
|
||||
else {
|
||||
statements = appendExportsOfVariableStatement(statements, node);
|
||||
@ -792,7 +791,7 @@ namespace ts {
|
||||
// statement.
|
||||
if (hasAssociatedEndOfDeclarationMarker(node) && node.original.kind === SyntaxKind.VariableStatement) {
|
||||
const id = getOriginalNodeId(node);
|
||||
deferredExports.set(id, appendExportsOfVariableStatement(deferredExports.get(id), <VariableStatement>node.original));
|
||||
deferredExports[id] = appendExportsOfVariableStatement(deferredExports[id], <VariableStatement>node.original);
|
||||
}
|
||||
|
||||
return node;
|
||||
@ -818,9 +817,9 @@ namespace ts {
|
||||
// end of the transformed declaration. We use this marker to emit any deferred exports
|
||||
// of the declaration.
|
||||
const id = getOriginalNodeId(node);
|
||||
const statements = deferredExports.get(id);
|
||||
const statements = deferredExports[id];
|
||||
if (statements) {
|
||||
deferredExports.delete(id);
|
||||
delete deferredExports[id];
|
||||
return append(statements, node);
|
||||
}
|
||||
|
||||
@ -971,7 +970,7 @@ namespace ts {
|
||||
*/
|
||||
function appendExportsOfDeclaration(statements: Statement[] | undefined, decl: Declaration): Statement[] | undefined {
|
||||
const name = getDeclarationName(decl);
|
||||
const exportSpecifiers = currentModuleInfo.exportSpecifiers.get(name.text);
|
||||
const exportSpecifiers = currentModuleInfo.exportSpecifiers[name.text];
|
||||
if (exportSpecifiers) {
|
||||
for (const exportSpecifier of exportSpecifiers) {
|
||||
statements = appendExportStatement(statements, exportSpecifier.name, name, /*location*/ exportSpecifier.name);
|
||||
@ -995,7 +994,7 @@ namespace ts {
|
||||
function appendExportStatement(statements: Statement[] | undefined, exportName: Identifier, expression: Expression, location?: TextRange, allowComments?: boolean): Statement[] | undefined {
|
||||
if (exportName.text === "default") {
|
||||
const sourceFile = getOriginalNode(currentSourceFile, isSourceFile);
|
||||
if (sourceFile && !sourceFile.symbol.exports.get("___esModule")) {
|
||||
if (sourceFile && !sourceFile.symbol.exports["___esModule"]) {
|
||||
if (languageVersion === ScriptTarget.ES3) {
|
||||
statements = append(statements,
|
||||
createStatement(
|
||||
@ -1100,8 +1099,8 @@ namespace ts {
|
||||
function onEmitNode(emitContext: EmitContext, node: Node, emitCallback: (emitContext: EmitContext, node: Node) => void): void {
|
||||
if (node.kind === SyntaxKind.SourceFile) {
|
||||
currentSourceFile = <SourceFile>node;
|
||||
currentModuleInfo = moduleInfoMap.get(getOriginalNodeId(currentSourceFile));
|
||||
noSubstitution = createMap<number, boolean>();
|
||||
currentModuleInfo = moduleInfoMap[getOriginalNodeId(currentSourceFile)];
|
||||
noSubstitution = createMap<boolean>();
|
||||
|
||||
previousOnEmitNode(emitContext, node, emitCallback);
|
||||
|
||||
@ -1126,7 +1125,7 @@ namespace ts {
|
||||
*/
|
||||
function onSubstituteNode(emitContext: EmitContext, node: Node) {
|
||||
node = previousOnSubstituteNode(emitContext, node);
|
||||
if (node.id && noSubstitution.get(node.id)) {
|
||||
if (node.id && noSubstitution[node.id]) {
|
||||
return node;
|
||||
}
|
||||
|
||||
@ -1244,7 +1243,7 @@ namespace ts {
|
||||
let expression: Expression = node;
|
||||
for (const exportName of exportedNames) {
|
||||
// Mark the node to prevent triggering this rule again.
|
||||
noSubstitution.set(getNodeId(expression), true);
|
||||
noSubstitution[getNodeId(expression)] = true;
|
||||
expression = createExportExpression(exportName, expression, /*location*/ node);
|
||||
}
|
||||
|
||||
@ -1286,7 +1285,7 @@ namespace ts {
|
||||
: node;
|
||||
for (const exportName of exportedNames) {
|
||||
// Mark the node to prevent triggering this rule again.
|
||||
noSubstitution.set(getNodeId(expression), true);
|
||||
noSubstitution[getNodeId(expression)] = true;
|
||||
expression = createExportExpression(exportName, expression);
|
||||
}
|
||||
|
||||
@ -1308,7 +1307,7 @@ namespace ts {
|
||||
|| resolver.getReferencedValueDeclaration(name);
|
||||
if (valueDeclaration) {
|
||||
return currentModuleInfo
|
||||
&& currentModuleInfo.exportedBindings.get(getOriginalNodeId(valueDeclaration));
|
||||
&& currentModuleInfo.exportedBindings[getOriginalNodeId(valueDeclaration)];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -28,10 +28,10 @@ namespace ts {
|
||||
context.enableSubstitution(SyntaxKind.PostfixUnaryExpression); // Substitutes updates to exported symbols.
|
||||
context.enableEmitNotification(SyntaxKind.SourceFile); // Restore state when substituting nodes in a file.
|
||||
|
||||
const moduleInfoMap = createMap<number, ExternalModuleInfo>(); // The ExternalModuleInfo for each file.
|
||||
const deferredExports = createMap<number, Statement[]>(); // Exports to defer until an EndOfDeclarationMarker is found.
|
||||
const exportFunctionsMap = createMap<number, Identifier>(); // The export function associated with a source file.
|
||||
const noSubstitutionMap = createMap<number, Map<number, boolean>>(); // Set of nodes for which substitution rules should be ignored for each file.
|
||||
const moduleInfoMap = createMap<ExternalModuleInfo>(); // The ExternalModuleInfo for each file.
|
||||
const deferredExports = createMap<Statement[]>(); // Exports to defer until an EndOfDeclarationMarker is found.
|
||||
const exportFunctionsMap = createMap<Identifier>(); // The export function associated with a source file.
|
||||
const noSubstitutionMap = createMap<Map<boolean>>(); // Set of nodes for which substitution rules should be ignored for each file.
|
||||
|
||||
let currentSourceFile: SourceFile; // The current file.
|
||||
let moduleInfo: ExternalModuleInfo; // ExternalModuleInfo for the current file.
|
||||
@ -39,7 +39,7 @@ namespace ts {
|
||||
let contextObject: Identifier; // The context object for the current file.
|
||||
let hoistedStatements: Statement[];
|
||||
let enclosingBlockScopedContainer: Node;
|
||||
let noSubstitution: Map<number, boolean>; // Set of nodes for which substitution rules should be ignored.
|
||||
let noSubstitution: Map<boolean>; // Set of nodes for which substitution rules should be ignored.
|
||||
|
||||
return transformSourceFile;
|
||||
|
||||
@ -73,11 +73,11 @@ namespace ts {
|
||||
// see comment to 'substitutePostfixUnaryExpression' for more details
|
||||
|
||||
// Collect information about the external module and dependency groups.
|
||||
moduleInfo = setAndReturn(moduleInfoMap, id, collectExternalModuleInfo(node, resolver));
|
||||
moduleInfo = moduleInfoMap[id] = collectExternalModuleInfo(node, resolver);
|
||||
|
||||
// Make sure that the name of the 'exports' function does not conflict with
|
||||
// existing identifiers.
|
||||
exportFunction = setAndReturn(exportFunctionsMap, id, createUniqueName("exports"));
|
||||
exportFunction = exportFunctionsMap[id] = createUniqueName("exports");
|
||||
contextObject = createUniqueName("context");
|
||||
|
||||
// Add the body of the module.
|
||||
@ -118,7 +118,7 @@ namespace ts {
|
||||
setEmitFlags(updated, getEmitFlags(node) & ~EmitFlags.EmitEmitHelpers);
|
||||
|
||||
if (noSubstitution) {
|
||||
noSubstitutionMap.set(id, noSubstitution);
|
||||
noSubstitutionMap[id] = noSubstitution;
|
||||
noSubstitution = undefined;
|
||||
}
|
||||
|
||||
@ -138,19 +138,19 @@ namespace ts {
|
||||
* @param externalImports The imports for the file.
|
||||
*/
|
||||
function collectDependencyGroups(externalImports: (ImportDeclaration | ImportEqualsDeclaration | ExportDeclaration)[]) {
|
||||
const groupIndices = createMap<string, number>();
|
||||
const groupIndices = createMap<number>();
|
||||
const dependencyGroups: DependencyGroup[] = [];
|
||||
for (let i = 0; i < externalImports.length; i++) {
|
||||
const externalImport = externalImports[i];
|
||||
const externalModuleName = getExternalModuleNameLiteral(externalImport, currentSourceFile, host, resolver, compilerOptions);
|
||||
const text = externalModuleName.text;
|
||||
const groupIndex = groupIndices.get(text);
|
||||
if (groupIndex !== undefined) {
|
||||
if (hasProperty(groupIndices, text)) {
|
||||
// deduplicate/group entries in dependency list by the dependency name
|
||||
const groupIndex = groupIndices[text];
|
||||
dependencyGroups[groupIndex].externalImports.push(externalImport);
|
||||
}
|
||||
else {
|
||||
groupIndices.set(text, dependencyGroups.length);
|
||||
groupIndices[text] = dependencyGroups.length;
|
||||
dependencyGroups.push({
|
||||
name: externalModuleName,
|
||||
externalImports: [externalImport]
|
||||
@ -301,8 +301,7 @@ namespace ts {
|
||||
// this set is used to filter names brought by star expors.
|
||||
|
||||
// local names set should only be added if we have anything exported
|
||||
|
||||
if (!moduleInfo.exportedNames && mapIsEmpty(moduleInfo.exportSpecifiers)) {
|
||||
if (!moduleInfo.exportedNames && isEmpty(moduleInfo.exportSpecifiers)) {
|
||||
// no exported declarations (export var ...) or export specifiers (export {x})
|
||||
// check if we have any non star export declarations.
|
||||
let hasExportDeclarationWithExportClause = false;
|
||||
@ -599,7 +598,7 @@ namespace ts {
|
||||
if (hasAssociatedEndOfDeclarationMarker(node)) {
|
||||
// Defer exports until we encounter an EndOfDeclarationMarker node
|
||||
const id = getOriginalNodeId(node);
|
||||
deferredExports.set(id, appendExportsOfImportDeclaration(deferredExports.get(id), node));
|
||||
deferredExports[id] = appendExportsOfImportDeclaration(deferredExports[id], node);
|
||||
}
|
||||
else {
|
||||
statements = appendExportsOfImportDeclaration(statements, node);
|
||||
@ -622,7 +621,7 @@ namespace ts {
|
||||
if (hasAssociatedEndOfDeclarationMarker(node)) {
|
||||
// Defer exports until we encounter an EndOfDeclarationMarker node
|
||||
const id = getOriginalNodeId(node);
|
||||
deferredExports.set(id, appendExportsOfImportEqualsDeclaration(deferredExports.get(id), node));
|
||||
deferredExports[id] = appendExportsOfImportEqualsDeclaration(deferredExports[id], node);
|
||||
}
|
||||
else {
|
||||
statements = appendExportsOfImportEqualsDeclaration(statements, node);
|
||||
@ -647,7 +646,7 @@ namespace ts {
|
||||
if (original && hasAssociatedEndOfDeclarationMarker(original)) {
|
||||
// Defer exports until we encounter an EndOfDeclarationMarker node
|
||||
const id = getOriginalNodeId(node);
|
||||
deferredExports.set(id, appendExportStatement(deferredExports.get(id), createIdentifier("default"), expression, /*allowComments*/ true));
|
||||
deferredExports[id] = appendExportStatement(deferredExports[id], createIdentifier("default"), expression, /*allowComments*/ true);
|
||||
}
|
||||
else {
|
||||
return createExportStatement(createIdentifier("default"), expression, /*allowComments*/ true);
|
||||
@ -679,7 +678,7 @@ namespace ts {
|
||||
if (hasAssociatedEndOfDeclarationMarker(node)) {
|
||||
// Defer exports until we encounter an EndOfDeclarationMarker node
|
||||
const id = getOriginalNodeId(node);
|
||||
deferredExports.set(id, appendExportsOfHoistedDeclaration(deferredExports.get(id), node));
|
||||
deferredExports[id] = appendExportsOfHoistedDeclaration(deferredExports[id], node);
|
||||
}
|
||||
else {
|
||||
hoistedStatements = appendExportsOfHoistedDeclaration(hoistedStatements, node);
|
||||
@ -721,7 +720,7 @@ namespace ts {
|
||||
if (hasAssociatedEndOfDeclarationMarker(node)) {
|
||||
// Defer exports until we encounter an EndOfDeclarationMarker node
|
||||
const id = getOriginalNodeId(node);
|
||||
deferredExports.set(id, appendExportsOfHoistedDeclaration(deferredExports.get(id), node));
|
||||
deferredExports[id] = appendExportsOfHoistedDeclaration(deferredExports[id], node);
|
||||
}
|
||||
else {
|
||||
statements = appendExportsOfHoistedDeclaration(statements, node);
|
||||
@ -761,7 +760,7 @@ namespace ts {
|
||||
if (isMarkedDeclaration) {
|
||||
// Defer exports until we encounter an EndOfDeclarationMarker node
|
||||
const id = getOriginalNodeId(node);
|
||||
deferredExports.set(id, appendExportsOfVariableStatement(deferredExports.get(id), node, isExportedDeclaration));
|
||||
deferredExports[id] = appendExportsOfVariableStatement(deferredExports[id], node, isExportedDeclaration);
|
||||
}
|
||||
else {
|
||||
statements = appendExportsOfVariableStatement(statements, node, /*exportSelf*/ false);
|
||||
@ -867,7 +866,7 @@ namespace ts {
|
||||
if (hasAssociatedEndOfDeclarationMarker(node) && node.original.kind === SyntaxKind.VariableStatement) {
|
||||
const id = getOriginalNodeId(node);
|
||||
const isExportedDeclaration = hasModifier(node.original, ModifierFlags.Export);
|
||||
deferredExports.set(id, appendExportsOfVariableStatement(deferredExports.get(id), <VariableStatement>node.original, isExportedDeclaration));
|
||||
deferredExports[id] = appendExportsOfVariableStatement(deferredExports[id], <VariableStatement>node.original, isExportedDeclaration);
|
||||
}
|
||||
|
||||
return node;
|
||||
@ -893,9 +892,9 @@ namespace ts {
|
||||
// end of the transformed declaration. We use this marker to emit any deferred exports
|
||||
// of the declaration.
|
||||
const id = getOriginalNodeId(node);
|
||||
const statements = deferredExports.get(id);
|
||||
const statements = deferredExports[id];
|
||||
if (statements) {
|
||||
deferredExports.delete(id);
|
||||
delete deferredExports[id];
|
||||
return append(statements, node);
|
||||
}
|
||||
|
||||
@ -1064,7 +1063,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
const name = getDeclarationName(decl);
|
||||
const exportSpecifiers = moduleInfo.exportSpecifiers.get(name.text);
|
||||
const exportSpecifiers = moduleInfo.exportSpecifiers[name.text];
|
||||
if (exportSpecifiers) {
|
||||
for (const exportSpecifier of exportSpecifiers) {
|
||||
if (exportSpecifier.name.text !== excludeName) {
|
||||
@ -1532,12 +1531,12 @@ namespace ts {
|
||||
if (node.kind === SyntaxKind.SourceFile) {
|
||||
const id = getOriginalNodeId(node);
|
||||
currentSourceFile = <SourceFile>node;
|
||||
moduleInfo = moduleInfoMap.get(id);
|
||||
exportFunction = exportFunctionsMap.get(id);
|
||||
noSubstitution = noSubstitutionMap.get(id);
|
||||
moduleInfo = moduleInfoMap[id];
|
||||
exportFunction = exportFunctionsMap[id];
|
||||
noSubstitution = noSubstitutionMap[id];
|
||||
|
||||
if (noSubstitution) {
|
||||
noSubstitutionMap.delete(id);
|
||||
delete noSubstitutionMap[id];
|
||||
}
|
||||
|
||||
previousOnEmitNode(emitContext, node, emitCallback);
|
||||
@ -1726,7 +1725,7 @@ namespace ts {
|
||||
exportedNames = append(exportedNames, getDeclarationName(valueDeclaration));
|
||||
}
|
||||
|
||||
exportedNames = addRange(exportedNames, moduleInfo && moduleInfo.exportedBindings.get(getOriginalNodeId(valueDeclaration)));
|
||||
exportedNames = addRange(exportedNames, moduleInfo && moduleInfo.exportedBindings[getOriginalNodeId(valueDeclaration)]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1739,8 +1738,8 @@ namespace ts {
|
||||
* @param node The node which should not be substituted.
|
||||
*/
|
||||
function preventSubstitution<T extends Node>(node: T): T {
|
||||
if (noSubstitution === undefined) noSubstitution = createMap<number, boolean>();
|
||||
noSubstitution.set(getNodeId(node), true);
|
||||
if (noSubstitution === undefined) noSubstitution = createMap<boolean>();
|
||||
noSubstitution[getNodeId(node)] = true;
|
||||
return node;
|
||||
}
|
||||
|
||||
@ -1750,7 +1749,7 @@ namespace ts {
|
||||
* @param node The node to test.
|
||||
*/
|
||||
function isSubstitutionPrevented(node: Node) {
|
||||
return noSubstitution && node.id && noSubstitution.get(node.id);
|
||||
return noSubstitution && node.id && noSubstitution[node.id];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -47,7 +47,7 @@ namespace ts {
|
||||
let currentNamespace: ModuleDeclaration;
|
||||
let currentNamespaceContainerName: Identifier;
|
||||
let currentScope: SourceFile | Block | ModuleBlock | CaseBlock;
|
||||
let currentScopeFirstDeclarationsOfName: Map<string, Node>;
|
||||
let currentScopeFirstDeclarationsOfName: Map<Node>;
|
||||
let currentExternalHelpersModuleName: Identifier;
|
||||
|
||||
/**
|
||||
@ -60,7 +60,7 @@ namespace ts {
|
||||
* A map that keeps track of aliases created for classes with decorators to avoid issues
|
||||
* with the double-binding behavior of classes.
|
||||
*/
|
||||
let classAliases: Map<number, Identifier>;
|
||||
let classAliases: Map<Identifier>;
|
||||
|
||||
/**
|
||||
* Keeps track of whether we are within any containing namespaces when performing
|
||||
@ -783,7 +783,7 @@ namespace ts {
|
||||
if (resolver.getNodeCheckFlags(node) & NodeCheckFlags.ClassWithConstructorReference) {
|
||||
// record an alias as the class name is not in scope for statics.
|
||||
enableSubstitutionForClassAliases();
|
||||
classAliases.set(getOriginalNodeId(node), getSynthesizedClone(temp));
|
||||
classAliases[getOriginalNodeId(node)] = getSynthesizedClone(temp);
|
||||
}
|
||||
|
||||
// To preserve the behavior of the old emitter, we explicitly indent
|
||||
@ -1454,7 +1454,7 @@ namespace ts {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const classAlias = classAliases && classAliases.get(getOriginalNodeId(node));
|
||||
const classAlias = classAliases && classAliases[getOriginalNodeId(node)];
|
||||
const localName = getLocalName(node, /*allowComments*/ false, /*allowSourceMaps*/ true);
|
||||
const decorate = createDecorateHelper(currentExternalHelpersModuleName, decoratorExpressions, localName);
|
||||
const expression = createAssignment(localName, classAlias ? createAssignment(classAlias, decorate) : decorate);
|
||||
@ -2245,7 +2245,7 @@ namespace ts {
|
||||
const savedCurrentScope = currentScope;
|
||||
const savedCurrentScopeFirstDeclarationsOfName = currentScopeFirstDeclarationsOfName;
|
||||
currentScope = body;
|
||||
currentScopeFirstDeclarationsOfName = createMap<string, Node>();
|
||||
currentScopeFirstDeclarationsOfName = createMap<Node>();
|
||||
startLexicalEnvironment();
|
||||
|
||||
const statements = visitNodes(body.statements, visitor, isStatement, start);
|
||||
@ -2630,10 +2630,12 @@ namespace ts {
|
||||
const name = node.symbol && node.symbol.name;
|
||||
if (name) {
|
||||
if (!currentScopeFirstDeclarationsOfName) {
|
||||
currentScopeFirstDeclarationsOfName = createMap<string, Node>();
|
||||
currentScopeFirstDeclarationsOfName = createMap<Node>();
|
||||
}
|
||||
|
||||
setIfNotSet(currentScopeFirstDeclarationsOfName, name, node);
|
||||
if (!(name in currentScopeFirstDeclarationsOfName)) {
|
||||
currentScopeFirstDeclarationsOfName[name] = node;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -2645,7 +2647,7 @@ namespace ts {
|
||||
if (currentScopeFirstDeclarationsOfName) {
|
||||
const name = node.symbol && node.symbol.name;
|
||||
if (name) {
|
||||
return currentScopeFirstDeclarationsOfName.get(name) === node;
|
||||
return currentScopeFirstDeclarationsOfName[name] === node;
|
||||
}
|
||||
}
|
||||
|
||||
@ -3186,7 +3188,7 @@ namespace ts {
|
||||
if (resolver.getNodeCheckFlags(node) & NodeCheckFlags.ClassWithConstructorReference) {
|
||||
enableSubstitutionForClassAliases();
|
||||
const classAlias = createUniqueName(node.name && !isGeneratedIdentifier(node.name) ? node.name.text : "default");
|
||||
classAliases.set(getOriginalNodeId(node), classAlias);
|
||||
classAliases[getOriginalNodeId(node)] = classAlias;
|
||||
hoistVariableDeclaration(classAlias);
|
||||
return classAlias;
|
||||
}
|
||||
@ -3218,7 +3220,7 @@ namespace ts {
|
||||
context.enableSubstitution(SyntaxKind.Identifier);
|
||||
|
||||
// Keep track of class aliases.
|
||||
classAliases = createMap<number, Identifier>();
|
||||
classAliases = createMap<Identifier>();
|
||||
}
|
||||
}
|
||||
|
||||
@ -3331,7 +3333,7 @@ namespace ts {
|
||||
// constructor references in static property initializers.
|
||||
const declaration = resolver.getReferencedValueDeclaration(node);
|
||||
if (declaration) {
|
||||
const classAlias = classAliases.get(declaration.id);
|
||||
const classAlias = classAliases[declaration.id];
|
||||
if (classAlias) {
|
||||
const clone = getSynthesizedClone(classAlias);
|
||||
setSourceMapRange(clone, node);
|
||||
|
||||
@ -93,7 +93,7 @@ namespace ts {
|
||||
return false;
|
||||
}
|
||||
try {
|
||||
ts.localizedDiagnosticMessages = mapOfMapLike<string>(JSON.parse(fileContents));
|
||||
ts.localizedDiagnosticMessages = JSON.parse(fileContents);
|
||||
}
|
||||
catch (e) {
|
||||
errors.push(createCompilerDiagnostic(Diagnostics.Corrupted_locale_file_0, filePath));
|
||||
@ -127,11 +127,11 @@ namespace ts {
|
||||
const gutterSeparator = " ";
|
||||
const resetEscapeSequence = "\u001b[0m";
|
||||
const ellipsis = "...";
|
||||
const categoryFormatMap = createMap<DiagnosticCategory, string>([
|
||||
[DiagnosticCategory.Warning, yellowForegroundEscapeSequence],
|
||||
[DiagnosticCategory.Error, redForegroundEscapeSequence],
|
||||
[DiagnosticCategory.Message, blueForegroundEscapeSequence],
|
||||
]);
|
||||
const categoryFormatMap = createMap<string>({
|
||||
[DiagnosticCategory.Warning]: yellowForegroundEscapeSequence,
|
||||
[DiagnosticCategory.Error]: redForegroundEscapeSequence,
|
||||
[DiagnosticCategory.Message]: blueForegroundEscapeSequence,
|
||||
});
|
||||
|
||||
function formatAndReset(text: string, formatStyle: string) {
|
||||
return formatStyle + text + resetEscapeSequence;
|
||||
@ -199,7 +199,7 @@ namespace ts {
|
||||
output += `${ relativeFileName }(${ firstLine + 1 },${ firstLineChar + 1 }): `;
|
||||
}
|
||||
|
||||
const categoryColor = categoryFormatMap.get(diagnostic.category);
|
||||
const categoryColor = categoryFormatMap[diagnostic.category];
|
||||
const category = DiagnosticCategory[diagnostic.category].toLowerCase();
|
||||
output += `${ formatAndReset(category, categoryColor) } TS${ diagnostic.code }: ${ flattenDiagnosticMessageText(diagnostic.messageText, sys.newLine) }`;
|
||||
output += sys.newLine + sys.newLine;
|
||||
@ -255,7 +255,7 @@ namespace ts {
|
||||
|
||||
// This map stores and reuses results of fileExists check that happen inside 'createProgram'
|
||||
// This allows to save time in module resolution heavy scenarios when existence of the same file might be checked multiple times.
|
||||
let cachedExistingFiles: Map<string, boolean>;
|
||||
let cachedExistingFiles: Map<boolean>;
|
||||
let hostFileExists: typeof compilerHost.fileExists;
|
||||
|
||||
if (commandLine.options.locale) {
|
||||
@ -425,7 +425,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
// reset the cache of existing files
|
||||
cachedExistingFiles = createMap<string, boolean>();
|
||||
cachedExistingFiles = createMap<boolean>();
|
||||
|
||||
const compileResult = compile(rootFileNames, compilerOptions, compilerHost);
|
||||
|
||||
@ -438,7 +438,9 @@ namespace ts {
|
||||
}
|
||||
|
||||
function cachedFileExists(fileName: string): boolean {
|
||||
return getOrUpdate(cachedExistingFiles, fileName, hostFileExists);
|
||||
return fileName in cachedExistingFiles
|
||||
? cachedExistingFiles[fileName]
|
||||
: cachedExistingFiles[fileName] = hostFileExists(fileName);
|
||||
}
|
||||
|
||||
function getSourceFile(fileName: string, languageVersion: ScriptTarget, onError?: (message: string) => void) {
|
||||
@ -706,7 +708,7 @@ namespace ts {
|
||||
const usageColumn: string[] = []; // Things like "-d, --declaration" go in here.
|
||||
const descriptionColumn: string[] = [];
|
||||
|
||||
const optionsDescriptionMap = createMap<string, string[]>(); // Map between option.description and list of option.type if it is a kind
|
||||
const optionsDescriptionMap = createMap<string[]>(); // Map between option.description and list of option.type if it is a kind
|
||||
|
||||
for (let i = 0; i < optsList.length; i++) {
|
||||
const option = optsList[i];
|
||||
@ -734,11 +736,11 @@ namespace ts {
|
||||
description = getDiagnosticText(option.description);
|
||||
const options: string[] = [];
|
||||
const element = (<CommandLineOptionOfListType>option).element;
|
||||
const typeMap = <Map<string, number | string>>element.type;
|
||||
forEachKeyInMap(typeMap, key => {
|
||||
const typeMap = <Map<number | string>>element.type;
|
||||
for (const key in typeMap) {
|
||||
options.push(`'${key}'`);
|
||||
});
|
||||
optionsDescriptionMap.set(description, options);
|
||||
}
|
||||
optionsDescriptionMap[description] = options;
|
||||
}
|
||||
else {
|
||||
description = getDiagnosticText(option.description);
|
||||
@ -760,7 +762,7 @@ namespace ts {
|
||||
for (let i = 0; i < usageColumn.length; i++) {
|
||||
const usage = usageColumn[i];
|
||||
const description = descriptionColumn[i];
|
||||
const kindsList = optionsDescriptionMap.get(description);
|
||||
const kindsList = optionsDescriptionMap[description];
|
||||
output.push(usage + makePadding(marginLength - usage.length + 2) + description + sys.newLine);
|
||||
|
||||
if (kindsList) {
|
||||
|
||||
@ -15,7 +15,6 @@
|
||||
},
|
||||
"files": [
|
||||
"core.ts",
|
||||
"collections.ts",
|
||||
"performance.ts",
|
||||
"sys.ts",
|
||||
"types.ts",
|
||||
|
||||
@ -1,46 +1,11 @@
|
||||
namespace ts {
|
||||
/**
|
||||
* Type of objects whose values are all of the same type.
|
||||
* The `in` and `for-in` operators can *not* be safely used,
|
||||
* since `Object.prototype` may be modified by outside code.
|
||||
*/
|
||||
|
||||
export interface MapLike<T> {
|
||||
[index: string]: T;
|
||||
}
|
||||
|
||||
/**
|
||||
* This contains just the parts of ES6's `Map` interface that we allow.
|
||||
* Map can only be instantiated using NumberMap and StringMap, which come with shims.
|
||||
*
|
||||
* Internet Explorer does not support iterator-returning methods, so those are not allowed here.
|
||||
* But map-using functions in collections.ts check for these features and use them where possible.
|
||||
*/
|
||||
export interface Map<K, V> {
|
||||
clear(): void;
|
||||
delete(key: K): boolean;
|
||||
/**
|
||||
* Call `action` for each entry in the map.
|
||||
* Since we use a `for-in` loop for our shims, `key` may be a string.
|
||||
*/
|
||||
forEach(action: (value: V, key: K | string) => void): void;
|
||||
get(key: K): V;
|
||||
/**
|
||||
* Whether the key is in the map.
|
||||
* Note: It is better to ask forgiveness than permission. Consider calling `get` and checking if the result is undefined.
|
||||
*/
|
||||
has(key: K): boolean;
|
||||
set(key: K, value: V): void;
|
||||
}
|
||||
|
||||
/**
|
||||
* This contains just the parts of ES6's `Set` interface that we allow.
|
||||
*/
|
||||
export interface Set<T> {
|
||||
add(value: T): void;
|
||||
clear(): void;
|
||||
delete(value: T): boolean;
|
||||
forEach(action: (value: T) => void): void;
|
||||
has(value: T): boolean;
|
||||
export interface Map<T> extends MapLike<T> {
|
||||
__mapBrand: any;
|
||||
}
|
||||
|
||||
// branded string type used to store absolute, normalized and canonicalized paths
|
||||
@ -2079,7 +2044,7 @@ namespace ts {
|
||||
|
||||
// this map is used by transpiler to supply alternative names for dependencies (i.e. in case of bundling)
|
||||
/* @internal */
|
||||
renamedDependencies?: Map<string, string>;
|
||||
renamedDependencies?: Map<string>;
|
||||
|
||||
/**
|
||||
* lib.d.ts should have a reference comment like
|
||||
@ -2099,7 +2064,7 @@ namespace ts {
|
||||
// The first node that causes this file to be a CommonJS module
|
||||
/* @internal */ commonJsModuleIndicator: Node;
|
||||
|
||||
/* @internal */ identifiers: Map<string, string>;
|
||||
/* @internal */ identifiers: Map<string>;
|
||||
/* @internal */ nodeCount: number;
|
||||
/* @internal */ identifierCount: number;
|
||||
/* @internal */ symbolCount: number;
|
||||
@ -2114,12 +2079,12 @@ namespace ts {
|
||||
// Stores a line map for the file.
|
||||
// This field should never be used directly to obtain line map, use getLineMap function instead.
|
||||
/* @internal */ lineMap: number[];
|
||||
/* @internal */ classifiableNames?: Set<string>;
|
||||
/* @internal */ classifiableNames?: Map<string>;
|
||||
// Stores a mapping 'external module reference text' -> 'resolved file name' | undefined
|
||||
// It is used to resolve module names in the checker.
|
||||
// Content of this field should never be used directly - use getResolvedModuleFileName/setResolvedModuleFileName functions instead
|
||||
/* @internal */ resolvedModules: Map<string, ResolvedModuleFull>;
|
||||
/* @internal */ resolvedTypeReferenceDirectiveNames: Map<string, ResolvedTypeReferenceDirective>;
|
||||
/* @internal */ resolvedModules: Map<ResolvedModuleFull>;
|
||||
/* @internal */ resolvedTypeReferenceDirectiveNames: Map<ResolvedTypeReferenceDirective>;
|
||||
/* @internal */ imports: LiteralExpression[];
|
||||
/* @internal */ moduleAugmentations: LiteralExpression[];
|
||||
/* @internal */ patternAmbientModules?: PatternAmbientModule[];
|
||||
@ -2203,7 +2168,7 @@ namespace ts {
|
||||
/* @internal */ getDiagnosticsProducingTypeChecker(): TypeChecker;
|
||||
/* @internal */ dropDiagnosticsProducingTypeChecker(): void;
|
||||
|
||||
/* @internal */ getClassifiableNames(): Set<string>;
|
||||
/* @internal */ getClassifiableNames(): Map<string>;
|
||||
|
||||
/* @internal */ getNodeCount(): number;
|
||||
/* @internal */ getIdentifierCount(): number;
|
||||
@ -2211,7 +2176,7 @@ namespace ts {
|
||||
/* @internal */ getTypeCount(): number;
|
||||
|
||||
/* @internal */ getFileProcessingDiagnostics(): DiagnosticCollection;
|
||||
/* @internal */ getResolvedTypeReferenceDirectives(): Map<string, ResolvedTypeReferenceDirective>;
|
||||
/* @internal */ getResolvedTypeReferenceDirectives(): Map<ResolvedTypeReferenceDirective>;
|
||||
// For testing purposes only.
|
||||
/* @internal */ structureIsReused?: boolean;
|
||||
}
|
||||
@ -2272,7 +2237,7 @@ namespace ts {
|
||||
|
||||
getSourceFiles(): SourceFile[];
|
||||
getSourceFile(fileName: string): SourceFile;
|
||||
getResolvedTypeReferenceDirectives(): Map<string, ResolvedTypeReferenceDirective>;
|
||||
getResolvedTypeReferenceDirectives(): Map<ResolvedTypeReferenceDirective>;
|
||||
}
|
||||
|
||||
export interface TypeChecker {
|
||||
@ -2606,7 +2571,7 @@ namespace ts {
|
||||
declaredType?: Type; // Type of class, interface, enum, type alias, or type parameter
|
||||
typeParameters?: TypeParameter[]; // Type parameters of type alias (undefined if non-generic)
|
||||
inferredClassType?: Type; // Type of an inferred ES5 class
|
||||
instantiations?: Map<string, Type>; // Instantiations of generic type alias (undefined if non-generic)
|
||||
instantiations?: Map<Type>; // Instantiations of generic type alias (undefined if non-generic)
|
||||
mapper?: TypeMapper; // Type mapper for instantiation alias
|
||||
referenced?: boolean; // True if alias symbol has been referenced as a value
|
||||
containingType?: UnionOrIntersectionType; // Containing union or intersection type for synthetic property
|
||||
@ -2623,7 +2588,7 @@ namespace ts {
|
||||
/* @internal */
|
||||
export interface TransientSymbol extends Symbol, SymbolLinks { }
|
||||
|
||||
export type SymbolTable = Map<string, Symbol>;
|
||||
export type SymbolTable = Map<Symbol>;
|
||||
|
||||
/** Represents a "prefix*suffix" pattern. */
|
||||
/* @internal */
|
||||
@ -2765,7 +2730,7 @@ namespace ts {
|
||||
|
||||
// Enum types (TypeFlags.Enum)
|
||||
export interface EnumType extends Type {
|
||||
memberTypes: { [enumMemberValue: number]: EnumLiteralType };
|
||||
memberTypes: Map<EnumLiteralType>;
|
||||
}
|
||||
|
||||
// Enum types (TypeFlags.EnumLiteral)
|
||||
@ -2827,7 +2792,7 @@ namespace ts {
|
||||
// Generic class and interface types
|
||||
export interface GenericType extends InterfaceType, TypeReference {
|
||||
/* @internal */
|
||||
instantiations: Map<string, TypeReference>; // Generic instantiation cache
|
||||
instantiations: Map<TypeReference>; // Generic instantiation cache
|
||||
}
|
||||
|
||||
export interface UnionOrIntersectionType extends Type {
|
||||
@ -3115,7 +3080,7 @@ namespace ts {
|
||||
fileNames: string[]; // The file names that belong to the same project.
|
||||
projectRootPath: string; // The path to the project root directory
|
||||
safeListPath: string; // The path used to retrieve the safe list
|
||||
packageNameToTypingLocation: MapLike<string>; // The map of package names to their cached typing locations
|
||||
packageNameToTypingLocation: Map<string>; // The map of package names to their cached typing locations
|
||||
typingOptions: TypingOptions; // Used to customize the typing inference process
|
||||
compilerOptions: CompilerOptions; // Used as a source for typing inference
|
||||
unresolvedImports: ReadonlyArray<string>; // List of unresolved module ids from imports
|
||||
@ -3200,7 +3165,7 @@ namespace ts {
|
||||
/* @internal */
|
||||
export interface CommandLineOptionBase {
|
||||
name: string;
|
||||
type: "string" | "number" | "boolean" | "object" | "list" | Map<string, number | string>; // a value of a primitive type, or an object literal mapping named values to actual values
|
||||
type: "string" | "number" | "boolean" | "object" | "list" | Map<number | string>; // a value of a primitive type, or an object literal mapping named values to actual values
|
||||
isFilePath?: boolean; // True if option value is a path or fileName
|
||||
shortName?: string; // A short mnemonic for convenience - for instance, 'h' can be used in place of 'help'
|
||||
description?: DiagnosticMessage; // The message describing what the command line switch does
|
||||
@ -3216,7 +3181,7 @@ namespace ts {
|
||||
|
||||
/* @internal */
|
||||
export interface CommandLineOptionOfCustomType extends CommandLineOptionBase {
|
||||
type: Map<string, number | string>; // an object literal mapping named values to actual values
|
||||
type: Map<number | string>; // an object literal mapping named values to actual values
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
@ -3543,7 +3508,7 @@ namespace ts {
|
||||
flags?: EmitFlags;
|
||||
commentRange?: TextRange;
|
||||
sourceMapRange?: TextRange;
|
||||
tokenSourceMapRanges?: Map<SyntaxKind, TextRange>;
|
||||
tokenSourceMapRanges?: Map<TextRange>;
|
||||
annotatedNodes?: Node[]; // Tracks Parse-tree nodes with EmitNodes for eventual cleanup.
|
||||
constantValue?: number;
|
||||
}
|
||||
|
||||
@ -84,27 +84,27 @@ namespace ts {
|
||||
}
|
||||
|
||||
export function hasResolvedModule(sourceFile: SourceFile, moduleNameText: string): boolean {
|
||||
return !!(sourceFile && sourceFile.resolvedModules && sourceFile.resolvedModules.get(moduleNameText));
|
||||
return !!(sourceFile && sourceFile.resolvedModules && sourceFile.resolvedModules[moduleNameText]);
|
||||
}
|
||||
|
||||
export function getResolvedModule(sourceFile: SourceFile, moduleNameText: string): ResolvedModuleFull {
|
||||
return hasResolvedModule(sourceFile, moduleNameText) ? sourceFile.resolvedModules.get(moduleNameText) : undefined;
|
||||
return hasResolvedModule(sourceFile, moduleNameText) ? sourceFile.resolvedModules[moduleNameText] : undefined;
|
||||
}
|
||||
|
||||
export function setResolvedModule(sourceFile: SourceFile, moduleNameText: string, resolvedModule: ResolvedModuleFull): void {
|
||||
if (!sourceFile.resolvedModules) {
|
||||
sourceFile.resolvedModules = createMap<string, ResolvedModuleFull>();
|
||||
sourceFile.resolvedModules = createMap<ResolvedModuleFull>();
|
||||
}
|
||||
|
||||
sourceFile.resolvedModules.set(moduleNameText, resolvedModule);
|
||||
sourceFile.resolvedModules[moduleNameText] = resolvedModule;
|
||||
}
|
||||
|
||||
export function setResolvedTypeReferenceDirective(sourceFile: SourceFile, typeReferenceDirectiveName: string, resolvedTypeReferenceDirective: ResolvedTypeReferenceDirective): void {
|
||||
if (!sourceFile.resolvedTypeReferenceDirectiveNames) {
|
||||
sourceFile.resolvedTypeReferenceDirectiveNames = createMap<string, ResolvedTypeReferenceDirective>();
|
||||
sourceFile.resolvedTypeReferenceDirectiveNames = createMap<ResolvedTypeReferenceDirective>();
|
||||
}
|
||||
|
||||
sourceFile.resolvedTypeReferenceDirectiveNames.set(typeReferenceDirectiveName, resolvedTypeReferenceDirective);
|
||||
sourceFile.resolvedTypeReferenceDirectiveNames[typeReferenceDirectiveName] = resolvedTypeReferenceDirective;
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
@ -120,13 +120,13 @@ namespace ts {
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
export function hasChangesInResolutions<T>(names: string[], newResolutions: T[], oldResolutions: Map<string, T>, comparer: (oldResolution: T, newResolution: T) => boolean): boolean {
|
||||
export function hasChangesInResolutions<T>(names: string[], newResolutions: T[], oldResolutions: Map<T>, comparer: (oldResolution: T, newResolution: T) => boolean): boolean {
|
||||
if (names.length !== newResolutions.length) {
|
||||
return false;
|
||||
}
|
||||
for (let i = 0; i < names.length; i++) {
|
||||
const newResolution = newResolutions[i];
|
||||
const oldResolution = oldResolutions && oldResolutions.get(names[i]);
|
||||
const oldResolution = oldResolutions && oldResolutions[names[i]];
|
||||
const changed =
|
||||
oldResolution
|
||||
? !newResolution || !comparer(oldResolution, newResolution)
|
||||
@ -2201,7 +2201,7 @@ namespace ts {
|
||||
|
||||
export function createDiagnosticCollection(): DiagnosticCollection {
|
||||
let nonFileDiagnostics: Diagnostic[] = [];
|
||||
const fileDiagnostics = createMap<string, Diagnostic[]>();
|
||||
const fileDiagnostics = createMap<Diagnostic[]>();
|
||||
|
||||
let diagnosticsModified = false;
|
||||
let modificationCount = 0;
|
||||
@ -2219,12 +2219,11 @@ namespace ts {
|
||||
}
|
||||
|
||||
function reattachFileDiagnostics(newFile: SourceFile): void {
|
||||
const diagnostics = fileDiagnostics.get(newFile.fileName);
|
||||
if (!diagnostics) {
|
||||
if (!hasProperty(fileDiagnostics, newFile.fileName)) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const diagnostic of diagnostics) {
|
||||
for (const diagnostic of fileDiagnostics[newFile.fileName]) {
|
||||
diagnostic.file = newFile;
|
||||
}
|
||||
}
|
||||
@ -2232,10 +2231,10 @@ namespace ts {
|
||||
function add(diagnostic: Diagnostic): void {
|
||||
let diagnostics: Diagnostic[];
|
||||
if (diagnostic.file) {
|
||||
diagnostics = fileDiagnostics.get(diagnostic.file.fileName);
|
||||
diagnostics = fileDiagnostics[diagnostic.file.fileName];
|
||||
if (!diagnostics) {
|
||||
diagnostics = [];
|
||||
fileDiagnostics.set(diagnostic.file.fileName, diagnostics);
|
||||
fileDiagnostics[diagnostic.file.fileName] = diagnostics;
|
||||
}
|
||||
}
|
||||
else {
|
||||
@ -2255,7 +2254,7 @@ namespace ts {
|
||||
function getDiagnostics(fileName?: string): Diagnostic[] {
|
||||
sortAndDeduplicate();
|
||||
if (fileName) {
|
||||
return fileDiagnostics.get(fileName) || [];
|
||||
return fileDiagnostics[fileName] || [];
|
||||
}
|
||||
|
||||
const allDiagnostics: Diagnostic[] = [];
|
||||
@ -2265,9 +2264,9 @@ namespace ts {
|
||||
|
||||
forEach(nonFileDiagnostics, pushDiagnostic);
|
||||
|
||||
fileDiagnostics.forEach(diagnostics => {
|
||||
forEach(diagnostics, pushDiagnostic);
|
||||
});
|
||||
for (const key in fileDiagnostics) {
|
||||
forEach(fileDiagnostics[key], pushDiagnostic);
|
||||
}
|
||||
|
||||
return sortAndDeduplicateDiagnostics(allDiagnostics);
|
||||
}
|
||||
@ -2280,7 +2279,9 @@ namespace ts {
|
||||
diagnosticsModified = false;
|
||||
nonFileDiagnostics = sortAndDeduplicateDiagnostics(nonFileDiagnostics);
|
||||
|
||||
updateMapValues(fileDiagnostics, sortAndDeduplicateDiagnostics);
|
||||
for (const key in fileDiagnostics) {
|
||||
fileDiagnostics[key] = sortAndDeduplicateDiagnostics(fileDiagnostics[key]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -2290,7 +2291,7 @@ namespace ts {
|
||||
// the map below must be updated. Note that this regexp *does not* include the 'delete' character.
|
||||
// There is no reason for this other than that JSON.stringify does not handle it either.
|
||||
const escapedCharsRegExp = /[\\\"\u0000-\u001f\t\v\f\b\r\n\u2028\u2029\u0085]/g;
|
||||
const escapedCharsMap = mapOfMapLike({
|
||||
const escapedCharsMap = createMap({
|
||||
"\0": "\\0",
|
||||
"\t": "\\t",
|
||||
"\v": "\\v",
|
||||
@ -2317,7 +2318,7 @@ namespace ts {
|
||||
return s;
|
||||
|
||||
function getReplacement(c: string) {
|
||||
return escapedCharsMap.get(c) || get16BitUnicodeEscapeSequence(c.charCodeAt(0));
|
||||
return escapedCharsMap[c] || get16BitUnicodeEscapeSequence(c.charCodeAt(0));
|
||||
}
|
||||
}
|
||||
|
||||
@ -3349,19 +3350,18 @@ namespace ts {
|
||||
return false;
|
||||
}
|
||||
|
||||
const syntaxKindCache = createMap<SyntaxKind, string>();
|
||||
const syntaxKindCache = createMap<string>();
|
||||
|
||||
export function formatSyntaxKind(kind: SyntaxKind): string {
|
||||
const syntaxKindEnum = (<any>ts).SyntaxKind;
|
||||
if (syntaxKindEnum) {
|
||||
const cached = syntaxKindCache.get(kind);
|
||||
if (cached !== undefined) {
|
||||
return cached;
|
||||
if (syntaxKindCache[kind]) {
|
||||
return syntaxKindCache[kind];
|
||||
}
|
||||
|
||||
for (const name in syntaxKindEnum) {
|
||||
if (syntaxKindEnum[name] === kind) {
|
||||
return setAndReturn(syntaxKindCache, kind, `${kind}(${name})`);
|
||||
return syntaxKindCache[kind] = kind.toString() + " (" + name + ")";
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3498,8 +3498,8 @@ namespace ts {
|
||||
|
||||
export interface ExternalModuleInfo {
|
||||
externalImports: (ImportDeclaration | ImportEqualsDeclaration | ExportDeclaration)[]; // imports of other external modules
|
||||
exportSpecifiers: Map<string, ExportSpecifier[]>; // export specifiers by name
|
||||
exportedBindings: Map<number, Identifier[]>; // exported names of local declarations
|
||||
exportSpecifiers: Map<ExportSpecifier[]>; // export specifiers by name
|
||||
exportedBindings: Map<Identifier[]>; // exported names of local declarations
|
||||
exportedNames: Identifier[]; // all exported names local to module
|
||||
exportEquals: ExportAssignment | undefined; // an export= declaration if one was present
|
||||
hasExportStarsToExportValues: boolean; // whether this module contains export*
|
||||
@ -3507,9 +3507,9 @@ namespace ts {
|
||||
|
||||
export function collectExternalModuleInfo(sourceFile: SourceFile, resolver: EmitResolver): ExternalModuleInfo {
|
||||
const externalImports: (ImportDeclaration | ImportEqualsDeclaration | ExportDeclaration)[] = [];
|
||||
const exportSpecifiers = createMap<string, ExportSpecifier[]>();
|
||||
const exportedBindings = createMap<number, Identifier[]>();
|
||||
const uniqueExports = createMap<string, Identifier>();
|
||||
const exportSpecifiers = createMap<ExportSpecifier[]>();
|
||||
const exportedBindings = createMap<Identifier[]>();
|
||||
const uniqueExports = createMap<Identifier>();
|
||||
let hasExportDefault = false;
|
||||
let exportEquals: ExportAssignment = undefined;
|
||||
let hasExportStarsToExportValues = false;
|
||||
@ -3546,7 +3546,7 @@ namespace ts {
|
||||
else {
|
||||
// export { x, y }
|
||||
for (const specifier of (<ExportDeclaration>node).exportClause.elements) {
|
||||
if (!uniqueExports.has(specifier.name.text)) {
|
||||
if (!uniqueExports[specifier.name.text]) {
|
||||
const name = specifier.propertyName || specifier.name;
|
||||
multiMapAdd(exportSpecifiers, name.text, specifier);
|
||||
|
||||
@ -3557,7 +3557,7 @@ namespace ts {
|
||||
multiMapAdd(exportedBindings, getOriginalNodeId(decl), specifier.name);
|
||||
}
|
||||
|
||||
uniqueExports.set(specifier.name.text, specifier.name);
|
||||
uniqueExports[specifier.name.text] = specifier.name;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3590,9 +3590,9 @@ namespace ts {
|
||||
else {
|
||||
// export function x() { }
|
||||
const name = (<FunctionDeclaration>node).name;
|
||||
if (!uniqueExports.has(name.text)) {
|
||||
if (!uniqueExports[name.text]) {
|
||||
multiMapAdd(exportedBindings, getOriginalNodeId(node), name);
|
||||
uniqueExports.set(name.text, name);
|
||||
uniqueExports[name.text] = name;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3610,9 +3610,9 @@ namespace ts {
|
||||
else {
|
||||
// export class x { }
|
||||
const name = (<ClassDeclaration>node).name;
|
||||
if (!uniqueExports.has(name.text)) {
|
||||
if (!uniqueExports[name.text]) {
|
||||
multiMapAdd(exportedBindings, getOriginalNodeId(node), name);
|
||||
uniqueExports.set(name.text, name);
|
||||
uniqueExports[name.text] = name;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3621,14 +3621,14 @@ namespace ts {
|
||||
}
|
||||
|
||||
let exportedNames: Identifier[];
|
||||
uniqueExports.forEach(exported => {
|
||||
exportedNames = ts.append(exportedNames, exported);
|
||||
});
|
||||
for (const key in uniqueExports) {
|
||||
exportedNames = ts.append(exportedNames, uniqueExports[key]);
|
||||
}
|
||||
|
||||
return { externalImports, exportSpecifiers, exportEquals, hasExportStarsToExportValues, exportedBindings, exportedNames };
|
||||
}
|
||||
|
||||
function collectExportedVariableInfo(decl: VariableDeclaration | BindingElement, uniqueExports: Map<string, Identifier>) {
|
||||
function collectExportedVariableInfo(decl: VariableDeclaration | BindingElement, uniqueExports: Map<Identifier>) {
|
||||
if (isBindingPattern(decl.name)) {
|
||||
for (const element of decl.name.elements) {
|
||||
if (!isOmittedExpression(element)) {
|
||||
@ -3637,8 +3637,8 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
else if (!isGeneratedIdentifier(decl.name)) {
|
||||
if (!uniqueExports.has(decl.name.text)) {
|
||||
uniqueExports.set(decl.name.text, decl.name);
|
||||
if (!uniqueExports[decl.name.text]) {
|
||||
uniqueExports[decl.name.text] = decl.name;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -46,54 +46,54 @@ namespace ts {
|
||||
* supplant the existing `forEachChild` implementation if performance is not
|
||||
* significantly impacted.
|
||||
*/
|
||||
const nodeEdgeTraversalMap = createMap<SyntaxKind, NodeTraversalPath>([
|
||||
[SyntaxKind.QualifiedName, [
|
||||
const nodeEdgeTraversalMap = createMap<NodeTraversalPath>({
|
||||
[SyntaxKind.QualifiedName]: [
|
||||
{ name: "left", test: isEntityName },
|
||||
{ name: "right", test: isIdentifier }
|
||||
]],
|
||||
[SyntaxKind.Decorator, [
|
||||
],
|
||||
[SyntaxKind.Decorator]: [
|
||||
{ name: "expression", test: isLeftHandSideExpression }
|
||||
]],
|
||||
[SyntaxKind.TypeAssertionExpression, [
|
||||
],
|
||||
[SyntaxKind.TypeAssertionExpression]: [
|
||||
{ name: "type", test: isTypeNode },
|
||||
{ name: "expression", test: isUnaryExpression }
|
||||
]],
|
||||
[SyntaxKind.AsExpression, [
|
||||
],
|
||||
[SyntaxKind.AsExpression]: [
|
||||
{ name: "expression", test: isExpression },
|
||||
{ name: "type", test: isTypeNode }
|
||||
]],
|
||||
[SyntaxKind.NonNullExpression, [
|
||||
],
|
||||
[SyntaxKind.NonNullExpression]: [
|
||||
{ name: "expression", test: isLeftHandSideExpression }
|
||||
]],
|
||||
[SyntaxKind.EnumDeclaration, [
|
||||
],
|
||||
[SyntaxKind.EnumDeclaration]: [
|
||||
{ name: "decorators", test: isDecorator },
|
||||
{ name: "modifiers", test: isModifier },
|
||||
{ name: "name", test: isIdentifier },
|
||||
{ name: "members", test: isEnumMember }
|
||||
]],
|
||||
[SyntaxKind.ModuleDeclaration, [
|
||||
],
|
||||
[SyntaxKind.ModuleDeclaration]: [
|
||||
{ name: "decorators", test: isDecorator },
|
||||
{ name: "modifiers", test: isModifier },
|
||||
{ name: "name", test: isModuleName },
|
||||
{ name: "body", test: isModuleBody }
|
||||
]],
|
||||
[SyntaxKind.ModuleBlock, [
|
||||
],
|
||||
[SyntaxKind.ModuleBlock]: [
|
||||
{ name: "statements", test: isStatement }
|
||||
]],
|
||||
[SyntaxKind.ImportEqualsDeclaration, [
|
||||
],
|
||||
[SyntaxKind.ImportEqualsDeclaration]: [
|
||||
{ name: "decorators", test: isDecorator },
|
||||
{ name: "modifiers", test: isModifier },
|
||||
{ name: "name", test: isIdentifier },
|
||||
{ name: "moduleReference", test: isModuleReference }
|
||||
]],
|
||||
[SyntaxKind.ExternalModuleReference, [
|
||||
],
|
||||
[SyntaxKind.ExternalModuleReference]: [
|
||||
{ name: "expression", test: isExpression, optional: true }
|
||||
]],
|
||||
[SyntaxKind.EnumMember, [
|
||||
],
|
||||
[SyntaxKind.EnumMember]: [
|
||||
{ name: "name", test: isPropertyName },
|
||||
{ name: "initializer", test: isExpression, optional: true, parenthesize: parenthesizeExpressionForList }
|
||||
]]
|
||||
]);
|
||||
]
|
||||
});
|
||||
|
||||
function reduceNode<T>(node: Node, f: (memo: T, node: Node) => T, initial: T) {
|
||||
return node ? f(initial, node) : initial;
|
||||
@ -520,7 +520,7 @@ namespace ts {
|
||||
break;
|
||||
|
||||
default:
|
||||
const edgeTraversalPath = nodeEdgeTraversalMap.get(kind);
|
||||
const edgeTraversalPath = nodeEdgeTraversalMap[kind];
|
||||
if (edgeTraversalPath) {
|
||||
for (const edge of edgeTraversalPath) {
|
||||
const value = (<MapLike<any>>node)[edge.name];
|
||||
@ -1142,10 +1142,10 @@ namespace ts {
|
||||
|
||||
default:
|
||||
let updated: Node & MapLike<any>;
|
||||
const edgeTraversalPath = nodeEdgeTraversalMap.get(kind);
|
||||
const edgeTraversalPath = nodeEdgeTraversalMap[kind];
|
||||
if (edgeTraversalPath) {
|
||||
for (const edge of edgeTraversalPath) {
|
||||
const value = <Node | NodeArray<Node>>(<Node & MapLike<any>>node)[edge.name];
|
||||
const value = <Node | NodeArray<Node>>(<Node & Map<any>>node)[edge.name];
|
||||
if (value !== undefined) {
|
||||
const visited = isArray(value)
|
||||
? visitNodes(value, visitor, edge.test, 0, value.length, edge.parenthesize, node)
|
||||
|
||||
@ -86,7 +86,7 @@ namespace FourSlash {
|
||||
|
||||
export import IndentStyle = ts.IndentStyle;
|
||||
|
||||
const entityMap = ts.mapOfMapLike({
|
||||
const entityMap = ts.createMap({
|
||||
"&": "&",
|
||||
"\"": """,
|
||||
"'": "'",
|
||||
@ -96,7 +96,7 @@ namespace FourSlash {
|
||||
});
|
||||
|
||||
export function escapeXmlAttributeValue(s: string) {
|
||||
return s.replace(/[&<>"'\/]/g, ch => entityMap.get(ch));
|
||||
return s.replace(/[&<>"'\/]/g, ch => entityMap[ch]);
|
||||
}
|
||||
|
||||
// Name of testcase metadata including ts.CompilerOptions properties that will be used by globalOptions
|
||||
@ -189,7 +189,7 @@ namespace FourSlash {
|
||||
|
||||
public formatCodeSettings: ts.FormatCodeSettings;
|
||||
|
||||
private inputFiles = ts.createMap<string, string>(); // Map between inputFile's fileName and its content for easily looking up when resolving references
|
||||
private inputFiles = ts.createMap<string>(); // Map between inputFile's fileName and its content for easily looking up when resolving references
|
||||
|
||||
private static getDisplayPartsJson(displayParts: ts.SymbolDisplayPart[]) {
|
||||
let result = "";
|
||||
@ -222,7 +222,7 @@ namespace FourSlash {
|
||||
}
|
||||
|
||||
function tryAdd(path: string) {
|
||||
const inputFile = inputFiles.get(path);
|
||||
const inputFile = inputFiles[path];
|
||||
if (inputFile && !Harness.isDefaultLibraryFile(path)) {
|
||||
languageServiceAdapterHost.addScript(path, inputFile, /*isRootFile*/ true);
|
||||
return true;
|
||||
@ -256,7 +256,7 @@ namespace FourSlash {
|
||||
|
||||
ts.forEach(testData.files, file => {
|
||||
// Create map between fileName and its content for easily looking up when resolveReference flag is specified
|
||||
this.inputFiles.set(file.fileName, file.content);
|
||||
this.inputFiles[file.fileName] = file.content;
|
||||
|
||||
if (ts.getBaseFileName(file.fileName).toLowerCase() === "tsconfig.json") {
|
||||
const configJson = ts.parseConfigFileTextToJson(file.fileName, file.content);
|
||||
@ -322,11 +322,11 @@ namespace FourSlash {
|
||||
}
|
||||
else {
|
||||
// resolveReference file-option is not specified then do not resolve any files and include all inputFiles
|
||||
this.inputFiles.forEach((inputFile, fileName) => {
|
||||
for (const fileName in this.inputFiles) {
|
||||
if (!Harness.isDefaultLibraryFile(fileName)) {
|
||||
this.languageServiceAdapterHost.addScript(fileName, inputFile, /*isRootFile*/ true);
|
||||
this.languageServiceAdapterHost.addScript(fileName, this.inputFiles[fileName], /*isRootFile*/ true);
|
||||
}
|
||||
});
|
||||
}
|
||||
this.languageServiceAdapterHost.addScript(Harness.Compiler.defaultLibFileName,
|
||||
Harness.Compiler.getDefaultLibrarySourceFile().text, /*isRootFile*/ false);
|
||||
}
|
||||
@ -674,11 +674,13 @@ namespace FourSlash {
|
||||
|
||||
public noItemsWithSameNameButDifferentKind(): void {
|
||||
const completions = this.getCompletionListAtCaret();
|
||||
const uniqueItems = ts.createMap<string, string>();
|
||||
const uniqueItems = ts.createMap<string>();
|
||||
for (const item of completions.entries) {
|
||||
if (!ts.setIfNotSet(uniqueItems, item.name, item.kind)) {
|
||||
const uniqueItem = uniqueItems.get(item.name);
|
||||
assert.equal(item.kind, uniqueItem, `Items should have the same kind, got ${item.kind} and ${uniqueItem}`);
|
||||
if (!(item.name in uniqueItems)) {
|
||||
uniqueItems[item.name] = item.kind;
|
||||
}
|
||||
else {
|
||||
assert.equal(item.kind, uniqueItems[item.name], `Items should have the same kind, got ${item.kind} and ${uniqueItems[item.name]}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -856,7 +858,7 @@ namespace FourSlash {
|
||||
}
|
||||
|
||||
public verifyRangesWithSameTextReferenceEachOther() {
|
||||
this.rangesByTextMap().forEach(ranges => this.verifyRangesReferenceEachOther(ranges));
|
||||
ts.forEachProperty(this.rangesByText(), ranges => this.verifyRangesReferenceEachOther(ranges));
|
||||
}
|
||||
|
||||
public verifyDisplayPartsOfReferencedSymbol(expected: ts.SymbolDisplayPart[]) {
|
||||
@ -932,8 +934,7 @@ namespace FourSlash {
|
||||
}
|
||||
|
||||
public verifyQuickInfos(namesAndTexts: { [name: string]: string | [string, string] }) {
|
||||
for (const name in namesAndTexts) {
|
||||
const text = namesAndTexts[name];
|
||||
ts.forEachProperty(ts.createMap(namesAndTexts), (text, name) => {
|
||||
if (text instanceof Array) {
|
||||
assert(text.length === 2);
|
||||
const [expectedText, expectedDocumentation] = text;
|
||||
@ -942,7 +943,7 @@ namespace FourSlash {
|
||||
else {
|
||||
this.verifyQuickInfoAt(name, text);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public verifyQuickInfoString(expectedText: string, expectedDocumentation?: string) {
|
||||
@ -1776,8 +1777,8 @@ namespace FourSlash {
|
||||
return this.testData.ranges;
|
||||
}
|
||||
|
||||
private rangesByTextMap(): ts.Map<string, Range[]> {
|
||||
const result = ts.createMap<string, Range[]>();
|
||||
public rangesByText(): ts.Map<Range[]> {
|
||||
const result = ts.createMap<Range[]>();
|
||||
for (const range of this.getRanges()) {
|
||||
const text = this.rangeText(range);
|
||||
ts.multiMapAdd(result, text, range);
|
||||
@ -1785,10 +1786,6 @@ namespace FourSlash {
|
||||
return result;
|
||||
}
|
||||
|
||||
public rangesByText(): ts.MapLike<Range[]> {
|
||||
return ts.mapLikeOfMap(this.rangesByTextMap());
|
||||
}
|
||||
|
||||
private rangeText({fileName, start, end}: Range): string {
|
||||
return this.getFileContent(fileName).slice(start, end);
|
||||
}
|
||||
@ -2069,7 +2066,7 @@ namespace FourSlash {
|
||||
|
||||
public verifyBraceCompletionAtPosition(negative: boolean, openingBrace: string) {
|
||||
|
||||
const openBraceMap = ts.mapOfMapLike<ts.CharacterCodes>({
|
||||
const openBraceMap = ts.createMap<ts.CharacterCodes>({
|
||||
"(": ts.CharacterCodes.openParen,
|
||||
"{": ts.CharacterCodes.openBrace,
|
||||
"[": ts.CharacterCodes.openBracket,
|
||||
@ -2079,7 +2076,7 @@ namespace FourSlash {
|
||||
"<": ts.CharacterCodes.lessThan
|
||||
});
|
||||
|
||||
const charCode = openBraceMap.get(openingBrace);
|
||||
const charCode = openBraceMap[openingBrace];
|
||||
|
||||
if (!charCode) {
|
||||
this.raiseError(`Invalid openingBrace '${openingBrace}' specified.`);
|
||||
@ -2954,7 +2951,7 @@ namespace FourSlashInterface {
|
||||
return this.state.getRanges();
|
||||
}
|
||||
|
||||
public rangesByText(): ts.MapLike<FourSlash.Range[]> {
|
||||
public rangesByText(): ts.Map<FourSlash.Range[]> {
|
||||
return this.state.rangesByText();
|
||||
}
|
||||
|
||||
|
||||
@ -922,19 +922,19 @@ namespace Harness {
|
||||
export const defaultLibFileName = "lib.d.ts";
|
||||
export const es2015DefaultLibFileName = "lib.es2015.d.ts";
|
||||
|
||||
const libFileNameSourceFileMap = ts.createMap<string, ts.SourceFile>([[
|
||||
defaultLibFileName,
|
||||
createSourceFileAndAssertInvariants(defaultLibFileName, IO.readFile(libFolder + "lib.es5.d.ts"), /*languageVersion*/ ts.ScriptTarget.Latest)
|
||||
]]);
|
||||
const libFileNameSourceFileMap = ts.createMap<ts.SourceFile>({
|
||||
[defaultLibFileName]: createSourceFileAndAssertInvariants(defaultLibFileName, IO.readFile(libFolder + "lib.es5.d.ts"), /*languageVersion*/ ts.ScriptTarget.Latest)
|
||||
});
|
||||
|
||||
export function getDefaultLibrarySourceFile(fileName = defaultLibFileName): ts.SourceFile {
|
||||
if (!isDefaultLibraryFile(fileName)) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const sourceFile = libFileNameSourceFileMap.get(fileName);
|
||||
return sourceFile || ts.setAndReturn(libFileNameSourceFileMap, fileName,
|
||||
createSourceFileAndAssertInvariants(fileName, IO.readFile(libFolder + fileName), ts.ScriptTarget.Latest));
|
||||
if (!libFileNameSourceFileMap[fileName]) {
|
||||
libFileNameSourceFileMap[fileName] = createSourceFileAndAssertInvariants(fileName, IO.readFile(libFolder + fileName), ts.ScriptTarget.Latest);
|
||||
}
|
||||
return libFileNameSourceFileMap[fileName];
|
||||
}
|
||||
|
||||
export function getDefaultLibFileName(options: ts.CompilerOptions): string {
|
||||
@ -1086,16 +1086,16 @@ namespace Harness {
|
||||
{ name: "symlink", type: "string" }
|
||||
];
|
||||
|
||||
let optionsIndex: ts.Map<string, ts.CommandLineOption>;
|
||||
let optionsIndex: ts.Map<ts.CommandLineOption>;
|
||||
function getCommandLineOption(name: string): ts.CommandLineOption {
|
||||
if (!optionsIndex) {
|
||||
optionsIndex = ts.createMap<string, ts.CommandLineOption>();
|
||||
optionsIndex = ts.createMap<ts.CommandLineOption>();
|
||||
const optionDeclarations = harnessOptionDeclarations.concat(ts.optionDeclarations);
|
||||
for (const option of optionDeclarations) {
|
||||
optionsIndex.set(option.name.toLowerCase(), option);
|
||||
optionsIndex[option.name.toLowerCase()] = option;
|
||||
}
|
||||
}
|
||||
return optionsIndex.get(name.toLowerCase());
|
||||
return optionsIndex[name.toLowerCase()];
|
||||
}
|
||||
|
||||
export function setCompilerOptionsFromHarnessSetting(settings: Harness.TestCaseParser.CompilerSettings, options: ts.CompilerOptions & HarnessOptions): void {
|
||||
@ -1452,10 +1452,10 @@ namespace Harness {
|
||||
|
||||
const fullWalker = new TypeWriterWalker(program, /*fullTypeCheck*/ true);
|
||||
|
||||
const fullResults = ts.createMap<string, TypeWriterResult[]>();
|
||||
const fullResults = ts.createMap<TypeWriterResult[]>();
|
||||
|
||||
for (const sourceFile of allFiles) {
|
||||
fullResults.set(sourceFile.unitName, fullWalker.getTypeAndSymbols(sourceFile.unitName));
|
||||
fullResults[sourceFile.unitName] = fullWalker.getTypeAndSymbols(sourceFile.unitName);
|
||||
}
|
||||
|
||||
// Produce baselines. The first gives the types for all expressions.
|
||||
@ -1502,13 +1502,13 @@ namespace Harness {
|
||||
Harness.Baseline.runBaseline(outputFileName, () => fullBaseLine, opts);
|
||||
}
|
||||
|
||||
function generateBaseLine(typeWriterResults: ts.Map<string, TypeWriterResult[]>, isSymbolBaseline: boolean): string {
|
||||
function generateBaseLine(typeWriterResults: ts.Map<TypeWriterResult[]>, isSymbolBaseline: boolean): string {
|
||||
const typeLines: string[] = [];
|
||||
const typeMap: { [fileName: string]: { [lineNum: number]: string[]; } } = {};
|
||||
|
||||
allFiles.forEach(file => {
|
||||
const codeLines = file.content.split("\n");
|
||||
typeWriterResults.get(file.unitName).forEach(result => {
|
||||
typeWriterResults[file.unitName].forEach(result => {
|
||||
if (isSymbolBaseline && !result.symbol) {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -262,7 +262,7 @@ namespace Harness.LanguageService {
|
||||
this.getModuleResolutionsForFile = (fileName) => {
|
||||
const scriptInfo = this.getScriptInfo(fileName);
|
||||
const preprocessInfo = ts.preProcessFile(scriptInfo.content, /*readImportFiles*/ true);
|
||||
const imports: ts.MapLike<string> = {};
|
||||
const imports = ts.createMap<string>();
|
||||
for (const module of preprocessInfo.importedFiles) {
|
||||
const resolutionInfo = ts.resolveModuleName(module.fileName, fileName, compilerOptions, moduleResolutionHost);
|
||||
if (resolutionInfo.resolvedModule) {
|
||||
@ -275,7 +275,7 @@ namespace Harness.LanguageService {
|
||||
const scriptInfo = this.getScriptInfo(fileName);
|
||||
if (scriptInfo) {
|
||||
const preprocessInfo = ts.preProcessFile(scriptInfo.content, /*readImportFiles*/ false);
|
||||
const resolutions: ts.MapLike<ts.ResolvedTypeReferenceDirective> = {};
|
||||
const resolutions = ts.createMap<ts.ResolvedTypeReferenceDirective>();
|
||||
const settings = this.nativeHost.getCompilationSettings();
|
||||
for (const typeReferenceDirective of preprocessInfo.typeReferenceDirectives) {
|
||||
const resolutionInfo = ts.resolveTypeReferenceDirective(typeReferenceDirective.fileName, fileName, settings, moduleResolutionHost);
|
||||
|
||||
@ -91,11 +91,13 @@ namespace Playback {
|
||||
}
|
||||
|
||||
function memoize<T>(func: (s: string) => T): Memoized<T> {
|
||||
const lookup = ts.createMap<string, T>();
|
||||
const run: Memoized<T> = <Memoized<T>>((s: string) =>
|
||||
ts.getOrUpdateAndAllowUndefined(lookup, s, func));
|
||||
let lookup: { [s: string]: T } = {};
|
||||
const run: Memoized<T> = <Memoized<T>>((s: string) => {
|
||||
if (lookup.hasOwnProperty(s)) return lookup[s];
|
||||
return lookup[s] = func(s);
|
||||
});
|
||||
run.reset = () => {
|
||||
lookup.clear();
|
||||
lookup = undefined;
|
||||
};
|
||||
|
||||
return run;
|
||||
|
||||
@ -256,20 +256,17 @@ class ProjectRunner extends RunnerBase {
|
||||
// Set the values specified using json
|
||||
const optionNameMap = ts.arrayToMap(ts.optionDeclarations, option => option.name);
|
||||
for (const name in testCase) {
|
||||
if (name !== "mapRoot" && name !== "sourceRoot") {
|
||||
const option = optionNameMap.get(name);
|
||||
if (option !== undefined) {
|
||||
const optType = option.type;
|
||||
let value = <any>testCase[name];
|
||||
if (typeof optType !== "string") {
|
||||
const key = value.toLowerCase();
|
||||
const translation = optType.get(key);
|
||||
if (translation !== undefined) {
|
||||
value = translation;
|
||||
}
|
||||
if (name !== "mapRoot" && name !== "sourceRoot" && name in optionNameMap) {
|
||||
const option = optionNameMap[name];
|
||||
const optType = option.type;
|
||||
let value = <any>testCase[name];
|
||||
if (typeof optType !== "string") {
|
||||
const key = value.toLowerCase();
|
||||
if (key in optType) {
|
||||
value = optType[key];
|
||||
}
|
||||
compilerOptions[option.name] = value;
|
||||
}
|
||||
compilerOptions[option.name] = value;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -6,25 +6,24 @@ namespace ts {
|
||||
content: string;
|
||||
}
|
||||
|
||||
function createDefaultServerHost(fileMap: Map<string, File>): server.ServerHost {
|
||||
const existingDirectories = createSet();
|
||||
forEachKeyInMap(fileMap, name => {
|
||||
function createDefaultServerHost(fileMap: Map<File>): server.ServerHost {
|
||||
const existingDirectories = createMap<boolean>();
|
||||
for (const name in fileMap) {
|
||||
let dir = getDirectoryPath(name);
|
||||
let previous: string;
|
||||
do {
|
||||
existingDirectories.add(dir);
|
||||
existingDirectories[dir] = true;
|
||||
previous = dir;
|
||||
dir = getDirectoryPath(dir);
|
||||
} while (dir !== previous);
|
||||
});
|
||||
}
|
||||
return {
|
||||
args: <string[]>[],
|
||||
newLine: "\r\n",
|
||||
useCaseSensitiveFileNames: false,
|
||||
write: noop,
|
||||
readFile: (path: string): string => {
|
||||
const file = fileMap.get(path);
|
||||
return file !== undefined ? file.content : undefined;
|
||||
return path in fileMap ? fileMap[path].content : undefined;
|
||||
},
|
||||
writeFile: (_path: string, _data: string, _writeByteOrderMark?: boolean) => {
|
||||
return ts.notImplemented();
|
||||
@ -33,10 +32,10 @@ namespace ts {
|
||||
return ts.notImplemented();
|
||||
},
|
||||
fileExists: (path: string): boolean => {
|
||||
return fileMap.has(path);
|
||||
return path in fileMap;
|
||||
},
|
||||
directoryExists: (path: string): boolean => {
|
||||
return existingDirectories.has(path);
|
||||
return existingDirectories[path] || false;
|
||||
},
|
||||
createDirectory: noop,
|
||||
getExecutingFilePath: (): string => {
|
||||
@ -99,7 +98,7 @@ namespace ts {
|
||||
content: `foo()`
|
||||
};
|
||||
|
||||
const serverHost = createDefaultServerHost(mapOfMapLike({ [root.name]: root, [imported.name]: imported }));
|
||||
const serverHost = createDefaultServerHost(createMap({ [root.name]: root, [imported.name]: imported }));
|
||||
const { project, rootScriptInfo } = createProject(root.name, serverHost);
|
||||
|
||||
// ensure that imported file was found
|
||||
@ -183,7 +182,7 @@ namespace ts {
|
||||
content: `export var y = 1`
|
||||
};
|
||||
|
||||
const fileMap = mapOfMapLike({ [root.name]: root });
|
||||
const fileMap = createMap({ [root.name]: root });
|
||||
const serverHost = createDefaultServerHost(fileMap);
|
||||
const originalFileExists = serverHost.fileExists;
|
||||
|
||||
@ -207,7 +206,7 @@ namespace ts {
|
||||
assert.isTrue(typeof diags[0].messageText === "string" && ((<string>diags[0].messageText).indexOf("Cannot find module") === 0), "should be 'cannot find module' message");
|
||||
|
||||
// assert that import will success once file appear on disk
|
||||
fileMap.set(imported.name, imported);
|
||||
fileMap[imported.name] = imported;
|
||||
fileExistsCalledForBar = false;
|
||||
rootScriptInfo.editContent(0, root.content.length, `import {y} from "bar"`);
|
||||
|
||||
|
||||
@ -32,11 +32,11 @@ namespace ts {
|
||||
const map = arrayToMap(files, f => f.name);
|
||||
|
||||
if (hasDirectoryExists) {
|
||||
const directories = createSet();
|
||||
const directories = createMap<string>();
|
||||
for (const f of files) {
|
||||
let name = getDirectoryPath(f.name);
|
||||
while (true) {
|
||||
directories.add(name);
|
||||
directories[name] = name;
|
||||
const baseName = getDirectoryPath(name);
|
||||
if (baseName === name) {
|
||||
break;
|
||||
@ -46,19 +46,20 @@ namespace ts {
|
||||
}
|
||||
return {
|
||||
readFile,
|
||||
directoryExists: path => directories.has(path),
|
||||
directoryExists: path => {
|
||||
return path in directories;
|
||||
},
|
||||
fileExists: path => {
|
||||
assert.isTrue(directories.has(getDirectoryPath(path)), `'fileExists' '${path}' request in non-existing directory`);
|
||||
return map.has(path);
|
||||
assert.isTrue(getDirectoryPath(path) in directories, `'fileExists' '${path}' request in non-existing directory`);
|
||||
return path in map;
|
||||
}
|
||||
};
|
||||
}
|
||||
else {
|
||||
return { readFile, fileExists: path => map.has(path) };
|
||||
return { readFile, fileExists: path => path in map, };
|
||||
}
|
||||
function readFile(path: string): string {
|
||||
const file = map.get(path);
|
||||
return file !== undefined ? file.content : undefined;
|
||||
return path in map ? map[path].content : undefined;
|
||||
}
|
||||
}
|
||||
|
||||
@ -314,13 +315,12 @@ namespace ts {
|
||||
});
|
||||
|
||||
describe("Module resolution - relative imports", () => {
|
||||
function test(files: Map<string, string>, currentDirectory: string, rootFiles: string[], expectedFilesCount: number, relativeNamesToCheck: string[]) {
|
||||
function test(files: Map<string>, currentDirectory: string, rootFiles: string[], expectedFilesCount: number, relativeNamesToCheck: string[]) {
|
||||
const options: CompilerOptions = { module: ModuleKind.CommonJS };
|
||||
const host: CompilerHost = {
|
||||
getSourceFile: (fileName: string, languageVersion: ScriptTarget) => {
|
||||
const path = normalizePath(combinePaths(currentDirectory, fileName));
|
||||
const file = files.get(path);
|
||||
return file !== undefined ? createSourceFile(fileName, file, languageVersion) : undefined;
|
||||
return path in files ? createSourceFile(fileName, files[path], languageVersion) : undefined;
|
||||
},
|
||||
getDefaultLibFileName: () => "lib.d.ts",
|
||||
writeFile: notImplemented,
|
||||
@ -331,7 +331,7 @@ namespace ts {
|
||||
useCaseSensitiveFileNames: () => false,
|
||||
fileExists: fileName => {
|
||||
const path = normalizePath(combinePaths(currentDirectory, fileName));
|
||||
return files.has(path);
|
||||
return path in files;
|
||||
},
|
||||
readFile: notImplemented
|
||||
};
|
||||
@ -351,7 +351,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
it("should find all modules", () => {
|
||||
const files = mapOfMapLike({
|
||||
const files = createMap({
|
||||
"/a/b/c/first/shared.ts": `
|
||||
class A {}
|
||||
export = A`,
|
||||
@ -370,7 +370,7 @@ export = C;
|
||||
});
|
||||
|
||||
it("should find modules in node_modules", () => {
|
||||
const files = mapOfMapLike({
|
||||
const files = createMap({
|
||||
"/parent/node_modules/mod/index.d.ts": "export var x",
|
||||
"/parent/app/myapp.ts": `import {x} from "mod"`
|
||||
});
|
||||
@ -378,7 +378,7 @@ export = C;
|
||||
});
|
||||
|
||||
it("should find file referenced via absolute and relative names", () => {
|
||||
const files = mapOfMapLike({
|
||||
const files = createMap({
|
||||
"/a/b/c.ts": `/// <reference path="b.ts"/>`,
|
||||
"/a/b/b.ts": "var x"
|
||||
});
|
||||
@ -388,10 +388,10 @@ export = C;
|
||||
|
||||
describe("Files with different casing", () => {
|
||||
const library = createSourceFile("lib.d.ts", "", ScriptTarget.ES5);
|
||||
function test(files: Map<string, string>, options: CompilerOptions, currentDirectory: string, useCaseSensitiveFileNames: boolean, rootFiles: string[], diagnosticCodes: number[]): void {
|
||||
function test(files: Map<string>, options: CompilerOptions, currentDirectory: string, useCaseSensitiveFileNames: boolean, rootFiles: string[], diagnosticCodes: number[]): void {
|
||||
const getCanonicalFileName = createGetCanonicalFileName(useCaseSensitiveFileNames);
|
||||
if (!useCaseSensitiveFileNames) {
|
||||
files = transformKeys(files, getCanonicalFileName);
|
||||
files = reduceProperties(files, (files, file, fileName) => (files[getCanonicalFileName(fileName)] = file, files), createMap<string>());
|
||||
}
|
||||
|
||||
const host: CompilerHost = {
|
||||
@ -400,8 +400,7 @@ export = C;
|
||||
return library;
|
||||
}
|
||||
const path = getCanonicalFileName(normalizePath(combinePaths(currentDirectory, fileName)));
|
||||
const file = files.get(path);
|
||||
return file !== undefined ? createSourceFile(fileName, file, languageVersion) : undefined;
|
||||
return path in files ? createSourceFile(fileName, files[path], languageVersion) : undefined;
|
||||
},
|
||||
getDefaultLibFileName: () => "lib.d.ts",
|
||||
writeFile: notImplemented,
|
||||
@ -412,7 +411,7 @@ export = C;
|
||||
useCaseSensitiveFileNames: () => useCaseSensitiveFileNames,
|
||||
fileExists: fileName => {
|
||||
const path = getCanonicalFileName(normalizePath(combinePaths(currentDirectory, fileName)));
|
||||
return files.has(path);
|
||||
return path in files;
|
||||
},
|
||||
readFile: notImplemented
|
||||
};
|
||||
@ -425,7 +424,7 @@ export = C;
|
||||
}
|
||||
|
||||
it("should succeed when the same file is referenced using absolute and relative names", () => {
|
||||
const files = mapOfMapLike({
|
||||
const files = createMap({
|
||||
"/a/b/c.ts": `/// <reference path="d.ts"/>`,
|
||||
"/a/b/d.ts": "var x"
|
||||
});
|
||||
@ -433,7 +432,7 @@ export = C;
|
||||
});
|
||||
|
||||
it("should fail when two files used in program differ only in casing (tripleslash references)", () => {
|
||||
const files = mapOfMapLike({
|
||||
const files = createMap({
|
||||
"/a/b/c.ts": `/// <reference path="D.ts"/>`,
|
||||
"/a/b/d.ts": "var x"
|
||||
});
|
||||
@ -441,7 +440,7 @@ export = C;
|
||||
});
|
||||
|
||||
it("should fail when two files used in program differ only in casing (imports)", () => {
|
||||
const files = mapOfMapLike({
|
||||
const files = createMap({
|
||||
"/a/b/c.ts": `import {x} from "D"`,
|
||||
"/a/b/d.ts": "export var x"
|
||||
});
|
||||
@ -449,7 +448,7 @@ export = C;
|
||||
});
|
||||
|
||||
it("should fail when two files used in program differ only in casing (imports, relative module names)", () => {
|
||||
const files = mapOfMapLike({
|
||||
const files = createMap({
|
||||
"moduleA.ts": `import {x} from "./ModuleB"`,
|
||||
"moduleB.ts": "export var x"
|
||||
});
|
||||
@ -457,7 +456,7 @@ export = C;
|
||||
});
|
||||
|
||||
it("should fail when two files exist on disk that differs only in casing", () => {
|
||||
const files = mapOfMapLike({
|
||||
const files = createMap({
|
||||
"/a/b/c.ts": `import {x} from "D"`,
|
||||
"/a/b/D.ts": "export var x",
|
||||
"/a/b/d.ts": "export var y"
|
||||
@ -466,7 +465,7 @@ export = C;
|
||||
});
|
||||
|
||||
it("should fail when module name in 'require' calls has inconsistent casing", () => {
|
||||
const files = mapOfMapLike({
|
||||
const files = createMap({
|
||||
"moduleA.ts": `import a = require("./ModuleC")`,
|
||||
"moduleB.ts": `import a = require("./moduleC")`,
|
||||
"moduleC.ts": "export var x"
|
||||
@ -475,7 +474,7 @@ export = C;
|
||||
});
|
||||
|
||||
it("should fail when module names in 'require' calls has inconsistent casing and current directory has uppercase chars", () => {
|
||||
const files = mapOfMapLike({
|
||||
const files = createMap({
|
||||
"/a/B/c/moduleA.ts": `import a = require("./ModuleC")`,
|
||||
"/a/B/c/moduleB.ts": `import a = require("./moduleC")`,
|
||||
"/a/B/c/moduleC.ts": "export var x",
|
||||
@ -487,7 +486,7 @@ import b = require("./moduleB");
|
||||
test(files, { module: ts.ModuleKind.CommonJS, forceConsistentCasingInFileNames: true }, "/a/B/c", /*useCaseSensitiveFileNames*/ false, ["moduleD.ts"], [1149]);
|
||||
});
|
||||
it("should not fail when module names in 'require' calls has consistent casing and current directory has uppercase chars", () => {
|
||||
const files = mapOfMapLike({
|
||||
const files = createMap({
|
||||
"/a/B/c/moduleA.ts": `import a = require("./moduleC")`,
|
||||
"/a/B/c/moduleB.ts": `import a = require("./moduleC")`,
|
||||
"/a/B/c/moduleC.ts": "export var x",
|
||||
@ -1045,8 +1044,8 @@ import b = require("./moduleB");
|
||||
const names = map(files, f => f.name);
|
||||
const sourceFiles = arrayToMap(map(files, f => createSourceFile(f.name, f.content, ScriptTarget.ES2015)), f => f.fileName);
|
||||
const compilerHost: CompilerHost = {
|
||||
fileExists : fileName => sourceFiles.has(fileName),
|
||||
getSourceFile: fileName => sourceFiles.get(fileName),
|
||||
fileExists : fileName => fileName in sourceFiles,
|
||||
getSourceFile: fileName => sourceFiles[fileName],
|
||||
getDefaultLibFileName: () => "lib.d.ts",
|
||||
writeFile: notImplemented,
|
||||
getCurrentDirectory: () => "/",
|
||||
@ -1054,10 +1053,7 @@ import b = require("./moduleB");
|
||||
getCanonicalFileName: f => f.toLowerCase(),
|
||||
getNewLine: () => "\r\n",
|
||||
useCaseSensitiveFileNames: () => false,
|
||||
readFile: fileName => {
|
||||
const file = sourceFiles.get(fileName);
|
||||
return file !== undefined ? file.text : undefined;
|
||||
}
|
||||
readFile: fileName => fileName in sourceFiles ? sourceFiles[fileName].text : undefined
|
||||
};
|
||||
const program1 = createProgram(names, {}, compilerHost);
|
||||
const diagnostics1 = program1.getFileProcessingDiagnostics().getDiagnostics();
|
||||
|
||||
@ -106,7 +106,7 @@ namespace ts {
|
||||
|
||||
return {
|
||||
getSourceFile(fileName): SourceFile {
|
||||
return files.get(fileName);
|
||||
return files[fileName];
|
||||
},
|
||||
getDefaultLibFileName(): string {
|
||||
return "lib.d.ts";
|
||||
@ -127,10 +127,9 @@ namespace ts {
|
||||
getNewLine(): string {
|
||||
return sys ? sys.newLine : newLine;
|
||||
},
|
||||
fileExists: fileName => files.has(fileName),
|
||||
fileExists: fileName => fileName in files,
|
||||
readFile: fileName => {
|
||||
const file = files.get(fileName);
|
||||
return file !== undefined ? file.text : undefined;
|
||||
return fileName in files ? files[fileName].text : undefined;
|
||||
}
|
||||
};
|
||||
}
|
||||
@ -162,7 +161,7 @@ namespace ts {
|
||||
return false;
|
||||
}
|
||||
|
||||
function checkCache<T>(caption: string, program: Program, fileName: string, expectedContent: Map<string, T>, getCache: (f: SourceFile) => Map<string, T>, entryChecker: (expected: T, original: T) => boolean): void {
|
||||
function checkCache<T>(caption: string, program: Program, fileName: string, expectedContent: Map<T>, getCache: (f: SourceFile) => Map<T>, entryChecker: (expected: T, original: T) => boolean): void {
|
||||
const file = program.getSourceFile(fileName);
|
||||
assert.isTrue(file !== undefined, `cannot find file ${fileName}`);
|
||||
const cache = getCache(file);
|
||||
@ -171,15 +170,15 @@ namespace ts {
|
||||
}
|
||||
else {
|
||||
assert.isTrue(cache !== undefined, `expected ${caption} to be set`);
|
||||
assert.isTrue(mapsAreEqual(expectedContent, cache, entryChecker), `contents of ${caption} did not match the expected contents.`);
|
||||
assert.isTrue(equalOwnProperties(expectedContent, cache, entryChecker), `contents of ${caption} did not match the expected contents.`);
|
||||
}
|
||||
}
|
||||
|
||||
function checkResolvedModulesCache(program: Program, fileName: string, expectedContent: Map<string, ResolvedModule>): void {
|
||||
function checkResolvedModulesCache(program: Program, fileName: string, expectedContent: Map<ResolvedModule>): void {
|
||||
checkCache("resolved modules", program, fileName, expectedContent, f => f.resolvedModules, checkResolvedModule);
|
||||
}
|
||||
|
||||
function checkResolvedTypeDirectivesCache(program: Program, fileName: string, expectedContent: Map<string, ResolvedTypeReferenceDirective>): void {
|
||||
function checkResolvedTypeDirectivesCache(program: Program, fileName: string, expectedContent: Map<ResolvedTypeReferenceDirective>): void {
|
||||
checkCache("resolved type directives", program, fileName, expectedContent, f => f.resolvedTypeReferenceDirectiveNames, checkResolvedTypeDirective);
|
||||
}
|
||||
|
||||
@ -290,7 +289,7 @@ namespace ts {
|
||||
const options: CompilerOptions = { target };
|
||||
|
||||
const program_1 = newProgram(files, ["a.ts"], options);
|
||||
checkResolvedModulesCache(program_1, "a.ts", mapOfMapLike({ "b": createResolvedModule("b.ts") }));
|
||||
checkResolvedModulesCache(program_1, "a.ts", createMap({ "b": createResolvedModule("b.ts") }));
|
||||
checkResolvedModulesCache(program_1, "b.ts", undefined);
|
||||
|
||||
const program_2 = updateProgram(program_1, ["a.ts"], options, files => {
|
||||
@ -299,7 +298,7 @@ namespace ts {
|
||||
assert.isTrue(program_1.structureIsReused);
|
||||
|
||||
// content of resolution cache should not change
|
||||
checkResolvedModulesCache(program_1, "a.ts", mapOfMapLike({ "b": createResolvedModule("b.ts") }));
|
||||
checkResolvedModulesCache(program_1, "a.ts", createMap({ "b": createResolvedModule("b.ts") }));
|
||||
checkResolvedModulesCache(program_1, "b.ts", undefined);
|
||||
|
||||
// imports has changed - program is not reused
|
||||
@ -316,7 +315,7 @@ namespace ts {
|
||||
files[0].text = files[0].text.updateImportsAndExports(newImports);
|
||||
});
|
||||
assert.isTrue(!program_3.structureIsReused);
|
||||
checkResolvedModulesCache(program_4, "a.ts", mapOfMapLike({ "b": createResolvedModule("b.ts"), "c": undefined }));
|
||||
checkResolvedModulesCache(program_4, "a.ts", createMap({ "b": createResolvedModule("b.ts"), "c": undefined }));
|
||||
});
|
||||
|
||||
it("resolved type directives cache follows type directives", () => {
|
||||
@ -327,7 +326,7 @@ namespace ts {
|
||||
const options: CompilerOptions = { target, typeRoots: ["/types"] };
|
||||
|
||||
const program_1 = newProgram(files, ["/a.ts"], options);
|
||||
checkResolvedTypeDirectivesCache(program_1, "/a.ts", mapOfMapLike({ "typedefs": { resolvedFileName: "/types/typedefs/index.d.ts", primary: true } }));
|
||||
checkResolvedTypeDirectivesCache(program_1, "/a.ts", createMap({ "typedefs": { resolvedFileName: "/types/typedefs/index.d.ts", primary: true } }));
|
||||
checkResolvedTypeDirectivesCache(program_1, "/types/typedefs/index.d.ts", undefined);
|
||||
|
||||
const program_2 = updateProgram(program_1, ["/a.ts"], options, files => {
|
||||
@ -336,7 +335,7 @@ namespace ts {
|
||||
assert.isTrue(program_1.structureIsReused);
|
||||
|
||||
// content of resolution cache should not change
|
||||
checkResolvedTypeDirectivesCache(program_1, "/a.ts", mapOfMapLike({ "typedefs": { resolvedFileName: "/types/typedefs/index.d.ts", primary: true } }));
|
||||
checkResolvedTypeDirectivesCache(program_1, "/a.ts", createMap({ "typedefs": { resolvedFileName: "/types/typedefs/index.d.ts", primary: true } }));
|
||||
checkResolvedTypeDirectivesCache(program_1, "/types/typedefs/index.d.ts", undefined);
|
||||
|
||||
// type reference directives has changed - program is not reused
|
||||
@ -354,7 +353,7 @@ namespace ts {
|
||||
files[0].text = files[0].text.updateReferences(newReferences);
|
||||
});
|
||||
assert.isTrue(!program_3.structureIsReused);
|
||||
checkResolvedTypeDirectivesCache(program_1, "/a.ts", mapOfMapLike({ "typedefs": { resolvedFileName: "/types/typedefs/index.d.ts", primary: true } }));
|
||||
checkResolvedTypeDirectivesCache(program_1, "/a.ts", createMap({ "typedefs": { resolvedFileName: "/types/typedefs/index.d.ts", primary: true } }));
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@ -159,6 +159,9 @@ namespace ts.server {
|
||||
it("should not throw when commands are executed with invalid arguments", () => {
|
||||
let i = 0;
|
||||
for (const name in CommandNames) {
|
||||
if (!Object.prototype.hasOwnProperty.call(CommandNames, name)) {
|
||||
continue;
|
||||
}
|
||||
const req: protocol.Request = {
|
||||
command: name,
|
||||
seq: i,
|
||||
@ -411,15 +414,15 @@ namespace ts.server {
|
||||
class InProcClient {
|
||||
private server: InProcSession;
|
||||
private seq = 0;
|
||||
private callbacks = createMap<number, (resp: protocol.Response) => void>();
|
||||
private eventHandlers = createMap<string, (args: any) => void>();
|
||||
private callbacks = createMap<(resp: protocol.Response) => void>();
|
||||
private eventHandlers = createMap<(args: any) => void>();
|
||||
|
||||
handle(msg: protocol.Message): void {
|
||||
if (msg.type === "response") {
|
||||
const response = <protocol.Response>msg;
|
||||
const callback = tryDelete(this.callbacks, response.request_seq);
|
||||
if (callback !== undefined) {
|
||||
callback(response);
|
||||
if (response.request_seq in this.callbacks) {
|
||||
this.callbacks[response.request_seq](response);
|
||||
delete this.callbacks[response.request_seq];
|
||||
}
|
||||
}
|
||||
else if (msg.type === "event") {
|
||||
@ -429,14 +432,13 @@ namespace ts.server {
|
||||
}
|
||||
|
||||
emit(name: string, args: any): void {
|
||||
const handler = this.eventHandlers.get(name);
|
||||
if (handler !== undefined) {
|
||||
handler(args);
|
||||
if (name in this.eventHandlers) {
|
||||
this.eventHandlers[name](args);
|
||||
}
|
||||
}
|
||||
|
||||
on(name: string, handler: (args: any) => void): void {
|
||||
this.eventHandlers.set(name, handler);
|
||||
this.eventHandlers[name] = handler;
|
||||
}
|
||||
|
||||
connect(session: InProcSession): void {
|
||||
@ -454,7 +456,7 @@ namespace ts.server {
|
||||
command,
|
||||
arguments: args
|
||||
});
|
||||
this.callbacks.set(this.seq, callback);
|
||||
this.callbacks[this.seq] = callback;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@ -249,10 +249,10 @@ namespace ts.projectSystem {
|
||||
return entry;
|
||||
}
|
||||
|
||||
export function checkMapKeys(caption: string, map: Map<string, any>, expectedKeys: string[]) {
|
||||
assert.equal(mapSize(map), expectedKeys.length, `${caption}: incorrect size of map`);
|
||||
export function checkMapKeys(caption: string, map: Map<any>, expectedKeys: string[]) {
|
||||
assert.equal(reduceProperties(map, count => count + 1, 0), expectedKeys.length, `${caption}: incorrect size of map`);
|
||||
for (const name of expectedKeys) {
|
||||
assert.isTrue(map.has(name), `${caption} is expected to contain ${name}, actual keys: ${keysOfMap(map)}`);
|
||||
assert.isTrue(name in map, `${caption} is expected to contain ${name}, actual keys: ${Object.keys(map)}`);
|
||||
}
|
||||
}
|
||||
|
||||
@ -298,28 +298,38 @@ namespace ts.projectSystem {
|
||||
}
|
||||
|
||||
export class Callbacks {
|
||||
private map = createMap<number, TimeOutCallback>();
|
||||
private map: { [n: number]: TimeOutCallback } = {};
|
||||
private nextId = 1;
|
||||
|
||||
register(cb: (...args: any[]) => void, args: any[]) {
|
||||
const timeoutId = this.nextId;
|
||||
this.nextId++;
|
||||
this.map.set(timeoutId, cb.bind(undefined, ...args));
|
||||
this.map[timeoutId] = cb.bind(undefined, ...args);
|
||||
return timeoutId;
|
||||
}
|
||||
unregister(id: any) {
|
||||
if (typeof id === "number") {
|
||||
this.map.delete(id);
|
||||
delete this.map[id];
|
||||
}
|
||||
}
|
||||
|
||||
count() {
|
||||
return mapSize(this.map);
|
||||
let n = 0;
|
||||
for (const _ in this.map) {
|
||||
// TODO: GH#11734
|
||||
_;
|
||||
n++;
|
||||
}
|
||||
return n;
|
||||
}
|
||||
|
||||
invoke() {
|
||||
this.map.forEach(callback => { callback(); });
|
||||
this.map.clear();
|
||||
for (const id in this.map) {
|
||||
if (hasProperty(this.map, id)) {
|
||||
this.map[id]();
|
||||
}
|
||||
}
|
||||
this.map = {};
|
||||
}
|
||||
}
|
||||
|
||||
@ -335,9 +345,8 @@ namespace ts.projectSystem {
|
||||
private timeoutCallbacks = new Callbacks();
|
||||
private immediateCallbacks = new Callbacks();
|
||||
|
||||
readonly watchedDirectories = createMap<string, { cb: DirectoryWatcherCallback, recursive: boolean }[]>();
|
||||
readonly watchedFiles = createMap<string, FileWatcherCallback[]>();
|
||||
|
||||
readonly watchedDirectories = createMap<{ cb: DirectoryWatcherCallback, recursive: boolean }[]>();
|
||||
readonly watchedFiles = createMap<FileWatcherCallback[]>();
|
||||
|
||||
private filesOrFolders: FileOrFolder[];
|
||||
|
||||
@ -432,7 +441,7 @@ namespace ts.projectSystem {
|
||||
|
||||
triggerDirectoryWatcherCallback(directoryName: string, fileName: string): void {
|
||||
const path = this.toPath(directoryName);
|
||||
const callbacks = this.watchedDirectories.get(path);
|
||||
const callbacks = this.watchedDirectories[path];
|
||||
if (callbacks) {
|
||||
for (const callback of callbacks) {
|
||||
callback.cb(fileName);
|
||||
@ -442,7 +451,7 @@ namespace ts.projectSystem {
|
||||
|
||||
triggerFileWatcherCallback(fileName: string, removed?: boolean): void {
|
||||
const path = this.toPath(fileName);
|
||||
const callbacks = this.watchedFiles.get(path);
|
||||
const callbacks = this.watchedFiles[path];
|
||||
if (callbacks) {
|
||||
for (const callback of callbacks) {
|
||||
callback(path, removed);
|
||||
|
||||
@ -955,7 +955,7 @@ namespace ts.projectSystem {
|
||||
content: ""
|
||||
};
|
||||
const host = createServerHost([f]);
|
||||
const cache = createMap<string, string>();
|
||||
const cache = createMap<string>();
|
||||
for (const name of JsTyping.nodeCoreModuleList) {
|
||||
const result = JsTyping.discoverTypings(host, [f.path], getDirectoryPath(<Path>f.path), /*safeListPath*/ undefined, cache, { enableAutoDiscovery: true }, [name, "somename"]);
|
||||
assert.deepEqual(result.newTypingNames.sort(), ["node", "somename"]);
|
||||
@ -972,7 +972,7 @@ namespace ts.projectSystem {
|
||||
content: ""
|
||||
};
|
||||
const host = createServerHost([f, node]);
|
||||
const cache = mapOfMapLike({ "node": node.path });
|
||||
const cache = createMap<string>({ "node": node.path });
|
||||
const result = JsTyping.discoverTypings(host, [f.path], getDirectoryPath(<Path>f.path), /*safeListPath*/ undefined, cache, { enableAutoDiscovery: true }, ["fs", "bar"]);
|
||||
assert.deepEqual(result.cachedTypingPaths, [node.path]);
|
||||
assert.deepEqual(result.newTypingNames, ["bar"]);
|
||||
|
||||
@ -346,26 +346,25 @@ namespace ts.server {
|
||||
|
||||
// Use slice to clone the array to avoid manipulating in place
|
||||
const queue = fileInfo.referencedBy.slice(0);
|
||||
const fileNameSet = createMap<string, ScriptInfo>();
|
||||
fileNameSet.set(scriptInfo.fileName, scriptInfo);
|
||||
const fileNameSet = createMap<ScriptInfo>();
|
||||
fileNameSet[scriptInfo.fileName] = scriptInfo;
|
||||
while (queue.length > 0) {
|
||||
const processingFileInfo = queue.pop();
|
||||
if (processingFileInfo.updateShapeSignature() && processingFileInfo.referencedBy.length > 0) {
|
||||
for (const potentialFileInfo of processingFileInfo.referencedBy) {
|
||||
if (!fileNameSet.get(potentialFileInfo.scriptInfo.fileName)) {
|
||||
if (!fileNameSet[potentialFileInfo.scriptInfo.fileName]) {
|
||||
queue.push(potentialFileInfo);
|
||||
}
|
||||
}
|
||||
}
|
||||
fileNameSet.set(processingFileInfo.scriptInfo.fileName, processingFileInfo.scriptInfo);
|
||||
fileNameSet[processingFileInfo.scriptInfo.fileName] = processingFileInfo.scriptInfo;
|
||||
}
|
||||
|
||||
const result: string[] = [];
|
||||
fileNameSet.forEach((scriptInfo, fileName) => {
|
||||
if (shouldEmitFile(scriptInfo)) {
|
||||
for (const fileName in fileNameSet) {
|
||||
if (shouldEmitFile(fileNameSet[fileName])) {
|
||||
result.push(fileName);
|
||||
}
|
||||
});
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
@ -15,7 +15,7 @@ namespace ts.server {
|
||||
|
||||
export class SessionClient implements LanguageService {
|
||||
private sequence: number = 0;
|
||||
private lineMaps = ts.createMap<string, number[]>();
|
||||
private lineMaps: ts.Map<number[]> = ts.createMap<number[]>();
|
||||
private messages: string[] = [];
|
||||
private lastRenameEntry: RenameEntry;
|
||||
|
||||
@ -31,10 +31,10 @@ namespace ts.server {
|
||||
}
|
||||
|
||||
private getLineMap(fileName: string): number[] {
|
||||
let lineMap = this.lineMaps.get(fileName);
|
||||
let lineMap = this.lineMaps[fileName];
|
||||
if (!lineMap) {
|
||||
const scriptSnapshot = this.host.getScriptSnapshot(fileName);
|
||||
lineMap = setAndReturn(this.lineMaps, fileName, ts.computeLineStarts(scriptSnapshot.getText(0, scriptSnapshot.getLength())));
|
||||
lineMap = this.lineMaps[fileName] = ts.computeLineStarts(scriptSnapshot.getText(0, scriptSnapshot.getLength()));
|
||||
}
|
||||
return lineMap;
|
||||
}
|
||||
@ -140,7 +140,7 @@ namespace ts.server {
|
||||
|
||||
changeFile(fileName: string, start: number, end: number, newText: string): void {
|
||||
// clear the line map after an edit
|
||||
this.lineMaps.set(fileName, undefined);
|
||||
this.lineMaps[fileName] = undefined;
|
||||
|
||||
const lineOffset = this.positionToOneBasedLineOffset(fileName, start);
|
||||
const endLineOffset = this.positionToOneBasedLineOffset(fileName, end);
|
||||
|
||||
@ -17,23 +17,23 @@ namespace ts.server {
|
||||
(event: ProjectServiceEvent): void;
|
||||
}
|
||||
|
||||
function prepareConvertersForEnumLikeCompilerOptions(commandLineOptions: CommandLineOption[]): Map<string, Map<string, number>> {
|
||||
const map = createMap<string, Map<string, number>>();
|
||||
function prepareConvertersForEnumLikeCompilerOptions(commandLineOptions: CommandLineOption[]): Map<Map<number>> {
|
||||
const map: Map<Map<number>> = createMap<Map<number>>();
|
||||
for (const option of commandLineOptions) {
|
||||
if (typeof option.type === "object") {
|
||||
const optionMap = <Map<string, number>>option.type;
|
||||
const optionMap = <Map<number>>option.type;
|
||||
// verify that map contains only numbers
|
||||
optionMap.forEach(value => {
|
||||
Debug.assert(typeof value === "number");
|
||||
});
|
||||
map.set(option.name, optionMap);
|
||||
for (const id in optionMap) {
|
||||
Debug.assert(typeof optionMap[id] === "number");
|
||||
}
|
||||
map[option.name] = optionMap;
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
const compilerOptionConverters = prepareConvertersForEnumLikeCompilerOptions(optionDeclarations);
|
||||
const indentStyle = mapOfMapLike({
|
||||
const indentStyle = createMap({
|
||||
"none": IndentStyle.None,
|
||||
"block": IndentStyle.Block,
|
||||
"smart": IndentStyle.Smart
|
||||
@ -41,20 +41,20 @@ namespace ts.server {
|
||||
|
||||
export function convertFormatOptions(protocolOptions: protocol.FormatCodeSettings): FormatCodeSettings {
|
||||
if (typeof protocolOptions.indentStyle === "string") {
|
||||
protocolOptions.indentStyle = indentStyle.get(protocolOptions.indentStyle.toLowerCase());
|
||||
protocolOptions.indentStyle = indentStyle[protocolOptions.indentStyle.toLowerCase()];
|
||||
Debug.assert(protocolOptions.indentStyle !== undefined);
|
||||
}
|
||||
return <any>protocolOptions;
|
||||
}
|
||||
|
||||
export function convertCompilerOptions(protocolOptions: protocol.ExternalProjectCompilerOptions): CompilerOptions & protocol.CompileOnSaveMixin {
|
||||
forEachKeyInMap(compilerOptionConverters, id => {
|
||||
for (const id in compilerOptionConverters) {
|
||||
const propertyValue = protocolOptions[id];
|
||||
if (typeof propertyValue === "string") {
|
||||
const mappedValues = compilerOptionConverters.get(id);
|
||||
protocolOptions[id] = mappedValues.get(propertyValue.toLowerCase());
|
||||
const mappedValues = compilerOptionConverters[id];
|
||||
protocolOptions[id] = mappedValues[propertyValue.toLowerCase()];
|
||||
}
|
||||
});
|
||||
}
|
||||
return <any>protocolOptions;
|
||||
}
|
||||
|
||||
@ -159,24 +159,23 @@ namespace ts.server {
|
||||
/**
|
||||
* a path to directory watcher map that detects added tsconfig files
|
||||
**/
|
||||
private readonly directoryWatchersForTsconfig = createMap<string, FileWatcher>();
|
||||
private readonly directoryWatchersForTsconfig: Map<FileWatcher> = createMap<FileWatcher>();
|
||||
/**
|
||||
* count of how many projects are using the directory watcher.
|
||||
* If the number becomes 0 for a watcher, then we should close it.
|
||||
**/
|
||||
private readonly directoryWatchersRefCount = createMap<string, number>();
|
||||
private readonly directoryWatchersRefCount: Map<number> = createMap<number>();
|
||||
|
||||
constructor(private readonly projectService: ProjectService) {
|
||||
}
|
||||
|
||||
stopWatchingDirectory(directory: string) {
|
||||
// if the ref count for this directory watcher drops to 0, it's time to close it
|
||||
const refCount = this.directoryWatchersRefCount.get(directory) - 1;
|
||||
this.directoryWatchersRefCount.set(directory, refCount);
|
||||
if (refCount === 0) {
|
||||
this.directoryWatchersRefCount[directory]--;
|
||||
if (this.directoryWatchersRefCount[directory] === 0) {
|
||||
this.projectService.logger.info(`Close directory watcher for: ${directory}`);
|
||||
this.directoryWatchersForTsconfig.get(directory).close();
|
||||
this.directoryWatchersForTsconfig.delete(directory);
|
||||
this.directoryWatchersForTsconfig[directory].close();
|
||||
delete this.directoryWatchersForTsconfig[directory];
|
||||
}
|
||||
}
|
||||
|
||||
@ -184,13 +183,13 @@ namespace ts.server {
|
||||
let currentPath = getDirectoryPath(fileName);
|
||||
let parentPath = getDirectoryPath(currentPath);
|
||||
while (currentPath != parentPath) {
|
||||
if (!this.directoryWatchersForTsconfig.get(currentPath)) {
|
||||
if (!this.directoryWatchersForTsconfig[currentPath]) {
|
||||
this.projectService.logger.info(`Add watcher for: ${currentPath}`);
|
||||
this.directoryWatchersForTsconfig.set(currentPath, this.projectService.host.watchDirectory(currentPath, callback));
|
||||
this.directoryWatchersRefCount.set(currentPath, 1);
|
||||
this.directoryWatchersForTsconfig[currentPath] = this.projectService.host.watchDirectory(currentPath, callback);
|
||||
this.directoryWatchersRefCount[currentPath] = 1;
|
||||
}
|
||||
else {
|
||||
modifyValue(this.directoryWatchersRefCount, currentPath, count => count + 1);
|
||||
this.directoryWatchersRefCount[currentPath] += 1;
|
||||
}
|
||||
project.directoriesWatchedForTsconfig.push(currentPath);
|
||||
currentPath = parentPath;
|
||||
@ -212,7 +211,7 @@ namespace ts.server {
|
||||
/**
|
||||
* maps external project file name to list of config files that were the part of this project
|
||||
*/
|
||||
private readonly externalProjectToConfiguredProjectMap = createMap<string, NormalizedPath[]>();
|
||||
private readonly externalProjectToConfiguredProjectMap: Map<NormalizedPath[]> = createMap<NormalizedPath[]>();
|
||||
|
||||
/**
|
||||
* external projects (configuration and list of root files is not controlled by tsserver)
|
||||
@ -393,7 +392,7 @@ namespace ts.server {
|
||||
}
|
||||
else {
|
||||
if (info && (!info.isOpen)) {
|
||||
// file has been changed which might affect the set of referenced files in projects that include
|
||||
// file has been changed which might affect the set of referenced files in projects that include
|
||||
// this file and set of inferred projects
|
||||
info.reloadFromFile();
|
||||
this.updateProjectGraphs(info.containingProjects);
|
||||
@ -412,7 +411,7 @@ namespace ts.server {
|
||||
this.filenameToScriptInfo.remove(info.path);
|
||||
this.lastDeletedFile = info;
|
||||
|
||||
// capture list of projects since detachAllProjects will wipe out original list
|
||||
// capture list of projects since detachAllProjects will wipe out original list
|
||||
const containingProjects = info.containingProjects.slice();
|
||||
|
||||
info.detachAllProjects();
|
||||
@ -563,7 +562,7 @@ namespace ts.server {
|
||||
const inferredProject = this.createInferredProjectWithRootFileIfNecessary(info);
|
||||
if (!this.useSingleInferredProject) {
|
||||
// if useOneInferredProject is not set then try to fixup ownership of open files
|
||||
// check 'defaultProject !== inferredProject' is necessary to handle cases
|
||||
// check 'defaultProject !== inferredProject' is necessary to handle cases
|
||||
// when creation inferred project for some file has added other open files into this project (i.e. as referenced files)
|
||||
// we definitely don't want to delete the project that was just created
|
||||
for (const f of this.openFiles) {
|
||||
@ -573,7 +572,7 @@ namespace ts.server {
|
||||
}
|
||||
const defaultProject = f.getDefaultProject();
|
||||
if (isRootFileInInferredProject(info) && defaultProject !== inferredProject && inferredProject.containsScriptInfo(f)) {
|
||||
// open file used to be root in inferred project,
|
||||
// open file used to be root in inferred project,
|
||||
// this inferred project is different from the one we've just created for current file
|
||||
// and new inferred project references this open file.
|
||||
// We should delete old inferred project and attach open file to the new one
|
||||
@ -785,7 +784,7 @@ namespace ts.server {
|
||||
files: parsedCommandLine.fileNames,
|
||||
compilerOptions: parsedCommandLine.options,
|
||||
configHasFilesProperty: config["files"] !== undefined,
|
||||
wildcardDirectories: parsedCommandLine.wildcardDirectories,
|
||||
wildcardDirectories: createMap(parsedCommandLine.wildcardDirectories),
|
||||
typingOptions: parsedCommandLine.typingOptions,
|
||||
compileOnSave: parsedCommandLine.compileOnSave
|
||||
};
|
||||
@ -844,7 +843,7 @@ namespace ts.server {
|
||||
this.documentRegistry,
|
||||
projectOptions.configHasFilesProperty,
|
||||
projectOptions.compilerOptions,
|
||||
mapOfMapLike(projectOptions.wildcardDirectories),
|
||||
projectOptions.wildcardDirectories,
|
||||
/*languageServiceEnabled*/ !sizeLimitExceeded,
|
||||
projectOptions.compileOnSave === undefined ? false : projectOptions.compileOnSave);
|
||||
|
||||
@ -902,7 +901,7 @@ namespace ts.server {
|
||||
private updateNonInferredProject<T>(project: ExternalProject | ConfiguredProject, newUncheckedFiles: T[], propertyReader: FilePropertyReader<T>, newOptions: CompilerOptions, newTypingOptions: TypingOptions, compileOnSave: boolean, configFileErrors: Diagnostic[]) {
|
||||
const oldRootScriptInfos = project.getRootScriptInfos();
|
||||
const newRootScriptInfos: ScriptInfo[] = [];
|
||||
const newRootScriptInfoMap: Map<NormalizedPath, ScriptInfo> = createMap<string, ScriptInfo>();
|
||||
const newRootScriptInfoMap: NormalizedPathMap<ScriptInfo> = createNormalizedPathMap<ScriptInfo>();
|
||||
|
||||
let projectErrors: Diagnostic[];
|
||||
let rootFilesChanged = false;
|
||||
@ -930,7 +929,7 @@ namespace ts.server {
|
||||
let toAdd: ScriptInfo[];
|
||||
let toRemove: ScriptInfo[];
|
||||
for (const oldFile of oldRootScriptInfos) {
|
||||
if (!newRootScriptInfoMap.has(oldFile.fileName)) {
|
||||
if (!newRootScriptInfoMap.contains(oldFile.fileName)) {
|
||||
(toRemove || (toRemove = [])).push(oldFile);
|
||||
}
|
||||
}
|
||||
@ -947,7 +946,7 @@ namespace ts.server {
|
||||
if (toAdd) {
|
||||
for (const f of toAdd) {
|
||||
if (f.isOpen && isRootFileInInferredProject(f)) {
|
||||
// if file is already root in some inferred project
|
||||
// if file is already root in some inferred project
|
||||
// - remove the file from that project and delete the project if necessary
|
||||
const inferredProject = f.containingProjects[0];
|
||||
inferredProject.removeFile(f);
|
||||
@ -1096,7 +1095,7 @@ namespace ts.server {
|
||||
this.logger.info(`Host information ${args.hostInfo}`);
|
||||
}
|
||||
if (args.formatOptions) {
|
||||
mergeMapLikes(this.hostConfiguration.formatCodeOptions, convertFormatOptions(args.formatOptions));
|
||||
mergeMaps(this.hostConfiguration.formatCodeOptions, convertFormatOptions(args.formatOptions));
|
||||
this.logger.info("Format host information updated");
|
||||
}
|
||||
}
|
||||
@ -1218,7 +1217,7 @@ namespace ts.server {
|
||||
for (const file of changedFiles) {
|
||||
const scriptInfo = this.getScriptInfo(file.fileName);
|
||||
Debug.assert(!!scriptInfo);
|
||||
// apply changes in reverse order
|
||||
// apply changes in reverse order
|
||||
for (let i = file.changes.length - 1; i >= 0; i--) {
|
||||
const change = file.changes[i];
|
||||
scriptInfo.editContent(change.span.start, change.span.start + change.span.length, change.newText);
|
||||
@ -1255,7 +1254,7 @@ namespace ts.server {
|
||||
|
||||
closeExternalProject(uncheckedFileName: string, suppressRefresh = false): void {
|
||||
const fileName = toNormalizedPath(uncheckedFileName);
|
||||
const configFiles = this.externalProjectToConfiguredProjectMap.get(fileName);
|
||||
const configFiles = this.externalProjectToConfiguredProjectMap[fileName];
|
||||
if (configFiles) {
|
||||
let shouldRefreshInferredProjects = false;
|
||||
for (const configFile of configFiles) {
|
||||
@ -1263,7 +1262,7 @@ namespace ts.server {
|
||||
shouldRefreshInferredProjects = true;
|
||||
}
|
||||
}
|
||||
this.externalProjectToConfiguredProjectMap.delete(fileName);
|
||||
delete this.externalProjectToConfiguredProjectMap[fileName];
|
||||
if (shouldRefreshInferredProjects && !suppressRefresh) {
|
||||
this.refreshInferredProjects();
|
||||
}
|
||||
@ -1310,46 +1309,43 @@ namespace ts.server {
|
||||
// close existing project and later we'll open a set of configured projects for these files
|
||||
this.closeExternalProject(proj.projectFileName, /*suppressRefresh*/ true);
|
||||
}
|
||||
else {
|
||||
const oldConfigFiles = this.externalProjectToConfiguredProjectMap.get(proj.projectFileName);
|
||||
if (oldConfigFiles) {
|
||||
// this project used to include config files
|
||||
if (!tsConfigFiles) {
|
||||
// config files were removed from the project - close existing external project which in turn will close configured projects
|
||||
this.closeExternalProject(proj.projectFileName, /*suppressRefresh*/ true);
|
||||
else if (this.externalProjectToConfiguredProjectMap[proj.projectFileName]) {
|
||||
// this project used to include config files
|
||||
if (!tsConfigFiles) {
|
||||
// config files were removed from the project - close existing external project which in turn will close configured projects
|
||||
this.closeExternalProject(proj.projectFileName, /*suppressRefresh*/ true);
|
||||
}
|
||||
else {
|
||||
// project previously had some config files - compare them with new set of files and close all configured projects that correspond to unused files
|
||||
const oldConfigFiles = this.externalProjectToConfiguredProjectMap[proj.projectFileName];
|
||||
let iNew = 0;
|
||||
let iOld = 0;
|
||||
while (iNew < tsConfigFiles.length && iOld < oldConfigFiles.length) {
|
||||
const newConfig = tsConfigFiles[iNew];
|
||||
const oldConfig = oldConfigFiles[iOld];
|
||||
if (oldConfig < newConfig) {
|
||||
this.closeConfiguredProject(oldConfig);
|
||||
iOld++;
|
||||
}
|
||||
else if (oldConfig > newConfig) {
|
||||
iNew++;
|
||||
}
|
||||
else {
|
||||
// record existing config files so avoid extra add-refs
|
||||
(exisingConfigFiles || (exisingConfigFiles = [])).push(oldConfig);
|
||||
iOld++;
|
||||
iNew++;
|
||||
}
|
||||
}
|
||||
else {
|
||||
// project previously had some config files - compare them with new set of files and close all configured projects that correspond to unused files
|
||||
let iNew = 0;
|
||||
let iOld = 0;
|
||||
while (iNew < tsConfigFiles.length && iOld < oldConfigFiles.length) {
|
||||
const newConfig = tsConfigFiles[iNew];
|
||||
const oldConfig = oldConfigFiles[iOld];
|
||||
if (oldConfig < newConfig) {
|
||||
this.closeConfiguredProject(oldConfig);
|
||||
iOld++;
|
||||
}
|
||||
else if (oldConfig > newConfig) {
|
||||
iNew++;
|
||||
}
|
||||
else {
|
||||
// record existing config files so avoid extra add-refs
|
||||
(exisingConfigFiles || (exisingConfigFiles = [])).push(oldConfig);
|
||||
iOld++;
|
||||
iNew++;
|
||||
}
|
||||
}
|
||||
for (let i = iOld; i < oldConfigFiles.length; i++) {
|
||||
// projects for all remaining old config files should be closed
|
||||
this.closeConfiguredProject(oldConfigFiles[i]);
|
||||
}
|
||||
for (let i = iOld; i < oldConfigFiles.length; i++) {
|
||||
// projects for all remaining old config files should be closed
|
||||
this.closeConfiguredProject(oldConfigFiles[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (tsConfigFiles) {
|
||||
// store the list of tsconfig files that belong to the external project
|
||||
this.externalProjectToConfiguredProjectMap.set(proj.projectFileName, tsConfigFiles);
|
||||
this.externalProjectToConfiguredProjectMap[proj.projectFileName] = tsConfigFiles;
|
||||
for (const tsconfigFile of tsConfigFiles) {
|
||||
let project = this.findConfiguredProjectByProjectName(tsconfigFile);
|
||||
if (!project) {
|
||||
@ -1365,7 +1361,7 @@ namespace ts.server {
|
||||
}
|
||||
else {
|
||||
// no config files - remove the item from the collection
|
||||
this.externalProjectToConfiguredProjectMap.delete(proj.projectFileName);
|
||||
delete this.externalProjectToConfiguredProjectMap[proj.projectFileName];
|
||||
this.createAndAddExternalProject(proj.projectFileName, rootFiles, proj.options, proj.typingOptions);
|
||||
}
|
||||
this.refreshInferredProjects();
|
||||
|
||||
@ -5,8 +5,8 @@
|
||||
namespace ts.server {
|
||||
export class LSHost implements ts.LanguageServiceHost, ModuleResolutionHost, ServerLanguageServiceHost {
|
||||
private compilationSettings: ts.CompilerOptions;
|
||||
private readonly resolvedModuleNames = createFileMap<Map<string, ResolvedModuleWithFailedLookupLocations>>();
|
||||
private readonly resolvedTypeReferenceDirectives = createFileMap<Map<string, ResolvedTypeReferenceDirectiveWithFailedLookupLocations>>();
|
||||
private readonly resolvedModuleNames= createFileMap<Map<ResolvedModuleWithFailedLookupLocations>>();
|
||||
private readonly resolvedTypeReferenceDirectives = createFileMap<Map<ResolvedTypeReferenceDirectiveWithFailedLookupLocations>>();
|
||||
private readonly getCanonicalFileName: (fileName: string) => string;
|
||||
|
||||
private filesWithChangedSetOfUnresolvedImports: Path[];
|
||||
@ -54,7 +54,7 @@ namespace ts.server {
|
||||
private resolveNamesWithLocalCache<T extends { failedLookupLocations: string[] }, R>(
|
||||
names: string[],
|
||||
containingFile: string,
|
||||
cache: ts.FileMap<Map<string, T>>,
|
||||
cache: ts.FileMap<Map<T>>,
|
||||
loader: (name: string, containingFile: string, options: CompilerOptions, host: ModuleResolutionHost) => T,
|
||||
getResult: (s: T) => R,
|
||||
getResultFileName: (result: R) => string | undefined,
|
||||
@ -63,22 +63,22 @@ namespace ts.server {
|
||||
const path = toPath(containingFile, this.host.getCurrentDirectory(), this.getCanonicalFileName);
|
||||
const currentResolutionsInFile = cache.get(path);
|
||||
|
||||
const newResolutions = createMap<string, T>();
|
||||
const newResolutions: Map<T> = createMap<T>();
|
||||
const resolvedModules: R[] = [];
|
||||
const compilerOptions = this.getCompilationSettings();
|
||||
const lastDeletedFileName = this.project.projectService.lastDeletedFile && this.project.projectService.lastDeletedFile.fileName;
|
||||
|
||||
for (const name of names) {
|
||||
// check if this is a duplicate entry in the list
|
||||
let resolution = newResolutions.get(name);
|
||||
let resolution = newResolutions[name];
|
||||
if (!resolution) {
|
||||
const existingResolution = currentResolutionsInFile && currentResolutionsInFile.get(name);
|
||||
const existingResolution = currentResolutionsInFile && currentResolutionsInFile[name];
|
||||
if (moduleResolutionIsValid(existingResolution)) {
|
||||
// ok, it is safe to use existing name resolution results
|
||||
resolution = existingResolution;
|
||||
}
|
||||
else {
|
||||
newResolutions.set(name, resolution = loader(name, containingFile, compilerOptions, this));
|
||||
newResolutions[name] = resolution = loader(name, containingFile, compilerOptions, this);
|
||||
}
|
||||
if (logChanges && this.filesWithChangedSetOfUnresolvedImports && !resolutionIsEqualTo(existingResolution, resolution)) {
|
||||
this.filesWithChangedSetOfUnresolvedImports.push(path);
|
||||
|
||||
@ -104,7 +104,7 @@ namespace ts.server {
|
||||
/**
|
||||
* Set of files that was returned from the last call to getChangesSinceVersion.
|
||||
*/
|
||||
private lastReportedFileNames: Map<string, string>;
|
||||
private lastReportedFileNames: Map<string>;
|
||||
/**
|
||||
* Last version that was reported.
|
||||
*/
|
||||
@ -385,9 +385,9 @@ namespace ts.server {
|
||||
}
|
||||
let unresolvedImports: string[];
|
||||
if (file.resolvedModules) {
|
||||
file.resolvedModules.forEach((resolvedModule, name) => {
|
||||
for (const name in file.resolvedModules) {
|
||||
// pick unresolved non-relative names
|
||||
if (!resolvedModule && !isExternalModuleNameRelative(name)) {
|
||||
if (!file.resolvedModules[name] && !isExternalModuleNameRelative(name)) {
|
||||
// for non-scoped names extract part up-to the first slash
|
||||
// for scoped names - extract up to the second slash
|
||||
let trimmed = name.trim();
|
||||
@ -401,7 +401,7 @@ namespace ts.server {
|
||||
(unresolvedImports || (unresolvedImports = [])).push(trimmed);
|
||||
result.push(trimmed);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
this.cachedUnresolvedImportsPerFile.set(file.path, unresolvedImports || emptyArray);
|
||||
}
|
||||
@ -427,7 +427,7 @@ namespace ts.server {
|
||||
}
|
||||
|
||||
// 1. no changes in structure, no changes in unresolved imports - do nothing
|
||||
// 2. no changes in structure, unresolved imports were changed - collect unresolved imports for all files
|
||||
// 2. no changes in structure, unresolved imports were changed - collect unresolved imports for all files
|
||||
// (can reuse cached imports for files that were not changed)
|
||||
// 3. new files were added/removed, but compilation settings stays the same - collect unresolved imports for all new/modified files
|
||||
// (can reuse cached imports for files that were not changed)
|
||||
@ -568,16 +568,16 @@ namespace ts.server {
|
||||
|
||||
const added: string[] = [];
|
||||
const removed: string[] = [];
|
||||
forEachKeyInMap(currentFiles, id => {
|
||||
if (!lastReportedFileNames.has(id)) {
|
||||
for (const id in currentFiles) {
|
||||
if (!hasProperty(lastReportedFileNames, id)) {
|
||||
added.push(id);
|
||||
}
|
||||
});
|
||||
forEachKeyInMap(lastReportedFileNames, id => {
|
||||
if (!currentFiles.has(id)) {
|
||||
}
|
||||
for (const id in lastReportedFileNames) {
|
||||
if (!hasProperty(currentFiles, id)) {
|
||||
removed.push(id);
|
||||
}
|
||||
});
|
||||
}
|
||||
this.lastReportedFileNames = currentFiles;
|
||||
this.lastReportedVersion = this.projectStructureVersion;
|
||||
return { info, changes: { added, removed }, projectErrors: this.projectErrors };
|
||||
@ -603,7 +603,7 @@ namespace ts.server {
|
||||
// We need to use a set here since the code can contain the same import twice,
|
||||
// but that will only be one dependency.
|
||||
// To avoid invernal conversion, the key of the referencedFiles map must be of type Path
|
||||
const referencedFiles = createSet();
|
||||
const referencedFiles = createMap<boolean>();
|
||||
if (sourceFile.imports && sourceFile.imports.length > 0) {
|
||||
const checker: TypeChecker = this.program.getTypeChecker();
|
||||
for (const importName of sourceFile.imports) {
|
||||
@ -611,7 +611,7 @@ namespace ts.server {
|
||||
if (symbol && symbol.declarations && symbol.declarations[0]) {
|
||||
const declarationSourceFile = symbol.declarations[0].getSourceFile();
|
||||
if (declarationSourceFile) {
|
||||
referencedFiles.add(declarationSourceFile.path);
|
||||
referencedFiles[declarationSourceFile.path] = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -623,24 +623,26 @@ namespace ts.server {
|
||||
if (sourceFile.referencedFiles && sourceFile.referencedFiles.length > 0) {
|
||||
for (const referencedFile of sourceFile.referencedFiles) {
|
||||
const referencedPath = toPath(referencedFile.fileName, currentDirectory, getCanonicalFileName);
|
||||
referencedFiles.add(referencedPath);
|
||||
referencedFiles[referencedPath] = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Handle type reference directives
|
||||
if (sourceFile.resolvedTypeReferenceDirectiveNames) {
|
||||
sourceFile.resolvedTypeReferenceDirectiveNames.forEach(resolvedTypeReferenceDirective => {
|
||||
for (const typeName in sourceFile.resolvedTypeReferenceDirectiveNames) {
|
||||
const resolvedTypeReferenceDirective = sourceFile.resolvedTypeReferenceDirectiveNames[typeName];
|
||||
if (!resolvedTypeReferenceDirective) {
|
||||
return;
|
||||
continue;
|
||||
}
|
||||
|
||||
const fileName = resolvedTypeReferenceDirective.resolvedFileName;
|
||||
const typeFilePath = toPath(fileName, currentDirectory, getCanonicalFileName);
|
||||
referencedFiles.add(typeFilePath);
|
||||
});
|
||||
referencedFiles[typeFilePath] = true;
|
||||
}
|
||||
}
|
||||
|
||||
return filterSetToArray(referencedFiles, file => this.projectService.host.fileExists(file)) as Path[];
|
||||
const allFileNames = map(Object.keys(referencedFiles), key => <Path>key);
|
||||
return filter(allFileNames, file => this.projectService.host.fileExists(file));
|
||||
}
|
||||
|
||||
// remove a root file from project
|
||||
@ -711,7 +713,7 @@ namespace ts.server {
|
||||
private typingOptions: TypingOptions;
|
||||
private projectFileWatcher: FileWatcher;
|
||||
private directoryWatcher: FileWatcher;
|
||||
private directoriesWatchedForWildcards: Map<string, FileWatcher>;
|
||||
private directoriesWatchedForWildcards: Map<FileWatcher>;
|
||||
private typeRootsWatchers: FileWatcher[];
|
||||
|
||||
/** Used for configured projects which may have multiple open roots */
|
||||
@ -722,7 +724,7 @@ namespace ts.server {
|
||||
documentRegistry: ts.DocumentRegistry,
|
||||
hasExplicitListOfFiles: boolean,
|
||||
compilerOptions: CompilerOptions,
|
||||
private wildcardDirectories: Map<string, WatchDirectoryFlags>,
|
||||
private wildcardDirectories: Map<WatchDirectoryFlags>,
|
||||
languageServiceEnabled: boolean,
|
||||
public compileOnSaveEnabled: boolean) {
|
||||
super(ProjectKind.Configured, projectService, documentRegistry, hasExplicitListOfFiles, languageServiceEnabled, compilerOptions, compileOnSaveEnabled);
|
||||
@ -777,19 +779,18 @@ namespace ts.server {
|
||||
return;
|
||||
}
|
||||
const configDirectoryPath = getDirectoryPath(this.configFileName);
|
||||
|
||||
this.directoriesWatchedForWildcards = createMap<string, FileWatcher>();
|
||||
this.wildcardDirectories.forEach((flag, directory) => {
|
||||
this.directoriesWatchedForWildcards = reduceProperties(this.wildcardDirectories, (watchers, flag, directory) => {
|
||||
if (comparePaths(configDirectoryPath, directory, ".", !this.projectService.host.useCaseSensitiveFileNames) !== Comparison.EqualTo) {
|
||||
const recursive = (flag & WatchDirectoryFlags.Recursive) !== 0;
|
||||
this.projectService.logger.info(`Add ${recursive ? "recursive " : ""}watcher for: ${directory}`);
|
||||
this.directoriesWatchedForWildcards.set(directory, this.projectService.host.watchDirectory(
|
||||
watchers[directory] = this.projectService.host.watchDirectory(
|
||||
directory,
|
||||
path => callback(this, path),
|
||||
recursive
|
||||
));
|
||||
);
|
||||
}
|
||||
});
|
||||
return watchers;
|
||||
}, <Map<FileWatcher>>{});
|
||||
}
|
||||
|
||||
stopWatchingDirectory() {
|
||||
@ -813,7 +814,9 @@ namespace ts.server {
|
||||
this.typeRootsWatchers = undefined;
|
||||
}
|
||||
|
||||
this.directoriesWatchedForWildcards.forEach(watcher => { watcher.close(); });
|
||||
for (const id in this.directoriesWatchedForWildcards) {
|
||||
this.directoriesWatchedForWildcards[id].close();
|
||||
}
|
||||
this.directoriesWatchedForWildcards = undefined;
|
||||
|
||||
this.stopWatchingDirectory();
|
||||
|
||||
@ -95,7 +95,7 @@ namespace ts.server {
|
||||
if (!this.formatCodeSettings) {
|
||||
this.formatCodeSettings = getDefaultFormatCodeSettings(this.host);
|
||||
}
|
||||
mergeMapLikes(this.formatCodeSettings, formatSettings);
|
||||
mergeMaps(this.formatCodeSettings, formatSettings);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1351,7 +1351,7 @@ namespace ts.server {
|
||||
return { response, responseRequired: true };
|
||||
}
|
||||
|
||||
private handlers = mapOfMapLike<(request: protocol.Request) => { response?: any, responseRequired?: boolean }>({
|
||||
private handlers = createMap<(request: protocol.Request) => { response?: any, responseRequired?: boolean }>({
|
||||
[CommandNames.OpenExternalProject]: (request: protocol.OpenExternalProjectRequest) => {
|
||||
this.projectService.openExternalProject(request.arguments);
|
||||
// TODO: report errors
|
||||
@ -1597,14 +1597,14 @@ namespace ts.server {
|
||||
});
|
||||
|
||||
public addProtocolHandler(command: string, handler: (request: protocol.Request) => { response?: any, responseRequired: boolean }) {
|
||||
if (this.handlers.has(command)) {
|
||||
if (command in this.handlers) {
|
||||
throw new Error(`Protocol handler already exists for command "${command}"`);
|
||||
}
|
||||
this.handlers.set(command, handler);
|
||||
this.handlers[command] = handler;
|
||||
}
|
||||
|
||||
public executeCommand(request: protocol.Request): { response?: any, responseRequired?: boolean } {
|
||||
const handler = this.handlers.get(request.command);
|
||||
const handler = this.handlers[request.command];
|
||||
if (handler) {
|
||||
return handler(request);
|
||||
}
|
||||
|
||||
@ -18,7 +18,7 @@
|
||||
"utilities.ts",
|
||||
"scriptVersionCache.ts",
|
||||
"scriptInfo.ts",
|
||||
"lsHost.ts",
|
||||
"lshost.ts",
|
||||
"typingsCache.ts",
|
||||
"project.ts",
|
||||
"editorServices.ts",
|
||||
|
||||
@ -31,22 +31,21 @@ namespace ts.server {
|
||||
if ((arr1 || emptyArray).length === 0 && (arr2 || emptyArray).length === 0) {
|
||||
return true;
|
||||
}
|
||||
const set = createMap<string, boolean>();
|
||||
const set: Map<boolean> = createMap<boolean>();
|
||||
let unique = 0;
|
||||
|
||||
for (const v of arr1) {
|
||||
if (set.get(v) !== true) {
|
||||
set.set(v, true);
|
||||
if (set[v] !== true) {
|
||||
set[v] = true;
|
||||
unique++;
|
||||
}
|
||||
}
|
||||
for (const v of arr2) {
|
||||
const isSet = set.get(v);
|
||||
if (isSet === undefined) {
|
||||
if (!hasProperty(set, v)) {
|
||||
return false;
|
||||
}
|
||||
if (isSet === true) {
|
||||
set.set(v, false);
|
||||
if (set[v] === true) {
|
||||
set[v] = false;
|
||||
unique--;
|
||||
}
|
||||
}
|
||||
@ -72,7 +71,7 @@ namespace ts.server {
|
||||
}
|
||||
|
||||
export class TypingsCache {
|
||||
private readonly perProjectCache = createMap<string, TypingsCacheEntry>();
|
||||
private readonly perProjectCache: Map<TypingsCacheEntry> = createMap<TypingsCacheEntry>();
|
||||
|
||||
constructor(private readonly installer: ITypingsInstaller) {
|
||||
}
|
||||
@ -84,7 +83,7 @@ namespace ts.server {
|
||||
return <any>emptyArray;
|
||||
}
|
||||
|
||||
const entry = this.perProjectCache.get(project.getProjectName());
|
||||
const entry = this.perProjectCache[project.getProjectName()];
|
||||
const result: SortedReadonlyArray<string> = entry ? entry.typings : <any>emptyArray;
|
||||
if (forceRefresh ||
|
||||
!entry ||
|
||||
@ -93,13 +92,13 @@ namespace ts.server {
|
||||
unresolvedImportsChanged(unresolvedImports, entry.unresolvedImports)) {
|
||||
// Note: entry is now poisoned since it does not really contain typings for a given combination of compiler options\typings options.
|
||||
// instead it acts as a placeholder to prevent issuing multiple requests
|
||||
this.perProjectCache.set(project.getProjectName(), {
|
||||
this.perProjectCache[project.getProjectName()] = {
|
||||
compilerOptions: project.getCompilerOptions(),
|
||||
typingOptions,
|
||||
typings: result,
|
||||
unresolvedImports,
|
||||
poisoned: true
|
||||
});
|
||||
};
|
||||
// something has been changed, issue a request to update typings
|
||||
this.installer.enqueueInstallTypingsRequest(project, typingOptions, unresolvedImports);
|
||||
}
|
||||
@ -107,21 +106,21 @@ namespace ts.server {
|
||||
}
|
||||
|
||||
updateTypingsForProject(projectName: string, compilerOptions: CompilerOptions, typingOptions: TypingOptions, unresolvedImports: SortedReadonlyArray<string>, newTypings: string[]) {
|
||||
this.perProjectCache.set(projectName, {
|
||||
this.perProjectCache[projectName] = {
|
||||
compilerOptions,
|
||||
typingOptions,
|
||||
typings: toSortedReadonlyArray(newTypings),
|
||||
unresolvedImports,
|
||||
poisoned: false
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
deleteTypingsForProject(projectName: string) {
|
||||
this.perProjectCache.delete(projectName);
|
||||
delete this.perProjectCache[projectName];
|
||||
}
|
||||
|
||||
onProjectClosed(project: Project) {
|
||||
this.perProjectCache.delete(project.getProjectName());
|
||||
delete this.perProjectCache[project.getProjectName()];
|
||||
this.installer.onProjectClosed(project);
|
||||
}
|
||||
}
|
||||
|
||||
@ -78,10 +78,10 @@ namespace ts.server.typingsInstaller {
|
||||
};
|
||||
|
||||
export abstract class TypingsInstaller {
|
||||
private readonly packageNameToTypingLocation = createMap<string, string>();
|
||||
private readonly missingTypingsSet = createSet();
|
||||
private readonly knownCachesSet = createSet();
|
||||
private readonly projectWatchers = createMap<string, FileWatcher[]>();
|
||||
private readonly packageNameToTypingLocation: Map<string> = createMap<string>();
|
||||
private readonly missingTypingsSet: Map<true> = createMap<true>();
|
||||
private readonly knownCachesSet: Map<true> = createMap<true>();
|
||||
private readonly projectWatchers: Map<FileWatcher[]> = createMap<FileWatcher[]>();
|
||||
readonly pendingRunRequests: PendingRequest[] = [];
|
||||
|
||||
private installRunCount = 1;
|
||||
@ -111,7 +111,7 @@ namespace ts.server.typingsInstaller {
|
||||
if (this.log.isEnabled()) {
|
||||
this.log.writeLine(`Closing file watchers for project '${projectName}'`);
|
||||
}
|
||||
const watchers = this.projectWatchers.get(projectName);
|
||||
const watchers = this.projectWatchers[projectName];
|
||||
if (!watchers) {
|
||||
if (this.log.isEnabled()) {
|
||||
this.log.writeLine(`No watchers are registered for project '${projectName}'`);
|
||||
@ -122,7 +122,7 @@ namespace ts.server.typingsInstaller {
|
||||
w.close();
|
||||
}
|
||||
|
||||
this.projectWatchers.delete(projectName);
|
||||
delete this.projectWatchers[projectName];
|
||||
|
||||
if (this.log.isEnabled()) {
|
||||
this.log.writeLine(`Closing file watchers for project '${projectName}' - done.`);
|
||||
@ -176,7 +176,7 @@ namespace ts.server.typingsInstaller {
|
||||
if (this.log.isEnabled()) {
|
||||
this.log.writeLine(`Processing cache location '${cacheLocation}'`);
|
||||
}
|
||||
if (this.knownCachesSet.has(cacheLocation)) {
|
||||
if (this.knownCachesSet[cacheLocation]) {
|
||||
if (this.log.isEnabled()) {
|
||||
this.log.writeLine(`Cache location was already processed...`);
|
||||
}
|
||||
@ -202,7 +202,7 @@ namespace ts.server.typingsInstaller {
|
||||
if (!typingFile) {
|
||||
continue;
|
||||
}
|
||||
const existingTypingFile = this.packageNameToTypingLocation.get(packageName);
|
||||
const existingTypingFile = this.packageNameToTypingLocation[packageName];
|
||||
if (existingTypingFile === typingFile) {
|
||||
continue;
|
||||
}
|
||||
@ -214,14 +214,14 @@ namespace ts.server.typingsInstaller {
|
||||
if (this.log.isEnabled()) {
|
||||
this.log.writeLine(`Adding entry into typings cache: '${packageName}' => '${typingFile}'`);
|
||||
}
|
||||
this.packageNameToTypingLocation.set(packageName, typingFile);
|
||||
this.packageNameToTypingLocation[packageName] = typingFile;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (this.log.isEnabled()) {
|
||||
this.log.writeLine(`Finished processing cache location '${cacheLocation}'`);
|
||||
}
|
||||
this.knownCachesSet.add(cacheLocation);
|
||||
this.knownCachesSet[cacheLocation] = true;
|
||||
}
|
||||
|
||||
private filterTypings(typingsToInstall: string[]) {
|
||||
@ -230,7 +230,7 @@ namespace ts.server.typingsInstaller {
|
||||
}
|
||||
const result: string[] = [];
|
||||
for (const typing of typingsToInstall) {
|
||||
if (this.missingTypingsSet.has(typing)) {
|
||||
if (this.missingTypingsSet[typing]) {
|
||||
continue;
|
||||
}
|
||||
const validationResult = validatePackageName(typing);
|
||||
@ -239,7 +239,7 @@ namespace ts.server.typingsInstaller {
|
||||
}
|
||||
else {
|
||||
// add typing name to missing set so we won't process it again
|
||||
this.missingTypingsSet.add(typing);
|
||||
this.missingTypingsSet[typing] = true;
|
||||
if (this.log.isEnabled()) {
|
||||
switch (validationResult) {
|
||||
case PackageNameValidationResult.EmptyName:
|
||||
@ -296,20 +296,20 @@ namespace ts.server.typingsInstaller {
|
||||
if (this.log.isEnabled()) {
|
||||
this.log.writeLine(`Requested to install typings ${JSON.stringify(typingsToInstall)}, installed typings ${JSON.stringify(installedTypings)}`);
|
||||
}
|
||||
const installedPackages = createSet();
|
||||
const installedPackages: Map<true> = createMap<true>();
|
||||
const installedTypingFiles: string[] = [];
|
||||
for (const t of installedTypings) {
|
||||
const packageName = getBaseFileName(t);
|
||||
if (!packageName) {
|
||||
continue;
|
||||
}
|
||||
installedPackages.add(packageName);
|
||||
installedPackages[packageName] = true;
|
||||
const typingFile = typingToFileName(cachePath, packageName, this.installTypingHost);
|
||||
if (!typingFile) {
|
||||
continue;
|
||||
}
|
||||
if (!this.packageNameToTypingLocation.get(packageName)) {
|
||||
this.packageNameToTypingLocation.set(packageName, typingFile);
|
||||
if (!this.packageNameToTypingLocation[packageName]) {
|
||||
this.packageNameToTypingLocation[packageName] = typingFile;
|
||||
}
|
||||
installedTypingFiles.push(typingFile);
|
||||
}
|
||||
@ -317,11 +317,11 @@ namespace ts.server.typingsInstaller {
|
||||
this.log.writeLine(`Installed typing files ${JSON.stringify(installedTypingFiles)}`);
|
||||
}
|
||||
for (const toInstall of typingsToInstall) {
|
||||
if (!installedPackages.has(toInstall)) {
|
||||
if (!installedPackages[toInstall]) {
|
||||
if (this.log.isEnabled()) {
|
||||
this.log.writeLine(`New missing typing package '${toInstall}'`);
|
||||
}
|
||||
this.missingTypingsSet.add(toInstall);
|
||||
this.missingTypingsSet[toInstall] = true;
|
||||
}
|
||||
}
|
||||
|
||||
@ -395,7 +395,7 @@ namespace ts.server.typingsInstaller {
|
||||
});
|
||||
watchers.push(w);
|
||||
}
|
||||
this.projectWatchers.set(projectName, watchers);
|
||||
this.projectWatchers[projectName] = watchers;
|
||||
}
|
||||
|
||||
private createSetTypings(request: DiscoverTypings, typings: string[]): SetTypings {
|
||||
|
||||
@ -91,7 +91,7 @@ namespace ts.server {
|
||||
};
|
||||
}
|
||||
|
||||
export function mergeMapLikes(target: MapLike<any>, source: MapLike <any>): void {
|
||||
export function mergeMaps(target: MapLike<any>, source: MapLike <any>): void {
|
||||
for (const key in source) {
|
||||
if (hasProperty(source, key)) {
|
||||
target[key] = source[key];
|
||||
@ -132,6 +132,32 @@ namespace ts.server {
|
||||
return <NormalizedPath>fileName;
|
||||
}
|
||||
|
||||
export interface NormalizedPathMap<T> {
|
||||
get(path: NormalizedPath): T;
|
||||
set(path: NormalizedPath, value: T): void;
|
||||
contains(path: NormalizedPath): boolean;
|
||||
remove(path: NormalizedPath): void;
|
||||
}
|
||||
|
||||
export function createNormalizedPathMap<T>(): NormalizedPathMap<T> {
|
||||
/* tslint:disable:no-null-keyword */
|
||||
const map: Map<T> = Object.create(null);
|
||||
/* tslint:enable:no-null-keyword */
|
||||
return {
|
||||
get(path) {
|
||||
return map[path];
|
||||
},
|
||||
set(path, value) {
|
||||
map[path] = value;
|
||||
},
|
||||
contains(path) {
|
||||
return hasProperty(map, path);
|
||||
},
|
||||
remove(path) {
|
||||
delete map[path];
|
||||
}
|
||||
};
|
||||
}
|
||||
function throwLanguageServiceIsDisabledError(): never {
|
||||
throw new Error("LanguageService is disabled");
|
||||
}
|
||||
@ -204,7 +230,7 @@ namespace ts.server {
|
||||
* these fields can be present in the project file
|
||||
**/
|
||||
files?: string[];
|
||||
wildcardDirectories?: MapLike<WatchDirectoryFlags>;
|
||||
wildcardDirectories?: Map<WatchDirectoryFlags>;
|
||||
compilerOptions?: CompilerOptions;
|
||||
typingOptions?: TypingOptions;
|
||||
compileOnSave?: boolean;
|
||||
@ -225,22 +251,21 @@ namespace ts.server {
|
||||
}
|
||||
|
||||
export class ThrottledOperations {
|
||||
private pendingTimeouts = createMap<string, any>();
|
||||
private pendingTimeouts: Map<any> = createMap<any>();
|
||||
constructor(private readonly host: ServerHost) {
|
||||
}
|
||||
|
||||
public schedule(operationId: string, delay: number, cb: () => void) {
|
||||
const pendingTimeout = this.pendingTimeouts.get(operationId);
|
||||
if (pendingTimeout !== undefined) {
|
||||
if (hasProperty(this.pendingTimeouts, operationId)) {
|
||||
// another operation was already scheduled for this id - cancel it
|
||||
this.host.clearTimeout(pendingTimeout);
|
||||
this.host.clearTimeout(this.pendingTimeouts[operationId]);
|
||||
}
|
||||
// schedule new operation, pass arguments
|
||||
this.pendingTimeouts.set(operationId, this.host.setTimeout(ThrottledOperations.run, delay, this, operationId, cb));
|
||||
this.pendingTimeouts[operationId] = this.host.setTimeout(ThrottledOperations.run, delay, this, operationId, cb);
|
||||
}
|
||||
|
||||
private static run(self: ThrottledOperations, operationId: string, cb: () => void) {
|
||||
self.pendingTimeouts.delete(operationId);
|
||||
delete self.pendingTimeouts[operationId];
|
||||
cb();
|
||||
}
|
||||
}
|
||||
|
||||
@ -462,7 +462,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
export function getSemanticClassifications(typeChecker: TypeChecker, cancellationToken: CancellationToken, sourceFile: SourceFile, classifiableNames: Set<string>, span: TextSpan): ClassifiedSpan[] {
|
||||
export function getSemanticClassifications(typeChecker: TypeChecker, cancellationToken: CancellationToken, sourceFile: SourceFile, classifiableNames: Map<string>, span: TextSpan): ClassifiedSpan[] {
|
||||
return convertClassifications(getEncodedSemanticClassifications(typeChecker, cancellationToken, sourceFile, classifiableNames, span));
|
||||
}
|
||||
|
||||
@ -487,7 +487,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
export function getEncodedSemanticClassifications(typeChecker: TypeChecker, cancellationToken: CancellationToken, sourceFile: SourceFile, classifiableNames: Set<string>, span: TextSpan): Classifications {
|
||||
export function getEncodedSemanticClassifications(typeChecker: TypeChecker, cancellationToken: CancellationToken, sourceFile: SourceFile, classifiableNames: Map<string>, span: TextSpan): Classifications {
|
||||
const result: number[] = [];
|
||||
processNode(sourceFile);
|
||||
|
||||
@ -557,7 +557,7 @@ namespace ts {
|
||||
// Only bother calling into the typechecker if this is an identifier that
|
||||
// could possibly resolve to a type name. This makes classification run
|
||||
// in a third of the time it would normally take.
|
||||
if (classifiableNames.has(identifier.text)) {
|
||||
if (classifiableNames[identifier.text]) {
|
||||
const symbol = typeChecker.getSymbolAtLocation(node);
|
||||
if (symbol) {
|
||||
const type = classifySymbol(symbol, getMeaningFromLocation(node));
|
||||
|
||||
@ -14,24 +14,25 @@ namespace ts {
|
||||
}
|
||||
|
||||
export namespace codefix {
|
||||
const codeFixes = createMap<number, CodeFix[]>();
|
||||
const codeFixes = createMap<CodeFix[]>();
|
||||
|
||||
export function registerCodeFix(action: CodeFix) {
|
||||
forEach(action.errorCodes, error => {
|
||||
multiMapAdd(codeFixes, error, action);
|
||||
let fixes = codeFixes[error];
|
||||
if (!fixes) {
|
||||
fixes = [];
|
||||
codeFixes[error] = fixes;
|
||||
}
|
||||
fixes.push(action);
|
||||
});
|
||||
}
|
||||
|
||||
export function getSupportedErrorCodes() {
|
||||
const supportedErrorCodes: string[] = [];
|
||||
codeFixes.forEach((_, key) => {
|
||||
supportedErrorCodes.push(key.toString());
|
||||
});
|
||||
return supportedErrorCodes;
|
||||
return Object.keys(codeFixes);
|
||||
}
|
||||
|
||||
export function getFixes(context: CodeFixContext): CodeAction[] {
|
||||
const fixes = codeFixes.get(context.errorCode);
|
||||
const fixes = codeFixes[context.errorCode];
|
||||
let allActions: CodeAction[] = [];
|
||||
|
||||
forEach(fixes, f => {
|
||||
|
||||
@ -60,17 +60,18 @@ namespace ts.Completions {
|
||||
|
||||
return { isGlobalCompletion, isMemberCompletion, isNewIdentifierLocation: isNewIdentifierLocation, entries };
|
||||
|
||||
function getJavaScriptCompletionEntries(sourceFile: SourceFile, position: number, uniqueNames: Set<string>): CompletionEntry[] {
|
||||
function getJavaScriptCompletionEntries(sourceFile: SourceFile, position: number, uniqueNames: Map<string>): CompletionEntry[] {
|
||||
const entries: CompletionEntry[] = [];
|
||||
|
||||
getNameTable(sourceFile).forEach((nameTablePosition, name) => {
|
||||
const nameTable = getNameTable(sourceFile);
|
||||
for (const name in nameTable) {
|
||||
// Skip identifiers produced only from the current location
|
||||
if (nameTablePosition === position) {
|
||||
return;
|
||||
if (nameTable[name] === position) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!uniqueNames.has(name)) {
|
||||
uniqueNames.add(name);
|
||||
if (!uniqueNames[name]) {
|
||||
uniqueNames[name] = name;
|
||||
const displayName = getCompletionEntryDisplayName(unescapeIdentifier(name), compilerOptions.target, /*performCharacterChecks*/ true);
|
||||
if (displayName) {
|
||||
const entry = {
|
||||
@ -82,7 +83,7 @@ namespace ts.Completions {
|
||||
entries.push(entry);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return entries;
|
||||
}
|
||||
@ -113,17 +114,17 @@ namespace ts.Completions {
|
||||
|
||||
}
|
||||
|
||||
function getCompletionEntriesFromSymbols(symbols: Symbol[], entries: CompletionEntry[], location: Node, performCharacterChecks: boolean): Set<string> {
|
||||
function getCompletionEntriesFromSymbols(symbols: Symbol[], entries: CompletionEntry[], location: Node, performCharacterChecks: boolean): Map<string> {
|
||||
const start = timestamp();
|
||||
const uniqueNames = createSet();
|
||||
const uniqueNames = createMap<string>();
|
||||
if (symbols) {
|
||||
for (const symbol of symbols) {
|
||||
const entry = createCompletionEntry(symbol, location, performCharacterChecks);
|
||||
if (entry) {
|
||||
const id = escapeIdentifier(entry.name);
|
||||
if (!uniqueNames.has(id)) {
|
||||
if (!uniqueNames[id]) {
|
||||
entries.push(entry);
|
||||
uniqueNames.add(id);
|
||||
uniqueNames[id] = id;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -363,7 +364,7 @@ namespace ts.Completions {
|
||||
*
|
||||
* both foo.ts and foo.tsx become foo
|
||||
*/
|
||||
const foundFiles = createSet();
|
||||
const foundFiles = createMap<boolean>();
|
||||
for (let filePath of files) {
|
||||
filePath = normalizePath(filePath);
|
||||
if (exclude && comparePaths(filePath, exclude, scriptPath, ignoreCase) === Comparison.EqualTo) {
|
||||
@ -372,14 +373,14 @@ namespace ts.Completions {
|
||||
|
||||
const foundFileName = includeExtensions ? getBaseFileName(filePath) : removeFileExtension(getBaseFileName(filePath));
|
||||
|
||||
if (!foundFiles.has(foundFileName)) {
|
||||
foundFiles.add(foundFileName);
|
||||
if (!foundFiles[foundFileName]) {
|
||||
foundFiles[foundFileName] = true;
|
||||
}
|
||||
}
|
||||
|
||||
foundFiles.forEach(foundFile => {
|
||||
for (const foundFile in foundFiles) {
|
||||
result.push(createCompletionEntryForModule(foundFile, ScriptElementKind.scriptElement, span));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// If possible, get folder completion as well
|
||||
@ -417,7 +418,7 @@ namespace ts.Completions {
|
||||
|
||||
if (paths) {
|
||||
for (const path in paths) {
|
||||
if (hasProperty(paths, path)) {
|
||||
if (paths.hasOwnProperty(path)) {
|
||||
if (path === "*") {
|
||||
if (paths[path]) {
|
||||
for (const pattern of paths[path]) {
|
||||
@ -1553,7 +1554,7 @@ namespace ts.Completions {
|
||||
* do not occur at the current position and have not otherwise been typed.
|
||||
*/
|
||||
function filterNamedImportOrExportCompletionItems(exportsOfModule: Symbol[], namedImportsOrExports: ImportOrExportSpecifier[]): Symbol[] {
|
||||
const existingImportsOrExports = createSet();
|
||||
const existingImportsOrExports = createMap<boolean>();
|
||||
|
||||
for (const element of namedImportsOrExports) {
|
||||
// If this is the current item we are editing right now, do not filter it out
|
||||
@ -1562,14 +1563,14 @@ namespace ts.Completions {
|
||||
}
|
||||
|
||||
const name = element.propertyName || element.name;
|
||||
existingImportsOrExports.add(name.text);
|
||||
existingImportsOrExports[name.text] = true;
|
||||
}
|
||||
|
||||
if (setIsEmpty(existingImportsOrExports)) {
|
||||
if (!someProperties(existingImportsOrExports)) {
|
||||
return filter(exportsOfModule, e => e.name !== "default");
|
||||
}
|
||||
|
||||
return filter(exportsOfModule, e => e.name !== "default" && !existingImportsOrExports.has(e.name));
|
||||
return filter(exportsOfModule, e => e.name !== "default" && !existingImportsOrExports[e.name]);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -1583,7 +1584,7 @@ namespace ts.Completions {
|
||||
return contextualMemberSymbols;
|
||||
}
|
||||
|
||||
const existingMemberNames = createSet();
|
||||
const existingMemberNames = createMap<boolean>();
|
||||
for (const m of existingMembers) {
|
||||
// Ignore omitted expressions for missing members
|
||||
if (m.kind !== SyntaxKind.PropertyAssignment &&
|
||||
@ -1615,10 +1616,10 @@ namespace ts.Completions {
|
||||
existingName = (<Identifier>m.name).text;
|
||||
}
|
||||
|
||||
existingMemberNames.add(existingName);
|
||||
existingMemberNames[existingName] = true;
|
||||
}
|
||||
|
||||
return filter(contextualMemberSymbols, m => !existingMemberNames.has(m.name));
|
||||
return filter(contextualMemberSymbols, m => !existingMemberNames[m.name]);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -1628,7 +1629,7 @@ namespace ts.Completions {
|
||||
* do not occur at the current position and have not otherwise been typed.
|
||||
*/
|
||||
function filterJsxAttributes(symbols: Symbol[], attributes: NodeArray<JsxAttribute | JsxSpreadAttribute>): Symbol[] {
|
||||
const seenNames = createSet();
|
||||
const seenNames = createMap<boolean>();
|
||||
for (const attr of attributes) {
|
||||
// If this is the current item we are editing right now, do not filter it out
|
||||
if (attr.getStart() <= position && position <= attr.getEnd()) {
|
||||
@ -1636,11 +1637,11 @@ namespace ts.Completions {
|
||||
}
|
||||
|
||||
if (attr.kind === SyntaxKind.JsxAttribute) {
|
||||
seenNames.add((<JsxAttribute>attr).name.text);
|
||||
seenNames[(<JsxAttribute>attr).name.text] = true;
|
||||
}
|
||||
}
|
||||
|
||||
return filter(symbols, a => !seenNames.has(a.name));
|
||||
return filter(symbols, a => !seenNames[a.name]);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -39,16 +39,16 @@ namespace ts.DocumentHighlights {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const fileNameToDocumentHighlights = createMap<string, DocumentHighlights>();
|
||||
const fileNameToDocumentHighlights = createMap<DocumentHighlights>();
|
||||
const result: DocumentHighlights[] = [];
|
||||
for (const referencedSymbol of referencedSymbols) {
|
||||
for (const referenceEntry of referencedSymbol.references) {
|
||||
const fileName = referenceEntry.fileName;
|
||||
let documentHighlights = fileNameToDocumentHighlights.get(fileName);
|
||||
let documentHighlights = fileNameToDocumentHighlights[fileName];
|
||||
if (!documentHighlights) {
|
||||
documentHighlights = { fileName, highlightSpans: [] };
|
||||
|
||||
fileNameToDocumentHighlights.set(fileName, documentHighlights);
|
||||
fileNameToDocumentHighlights[fileName] = documentHighlights;
|
||||
result.push(documentHighlights);
|
||||
}
|
||||
|
||||
|
||||
@ -105,7 +105,7 @@ namespace ts {
|
||||
export function createDocumentRegistry(useCaseSensitiveFileNames?: boolean, currentDirectory = ""): DocumentRegistry {
|
||||
// Maps from compiler setting target (ES3, ES5, etc.) to all the cached documents we have
|
||||
// for those settings.
|
||||
const buckets = createMap<string, FileMap<DocumentRegistryEntry>>();
|
||||
const buckets = createMap<FileMap<DocumentRegistryEntry>>();
|
||||
const getCanonicalFileName = createGetCanonicalFileName(!!useCaseSensitiveFileNames);
|
||||
|
||||
function getKeyForCompilationSettings(settings: CompilerOptions): DocumentRegistryBucketKey {
|
||||
@ -113,35 +113,30 @@ namespace ts {
|
||||
}
|
||||
|
||||
function getBucketForCompilationSettings(key: DocumentRegistryBucketKey, createIfMissing: boolean): FileMap<DocumentRegistryEntry> {
|
||||
let bucket = buckets.get(key);
|
||||
let bucket = buckets[key];
|
||||
if (!bucket && createIfMissing) {
|
||||
buckets.set(key, bucket = createFileMap<DocumentRegistryEntry>());
|
||||
buckets[key] = bucket = createFileMap<DocumentRegistryEntry>();
|
||||
}
|
||||
return bucket;
|
||||
}
|
||||
|
||||
function reportStats() {
|
||||
type Info = {
|
||||
bucket: string;
|
||||
sourceFiles: { name: string, refCount: number, references: string[] }[]
|
||||
};
|
||||
|
||||
const bucketInfoArray: Info[] = [];
|
||||
buckets.forEach((entries, name) => {
|
||||
if (name && name.charAt(0) === "_") {
|
||||
const sourceFiles: { name: string; refCount: number; references: string[]; }[] = [];
|
||||
entries.forEachValue((key, entry) => {
|
||||
sourceFiles.push({
|
||||
name: key,
|
||||
refCount: entry.languageServiceRefCount,
|
||||
references: entry.owners
|
||||
});
|
||||
const bucketInfoArray = Object.keys(buckets).filter(name => name && name.charAt(0) === "_").map(name => {
|
||||
const entries = buckets[name];
|
||||
const sourceFiles: { name: string; refCount: number; references: string[]; }[] = [];
|
||||
entries.forEachValue((key, entry) => {
|
||||
sourceFiles.push({
|
||||
name: key,
|
||||
refCount: entry.languageServiceRefCount,
|
||||
references: entry.owners.slice(0)
|
||||
});
|
||||
sourceFiles.sort((x, y) => y.refCount - x.refCount);
|
||||
bucketInfoArray.push({ bucket: name, sourceFiles });
|
||||
}
|
||||
});
|
||||
sourceFiles.sort((x, y) => y.refCount - x.refCount);
|
||||
return {
|
||||
bucket: name,
|
||||
sourceFiles
|
||||
};
|
||||
});
|
||||
|
||||
return JSON.stringify(bucketInfoArray, undefined, 2);
|
||||
}
|
||||
|
||||
|
||||
@ -96,7 +96,7 @@ namespace ts.FindAllReferences {
|
||||
|
||||
const nameTable = getNameTable(sourceFile);
|
||||
|
||||
if (nameTable.get(internedName) !== undefined) {
|
||||
if (nameTable[internedName] !== undefined) {
|
||||
result = result || [];
|
||||
getReferencesInNode(sourceFile, symbol, declaredName, node, searchMeaning, findInStrings, findInComments, result, symbolToIndex);
|
||||
}
|
||||
@ -378,7 +378,7 @@ namespace ts.FindAllReferences {
|
||||
const possiblePositions = getPossibleSymbolReferencePositions(sourceFile, searchText, start, container.getEnd());
|
||||
|
||||
const parents = getParentSymbolsOfPropertyAccess();
|
||||
const inheritsFromCache = createMap<string, boolean>();
|
||||
const inheritsFromCache: Map<boolean> = createMap<boolean>();
|
||||
|
||||
if (possiblePositions.length) {
|
||||
// Build the set of symbols to search for, initially it has only the current symbol
|
||||
@ -501,14 +501,14 @@ namespace ts.FindAllReferences {
|
||||
function findOwnConstructorCalls(classSymbol: Symbol): Node[] {
|
||||
const result: Node[] = [];
|
||||
|
||||
for (const decl of classSymbol.members.get("__constructor").declarations) {
|
||||
for (const decl of classSymbol.members["__constructor"].declarations) {
|
||||
Debug.assert(decl.kind === SyntaxKind.Constructor);
|
||||
const ctrKeyword = decl.getChildAt(0);
|
||||
Debug.assert(ctrKeyword.kind === SyntaxKind.ConstructorKeyword);
|
||||
result.push(ctrKeyword);
|
||||
}
|
||||
|
||||
classSymbol.exports.forEach(member => {
|
||||
forEachProperty(classSymbol.exports, member => {
|
||||
const decl = member.valueDeclaration;
|
||||
if (decl && decl.kind === SyntaxKind.MethodDeclaration) {
|
||||
const body = (<MethodDeclaration>decl).body;
|
||||
@ -528,7 +528,7 @@ namespace ts.FindAllReferences {
|
||||
/** Find references to `super` in the constructor of an extending class. */
|
||||
function superConstructorAccesses(cls: ClassLikeDeclaration): Node[] {
|
||||
const symbol = cls.symbol;
|
||||
const ctr = symbol.members.get("__constructor");
|
||||
const ctr = symbol.members["__constructor"];
|
||||
if (!ctr) {
|
||||
return [];
|
||||
}
|
||||
@ -705,7 +705,7 @@ namespace ts.FindAllReferences {
|
||||
* @param parent Another class or interface Symbol
|
||||
* @param cachedResults A map of symbol id pairs (i.e. "child,parent") to booleans indicating previous results
|
||||
*/
|
||||
function explicitlyInheritsFrom(child: Symbol, parent: Symbol, cachedResults: Map<string, boolean>): boolean {
|
||||
function explicitlyInheritsFrom(child: Symbol, parent: Symbol, cachedResults: Map<boolean>): boolean {
|
||||
const parentIsInterface = parent.getFlags() & SymbolFlags.Interface;
|
||||
return searchHierarchy(child);
|
||||
|
||||
@ -715,13 +715,12 @@ namespace ts.FindAllReferences {
|
||||
}
|
||||
|
||||
const key = getSymbolId(symbol) + "," + getSymbolId(parent);
|
||||
const cachedResult = cachedResults.get(key);
|
||||
if (cachedResult !== undefined) {
|
||||
return cachedResult;
|
||||
if (key in cachedResults) {
|
||||
return cachedResults[key];
|
||||
}
|
||||
|
||||
// Set the key so that we don't infinitely recurse
|
||||
cachedResults.set(key, false);
|
||||
cachedResults[key] = false;
|
||||
|
||||
const inherits = forEach(symbol.getDeclarations(), declaration => {
|
||||
if (isClassLike(declaration)) {
|
||||
@ -745,7 +744,7 @@ namespace ts.FindAllReferences {
|
||||
return false;
|
||||
});
|
||||
|
||||
cachedResults.set(key, inherits);
|
||||
cachedResults[key] = inherits;
|
||||
return inherits;
|
||||
}
|
||||
|
||||
@ -1047,7 +1046,7 @@ namespace ts.FindAllReferences {
|
||||
|
||||
// Add symbol of properties/methods of the same name in base classes and implemented interfaces definitions
|
||||
if (!implementations && rootSymbol.parent && rootSymbol.parent.flags & (SymbolFlags.Class | SymbolFlags.Interface)) {
|
||||
getPropertySymbolsFromBaseTypes(rootSymbol.parent, rootSymbol.getName(), result, /*previousIterationSymbolsCache*/ createMap<string, Symbol>());
|
||||
getPropertySymbolsFromBaseTypes(rootSymbol.parent, rootSymbol.getName(), result, /*previousIterationSymbolsCache*/ createMap<Symbol>());
|
||||
}
|
||||
});
|
||||
|
||||
@ -1079,7 +1078,7 @@ namespace ts.FindAllReferences {
|
||||
// the function will add any found symbol of the property-name, then its sub-routine will call
|
||||
// getPropertySymbolsFromBaseTypes again to walk up any base types to prevent revisiting already
|
||||
// visited symbol, interface "C", the sub-routine will pass the current symbol as previousIterationSymbol.
|
||||
if (previousIterationSymbolsCache.has(symbol.name)) {
|
||||
if (symbol.name in previousIterationSymbolsCache) {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -1106,14 +1105,14 @@ namespace ts.FindAllReferences {
|
||||
}
|
||||
|
||||
// Visit the typeReference as well to see if it directly or indirectly use that property
|
||||
previousIterationSymbolsCache.set(symbol.name, symbol);
|
||||
previousIterationSymbolsCache[symbol.name] = symbol;
|
||||
getPropertySymbolsFromBaseTypes(type.symbol, propertyName, result, previousIterationSymbolsCache);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getRelatedSymbol(searchSymbols: Symbol[], referenceSymbol: Symbol, referenceLocation: Node, searchLocationIsConstructor: boolean, parents: Symbol[] | undefined, cache: Map<string, boolean>): Symbol {
|
||||
function getRelatedSymbol(searchSymbols: Symbol[], referenceSymbol: Symbol, referenceLocation: Node, searchLocationIsConstructor: boolean, parents: Symbol[] | undefined, cache: Map<boolean>): Symbol {
|
||||
if (contains(searchSymbols, referenceSymbol)) {
|
||||
// If we are searching for constructor uses, they must be 'new' expressions.
|
||||
return (!searchLocationIsConstructor || isNewExpressionTarget(referenceLocation)) && referenceSymbol;
|
||||
@ -1177,7 +1176,7 @@ namespace ts.FindAllReferences {
|
||||
}
|
||||
|
||||
const result: Symbol[] = [];
|
||||
getPropertySymbolsFromBaseTypes(rootSymbol.parent, rootSymbol.getName(), result, /*previousIterationSymbolsCache*/ createMap<string, Symbol>());
|
||||
getPropertySymbolsFromBaseTypes(rootSymbol.parent, rootSymbol.getName(), result, /*previousIterationSymbolsCache*/ createMap<Symbol>());
|
||||
return forEach(result, s => searchSymbols.indexOf(s) >= 0 ? s : undefined);
|
||||
}
|
||||
|
||||
|
||||
@ -4,7 +4,7 @@
|
||||
namespace ts.formatting {
|
||||
export class Rules {
|
||||
public getRuleName(rule: Rule) {
|
||||
const o: ts.MapLike<any> = <any>this;
|
||||
const o: ts.Map<any> = <any>this;
|
||||
for (const name in o) {
|
||||
if (o[name] === rule) {
|
||||
return name;
|
||||
|
||||
@ -14,7 +14,7 @@ namespace ts.GoToDefinition {
|
||||
// Type reference directives
|
||||
const typeReferenceDirective = findReferenceInPosition(sourceFile.typeReferenceDirectives, position);
|
||||
if (typeReferenceDirective) {
|
||||
const referenceFile = program.getResolvedTypeReferenceDirectives().get(typeReferenceDirective.fileName);
|
||||
const referenceFile = program.getResolvedTypeReferenceDirectives()[typeReferenceDirective.fileName];
|
||||
if (referenceFile && referenceFile.resolvedFileName) {
|
||||
return [getDefinitionInfoForFileReference(typeReferenceDirective.fileName, referenceFile.resolvedFileName)];
|
||||
}
|
||||
|
||||
@ -17,19 +17,19 @@ namespace ts.JsTyping {
|
||||
|
||||
interface PackageJson {
|
||||
_requiredBy?: string[];
|
||||
dependencies?: MapLike<string>;
|
||||
devDependencies?: MapLike<string>;
|
||||
dependencies?: Map<string>;
|
||||
devDependencies?: Map<string>;
|
||||
name?: string;
|
||||
optionalDependencies?: MapLike<string>;
|
||||
peerDependencies?: MapLike<string>;
|
||||
optionalDependencies?: Map<string>;
|
||||
peerDependencies?: Map<string>;
|
||||
typings?: string;
|
||||
};
|
||||
|
||||
// A map of loose file names to library names
|
||||
// that we are confident require typings
|
||||
let safeList: Map<string, string>;
|
||||
let safeList: Map<string>;
|
||||
|
||||
const EmptySafeList = createMap<string, string>();
|
||||
const EmptySafeList: Map<string> = createMap<string>();
|
||||
|
||||
/* @internal */
|
||||
export const nodeCoreModuleList: ReadonlyArray<string> = [
|
||||
@ -56,13 +56,13 @@ namespace ts.JsTyping {
|
||||
fileNames: string[],
|
||||
projectRootPath: Path,
|
||||
safeListPath: Path,
|
||||
packageNameToTypingLocation: Map<string, string>,
|
||||
packageNameToTypingLocation: Map<string>,
|
||||
typingOptions: TypingOptions,
|
||||
unresolvedImports: ReadonlyArray<string>):
|
||||
{ cachedTypingPaths: string[], newTypingNames: string[], filesToWatch: string[] } {
|
||||
|
||||
// A typing name to typing file path mapping
|
||||
const inferredTypings = createMap<string, string | undefined>();
|
||||
const inferredTypings = createMap<string>();
|
||||
|
||||
if (!typingOptions || !typingOptions.enableAutoDiscovery) {
|
||||
return { cachedTypingPaths: [], newTypingNames: [], filesToWatch: [] };
|
||||
@ -76,7 +76,7 @@ namespace ts.JsTyping {
|
||||
|
||||
if (!safeList) {
|
||||
const result = readConfigFile(safeListPath, (path: string) => host.readFile(path));
|
||||
safeList = result.config ? mapOfMapLike<string>(result.config) : EmptySafeList;
|
||||
safeList = result.config ? createMap<string>(result.config) : EmptySafeList;
|
||||
}
|
||||
|
||||
const filesToWatch: string[] = [];
|
||||
@ -107,35 +107,34 @@ namespace ts.JsTyping {
|
||||
// add typings for unresolved imports
|
||||
if (unresolvedImports) {
|
||||
for (const moduleId of unresolvedImports) {
|
||||
const typingName = nodeCoreModules.has(moduleId) ? "node" : moduleId;
|
||||
if (!inferredTypings.has(typingName)) {
|
||||
inferredTypings.set(typingName, undefined);
|
||||
const typingName = moduleId in nodeCoreModules ? "node" : moduleId;
|
||||
if (!(typingName in inferredTypings)) {
|
||||
inferredTypings[typingName] = undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Add the cached typing locations for inferred typings that are already installed
|
||||
packageNameToTypingLocation.forEach((typingLocation, name) => {
|
||||
if (inferredTypings.has(name) && inferredTypings.get(name) === undefined) {
|
||||
inferredTypings.set(name, typingLocation);
|
||||
for (const name in packageNameToTypingLocation) {
|
||||
if (name in inferredTypings && !inferredTypings[name]) {
|
||||
inferredTypings[name] = packageNameToTypingLocation[name];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Remove typings that the user has added to the exclude list
|
||||
for (const excludeTypingName of exclude) {
|
||||
inferredTypings.delete(excludeTypingName);
|
||||
delete inferredTypings[excludeTypingName];
|
||||
}
|
||||
|
||||
const newTypingNames: string[] = [];
|
||||
const cachedTypingPaths: string[] = [];
|
||||
|
||||
inferredTypings.forEach((inferredTyping, typing) => {
|
||||
if (inferredTyping !== undefined) {
|
||||
cachedTypingPaths.push(inferredTyping);
|
||||
for (const typing in inferredTypings) {
|
||||
if (inferredTypings[typing] !== undefined) {
|
||||
cachedTypingPaths.push(inferredTypings[typing]);
|
||||
}
|
||||
else {
|
||||
newTypingNames.push(typing);
|
||||
}
|
||||
});
|
||||
}
|
||||
return { cachedTypingPaths, newTypingNames, filesToWatch };
|
||||
|
||||
/**
|
||||
@ -147,8 +146,8 @@ namespace ts.JsTyping {
|
||||
}
|
||||
|
||||
for (const typing of typingNames) {
|
||||
if (!inferredTypings.has(typing)) {
|
||||
inferredTypings.set(typing, undefined);
|
||||
if (!(typing in inferredTypings)) {
|
||||
inferredTypings[typing] = undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -190,7 +189,7 @@ namespace ts.JsTyping {
|
||||
const cleanedTypingNames = map(inferredTypingNames, f => f.replace(/((?:\.|-)min(?=\.|$))|((?:-|\.)\d+)/g, ""));
|
||||
|
||||
if (safeList !== EmptySafeList) {
|
||||
mergeTypings(filter(cleanedTypingNames, f => safeList.has(f)));
|
||||
mergeTypings(filter(cleanedTypingNames, f => f in safeList));
|
||||
}
|
||||
|
||||
const hasJsxFile = forEach(fileNames, f => ensureScriptKind(f, getScriptKindFromFileName(f)) === ScriptKind.JSX);
|
||||
@ -237,7 +236,7 @@ namespace ts.JsTyping {
|
||||
}
|
||||
if (packageJson.typings) {
|
||||
const absolutePath = getNormalizedAbsolutePath(packageJson.typings, getDirectoryPath(normalizedFileName));
|
||||
inferredTypings.set(packageJson.name, absolutePath);
|
||||
inferredTypings[packageJson.name] = absolutePath;
|
||||
}
|
||||
else {
|
||||
typingNames.push(packageJson.name);
|
||||
|
||||
@ -7,22 +7,23 @@ namespace ts.NavigateTo {
|
||||
let rawItems: RawNavigateToItem[] = [];
|
||||
|
||||
// Search the declarations in all files and output matched NavigateToItem into array of NavigateToItem[]
|
||||
for (const sourceFile of sourceFiles) {
|
||||
forEach(sourceFiles, sourceFile => {
|
||||
cancellationToken.throwIfCancellationRequested();
|
||||
|
||||
if (excludeDtsFiles && fileExtensionIs(sourceFile.fileName, ".d.ts")) {
|
||||
continue;
|
||||
return;
|
||||
}
|
||||
|
||||
// Use `someInMap` to break out early.
|
||||
someInMap(sourceFile.getNamedDeclarations(), (declarations, name) => {
|
||||
const nameToDeclarations = sourceFile.getNamedDeclarations();
|
||||
for (const name in nameToDeclarations) {
|
||||
const declarations = nameToDeclarations[name];
|
||||
if (declarations) {
|
||||
// First do a quick check to see if the name of the declaration matches the
|
||||
// last portion of the (possibly) dotted name they're searching for.
|
||||
let matches = patternMatcher.getMatchesForLastSegmentOfPattern(name);
|
||||
|
||||
if (!matches) {
|
||||
return false;
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const declaration of declarations) {
|
||||
@ -31,13 +32,13 @@ namespace ts.NavigateTo {
|
||||
if (patternMatcher.patternContainsDots) {
|
||||
const containers = getContainers(declaration);
|
||||
if (!containers) {
|
||||
return true; // Go to the next source file.
|
||||
return undefined;
|
||||
}
|
||||
|
||||
matches = patternMatcher.getMatches(containers, name);
|
||||
|
||||
if (!matches) {
|
||||
return false;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
@ -46,8 +47,8 @@ namespace ts.NavigateTo {
|
||||
rawItems.push({ name, fileName, matchKind, isCaseSensitive: allMatchesAreCaseSensitive(matches), declaration });
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Remove imports when the imported declaration is already in the list and has the same name.
|
||||
rawItems = filter(rawItems, item => {
|
||||
|
||||
@ -239,7 +239,7 @@ namespace ts.NavigationBar {
|
||||
|
||||
/** Merge declarations of the same kind. */
|
||||
function mergeChildren(children: NavigationBarNode[]): void {
|
||||
const nameToItems = createMap<string, NavigationBarNode | NavigationBarNode[]>();
|
||||
const nameToItems = createMap<NavigationBarNode | NavigationBarNode[]>();
|
||||
filterMutate(children, child => {
|
||||
const decl = <Declaration>child.node;
|
||||
const name = decl.name && nodeText(decl.name);
|
||||
@ -248,9 +248,9 @@ namespace ts.NavigationBar {
|
||||
return true;
|
||||
}
|
||||
|
||||
const itemsWithSameName = nameToItems.get(name);
|
||||
const itemsWithSameName = nameToItems[name];
|
||||
if (!itemsWithSameName) {
|
||||
nameToItems.set(name, child);
|
||||
nameToItems[name] = child;
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -268,7 +268,7 @@ namespace ts.NavigationBar {
|
||||
if (tryMerge(itemWithSameName, child)) {
|
||||
return false;
|
||||
}
|
||||
nameToItems.set(name, [itemWithSameName, child]);
|
||||
nameToItems[name] = [itemWithSameName, child];
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -626,15 +626,15 @@ namespace ts.NavigationBar {
|
||||
|
||||
/**
|
||||
* Matches all whitespace characters in a string. Eg:
|
||||
*
|
||||
*
|
||||
* "app.
|
||||
*
|
||||
*
|
||||
* onactivated"
|
||||
*
|
||||
*
|
||||
* matches because of the newline, whereas
|
||||
*
|
||||
*
|
||||
* "app.onactivated"
|
||||
*
|
||||
*
|
||||
* does not match.
|
||||
*/
|
||||
const whiteSpaceRegex = /\s+/g;
|
||||
|
||||
@ -113,7 +113,7 @@ namespace ts {
|
||||
// we see the name of a module that is used everywhere, or the name of an overload). As
|
||||
// such, we cache the information we compute about the candidate for the life of this
|
||||
// pattern matcher so we don't have to compute it multiple times.
|
||||
const stringToWordSpans = createMap<string, TextSpan[]>();
|
||||
const stringToWordSpans = createMap<TextSpan[]>();
|
||||
|
||||
pattern = pattern.trim();
|
||||
|
||||
@ -188,7 +188,11 @@ namespace ts {
|
||||
}
|
||||
|
||||
function getWordSpans(word: string): TextSpan[] {
|
||||
return getOrUpdate(stringToWordSpans, word, breakIntoWordSpans);
|
||||
if (!(word in stringToWordSpans)) {
|
||||
stringToWordSpans[word] = breakIntoWordSpans(word);
|
||||
}
|
||||
|
||||
return stringToWordSpans[word];
|
||||
}
|
||||
|
||||
function matchTextChunk(candidate: string, chunk: TextChunk, punctuationStripped: boolean): PatternMatch {
|
||||
|
||||
@ -465,13 +465,13 @@ namespace ts {
|
||||
public scriptKind: ScriptKind;
|
||||
public languageVersion: ScriptTarget;
|
||||
public languageVariant: LanguageVariant;
|
||||
public identifiers: Map<string, string>;
|
||||
public nameTable: Map<string, number>;
|
||||
public resolvedModules: Map<string, ResolvedModuleFull>;
|
||||
public resolvedTypeReferenceDirectiveNames: Map<string, ResolvedTypeReferenceDirective>;
|
||||
public identifiers: Map<string>;
|
||||
public nameTable: Map<number>;
|
||||
public resolvedModules: Map<ResolvedModuleFull>;
|
||||
public resolvedTypeReferenceDirectiveNames: Map<ResolvedTypeReferenceDirective>;
|
||||
public imports: LiteralExpression[];
|
||||
public moduleAugmentations: LiteralExpression[];
|
||||
private namedDeclarations: Map<string, Declaration[]>;
|
||||
private namedDeclarations: Map<Declaration[]>;
|
||||
|
||||
constructor(kind: SyntaxKind, pos: number, end: number) {
|
||||
super(kind, pos, end);
|
||||
@ -493,7 +493,7 @@ namespace ts {
|
||||
return ts.getPositionOfLineAndCharacter(this, line, character);
|
||||
}
|
||||
|
||||
public getNamedDeclarations(): Map<string, Declaration[]> {
|
||||
public getNamedDeclarations(): Map<Declaration[]> {
|
||||
if (!this.namedDeclarations) {
|
||||
this.namedDeclarations = this.computeNamedDeclarations();
|
||||
}
|
||||
@ -501,8 +501,8 @@ namespace ts {
|
||||
return this.namedDeclarations;
|
||||
}
|
||||
|
||||
private computeNamedDeclarations(): Map<string, Declaration[]> {
|
||||
const result = createMap<string, Declaration[]>();
|
||||
private computeNamedDeclarations(): Map<Declaration[]> {
|
||||
const result = createMap<Declaration[]>();
|
||||
|
||||
forEachChild(this, visit);
|
||||
|
||||
@ -516,7 +516,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
function getDeclarations(name: string) {
|
||||
return getOrUpdate(result, name, () => []);
|
||||
return result[name] || (result[name] = []);
|
||||
}
|
||||
|
||||
function getDeclarationName(declaration: Declaration) {
|
||||
@ -967,7 +967,7 @@ namespace ts {
|
||||
const currentDirectory = host.getCurrentDirectory();
|
||||
// Check if the localized messages json is set, otherwise query the host for it
|
||||
if (!localizedDiagnosticMessages && host.getLocalizedDiagnosticMessages) {
|
||||
localizedDiagnosticMessages = mapOfMapLike<string>(host.getLocalizedDiagnosticMessages());
|
||||
localizedDiagnosticMessages = host.getLocalizedDiagnosticMessages();
|
||||
}
|
||||
|
||||
function log(message: string) {
|
||||
@ -1922,7 +1922,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
export function getNameTable(sourceFile: SourceFile): Map<string, number> {
|
||||
export function getNameTable(sourceFile: SourceFile): Map<number> {
|
||||
if (!sourceFile.nameTable) {
|
||||
initializeNameTable(sourceFile);
|
||||
}
|
||||
@ -1931,7 +1931,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
function initializeNameTable(sourceFile: SourceFile): void {
|
||||
const nameTable = createMap<string, number>();
|
||||
const nameTable = createMap<number>();
|
||||
|
||||
walk(sourceFile);
|
||||
sourceFile.nameTable = nameTable;
|
||||
@ -1939,7 +1939,7 @@ namespace ts {
|
||||
function walk(node: Node) {
|
||||
switch (node.kind) {
|
||||
case SyntaxKind.Identifier:
|
||||
nameTable.set((<Identifier>node).text, nameTable.get((<Identifier>node).text) === undefined ? node.pos : -1);
|
||||
nameTable[(<Identifier>node).text] = nameTable[(<Identifier>node).text] === undefined ? node.pos : -1;
|
||||
break;
|
||||
case SyntaxKind.StringLiteral:
|
||||
case SyntaxKind.NumericLiteral:
|
||||
@ -1952,7 +1952,7 @@ namespace ts {
|
||||
isArgumentOfElementAccessExpression(node) ||
|
||||
isLiteralComputedPropertyDeclarationName(node)) {
|
||||
|
||||
nameTable.set((<LiteralExpression>node).text, nameTable.get((<LiteralExpression>node).text) === undefined ? node.pos : -1);
|
||||
nameTable[(<LiteralExpression>node).text] = nameTable[(<LiteralExpression>node).text] === undefined ? node.pos : -1;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
|
||||
@ -1167,7 +1167,7 @@ namespace ts {
|
||||
info.fileNames,
|
||||
toPath(info.projectRootPath, info.projectRootPath, getCanonicalFileName),
|
||||
toPath(info.safeListPath, info.safeListPath, getCanonicalFileName),
|
||||
mapOfMapLike(info.packageNameToTypingLocation),
|
||||
info.packageNameToTypingLocation,
|
||||
info.typingOptions,
|
||||
info.unresolvedImports);
|
||||
});
|
||||
|
||||
@ -237,7 +237,7 @@ namespace ts.SignatureHelp {
|
||||
const typeChecker = program.getTypeChecker();
|
||||
for (const sourceFile of program.getSourceFiles()) {
|
||||
const nameToDeclarations = sourceFile.getNamedDeclarations();
|
||||
const declarations = nameToDeclarations.get(name.text);
|
||||
const declarations = nameToDeclarations[name.text];
|
||||
|
||||
if (declarations) {
|
||||
for (const declaration of declarations) {
|
||||
|
||||
@ -63,7 +63,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
if (transpileOptions.renamedDependencies) {
|
||||
sourceFile.renamedDependencies = mapOfMapLike(transpileOptions.renamedDependencies);
|
||||
sourceFile.renamedDependencies = createMap(transpileOptions.renamedDependencies);
|
||||
}
|
||||
|
||||
const newLine = getNewLineCharacter(options);
|
||||
@ -126,7 +126,7 @@ namespace ts {
|
||||
function fixupCompilerOptions(options: CompilerOptions, diagnostics: Diagnostic[]): CompilerOptions {
|
||||
// Lazily create this value to fix module loading errors.
|
||||
commandLineOptionsStringToEnum = commandLineOptionsStringToEnum || <CommandLineOptionOfCustomType[]>filter(optionDeclarations, o =>
|
||||
typeof o.type === "object" && !someValueInMap(o.type, v => typeof v !== "number"));
|
||||
typeof o.type === "object" && !forEachProperty(o.type, v => typeof v !== "number"));
|
||||
|
||||
options = clone(options);
|
||||
|
||||
@ -142,7 +142,7 @@ namespace ts {
|
||||
options[opt.name] = parseCustomTypeOption(opt, value, diagnostics);
|
||||
}
|
||||
else {
|
||||
if (!someValueInMap(opt.type, v => v === value)) {
|
||||
if (!forEachProperty(opt.type, v => v === value)) {
|
||||
// Supplied value isn't a valid enum value.
|
||||
diagnostics.push(createCompilerDiagnosticForInvalidCustomType(opt));
|
||||
}
|
||||
|
||||
@ -48,9 +48,9 @@ namespace ts {
|
||||
export interface SourceFile {
|
||||
/* @internal */ version: string;
|
||||
/* @internal */ scriptSnapshot: IScriptSnapshot;
|
||||
/* @internal */ nameTable: Map<string, number>;
|
||||
/* @internal */ nameTable: Map<number>;
|
||||
|
||||
/* @internal */ getNamedDeclarations(): Map<string, Declaration[]>;
|
||||
/* @internal */ getNamedDeclarations(): Map<Declaration[]>;
|
||||
|
||||
getLineAndCharacterOfPosition(pos: number): LineAndCharacter;
|
||||
getLineStarts(): number[];
|
||||
|
||||
@ -47,7 +47,6 @@
|
||||
"prefer-const": true,
|
||||
"no-increment-decrement": true,
|
||||
"object-literal-surrounding-space": true,
|
||||
"no-type-assertion-whitespace": true,
|
||||
"no-in-operator": true
|
||||
"no-type-assertion-whitespace": true
|
||||
}
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user