mirror of
https://github.com/microsoft/TypeScript.git
synced 2026-02-15 11:35:42 -06:00
Merge pull request #11715 from Microsoft/unused
Forbid unused locals/parameters in compiler
This commit is contained in:
commit
db0ee4f763
@ -1054,7 +1054,7 @@ namespace ts {
|
||||
if (node.moduleReference.kind === SyntaxKind.ExternalModuleReference) {
|
||||
return resolveExternalModuleSymbol(resolveExternalModuleName(node, getExternalModuleImportEqualsDeclarationExpression(node)));
|
||||
}
|
||||
return getSymbolOfPartOfRightHandSideOfImportEquals(<EntityName>node.moduleReference, node);
|
||||
return getSymbolOfPartOfRightHandSideOfImportEquals(<EntityName>node.moduleReference);
|
||||
}
|
||||
|
||||
function getTargetOfImportClause(node: ImportClause): Symbol {
|
||||
@ -1267,7 +1267,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
// This function is only for imports with entity names
|
||||
function getSymbolOfPartOfRightHandSideOfImportEquals(entityName: EntityName, importDeclaration: ImportEqualsDeclaration, dontResolveAlias?: boolean): Symbol {
|
||||
function getSymbolOfPartOfRightHandSideOfImportEquals(entityName: EntityName, dontResolveAlias?: boolean): Symbol {
|
||||
// There are three things we might try to look for. In the following examples,
|
||||
// the search term is enclosed in |...|:
|
||||
//
|
||||
@ -2583,7 +2583,7 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
|
||||
function buildDisplayForTypeArgumentsAndDelimiters(typeParameters: TypeParameter[], mapper: TypeMapper, writer: SymbolWriter, enclosingDeclaration?: Node, flags?: TypeFormatFlags, symbolStack?: Symbol[]) {
|
||||
function buildDisplayForTypeArgumentsAndDelimiters(typeParameters: TypeParameter[], mapper: TypeMapper, writer: SymbolWriter, enclosingDeclaration?: Node) {
|
||||
if (typeParameters && typeParameters.length) {
|
||||
writePunctuation(writer, SyntaxKind.LessThanToken);
|
||||
let flags = TypeFormatFlags.InFirstTypeArgument;
|
||||
@ -4795,7 +4795,7 @@ namespace ts {
|
||||
const typeParameters = classType ? classType.localTypeParameters :
|
||||
declaration.typeParameters ? getTypeParametersFromDeclaration(declaration.typeParameters) :
|
||||
getTypeParametersFromJSDocTemplate(declaration);
|
||||
const returnType = getSignatureReturnTypeFromDeclaration(declaration, minArgumentCount, isJSConstructSignature, classType);
|
||||
const returnType = getSignatureReturnTypeFromDeclaration(declaration, isJSConstructSignature, classType);
|
||||
const typePredicate = declaration.type && declaration.type.kind === SyntaxKind.TypePredicate ?
|
||||
createTypePredicateFromTypePredicateNode(declaration.type as TypePredicateNode) :
|
||||
undefined;
|
||||
@ -4805,7 +4805,7 @@ namespace ts {
|
||||
return links.resolvedSignature;
|
||||
}
|
||||
|
||||
function getSignatureReturnTypeFromDeclaration(declaration: SignatureDeclaration, minArgumentCount: number, isJSConstructSignature: boolean, classType: Type) {
|
||||
function getSignatureReturnTypeFromDeclaration(declaration: SignatureDeclaration, isJSConstructSignature: boolean, classType: Type) {
|
||||
if (isJSConstructSignature) {
|
||||
return getTypeFromTypeNode(declaration.parameters[0].type);
|
||||
}
|
||||
@ -5170,10 +5170,7 @@ namespace ts {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function resolveTypeReferenceName(
|
||||
node: TypeReferenceNode | ExpressionWithTypeArguments | JSDocTypeReference,
|
||||
typeReferenceName: EntityNameExpression | EntityName) {
|
||||
|
||||
function resolveTypeReferenceName(typeReferenceName: EntityNameExpression | EntityName) {
|
||||
if (!typeReferenceName) {
|
||||
return unknownSymbol;
|
||||
}
|
||||
@ -5211,7 +5208,7 @@ namespace ts {
|
||||
let type: Type;
|
||||
if (node.kind === SyntaxKind.JSDocTypeReference) {
|
||||
const typeReferenceName = getTypeReferenceName(node);
|
||||
symbol = resolveTypeReferenceName(node, typeReferenceName);
|
||||
symbol = resolveTypeReferenceName(typeReferenceName);
|
||||
type = getTypeReferenceType(node, symbol);
|
||||
}
|
||||
else {
|
||||
@ -6673,8 +6670,8 @@ namespace ts {
|
||||
}
|
||||
if (source.flags & TypeFlags.Union && target.flags & TypeFlags.Union ||
|
||||
source.flags & TypeFlags.Intersection && target.flags & TypeFlags.Intersection) {
|
||||
if (result = eachTypeRelatedToSomeType(<UnionOrIntersectionType>source, <UnionOrIntersectionType>target, /*reportErrors*/ false)) {
|
||||
if (result &= eachTypeRelatedToSomeType(<UnionOrIntersectionType>target, <UnionOrIntersectionType>source, /*reportErrors*/ false)) {
|
||||
if (result = eachTypeRelatedToSomeType(<UnionOrIntersectionType>source, <UnionOrIntersectionType>target)) {
|
||||
if (result &= eachTypeRelatedToSomeType(<UnionOrIntersectionType>target, <UnionOrIntersectionType>source)) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
@ -6735,7 +6732,7 @@ namespace ts {
|
||||
return false;
|
||||
}
|
||||
|
||||
function eachTypeRelatedToSomeType(source: UnionOrIntersectionType, target: UnionOrIntersectionType, reportErrors: boolean): Ternary {
|
||||
function eachTypeRelatedToSomeType(source: UnionOrIntersectionType, target: UnionOrIntersectionType): Ternary {
|
||||
let result = Ternary.True;
|
||||
const sourceTypes = source.types;
|
||||
for (const sourceType of sourceTypes) {
|
||||
@ -11772,7 +11769,7 @@ namespace ts {
|
||||
// If the effective argument is 'undefined', then it is an argument that is present but is synthetic.
|
||||
if (arg === undefined || arg.kind !== SyntaxKind.OmittedExpression) {
|
||||
const paramType = getTypeAtPosition(signature, i);
|
||||
let argType = getEffectiveArgumentType(node, i, arg);
|
||||
let argType = getEffectiveArgumentType(node, i);
|
||||
|
||||
// If the effective argument type is 'undefined', there is no synthetic type
|
||||
// for the argument. In that case, we should check the argument.
|
||||
@ -11858,7 +11855,7 @@ namespace ts {
|
||||
if (arg === undefined || arg.kind !== SyntaxKind.OmittedExpression) {
|
||||
// Check spread elements against rest type (from arity check we know spread argument corresponds to a rest parameter)
|
||||
const paramType = getTypeAtPosition(signature, i);
|
||||
let argType = getEffectiveArgumentType(node, i, arg);
|
||||
let argType = getEffectiveArgumentType(node, i);
|
||||
|
||||
// If the effective argument type is 'undefined', there is no synthetic type
|
||||
// for the argument. In that case, we should check the argument.
|
||||
@ -12151,7 +12148,7 @@ namespace ts {
|
||||
/**
|
||||
* Gets the effective argument type for an argument in a call expression.
|
||||
*/
|
||||
function getEffectiveArgumentType(node: CallLikeExpression, argIndex: number, arg: Expression): Type {
|
||||
function getEffectiveArgumentType(node: CallLikeExpression, argIndex: number): Type {
|
||||
// Decorators provide special arguments, a tagged template expression provides
|
||||
// a special first argument, and string literals get string literal types
|
||||
// unless we're reporting errors
|
||||
@ -13556,15 +13553,15 @@ namespace ts {
|
||||
return booleanType;
|
||||
}
|
||||
|
||||
function checkObjectLiteralAssignment(node: ObjectLiteralExpression, sourceType: Type, contextualMapper?: TypeMapper): Type {
|
||||
function checkObjectLiteralAssignment(node: ObjectLiteralExpression, sourceType: Type): Type {
|
||||
const properties = node.properties;
|
||||
for (const p of properties) {
|
||||
checkObjectLiteralDestructuringPropertyAssignment(sourceType, p, contextualMapper);
|
||||
checkObjectLiteralDestructuringPropertyAssignment(sourceType, p);
|
||||
}
|
||||
return sourceType;
|
||||
}
|
||||
|
||||
function checkObjectLiteralDestructuringPropertyAssignment(objectLiteralType: Type, property: ObjectLiteralElementLike, contextualMapper?: TypeMapper) {
|
||||
function checkObjectLiteralDestructuringPropertyAssignment(objectLiteralType: Type, property: ObjectLiteralElementLike) {
|
||||
if (property.kind === SyntaxKind.PropertyAssignment || property.kind === SyntaxKind.ShorthandPropertyAssignment) {
|
||||
const name = <PropertyName>(<PropertyAssignment>property).name;
|
||||
if (name.kind === SyntaxKind.ComputedPropertyName) {
|
||||
@ -13679,7 +13676,7 @@ namespace ts {
|
||||
target = (<BinaryExpression>target).left;
|
||||
}
|
||||
if (target.kind === SyntaxKind.ObjectLiteralExpression) {
|
||||
return checkObjectLiteralAssignment(<ObjectLiteralExpression>target, sourceType, contextualMapper);
|
||||
return checkObjectLiteralAssignment(<ObjectLiteralExpression>target, sourceType);
|
||||
}
|
||||
if (target.kind === SyntaxKind.ArrayLiteralExpression) {
|
||||
return checkArrayLiteralAssignment(<ArrayLiteralExpression>target, sourceType, contextualMapper);
|
||||
@ -18588,7 +18585,7 @@ namespace ts {
|
||||
// Since we already checked for ExportAssignment, this really could only be an Import
|
||||
const importEqualsDeclaration = <ImportEqualsDeclaration>getAncestor(entityName, SyntaxKind.ImportEqualsDeclaration);
|
||||
Debug.assert(importEqualsDeclaration !== undefined);
|
||||
return getSymbolOfPartOfRightHandSideOfImportEquals(<EntityName>entityName, importEqualsDeclaration, /*dontResolveAlias*/ true);
|
||||
return getSymbolOfPartOfRightHandSideOfImportEquals(<EntityName>entityName, /*dontResolveAlias*/ true);
|
||||
}
|
||||
|
||||
if (isRightSideOfQualifiedNameOrPropertyAccess(entityName)) {
|
||||
@ -19971,7 +19968,7 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
|
||||
function checkGrammarTypeParameterList(node: FunctionLikeDeclaration, typeParameters: NodeArray<TypeParameterDeclaration>, file: SourceFile): boolean {
|
||||
function checkGrammarTypeParameterList(typeParameters: NodeArray<TypeParameterDeclaration>, file: SourceFile): boolean {
|
||||
if (checkGrammarForDisallowedTrailingComma(typeParameters)) {
|
||||
return true;
|
||||
}
|
||||
@ -20022,7 +20019,7 @@ namespace ts {
|
||||
function checkGrammarFunctionLikeDeclaration(node: FunctionLikeDeclaration): boolean {
|
||||
// Prevent cascading error by short-circuit
|
||||
const file = getSourceFileOfNode(node);
|
||||
return checkGrammarDecorators(node) || checkGrammarModifiers(node) || checkGrammarTypeParameterList(node, node.typeParameters, file) ||
|
||||
return checkGrammarDecorators(node) || checkGrammarModifiers(node) || checkGrammarTypeParameterList(node.typeParameters, file) ||
|
||||
checkGrammarParameterList(node.parameters) || checkGrammarArrowFunction(node, file);
|
||||
}
|
||||
|
||||
@ -20208,7 +20205,7 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
|
||||
function checkGrammarForInvalidQuestionMark(node: Declaration, questionToken: Node, message: DiagnosticMessage): boolean {
|
||||
function checkGrammarForInvalidQuestionMark(questionToken: Node, message: DiagnosticMessage): boolean {
|
||||
if (questionToken) {
|
||||
return grammarErrorOnNode(questionToken, message);
|
||||
}
|
||||
@ -20254,7 +20251,7 @@ namespace ts {
|
||||
let currentKind: number;
|
||||
if (prop.kind === SyntaxKind.PropertyAssignment || prop.kind === SyntaxKind.ShorthandPropertyAssignment) {
|
||||
// Grammar checking for computedPropertyName and shorthandPropertyAssignment
|
||||
checkGrammarForInvalidQuestionMark(prop, (<PropertyAssignment>prop).questionToken, Diagnostics.An_object_member_cannot_be_declared_optional);
|
||||
checkGrammarForInvalidQuestionMark((<PropertyAssignment>prop).questionToken, Diagnostics.An_object_member_cannot_be_declared_optional);
|
||||
if (name.kind === SyntaxKind.NumericLiteral) {
|
||||
checkGrammarNumericLiteral(<NumericLiteral>name);
|
||||
}
|
||||
@ -20438,7 +20435,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
if (node.parent.kind === SyntaxKind.ObjectLiteralExpression) {
|
||||
if (checkGrammarForInvalidQuestionMark(node, node.questionToken, Diagnostics.An_object_member_cannot_be_declared_optional)) {
|
||||
if (checkGrammarForInvalidQuestionMark(node.questionToken, Diagnostics.An_object_member_cannot_be_declared_optional)) {
|
||||
return true;
|
||||
}
|
||||
else if (node.body === undefined) {
|
||||
|
||||
@ -188,7 +188,7 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
|
||||
function emitLeadingComment(commentPos: number, commentEnd: number, kind: SyntaxKind, hasTrailingNewLine: boolean, rangePos: number) {
|
||||
function emitLeadingComment(commentPos: number, commentEnd: number, _kind: SyntaxKind, hasTrailingNewLine: boolean, rangePos: number) {
|
||||
if (!hasWrittenComment) {
|
||||
emitNewLineBeforeLeadingCommentOfPosition(currentLineMap, writer, rangePos, commentPos);
|
||||
hasWrittenComment = true;
|
||||
@ -211,7 +211,7 @@ namespace ts {
|
||||
forEachTrailingCommentToEmit(pos, emitTrailingComment);
|
||||
}
|
||||
|
||||
function emitTrailingComment(commentPos: number, commentEnd: number, kind: SyntaxKind, hasTrailingNewLine: boolean) {
|
||||
function emitTrailingComment(commentPos: number, commentEnd: number, _kind: SyntaxKind, hasTrailingNewLine: boolean) {
|
||||
// trailing comments are emitted at space/*trailing comment1 */space/*trailing comment2*/
|
||||
if (!writer.isAtStartOfLine()) {
|
||||
writer.write(" ");
|
||||
@ -242,7 +242,7 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
|
||||
function emitTrailingCommentOfPosition(commentPos: number, commentEnd: number, kind: SyntaxKind, hasTrailingNewLine: boolean) {
|
||||
function emitTrailingCommentOfPosition(commentPos: number, commentEnd: number, _kind: SyntaxKind, hasTrailingNewLine: boolean) {
|
||||
// trailing comments of a position are emitted at /*trailing comment1 */space/*trailing comment*/space
|
||||
|
||||
emitPos(commentPos);
|
||||
|
||||
@ -903,7 +903,7 @@ namespace ts {
|
||||
return t => compose(a(t));
|
||||
}
|
||||
else {
|
||||
return t => u => u;
|
||||
return _ => u => u;
|
||||
}
|
||||
}
|
||||
|
||||
@ -943,7 +943,7 @@ namespace ts {
|
||||
function formatStringFromArgs(text: string, args: { [index: number]: string; }, baseIndex?: number): string {
|
||||
baseIndex = baseIndex || 0;
|
||||
|
||||
return text.replace(/{(\d+)}/g, (match, index?) => args[+index + baseIndex]);
|
||||
return text.replace(/{(\d+)}/g, (_match, index?) => args[+index + baseIndex]);
|
||||
}
|
||||
|
||||
export let localizedDiagnosticMessages: Map<string> = undefined;
|
||||
@ -982,7 +982,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
/* internal */
|
||||
export function formatMessage(dummy: any, message: DiagnosticMessage): string {
|
||||
export function formatMessage(_dummy: any, message: DiagnosticMessage): string {
|
||||
let text = getLocaleSpecificMessage(message);
|
||||
|
||||
if (arguments.length > 2) {
|
||||
@ -1623,7 +1623,7 @@ namespace ts {
|
||||
basePaths: string[];
|
||||
}
|
||||
|
||||
export function getFileMatcherPatterns(path: string, extensions: string[], excludes: string[], includes: string[], useCaseSensitiveFileNames: boolean, currentDirectory: string): FileMatcherPatterns {
|
||||
export function getFileMatcherPatterns(path: string, excludes: string[], includes: string[], useCaseSensitiveFileNames: boolean, currentDirectory: string): FileMatcherPatterns {
|
||||
path = normalizePath(path);
|
||||
currentDirectory = normalizePath(currentDirectory);
|
||||
const absolutePath = combinePaths(currentDirectory, path);
|
||||
@ -1640,7 +1640,7 @@ namespace ts {
|
||||
path = normalizePath(path);
|
||||
currentDirectory = normalizePath(currentDirectory);
|
||||
|
||||
const patterns = getFileMatcherPatterns(path, extensions, excludes, includes, useCaseSensitiveFileNames, currentDirectory);
|
||||
const patterns = getFileMatcherPatterns(path, excludes, includes, useCaseSensitiveFileNames, currentDirectory);
|
||||
|
||||
const regexFlag = useCaseSensitiveFileNames ? "" : "i";
|
||||
const includeFileRegex = patterns.includeFilePattern && new RegExp(patterns.includeFilePattern, regexFlag);
|
||||
@ -1874,11 +1874,11 @@ namespace ts {
|
||||
this.declarations = undefined;
|
||||
}
|
||||
|
||||
function Type(this: Type, checker: TypeChecker, flags: TypeFlags) {
|
||||
function Type(this: Type, _checker: TypeChecker, flags: TypeFlags) {
|
||||
this.flags = flags;
|
||||
}
|
||||
|
||||
function Signature(checker: TypeChecker) {
|
||||
function Signature() {
|
||||
}
|
||||
|
||||
function Node(this: Node, kind: SyntaxKind, pos: number, end: number) {
|
||||
@ -1911,9 +1911,6 @@ namespace ts {
|
||||
}
|
||||
|
||||
export namespace Debug {
|
||||
declare var process: any;
|
||||
declare var require: any;
|
||||
|
||||
export let currentAssertionLevel = AssertionLevel.None;
|
||||
|
||||
export function shouldAssert(level: AssertionLevel): boolean {
|
||||
|
||||
@ -63,7 +63,7 @@ namespace ts {
|
||||
let isCurrentFileExternalModule: boolean;
|
||||
let reportedDeclarationError = false;
|
||||
let errorNameNode: DeclarationName;
|
||||
const emitJsDocComments = compilerOptions.removeComments ? function (declaration: Node) { } : writeJsDocComments;
|
||||
const emitJsDocComments = compilerOptions.removeComments ? () => {} : writeJsDocComments;
|
||||
const emit = compilerOptions.stripInternal ? stripInternal : emitNode;
|
||||
let noDeclare: boolean;
|
||||
|
||||
@ -580,7 +580,7 @@ namespace ts {
|
||||
writeAsynchronousModuleElements(nodes);
|
||||
}
|
||||
|
||||
function getDefaultExportAccessibilityDiagnostic(diagnostic: SymbolAccessibilityResult): SymbolAccessibilityDiagnostic {
|
||||
function getDefaultExportAccessibilityDiagnostic(): SymbolAccessibilityDiagnostic {
|
||||
return {
|
||||
diagnosticMessage: Diagnostics.Default_export_of_the_module_has_or_is_using_private_name_0,
|
||||
errorNode: node
|
||||
@ -710,7 +710,7 @@ namespace ts {
|
||||
}
|
||||
writer.writeLine();
|
||||
|
||||
function getImportEntityNameVisibilityError(symbolAccessibilityResult: SymbolAccessibilityResult): SymbolAccessibilityDiagnostic {
|
||||
function getImportEntityNameVisibilityError(): SymbolAccessibilityDiagnostic {
|
||||
return {
|
||||
diagnosticMessage: Diagnostics.Import_declaration_0_is_using_private_name_1,
|
||||
errorNode: node,
|
||||
@ -888,7 +888,7 @@ namespace ts {
|
||||
writeLine();
|
||||
enclosingDeclaration = prevEnclosingDeclaration;
|
||||
|
||||
function getTypeAliasDeclarationVisibilityError(symbolAccessibilityResult: SymbolAccessibilityResult): SymbolAccessibilityDiagnostic {
|
||||
function getTypeAliasDeclarationVisibilityError(): SymbolAccessibilityDiagnostic {
|
||||
return {
|
||||
diagnosticMessage: Diagnostics.Exported_type_alias_0_has_or_is_using_private_name_1,
|
||||
errorNode: node.type,
|
||||
@ -955,7 +955,7 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
|
||||
function getTypeParameterConstraintVisibilityError(symbolAccessibilityResult: SymbolAccessibilityResult): SymbolAccessibilityDiagnostic {
|
||||
function getTypeParameterConstraintVisibilityError(): SymbolAccessibilityDiagnostic {
|
||||
// Type parameter constraints are named by user so we should always be able to name it
|
||||
let diagnosticMessage: DiagnosticMessage;
|
||||
switch (node.parent.kind) {
|
||||
@ -1029,7 +1029,7 @@ namespace ts {
|
||||
resolver.writeBaseConstructorTypeOfClass(<ClassLikeDeclaration>enclosingDeclaration, enclosingDeclaration, TypeFormatFlags.UseTypeOfFunction | TypeFormatFlags.UseTypeAliasValue, writer);
|
||||
}
|
||||
|
||||
function getHeritageClauseVisibilityError(symbolAccessibilityResult: SymbolAccessibilityResult): SymbolAccessibilityDiagnostic {
|
||||
function getHeritageClauseVisibilityError(): SymbolAccessibilityDiagnostic {
|
||||
let diagnosticMessage: DiagnosticMessage;
|
||||
// Heritage clause is written by user so it can always be named
|
||||
if (node.parent.parent.kind === SyntaxKind.ClassDeclaration) {
|
||||
@ -1743,7 +1743,7 @@ namespace ts {
|
||||
}
|
||||
return addedBundledEmitReference;
|
||||
|
||||
function getDeclFileName(emitFileNames: EmitFileNames, sourceFiles: SourceFile[], isBundledEmit: boolean) {
|
||||
function getDeclFileName(emitFileNames: EmitFileNames, _sourceFiles: SourceFile[], isBundledEmit: boolean) {
|
||||
// Dont add reference path to this file if it is a bundled emit and caller asked not emit bundled file path
|
||||
if (isBundledEmit && !addBundledFileReference) {
|
||||
return;
|
||||
|
||||
@ -14,7 +14,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
const id = (s: SourceFile) => s;
|
||||
const nullTransformers: Transformer[] = [ctx => id];
|
||||
const nullTransformers: Transformer[] = [_ => id];
|
||||
|
||||
// targetSourceFile is when users only want one file in entire project to be emitted. This is used in compileOnSave feature
|
||||
export function emitFiles(resolver: EmitResolver, host: EmitHost, targetSourceFile: SourceFile, emitOnlyDtsFiles?: boolean): EmitResult {
|
||||
@ -593,7 +593,7 @@ const _super = (function (geti, seti) {
|
||||
case SyntaxKind.ExpressionWithTypeArguments:
|
||||
return emitExpressionWithTypeArguments(<ExpressionWithTypeArguments>node);
|
||||
case SyntaxKind.ThisType:
|
||||
return emitThisType(<ThisTypeNode>node);
|
||||
return emitThisType();
|
||||
case SyntaxKind.LiteralType:
|
||||
return emitLiteralType(<LiteralTypeNode>node);
|
||||
|
||||
@ -609,7 +609,7 @@ const _super = (function (geti, seti) {
|
||||
case SyntaxKind.TemplateSpan:
|
||||
return emitTemplateSpan(<TemplateSpan>node);
|
||||
case SyntaxKind.SemicolonClassElement:
|
||||
return emitSemicolonClassElement(<SemicolonClassElement>node);
|
||||
return emitSemicolonClassElement();
|
||||
|
||||
// Statements
|
||||
case SyntaxKind.Block:
|
||||
@ -617,7 +617,7 @@ const _super = (function (geti, seti) {
|
||||
case SyntaxKind.VariableStatement:
|
||||
return emitVariableStatement(<VariableStatement>node);
|
||||
case SyntaxKind.EmptyStatement:
|
||||
return emitEmptyStatement(<EmptyStatement>node);
|
||||
return emitEmptyStatement();
|
||||
case SyntaxKind.ExpressionStatement:
|
||||
return emitExpressionStatement(<ExpressionStatement>node);
|
||||
case SyntaxKind.IfStatement:
|
||||
@ -1014,7 +1014,7 @@ const _super = (function (geti, seti) {
|
||||
write(";");
|
||||
}
|
||||
|
||||
function emitSemicolonClassElement(node: SemicolonClassElement) {
|
||||
function emitSemicolonClassElement() {
|
||||
write(";");
|
||||
}
|
||||
|
||||
@ -1084,7 +1084,7 @@ const _super = (function (geti, seti) {
|
||||
write(")");
|
||||
}
|
||||
|
||||
function emitThisType(node: ThisTypeNode) {
|
||||
function emitThisType() {
|
||||
write("this");
|
||||
}
|
||||
|
||||
@ -1397,7 +1397,7 @@ const _super = (function (geti, seti) {
|
||||
// Statements
|
||||
//
|
||||
|
||||
function emitBlock(node: Block, format?: ListFormat) {
|
||||
function emitBlock(node: Block) {
|
||||
if (isSingleLineEmptyBlock(node)) {
|
||||
writeToken(SyntaxKind.OpenBraceToken, node.pos, /*contextNode*/ node);
|
||||
write(" ");
|
||||
@ -1425,7 +1425,7 @@ const _super = (function (geti, seti) {
|
||||
write(";");
|
||||
}
|
||||
|
||||
function emitEmptyStatement(node: EmptyStatement) {
|
||||
function emitEmptyStatement() {
|
||||
write(";");
|
||||
}
|
||||
|
||||
@ -1623,13 +1623,13 @@ const _super = (function (geti, seti) {
|
||||
|
||||
if (getEmitFlags(node) & EmitFlags.ReuseTempVariableScope) {
|
||||
emitSignatureHead(node);
|
||||
emitBlockFunctionBody(node, body);
|
||||
emitBlockFunctionBody(body);
|
||||
}
|
||||
else {
|
||||
const savedTempFlags = tempFlags;
|
||||
tempFlags = 0;
|
||||
emitSignatureHead(node);
|
||||
emitBlockFunctionBody(node, body);
|
||||
emitBlockFunctionBody(body);
|
||||
tempFlags = savedTempFlags;
|
||||
}
|
||||
|
||||
@ -1656,7 +1656,7 @@ const _super = (function (geti, seti) {
|
||||
emitWithPrefix(": ", node.type);
|
||||
}
|
||||
|
||||
function shouldEmitBlockFunctionBodyOnSingleLine(parentNode: Node, body: Block) {
|
||||
function shouldEmitBlockFunctionBodyOnSingleLine(body: Block) {
|
||||
// We must emit a function body as a single-line body in the following case:
|
||||
// * The body has NodeEmitFlags.SingleLine specified.
|
||||
|
||||
@ -1694,12 +1694,12 @@ const _super = (function (geti, seti) {
|
||||
return true;
|
||||
}
|
||||
|
||||
function emitBlockFunctionBody(parentNode: Node, body: Block) {
|
||||
function emitBlockFunctionBody(body: Block) {
|
||||
write(" {");
|
||||
increaseIndent();
|
||||
|
||||
emitBodyWithDetachedComments(body, body.statements,
|
||||
shouldEmitBlockFunctionBodyOnSingleLine(parentNode, body)
|
||||
shouldEmitBlockFunctionBodyOnSingleLine(body)
|
||||
? emitBlockFunctionBodyOnSingleLine
|
||||
: emitBlockFunctionBodyWorker);
|
||||
|
||||
|
||||
@ -5,7 +5,7 @@ namespace ts {
|
||||
|
||||
/* @internal */
|
||||
export function trace(host: ModuleResolutionHost, message: DiagnosticMessage, ...args: any[]): void;
|
||||
export function trace(host: ModuleResolutionHost, message: DiagnosticMessage): void {
|
||||
export function trace(host: ModuleResolutionHost): void {
|
||||
host.trace(formatMessage.apply(undefined, arguments));
|
||||
}
|
||||
|
||||
|
||||
@ -572,10 +572,10 @@ namespace ts {
|
||||
// attached to the EOF token.
|
||||
let parseErrorBeforeNextFinishedNode = false;
|
||||
|
||||
export function parseSourceFile(fileName: string, _sourceText: string, languageVersion: ScriptTarget, _syntaxCursor: IncrementalParser.SyntaxCursor, setParentNodes?: boolean, scriptKind?: ScriptKind): SourceFile {
|
||||
export function parseSourceFile(fileName: string, sourceText: string, languageVersion: ScriptTarget, syntaxCursor: IncrementalParser.SyntaxCursor, setParentNodes?: boolean, scriptKind?: ScriptKind): SourceFile {
|
||||
scriptKind = ensureScriptKind(fileName, scriptKind);
|
||||
|
||||
initializeState(fileName, _sourceText, languageVersion, _syntaxCursor, scriptKind);
|
||||
initializeState(sourceText, languageVersion, syntaxCursor, scriptKind);
|
||||
|
||||
const result = parseSourceFileWorker(fileName, languageVersion, setParentNodes, scriptKind);
|
||||
|
||||
@ -589,7 +589,7 @@ namespace ts {
|
||||
return scriptKind === ScriptKind.TSX || scriptKind === ScriptKind.JSX || scriptKind === ScriptKind.JS ? LanguageVariant.JSX : LanguageVariant.Standard;
|
||||
}
|
||||
|
||||
function initializeState(fileName: string, _sourceText: string, languageVersion: ScriptTarget, _syntaxCursor: IncrementalParser.SyntaxCursor, scriptKind: ScriptKind) {
|
||||
function initializeState(_sourceText: string, languageVersion: ScriptTarget, _syntaxCursor: IncrementalParser.SyntaxCursor, scriptKind: ScriptKind) {
|
||||
NodeConstructor = objectAllocator.getNodeConstructor();
|
||||
TokenConstructor = objectAllocator.getTokenConstructor();
|
||||
IdentifierConstructor = objectAllocator.getIdentifierConstructor();
|
||||
@ -5259,7 +5259,7 @@ namespace ts {
|
||||
parseExpected(SyntaxKind.ClassKeyword);
|
||||
node.name = parseNameOfClassDeclarationOrExpression();
|
||||
node.typeParameters = parseTypeParameters();
|
||||
node.heritageClauses = parseHeritageClauses(/*isClassHeritageClause*/ true);
|
||||
node.heritageClauses = parseHeritageClauses();
|
||||
|
||||
if (parseExpected(SyntaxKind.OpenBraceToken)) {
|
||||
// ClassTail[Yield,Await] : (Modified) See 14.5
|
||||
@ -5289,7 +5289,7 @@ namespace ts {
|
||||
return token() === SyntaxKind.ImplementsKeyword && lookAhead(nextTokenIsIdentifierOrKeyword);
|
||||
}
|
||||
|
||||
function parseHeritageClauses(isClassHeritageClause: boolean): NodeArray<HeritageClause> {
|
||||
function parseHeritageClauses(): NodeArray<HeritageClause> {
|
||||
// ClassTail[Yield,Await] : (Modified) See 14.5
|
||||
// ClassHeritage[?Yield,?Await]opt { ClassBody[?Yield,?Await]opt }
|
||||
|
||||
@ -5337,7 +5337,7 @@ namespace ts {
|
||||
parseExpected(SyntaxKind.InterfaceKeyword);
|
||||
node.name = parseIdentifier();
|
||||
node.typeParameters = parseTypeParameters();
|
||||
node.heritageClauses = parseHeritageClauses(/*isClassHeritageClause*/ false);
|
||||
node.heritageClauses = parseHeritageClauses();
|
||||
node.members = parseObjectTypeMembers();
|
||||
return addJSDocComment(finishNode(node));
|
||||
}
|
||||
@ -5817,7 +5817,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
export function parseJSDocTypeExpressionForTests(content: string, start: number, length: number) {
|
||||
initializeState("file.js", content, ScriptTarget.Latest, /*_syntaxCursor:*/ undefined, ScriptKind.JS);
|
||||
initializeState(content, ScriptTarget.Latest, /*_syntaxCursor:*/ undefined, ScriptKind.JS);
|
||||
sourceFile = createSourceFile("file.js", ScriptTarget.Latest, ScriptKind.JS);
|
||||
scanner.setText(content, start, length);
|
||||
currentToken = scanner.scan();
|
||||
@ -6134,7 +6134,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
export function parseIsolatedJSDocComment(content: string, start: number, length: number) {
|
||||
initializeState("file.js", content, ScriptTarget.Latest, /*_syntaxCursor:*/ undefined, ScriptKind.JS);
|
||||
initializeState(content, ScriptTarget.Latest, /*_syntaxCursor:*/ undefined, ScriptKind.JS);
|
||||
sourceFile = <SourceFile>{ languageVariant: LanguageVariant.Standard, text: content };
|
||||
const jsDoc = parseJSDocCommentWorker(start, length);
|
||||
const diagnostics = parseDiagnostics;
|
||||
|
||||
@ -12,7 +12,7 @@ namespace ts.performance {
|
||||
|
||||
const profilerEvent = typeof onProfilerEvent === "function" && onProfilerEvent.profiler === true
|
||||
? onProfilerEvent
|
||||
: (markName: string) => { };
|
||||
: (_markName: string) => { };
|
||||
|
||||
let enabled = false;
|
||||
let profilerStart = 0;
|
||||
|
||||
@ -724,7 +724,7 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
|
||||
function getSyntacticDiagnosticsForFile(sourceFile: SourceFile, cancellationToken: CancellationToken): Diagnostic[] {
|
||||
function getSyntacticDiagnosticsForFile(sourceFile: SourceFile): Diagnostic[] {
|
||||
return sourceFile.parseDiagnostics;
|
||||
}
|
||||
|
||||
@ -761,7 +761,7 @@ namespace ts {
|
||||
// Instead, we just report errors for using TypeScript-only constructs from within a
|
||||
// JavaScript file.
|
||||
const checkDiagnostics = isSourceFileJavaScript(sourceFile) ?
|
||||
getJavaScriptSemanticDiagnosticsForFile(sourceFile, cancellationToken) :
|
||||
getJavaScriptSemanticDiagnosticsForFile(sourceFile) :
|
||||
typeChecker.getDiagnostics(sourceFile, cancellationToken);
|
||||
const fileProcessingDiagnosticsInFile = fileProcessingDiagnostics.getDiagnostics(sourceFile.fileName);
|
||||
const programDiagnosticsInFile = programDiagnostics.getDiagnostics(sourceFile.fileName);
|
||||
@ -770,7 +770,7 @@ namespace ts {
|
||||
});
|
||||
}
|
||||
|
||||
function getJavaScriptSemanticDiagnosticsForFile(sourceFile: SourceFile, cancellationToken: CancellationToken): Diagnostic[] {
|
||||
function getJavaScriptSemanticDiagnosticsForFile(sourceFile: SourceFile): Diagnostic[] {
|
||||
return runWithCancellationToken(() => {
|
||||
const diagnostics: Diagnostic[] = [];
|
||||
walk(sourceFile);
|
||||
@ -977,7 +977,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
function processRootFile(fileName: string, isDefaultLib: boolean) {
|
||||
processSourceFile(normalizePath(fileName), isDefaultLib, /*isReference*/ true);
|
||||
processSourceFile(normalizePath(fileName), isDefaultLib);
|
||||
}
|
||||
|
||||
function fileReferenceIsEqualTo(a: FileReference, b: FileReference): boolean {
|
||||
@ -1088,7 +1088,7 @@ namespace ts {
|
||||
/**
|
||||
* 'isReference' indicates whether the file was brought in via a reference directive (rather than an import declaration)
|
||||
*/
|
||||
function processSourceFile(fileName: string, isDefaultLib: boolean, isReference: boolean, refFile?: SourceFile, refPos?: number, refEnd?: number) {
|
||||
function processSourceFile(fileName: string, isDefaultLib: boolean, refFile?: SourceFile, refPos?: number, refEnd?: number) {
|
||||
let diagnosticArgument: string[];
|
||||
let diagnostic: DiagnosticMessage;
|
||||
if (hasExtension(fileName)) {
|
||||
@ -1096,7 +1096,7 @@ namespace ts {
|
||||
diagnostic = Diagnostics.File_0_has_unsupported_extension_The_only_supported_extensions_are_1;
|
||||
diagnosticArgument = [fileName, "'" + supportedExtensions.join("', '") + "'"];
|
||||
}
|
||||
else if (!findSourceFile(fileName, toPath(fileName, currentDirectory, getCanonicalFileName), isDefaultLib, isReference, refFile, refPos, refEnd)) {
|
||||
else if (!findSourceFile(fileName, toPath(fileName, currentDirectory, getCanonicalFileName), isDefaultLib, refFile, refPos, refEnd)) {
|
||||
diagnostic = Diagnostics.File_0_not_found;
|
||||
diagnosticArgument = [fileName];
|
||||
}
|
||||
@ -1106,13 +1106,13 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
else {
|
||||
const nonTsFile: SourceFile = options.allowNonTsExtensions && findSourceFile(fileName, toPath(fileName, currentDirectory, getCanonicalFileName), isDefaultLib, isReference, refFile, refPos, refEnd);
|
||||
const nonTsFile: SourceFile = options.allowNonTsExtensions && findSourceFile(fileName, toPath(fileName, currentDirectory, getCanonicalFileName), isDefaultLib, refFile, refPos, refEnd);
|
||||
if (!nonTsFile) {
|
||||
if (options.allowNonTsExtensions) {
|
||||
diagnostic = Diagnostics.File_0_not_found;
|
||||
diagnosticArgument = [fileName];
|
||||
}
|
||||
else if (!forEach(supportedExtensions, extension => findSourceFile(fileName + extension, toPath(fileName + extension, currentDirectory, getCanonicalFileName), isDefaultLib, isReference, refFile, refPos, refEnd))) {
|
||||
else if (!forEach(supportedExtensions, extension => findSourceFile(fileName + extension, toPath(fileName + extension, currentDirectory, getCanonicalFileName), isDefaultLib, refFile, refPos, refEnd))) {
|
||||
diagnostic = Diagnostics.File_0_not_found;
|
||||
fileName += ".ts";
|
||||
diagnosticArgument = [fileName];
|
||||
@ -1141,7 +1141,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
// Get source file from normalized fileName
|
||||
function findSourceFile(fileName: string, path: Path, isDefaultLib: boolean, isReference: boolean, refFile?: SourceFile, refPos?: number, refEnd?: number): SourceFile {
|
||||
function findSourceFile(fileName: string, path: Path, isDefaultLib: boolean, refFile?: SourceFile, refPos?: number, refEnd?: number): SourceFile {
|
||||
if (filesByName.contains(path)) {
|
||||
const file = filesByName.get(path);
|
||||
// try to check if we've already seen this file but with a different casing in path
|
||||
@ -1155,18 +1155,18 @@ namespace ts {
|
||||
if (file && sourceFilesFoundSearchingNodeModules[file.path] && currentNodeModulesDepth == 0) {
|
||||
sourceFilesFoundSearchingNodeModules[file.path] = false;
|
||||
if (!options.noResolve) {
|
||||
processReferencedFiles(file, getDirectoryPath(fileName), isDefaultLib);
|
||||
processReferencedFiles(file, isDefaultLib);
|
||||
processTypeReferenceDirectives(file);
|
||||
}
|
||||
|
||||
modulesWithElidedImports[file.path] = false;
|
||||
processImportedModules(file, getDirectoryPath(fileName));
|
||||
processImportedModules(file);
|
||||
}
|
||||
// See if we need to reprocess the imports due to prior skipped imports
|
||||
else if (file && modulesWithElidedImports[file.path]) {
|
||||
if (currentNodeModulesDepth < maxNodeModulesJsDepth) {
|
||||
modulesWithElidedImports[file.path] = false;
|
||||
processImportedModules(file, getDirectoryPath(fileName));
|
||||
processImportedModules(file);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1202,14 +1202,13 @@ namespace ts {
|
||||
|
||||
skipDefaultLib = skipDefaultLib || file.hasNoDefaultLib;
|
||||
|
||||
const basePath = getDirectoryPath(fileName);
|
||||
if (!options.noResolve) {
|
||||
processReferencedFiles(file, basePath, isDefaultLib);
|
||||
processReferencedFiles(file, isDefaultLib);
|
||||
processTypeReferenceDirectives(file);
|
||||
}
|
||||
|
||||
// always process imported modules to record module name resolutions
|
||||
processImportedModules(file, basePath);
|
||||
processImportedModules(file);
|
||||
|
||||
if (isDefaultLib) {
|
||||
files.unshift(file);
|
||||
@ -1222,10 +1221,10 @@ namespace ts {
|
||||
return file;
|
||||
}
|
||||
|
||||
function processReferencedFiles(file: SourceFile, basePath: string, isDefaultLib: boolean) {
|
||||
function processReferencedFiles(file: SourceFile, isDefaultLib: boolean) {
|
||||
forEach(file.referencedFiles, ref => {
|
||||
const referencedFileName = resolveTripleslashReference(ref.fileName, file.fileName);
|
||||
processSourceFile(referencedFileName, isDefaultLib, /*isReference*/ true, file, ref.pos, ref.end);
|
||||
processSourceFile(referencedFileName, isDefaultLib, file, ref.pos, ref.end);
|
||||
});
|
||||
}
|
||||
|
||||
@ -1256,7 +1255,7 @@ namespace ts {
|
||||
if (resolvedTypeReferenceDirective) {
|
||||
if (resolvedTypeReferenceDirective.primary) {
|
||||
// resolved from the primary path
|
||||
processSourceFile(resolvedTypeReferenceDirective.resolvedFileName, /*isDefaultLib*/ false, /*isReference*/ true, refFile, refPos, refEnd);
|
||||
processSourceFile(resolvedTypeReferenceDirective.resolvedFileName, /*isDefaultLib*/ false, refFile, refPos, refEnd);
|
||||
}
|
||||
else {
|
||||
// If we already resolved to this file, it must have been a secondary reference. Check file contents
|
||||
@ -1276,7 +1275,7 @@ namespace ts {
|
||||
}
|
||||
else {
|
||||
// First resolution of this library
|
||||
processSourceFile(resolvedTypeReferenceDirective.resolvedFileName, /*isDefaultLib*/ false, /*isReference*/ true, refFile, refPos, refEnd);
|
||||
processSourceFile(resolvedTypeReferenceDirective.resolvedFileName, /*isDefaultLib*/ false, refFile, refPos, refEnd);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1302,7 +1301,7 @@ namespace ts {
|
||||
return host.getCanonicalFileName(fileName);
|
||||
}
|
||||
|
||||
function processImportedModules(file: SourceFile, basePath: string) {
|
||||
function processImportedModules(file: SourceFile) {
|
||||
collectExternalModuleReferences(file);
|
||||
if (file.imports.length || file.moduleAugmentations.length) {
|
||||
file.resolvedModules = createMap<ResolvedModule>();
|
||||
@ -1333,7 +1332,7 @@ namespace ts {
|
||||
else if (shouldAddFile) {
|
||||
findSourceFile(resolution.resolvedFileName,
|
||||
toPath(resolution.resolvedFileName, currentDirectory, getCanonicalFileName),
|
||||
/*isDefaultLib*/ false, /*isReference*/ false,
|
||||
/*isDefaultLib*/ false,
|
||||
file,
|
||||
skipTrivia(file.text, file.imports[i].pos),
|
||||
file.imports[i].end);
|
||||
@ -1541,7 +1540,7 @@ namespace ts {
|
||||
if (!options.noEmit && !options.suppressOutputPathCheck) {
|
||||
const emitHost = getEmitHost();
|
||||
const emitFilesSeen = createFileMap<boolean>(!host.useCaseSensitiveFileNames() ? key => key.toLocaleLowerCase() : undefined);
|
||||
forEachExpectedEmitFile(emitHost, (emitFileNames, sourceFiles, isBundledEmit) => {
|
||||
forEachExpectedEmitFile(emitHost, (emitFileNames) => {
|
||||
verifyEmitFilePath(emitFileNames.jsFilePath, emitFilesSeen);
|
||||
verifyEmitFilePath(emitFileNames.declarationFilePath, emitFilesSeen);
|
||||
});
|
||||
@ -1553,13 +1552,13 @@ namespace ts {
|
||||
const emitFilePath = toPath(emitFileName, currentDirectory, getCanonicalFileName);
|
||||
// Report error if the output overwrites input file
|
||||
if (filesByName.contains(emitFilePath)) {
|
||||
createEmitBlockingDiagnostics(emitFileName, emitFilePath, Diagnostics.Cannot_write_file_0_because_it_would_overwrite_input_file);
|
||||
createEmitBlockingDiagnostics(emitFileName, Diagnostics.Cannot_write_file_0_because_it_would_overwrite_input_file);
|
||||
}
|
||||
|
||||
// Report error if multiple files write into same file
|
||||
if (emitFilesSeen.contains(emitFilePath)) {
|
||||
// Already seen the same emit file - report error
|
||||
createEmitBlockingDiagnostics(emitFileName, emitFilePath, Diagnostics.Cannot_write_file_0_because_it_would_be_overwritten_by_multiple_input_files);
|
||||
createEmitBlockingDiagnostics(emitFileName, Diagnostics.Cannot_write_file_0_because_it_would_be_overwritten_by_multiple_input_files);
|
||||
}
|
||||
else {
|
||||
emitFilesSeen.set(emitFilePath, true);
|
||||
@ -1568,7 +1567,7 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
|
||||
function createEmitBlockingDiagnostics(emitFileName: string, emitFilePath: Path, message: DiagnosticMessage) {
|
||||
function createEmitBlockingDiagnostics(emitFileName: string, message: DiagnosticMessage) {
|
||||
hasEmitBlockingDiagnostics.set(toPath(emitFileName, currentDirectory, getCanonicalFileName), true);
|
||||
programDiagnostics.add(createCompilerDiagnostic(message, emitFileName));
|
||||
}
|
||||
|
||||
@ -723,7 +723,7 @@ namespace ts {
|
||||
return iterateCommentRanges(/*reduce*/ true, text, pos, /*trailing*/ true, cb, state, initial);
|
||||
}
|
||||
|
||||
function appendCommentRange(pos: number, end: number, kind: SyntaxKind, hasTrailingNewLine: boolean, state: any, comments: CommentRange[]) {
|
||||
function appendCommentRange(pos: number, end: number, kind: SyntaxKind, hasTrailingNewLine: boolean, _state: any, comments: CommentRange[]) {
|
||||
if (!comments) {
|
||||
comments = [];
|
||||
}
|
||||
|
||||
@ -46,13 +46,9 @@ namespace ts {
|
||||
}
|
||||
|
||||
declare var require: any;
|
||||
declare var module: any;
|
||||
declare var process: any;
|
||||
declare var global: any;
|
||||
declare var __filename: string;
|
||||
declare var Buffer: {
|
||||
new (str: string, encoding?: string): any;
|
||||
};
|
||||
|
||||
declare class Enumerator {
|
||||
public atEnd(): boolean;
|
||||
@ -324,7 +320,7 @@ namespace ts {
|
||||
const platform: string = _os.platform();
|
||||
const useCaseSensitiveFileNames = isFileSystemCaseSensitive();
|
||||
|
||||
function readFile(fileName: string, encoding?: string): string {
|
||||
function readFile(fileName: string, _encoding?: string): string {
|
||||
if (!fileExists(fileName)) {
|
||||
return undefined;
|
||||
}
|
||||
@ -576,7 +572,7 @@ namespace ts {
|
||||
args: ChakraHost.args,
|
||||
useCaseSensitiveFileNames: !!ChakraHost.useCaseSensitiveFileNames,
|
||||
write: ChakraHost.echo,
|
||||
readFile(path: string, encoding?: string) {
|
||||
readFile(path: string, _encoding?: string) {
|
||||
// encoding is automatically handled by the implementation in ChakraHost
|
||||
return ChakraHost.readFile(path);
|
||||
},
|
||||
@ -595,9 +591,9 @@ namespace ts {
|
||||
getExecutingFilePath: () => ChakraHost.executingFile,
|
||||
getCurrentDirectory: () => ChakraHost.currentDirectory,
|
||||
getDirectories: ChakraHost.getDirectories,
|
||||
getEnvironmentVariable: ChakraHost.getEnvironmentVariable || ((name: string) => ""),
|
||||
getEnvironmentVariable: ChakraHost.getEnvironmentVariable || (() => ""),
|
||||
readDirectory: (path: string, extensions?: string[], excludes?: string[], includes?: string[]) => {
|
||||
const pattern = getFileMatcherPatterns(path, extensions, excludes, includes, !!ChakraHost.useCaseSensitiveFileNames, ChakraHost.currentDirectory);
|
||||
const pattern = getFileMatcherPatterns(path, excludes, includes, !!ChakraHost.useCaseSensitiveFileNames, ChakraHost.currentDirectory);
|
||||
return ChakraHost.readDirectory(path, extensions, pattern.basePaths, pattern.excludePattern, pattern.includeFilePattern, pattern.includeDirectoryPattern);
|
||||
},
|
||||
exit: ChakraHost.quit,
|
||||
|
||||
@ -165,7 +165,7 @@ namespace ts {
|
||||
hoistFunctionDeclaration,
|
||||
startLexicalEnvironment,
|
||||
endLexicalEnvironment,
|
||||
onSubstituteNode: (emitContext, node) => node,
|
||||
onSubstituteNode: (_emitContext, node) => node,
|
||||
enableSubstitution,
|
||||
isSubstitutionEnabled,
|
||||
onEmitNode: (node, emitContext, emitCallback) => emitCallback(node, emitContext),
|
||||
|
||||
@ -51,7 +51,7 @@ namespace ts {
|
||||
location = value;
|
||||
}
|
||||
|
||||
flattenDestructuring(context, node, value, location, emitAssignment, emitTempVariableAssignment, visitor);
|
||||
flattenDestructuring(node, value, location, emitAssignment, emitTempVariableAssignment, visitor);
|
||||
|
||||
if (needsValue) {
|
||||
expressions.push(value);
|
||||
@ -87,13 +87,12 @@ namespace ts {
|
||||
* @param visitor An optional visitor to use to visit expressions.
|
||||
*/
|
||||
export function flattenParameterDestructuring(
|
||||
context: TransformationContext,
|
||||
node: ParameterDeclaration,
|
||||
value: Expression,
|
||||
visitor?: (node: Node) => VisitResult<Node>) {
|
||||
const declarations: VariableDeclaration[] = [];
|
||||
|
||||
flattenDestructuring(context, node, value, node, emitAssignment, emitTempVariableAssignment, visitor);
|
||||
flattenDestructuring(node, value, node, emitAssignment, emitTempVariableAssignment, visitor);
|
||||
|
||||
return declarations;
|
||||
|
||||
@ -123,7 +122,6 @@ namespace ts {
|
||||
* @param visitor An optional visitor to use to visit expressions.
|
||||
*/
|
||||
export function flattenVariableDestructuring(
|
||||
context: TransformationContext,
|
||||
node: VariableDeclaration,
|
||||
value?: Expression,
|
||||
visitor?: (node: Node) => VisitResult<Node>,
|
||||
@ -131,7 +129,7 @@ namespace ts {
|
||||
const declarations: VariableDeclaration[] = [];
|
||||
|
||||
let pendingAssignments: Expression[];
|
||||
flattenDestructuring(context, node, value, node, emitAssignment, emitTempVariableAssignment, visitor);
|
||||
flattenDestructuring(node, value, node, emitAssignment, emitTempVariableAssignment, visitor);
|
||||
|
||||
return declarations;
|
||||
|
||||
@ -180,7 +178,6 @@ namespace ts {
|
||||
* @param visitor An optional visitor to use to visit expressions.
|
||||
*/
|
||||
export function flattenVariableDestructuringToExpression(
|
||||
context: TransformationContext,
|
||||
node: VariableDeclaration,
|
||||
recordTempVariable: (name: Identifier) => void,
|
||||
nameSubstitution?: (name: Identifier) => Expression,
|
||||
@ -188,7 +185,7 @@ namespace ts {
|
||||
|
||||
const pendingAssignments: Expression[] = [];
|
||||
|
||||
flattenDestructuring(context, node, /*value*/ undefined, node, emitAssignment, emitTempVariableAssignment, visitor);
|
||||
flattenDestructuring(node, /*value*/ undefined, node, emitAssignment, emitTempVariableAssignment, visitor);
|
||||
|
||||
const expression = inlineExpressions(pendingAssignments);
|
||||
aggregateTransformFlags(expression);
|
||||
@ -219,7 +216,6 @@ namespace ts {
|
||||
}
|
||||
|
||||
function flattenDestructuring(
|
||||
context: TransformationContext,
|
||||
root: VariableDeclaration | ParameterDeclaration | BindingElement | BinaryExpression,
|
||||
value: Expression,
|
||||
location: TextRange,
|
||||
|
||||
@ -396,7 +396,7 @@ namespace ts {
|
||||
return visitYieldExpression(<YieldExpression>node);
|
||||
|
||||
case SyntaxKind.SuperKeyword:
|
||||
return visitSuperKeyword(<PrimaryExpression>node);
|
||||
return visitSuperKeyword();
|
||||
|
||||
case SyntaxKind.YieldExpression:
|
||||
// `yield` will be handled by a generators transform.
|
||||
@ -1118,7 +1118,7 @@ namespace ts {
|
||||
createVariableStatement(
|
||||
/*modifiers*/ undefined,
|
||||
createVariableDeclarationList(
|
||||
flattenParameterDestructuring(context, parameter, temp, visitor)
|
||||
flattenParameterDestructuring(parameter, temp, visitor)
|
||||
)
|
||||
),
|
||||
EmitFlags.CustomPrologue
|
||||
@ -1690,7 +1690,7 @@ namespace ts {
|
||||
if (decl.initializer) {
|
||||
let assignment: Expression;
|
||||
if (isBindingPattern(decl.name)) {
|
||||
assignment = flattenVariableDestructuringToExpression(context, decl, hoistVariableDeclaration, /*nameSubstitution*/ undefined, visitor);
|
||||
assignment = flattenVariableDestructuringToExpression(decl, hoistVariableDeclaration, /*nameSubstitution*/ undefined, visitor);
|
||||
}
|
||||
else {
|
||||
assignment = createBinary(<Identifier>decl.name, SyntaxKind.EqualsToken, visitNode(decl.initializer, visitor, isExpression));
|
||||
@ -1843,7 +1843,7 @@ namespace ts {
|
||||
if (isBindingPattern(node.name)) {
|
||||
const recordTempVariablesInLine = !enclosingVariableStatement
|
||||
|| !hasModifier(enclosingVariableStatement, ModifierFlags.Export);
|
||||
return flattenVariableDestructuring(context, node, /*value*/ undefined, visitor,
|
||||
return flattenVariableDestructuring(node, /*value*/ undefined, visitor,
|
||||
recordTempVariablesInLine ? undefined : hoistVariableDeclaration);
|
||||
}
|
||||
|
||||
@ -1946,7 +1946,6 @@ namespace ts {
|
||||
// This works whether the declaration is a var, let, or const.
|
||||
// It will use rhsIterationValue _a[_i] as the initializer.
|
||||
const declarations = flattenVariableDestructuring(
|
||||
context,
|
||||
firstOriginalDeclaration,
|
||||
createElementAccess(rhsReference, counter),
|
||||
visitor
|
||||
@ -2539,15 +2538,15 @@ namespace ts {
|
||||
break;
|
||||
|
||||
case SyntaxKind.PropertyAssignment:
|
||||
expressions.push(transformPropertyAssignmentToExpression(node, <PropertyAssignment>property, receiver, node.multiLine));
|
||||
expressions.push(transformPropertyAssignmentToExpression(<PropertyAssignment>property, receiver, node.multiLine));
|
||||
break;
|
||||
|
||||
case SyntaxKind.ShorthandPropertyAssignment:
|
||||
expressions.push(transformShorthandPropertyAssignmentToExpression(node, <ShorthandPropertyAssignment>property, receiver, node.multiLine));
|
||||
expressions.push(transformShorthandPropertyAssignmentToExpression(<ShorthandPropertyAssignment>property, receiver, node.multiLine));
|
||||
break;
|
||||
|
||||
case SyntaxKind.MethodDeclaration:
|
||||
expressions.push(transformObjectLiteralMethodDeclarationToExpression(node, <MethodDeclaration>property, receiver, node.multiLine));
|
||||
expressions.push(transformObjectLiteralMethodDeclarationToExpression(<MethodDeclaration>property, receiver, node.multiLine));
|
||||
break;
|
||||
|
||||
default:
|
||||
@ -2564,7 +2563,7 @@ namespace ts {
|
||||
* @param property The PropertyAssignment node.
|
||||
* @param receiver The receiver for the assignment.
|
||||
*/
|
||||
function transformPropertyAssignmentToExpression(node: ObjectLiteralExpression, property: PropertyAssignment, receiver: Expression, startsOnNewLine: boolean) {
|
||||
function transformPropertyAssignmentToExpression(property: PropertyAssignment, receiver: Expression, startsOnNewLine: boolean) {
|
||||
const expression = createAssignment(
|
||||
createMemberAccessForPropertyName(
|
||||
receiver,
|
||||
@ -2586,7 +2585,7 @@ namespace ts {
|
||||
* @param property The ShorthandPropertyAssignment node.
|
||||
* @param receiver The receiver for the assignment.
|
||||
*/
|
||||
function transformShorthandPropertyAssignmentToExpression(node: ObjectLiteralExpression, property: ShorthandPropertyAssignment, receiver: Expression, startsOnNewLine: boolean) {
|
||||
function transformShorthandPropertyAssignmentToExpression(property: ShorthandPropertyAssignment, receiver: Expression, startsOnNewLine: boolean) {
|
||||
const expression = createAssignment(
|
||||
createMemberAccessForPropertyName(
|
||||
receiver,
|
||||
@ -2608,7 +2607,7 @@ namespace ts {
|
||||
* @param method The MethodDeclaration node.
|
||||
* @param receiver The receiver for the assignment.
|
||||
*/
|
||||
function transformObjectLiteralMethodDeclarationToExpression(node: ObjectLiteralExpression, method: MethodDeclaration, receiver: Expression, startsOnNewLine: boolean) {
|
||||
function transformObjectLiteralMethodDeclarationToExpression(method: MethodDeclaration, receiver: Expression, startsOnNewLine: boolean) {
|
||||
const expression = createAssignment(
|
||||
createMemberAccessForPropertyName(
|
||||
receiver,
|
||||
@ -2798,7 +2797,7 @@ namespace ts {
|
||||
// expressions into an array literal.
|
||||
const numElements = elements.length;
|
||||
const segments = flatten(
|
||||
spanMap(elements, partitionSpreadElement, (partition, visitPartition, start, end) =>
|
||||
spanMap(elements, partitionSpreadElement, (partition, visitPartition, _start, end) =>
|
||||
visitPartition(partition, multiLine, hasTrailingComma && end === numElements)
|
||||
)
|
||||
);
|
||||
@ -2820,7 +2819,7 @@ namespace ts {
|
||||
: visitSpanOfNonSpreadElements;
|
||||
}
|
||||
|
||||
function visitSpanOfSpreadElements(chunk: Expression[], multiLine: boolean, hasTrailingComma: boolean): VisitResult<Expression> {
|
||||
function visitSpanOfSpreadElements(chunk: Expression[]): VisitResult<Expression> {
|
||||
return map(chunk, visitExpressionOfSpreadElement);
|
||||
}
|
||||
|
||||
@ -3009,7 +3008,7 @@ namespace ts {
|
||||
/**
|
||||
* Visits the `super` keyword
|
||||
*/
|
||||
function visitSuperKeyword(node: PrimaryExpression): LeftHandSideExpression {
|
||||
function visitSuperKeyword(): LeftHandSideExpression {
|
||||
return enclosingNonAsyncFunctionBody
|
||||
&& isClassElement(enclosingNonAsyncFunctionBody)
|
||||
&& !hasModifier(enclosingNonAsyncFunctionBody, ModifierFlags.Static)
|
||||
|
||||
@ -956,7 +956,7 @@ namespace ts {
|
||||
* @param elements The elements to visit.
|
||||
* @param multiLine Whether array literals created should be emitted on multiple lines.
|
||||
*/
|
||||
function visitElements(elements: NodeArray<Expression>, multiLine: boolean) {
|
||||
function visitElements(elements: NodeArray<Expression>, _multiLine?: boolean) {
|
||||
// [source]
|
||||
// ar = [1, yield, 2];
|
||||
//
|
||||
@ -1102,7 +1102,7 @@ namespace ts {
|
||||
createFunctionApply(
|
||||
cacheExpression(visitNode(target, visitor, isLeftHandSideExpression)),
|
||||
thisArg,
|
||||
visitElements(node.arguments, /*multiLine*/ false),
|
||||
visitElements(node.arguments),
|
||||
/*location*/ node
|
||||
),
|
||||
node
|
||||
@ -1131,7 +1131,7 @@ namespace ts {
|
||||
createFunctionApply(
|
||||
cacheExpression(visitNode(target, visitor, isExpression)),
|
||||
thisArg,
|
||||
visitElements(node.arguments, /*multiLine*/ false)
|
||||
visitElements(node.arguments)
|
||||
),
|
||||
/*typeArguments*/ undefined,
|
||||
[],
|
||||
|
||||
@ -22,7 +22,8 @@ namespace ts {
|
||||
function visitor(node: Node): VisitResult<Node> {
|
||||
switch (node.kind) {
|
||||
case SyntaxKind.ImportEqualsDeclaration:
|
||||
return visitImportEqualsDeclaration(<ImportEqualsDeclaration>node);
|
||||
// Elide `import=` as it is not legal with --module ES6
|
||||
return undefined;
|
||||
case SyntaxKind.ExportAssignment:
|
||||
return visitExportAssignment(<ExportAssignment>node);
|
||||
}
|
||||
@ -30,11 +31,6 @@ namespace ts {
|
||||
return node;
|
||||
}
|
||||
|
||||
function visitImportEqualsDeclaration(node: ImportEqualsDeclaration): VisitResult<ImportEqualsDeclaration> {
|
||||
// Elide `import=` as it is not legal with --module ES6
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function visitExportAssignment(node: ExportAssignment): VisitResult<ExportAssignment> {
|
||||
// Elide `export=` as it is not legal with --module ES6
|
||||
return node.isExportEquals ? undefined : node;
|
||||
|
||||
@ -680,7 +680,6 @@ namespace ts {
|
||||
const name = node.name;
|
||||
if (isBindingPattern(name)) {
|
||||
return flattenVariableDestructuringToExpression(
|
||||
context,
|
||||
node,
|
||||
hoistVariableDeclaration,
|
||||
getModuleMemberName,
|
||||
|
||||
@ -646,7 +646,7 @@ namespace ts {
|
||||
}
|
||||
else {
|
||||
// If the variable has a BindingPattern, flatten the variable into multiple assignment expressions.
|
||||
return flattenVariableDestructuringToExpression(context, node, hoistVariableDeclaration);
|
||||
return flattenVariableDestructuringToExpression(node, hoistVariableDeclaration);
|
||||
}
|
||||
}
|
||||
|
||||
@ -795,7 +795,7 @@ namespace ts {
|
||||
const name = firstDeclaration.name;
|
||||
return isIdentifier(name)
|
||||
? name
|
||||
: flattenVariableDestructuringToExpression(context, firstDeclaration, hoistVariableDeclaration);
|
||||
: flattenVariableDestructuringToExpression(firstDeclaration, hoistVariableDeclaration);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -580,7 +580,7 @@ namespace ts {
|
||||
// HasLexicalDeclaration (N) : Determines if the argument identifier has a binding in this environment record that was created using
|
||||
// a lexical declaration such as a LexicalDeclaration or a ClassDeclaration.
|
||||
if (staticProperties.length) {
|
||||
addInitializedPropertyStatements(statements, node, staticProperties, getLocalName(node, /*noSourceMaps*/ true));
|
||||
addInitializedPropertyStatements(statements, staticProperties, getLocalName(node, /*noSourceMaps*/ true));
|
||||
}
|
||||
|
||||
// Write any decorators of the node.
|
||||
@ -822,7 +822,7 @@ namespace ts {
|
||||
// the body of a class with static initializers.
|
||||
setEmitFlags(classExpression, EmitFlags.Indented | getEmitFlags(classExpression));
|
||||
expressions.push(startOnNewLine(createAssignment(temp, classExpression)));
|
||||
addRange(expressions, generateInitializedPropertyExpressions(node, staticProperties, temp));
|
||||
addRange(expressions, generateInitializedPropertyExpressions(staticProperties, temp));
|
||||
expressions.push(startOnNewLine(temp));
|
||||
return inlineExpressions(expressions);
|
||||
}
|
||||
@ -868,7 +868,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
const parameters = transformConstructorParameters(constructor);
|
||||
const body = transformConstructorBody(node, constructor, hasExtendsClause, parameters);
|
||||
const body = transformConstructorBody(node, constructor, hasExtendsClause);
|
||||
|
||||
// constructor(${parameters}) {
|
||||
// ${body}
|
||||
@ -922,9 +922,8 @@ namespace ts {
|
||||
* @param node The current class.
|
||||
* @param constructor The current class constructor.
|
||||
* @param hasExtendsClause A value indicating whether the class has an extends clause.
|
||||
* @param parameters The transformed parameters for the constructor.
|
||||
*/
|
||||
function transformConstructorBody(node: ClassExpression | ClassDeclaration, constructor: ConstructorDeclaration, hasExtendsClause: boolean, parameters: ParameterDeclaration[]) {
|
||||
function transformConstructorBody(node: ClassExpression | ClassDeclaration, constructor: ConstructorDeclaration, hasExtendsClause: boolean) {
|
||||
const statements: Statement[] = [];
|
||||
let indexOfFirstStatement = 0;
|
||||
|
||||
@ -976,7 +975,7 @@ namespace ts {
|
||||
// }
|
||||
//
|
||||
const properties = getInitializedProperties(node, /*isStatic*/ false);
|
||||
addInitializedPropertyStatements(statements, node, properties, createThis());
|
||||
addInitializedPropertyStatements(statements, properties, createThis());
|
||||
|
||||
if (constructor) {
|
||||
// The class already had a constructor, so we should add the existing statements, skipping the initial super call.
|
||||
@ -1116,13 +1115,12 @@ namespace ts {
|
||||
/**
|
||||
* Generates assignment statements for property initializers.
|
||||
*
|
||||
* @param node The class node.
|
||||
* @param properties An array of property declarations to transform.
|
||||
* @param receiver The receiver on which each property should be assigned.
|
||||
*/
|
||||
function addInitializedPropertyStatements(statements: Statement[], node: ClassExpression | ClassDeclaration, properties: PropertyDeclaration[], receiver: LeftHandSideExpression) {
|
||||
function addInitializedPropertyStatements(statements: Statement[], properties: PropertyDeclaration[], receiver: LeftHandSideExpression) {
|
||||
for (const property of properties) {
|
||||
const statement = createStatement(transformInitializedProperty(node, property, receiver));
|
||||
const statement = createStatement(transformInitializedProperty(property, receiver));
|
||||
setSourceMapRange(statement, moveRangePastModifiers(property));
|
||||
setCommentRange(statement, property);
|
||||
statements.push(statement);
|
||||
@ -1132,14 +1130,13 @@ namespace ts {
|
||||
/**
|
||||
* Generates assignment expressions for property initializers.
|
||||
*
|
||||
* @param node The class node.
|
||||
* @param properties An array of property declarations to transform.
|
||||
* @param receiver The receiver on which each property should be assigned.
|
||||
*/
|
||||
function generateInitializedPropertyExpressions(node: ClassExpression | ClassDeclaration, properties: PropertyDeclaration[], receiver: LeftHandSideExpression) {
|
||||
function generateInitializedPropertyExpressions(properties: PropertyDeclaration[], receiver: LeftHandSideExpression) {
|
||||
const expressions: Expression[] = [];
|
||||
for (const property of properties) {
|
||||
const expression = transformInitializedProperty(node, property, receiver);
|
||||
const expression = transformInitializedProperty(property, receiver);
|
||||
expression.startsOnNewLine = true;
|
||||
setSourceMapRange(expression, moveRangePastModifiers(property));
|
||||
setCommentRange(expression, property);
|
||||
@ -1152,11 +1149,10 @@ namespace ts {
|
||||
/**
|
||||
* Transforms a property initializer into an assignment statement.
|
||||
*
|
||||
* @param node The class containing the property.
|
||||
* @param property The property declaration.
|
||||
* @param receiver The object receiving the property assignment.
|
||||
*/
|
||||
function transformInitializedProperty(node: ClassExpression | ClassDeclaration, property: PropertyDeclaration, receiver: LeftHandSideExpression) {
|
||||
function transformInitializedProperty(property: PropertyDeclaration, receiver: LeftHandSideExpression) {
|
||||
const propertyName = visitPropertyNameOfClassElement(property);
|
||||
const initializer = visitNode(property.initializer, visitor, isExpression);
|
||||
const memberAccess = createMemberAccessForPropertyName(receiver, propertyName, /*location*/ propertyName);
|
||||
@ -2423,7 +2419,6 @@ namespace ts {
|
||||
const name = node.name;
|
||||
if (isBindingPattern(name)) {
|
||||
return flattenVariableDestructuringToExpression(
|
||||
context,
|
||||
node,
|
||||
hoistVariableDeclaration,
|
||||
getNamespaceMemberNameWithSourceMapsAndWithoutComments,
|
||||
|
||||
@ -29,7 +29,7 @@ namespace ts {
|
||||
}
|
||||
}
|
||||
|
||||
function reportEmittedFiles(files: string[], host: CompilerHost): void {
|
||||
function reportEmittedFiles(files: string[]): void {
|
||||
if (!files || files.length == 0) {
|
||||
return;
|
||||
}
|
||||
@ -111,7 +111,7 @@ namespace ts {
|
||||
return count;
|
||||
}
|
||||
|
||||
function getDiagnosticText(message: DiagnosticMessage, ...args: any[]): string {
|
||||
function getDiagnosticText(_message: DiagnosticMessage, ..._args: any[]): string {
|
||||
const diagnostic = createCompilerDiagnostic.apply(undefined, arguments);
|
||||
return <string>diagnostic.messageText;
|
||||
}
|
||||
@ -456,7 +456,7 @@ namespace ts {
|
||||
const sourceFile = hostGetSourceFile(fileName, languageVersion, onError);
|
||||
if (sourceFile && isWatchSet(compilerOptions) && sys.watchFile) {
|
||||
// Attach a file watcher
|
||||
sourceFile.fileWatcher = sys.watchFile(sourceFile.fileName, (fileName: string, removed?: boolean) => sourceFileChanged(sourceFile, removed));
|
||||
sourceFile.fileWatcher = sys.watchFile(sourceFile.fileName, (_fileName: string, removed?: boolean) => sourceFileChanged(sourceFile, removed));
|
||||
}
|
||||
return sourceFile;
|
||||
}
|
||||
@ -617,7 +617,7 @@ namespace ts {
|
||||
|
||||
reportDiagnostics(sortAndDeduplicateDiagnostics(diagnostics), compilerHost);
|
||||
|
||||
reportEmittedFiles(emitOutput.emittedFiles, compilerHost);
|
||||
reportEmittedFiles(emitOutput.emittedFiles);
|
||||
|
||||
if (emitOutput.emitSkipped && diagnostics.length > 0) {
|
||||
// If the emitter didn't emit anything, then pass that value along.
|
||||
|
||||
@ -9,7 +9,9 @@
|
||||
"sourceMap": true,
|
||||
"declaration": true,
|
||||
"stripInternal": true,
|
||||
"target": "es5"
|
||||
"target": "es5",
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true
|
||||
},
|
||||
"files": [
|
||||
"core.ts",
|
||||
|
||||
@ -2566,7 +2566,7 @@ namespace ts {
|
||||
const options = host.getCompilerOptions();
|
||||
// Emit on each source file
|
||||
if (options.outFile || options.out) {
|
||||
onBundledEmit(host, sourceFiles);
|
||||
onBundledEmit(sourceFiles);
|
||||
}
|
||||
else {
|
||||
for (const sourceFile of sourceFiles) {
|
||||
@ -2599,7 +2599,7 @@ namespace ts {
|
||||
action(jsFilePath, sourceMapFilePath, declarationFilePath, [sourceFile], /*isBundledEmit*/ false);
|
||||
}
|
||||
|
||||
function onBundledEmit(host: EmitHost, sourceFiles: SourceFile[]) {
|
||||
function onBundledEmit(sourceFiles: SourceFile[]) {
|
||||
if (sourceFiles.length) {
|
||||
const jsFilePath = options.outFile || options.out;
|
||||
const sourceMapFilePath = getSourceMapFilePath(jsFilePath, options);
|
||||
|
||||
@ -1331,18 +1331,18 @@ namespace ts {
|
||||
export namespace Debug {
|
||||
export const failNotOptional = shouldAssert(AssertionLevel.Normal)
|
||||
? (message?: string) => assert(false, message || "Node not optional.")
|
||||
: (message?: string) => {};
|
||||
: () => {};
|
||||
|
||||
export const failBadSyntaxKind = shouldAssert(AssertionLevel.Normal)
|
||||
? (node: Node, message?: string) => assert(false, message || "Unexpected node.", () => `Node ${formatSyntaxKind(node.kind)} was unexpected.`)
|
||||
: (node: Node, message?: string) => {};
|
||||
: () => {};
|
||||
|
||||
export const assertNode = shouldAssert(AssertionLevel.Normal)
|
||||
? (node: Node, test: (node: Node) => boolean, message?: string) => assert(
|
||||
test === undefined || test(node),
|
||||
message || "Unexpected node.",
|
||||
() => `Node ${formatSyntaxKind(node.kind)} did not pass test '${getFunctionName(test)}'.`)
|
||||
: (node: Node, test: (node: Node) => boolean, message?: string) => {};
|
||||
: () => {};
|
||||
|
||||
function getFunctionName(func: Function) {
|
||||
if (typeof func !== "function") {
|
||||
|
||||
@ -444,7 +444,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
public readDirectory(path: string, extensions?: string[], exclude?: string[], include?: string[], depth?: number): string[] {
|
||||
const pattern = getFileMatcherPatterns(path, extensions, exclude, include,
|
||||
const pattern = getFileMatcherPatterns(path, exclude, include,
|
||||
this.shimHost.useCaseSensitiveFileNames(), this.shimHost.getCurrentDirectory());
|
||||
return JSON.parse(this.shimHost.readDirectory(
|
||||
path,
|
||||
@ -509,7 +509,7 @@ namespace ts {
|
||||
// Wrap the API changes for 2.0 release. This try/catch
|
||||
// should be removed once TypeScript 2.0 has shipped.
|
||||
try {
|
||||
const pattern = getFileMatcherPatterns(rootDir, extensions, exclude, include,
|
||||
const pattern = getFileMatcherPatterns(rootDir, exclude, include,
|
||||
this.shimHost.useCaseSensitiveFileNames(), this.shimHost.getCurrentDirectory());
|
||||
return JSON.parse(this.shimHost.readDirectory(
|
||||
rootDir,
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user