mirror of
https://github.com/microsoft/TypeScript.git
synced 2026-02-05 08:11:30 -06:00
Trace key operations
Produce output in Chrome's event tracing format so that it can be viewed in Chrome/Edge.
This commit is contained in:
parent
9569198df6
commit
5d60972ef4
@ -174,12 +174,14 @@ namespace ts {
|
||||
const binder = createBinder();
|
||||
|
||||
export function bindSourceFile(file: SourceFile, options: CompilerOptions) {
|
||||
tracing.begin(tracing.Phase.Bind, "bindSourceFile", { path: file.path });
|
||||
performance.mark("beforeBind");
|
||||
perfLogger.logStartBindFile("" + file.fileName);
|
||||
binder(file, options);
|
||||
perfLogger.logStopBindFile();
|
||||
performance.mark("afterBind");
|
||||
performance.measure("Bind", "beforeBind", "afterBind");
|
||||
tracing.end();
|
||||
}
|
||||
|
||||
function createBinder(): (file: SourceFile, options: CompilerOptions) => void {
|
||||
|
||||
@ -317,6 +317,8 @@ namespace ts {
|
||||
let constraintDepth = 0;
|
||||
let currentNode: Node | undefined;
|
||||
|
||||
const typeCatalog: Type[] = []; // NB: id is index + 1
|
||||
|
||||
const emptySymbols = createSymbolTable();
|
||||
const arrayVariances = [VarianceFlags.Covariant];
|
||||
|
||||
@ -360,6 +362,7 @@ namespace ts {
|
||||
getNodeCount: () => sum(host.getSourceFiles(), "nodeCount"),
|
||||
getIdentifierCount: () => sum(host.getSourceFiles(), "identifierCount"),
|
||||
getSymbolCount: () => sum(host.getSourceFiles(), "symbolCount") + symbolCount,
|
||||
getTypeCatalog: () => typeCatalog,
|
||||
getTypeCount: () => typeCount,
|
||||
getInstantiationCount: () => totalInstantiationCount,
|
||||
getRelationCacheSizes: () => ({
|
||||
@ -3661,6 +3664,7 @@ namespace ts {
|
||||
const result = new Type(checker, flags);
|
||||
typeCount++;
|
||||
result.id = typeCount;
|
||||
typeCatalog.push(result);
|
||||
return result;
|
||||
}
|
||||
|
||||
@ -16031,6 +16035,7 @@ namespace ts {
|
||||
containingMessageChain?: () => DiagnosticMessageChain | undefined,
|
||||
errorOutputContainer?: { errors?: Diagnostic[], skipLogging?: boolean },
|
||||
): boolean {
|
||||
|
||||
let errorInfo: DiagnosticMessageChain | undefined;
|
||||
let relatedInfo: [DiagnosticRelatedInformation, ...DiagnosticRelatedInformation[]] | undefined;
|
||||
let maybeKeys: string[];
|
||||
@ -16093,6 +16098,8 @@ namespace ts {
|
||||
if (errorNode && errorOutputContainer && errorOutputContainer.skipLogging && result === Ternary.False) {
|
||||
Debug.assert(!!errorOutputContainer.errors, "missed opportunity to interact with error.");
|
||||
}
|
||||
|
||||
|
||||
return result !== Ternary.False;
|
||||
|
||||
function resetErrorInfo(saved: ReturnType<typeof captureErrorCalculationState>) {
|
||||
@ -16811,6 +16818,13 @@ namespace ts {
|
||||
// equal and infinitely expanding. Fourth, if we have reached a depth of 100 nested comparisons, assume we have runaway recursion
|
||||
// and issue an error. Otherwise, actually compare the structure of the two types.
|
||||
function recursiveTypeRelatedTo(source: Type, target: Type, reportErrors: boolean, intersectionState: IntersectionState): Ternary {
|
||||
tracing.begin(tracing.Phase.Check, "recursiveTypeRelatedTo", { sourceId: source.id, targetId: target.id });
|
||||
const result = recursiveTypeRelatedToWorker(source, target, reportErrors, intersectionState);
|
||||
tracing.end();
|
||||
return result;
|
||||
}
|
||||
|
||||
function recursiveTypeRelatedToWorker(source: Type, target: Type, reportErrors: boolean, intersectionState: IntersectionState): Ternary {
|
||||
if (overflow) {
|
||||
return Ternary.False;
|
||||
}
|
||||
@ -18082,6 +18096,7 @@ namespace ts {
|
||||
function getVariancesWorker<TCache extends { variances?: VarianceFlags[] }>(typeParameters: readonly TypeParameter[] = emptyArray, cache: TCache, createMarkerType: (input: TCache, param: TypeParameter, marker: Type) => Type): VarianceFlags[] {
|
||||
let variances = cache.variances;
|
||||
if (!variances) {
|
||||
tracing.begin(tracing.Phase.Check, "getVariancesWorker", { arity: typeParameters.length, id: (cache as any).id ?? (cache as any).declaredType?.id ?? -1 });
|
||||
// The emptyArray singleton is used to signal a recursive invocation.
|
||||
cache.variances = emptyArray;
|
||||
variances = [];
|
||||
@ -18116,6 +18131,7 @@ namespace ts {
|
||||
variances.push(variance);
|
||||
}
|
||||
cache.variances = variances;
|
||||
tracing.end();
|
||||
}
|
||||
return variances;
|
||||
}
|
||||
@ -19369,6 +19385,7 @@ namespace ts {
|
||||
inferFromTypes(originalSource, originalTarget);
|
||||
|
||||
function inferFromTypes(source: Type, target: Type): void {
|
||||
|
||||
if (!couldContainTypeVariables(target)) {
|
||||
return;
|
||||
}
|
||||
@ -30257,6 +30274,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
function checkExpression(node: Expression | QualifiedName, checkMode?: CheckMode, forceTuple?: boolean): Type {
|
||||
tracing.begin(tracing.Phase.Check, "checkExpression", { kind: node.kind, pos: node.pos, end: node.end });
|
||||
const saveCurrentNode = currentNode;
|
||||
currentNode = node;
|
||||
instantiationCount = 0;
|
||||
@ -30266,6 +30284,7 @@ namespace ts {
|
||||
checkConstEnumAccess(node, type);
|
||||
}
|
||||
currentNode = saveCurrentNode;
|
||||
tracing.end();
|
||||
return type;
|
||||
}
|
||||
|
||||
@ -33032,8 +33051,10 @@ namespace ts {
|
||||
}
|
||||
|
||||
function checkVariableDeclaration(node: VariableDeclaration) {
|
||||
tracing.begin(tracing.Phase.Check, "checkVariableDeclaration", { kind: node.kind, pos: node.pos, end: node.end });
|
||||
checkGrammarVariableDeclaration(node);
|
||||
return checkVariableLikeDeclaration(node);
|
||||
checkVariableLikeDeclaration(node);
|
||||
tracing.end();
|
||||
}
|
||||
|
||||
function checkBindingElement(node: BindingElement) {
|
||||
@ -36069,10 +36090,12 @@ namespace ts {
|
||||
}
|
||||
|
||||
function checkSourceFile(node: SourceFile) {
|
||||
tracing.begin(tracing.Phase.Check, "checkSourceFile", { path: node.path });
|
||||
performance.mark("beforeCheck");
|
||||
checkSourceFileWorker(node);
|
||||
performance.mark("afterCheck");
|
||||
performance.measure("Check", "beforeCheck", "afterCheck");
|
||||
tracing.end();
|
||||
}
|
||||
|
||||
function unusedIsError(kind: UnusedKind, isAmbient: boolean): boolean {
|
||||
|
||||
@ -197,6 +197,14 @@ namespace ts {
|
||||
category: Diagnostics.Advanced_Options,
|
||||
description: Diagnostics.Generates_a_CPU_profile
|
||||
},
|
||||
{
|
||||
name: "generateTrace",
|
||||
type: "string",
|
||||
isFilePath: true,
|
||||
paramType: Diagnostics.FILE_OR_DIRECTORY,
|
||||
category: Diagnostics.Advanced_Options,
|
||||
description: Diagnostics.Generates_an_event_trace_and_a_list_of_types
|
||||
},
|
||||
{
|
||||
name: "incremental",
|
||||
shortName: "i",
|
||||
|
||||
@ -4486,6 +4486,10 @@
|
||||
"category": "Error",
|
||||
"code": 6236
|
||||
},
|
||||
"Generates an event trace and a list of types.": {
|
||||
"category": "Message",
|
||||
"code": 6237
|
||||
},
|
||||
|
||||
"Projects to reference": {
|
||||
"category": "Message",
|
||||
|
||||
@ -300,9 +300,17 @@ namespace ts {
|
||||
sourceFiles: sourceFileOrBundle.sourceFiles.map(file => relativeToBuildInfo(getNormalizedAbsolutePath(file.fileName, host.getCurrentDirectory())))
|
||||
};
|
||||
}
|
||||
tracing.begin(tracing.Phase.Emit, "emitJsFileOrBundle", { jsFilePath });
|
||||
emitJsFileOrBundle(sourceFileOrBundle, jsFilePath, sourceMapFilePath, relativeToBuildInfo);
|
||||
tracing.end();
|
||||
|
||||
tracing.begin(tracing.Phase.Emit, "emitDeclarationFileOrBundle", { declarationFilePath });
|
||||
emitDeclarationFileOrBundle(sourceFileOrBundle, declarationFilePath, declarationMapPath, relativeToBuildInfo);
|
||||
tracing.end();
|
||||
|
||||
tracing.begin(tracing.Phase.Emit, "emitBuildInfo", { buildInfoPath });
|
||||
emitBuildInfo(bundleBuildInfo, buildInfoPath);
|
||||
tracing.end();
|
||||
|
||||
if (!emitSkipped && emittedFilesList) {
|
||||
if (!emitOnlyDtsFiles) {
|
||||
|
||||
@ -604,6 +604,7 @@ namespace ts {
|
||||
}
|
||||
|
||||
export function createSourceFile(fileName: string, sourceText: string, languageVersion: ScriptTarget, setParentNodes = false, scriptKind?: ScriptKind): SourceFile {
|
||||
tracing.begin(tracing.Phase.Parse, "createSourceFile", { path: fileName });
|
||||
performance.mark("beforeParse");
|
||||
let result: SourceFile;
|
||||
|
||||
@ -618,6 +619,7 @@ namespace ts {
|
||||
|
||||
performance.mark("afterParse");
|
||||
performance.measure("Parse", "beforeParse", "afterParse");
|
||||
tracing.end();
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
@ -734,6 +734,7 @@ namespace ts {
|
||||
// Track source files that are source files found by searching under node_modules, as these shouldn't be compiled.
|
||||
const sourceFilesFoundSearchingNodeModules = new Map<string, boolean>();
|
||||
|
||||
tracing.begin(tracing.Phase.Program, "createProgram", {});
|
||||
performance.mark("beforeProgram");
|
||||
|
||||
const host = createProgramOptions.host || createCompilerHost(options);
|
||||
@ -948,6 +949,7 @@ namespace ts {
|
||||
getNodeCount: () => getDiagnosticsProducingTypeChecker().getNodeCount(),
|
||||
getIdentifierCount: () => getDiagnosticsProducingTypeChecker().getIdentifierCount(),
|
||||
getSymbolCount: () => getDiagnosticsProducingTypeChecker().getSymbolCount(),
|
||||
getTypeCatalog: () => getDiagnosticsProducingTypeChecker().getTypeCatalog(),
|
||||
getTypeCount: () => getDiagnosticsProducingTypeChecker().getTypeCount(),
|
||||
getInstantiationCount: () => getDiagnosticsProducingTypeChecker().getInstantiationCount(),
|
||||
getRelationCacheSizes: () => getDiagnosticsProducingTypeChecker().getRelationCacheSizes(),
|
||||
@ -982,6 +984,7 @@ namespace ts {
|
||||
verifyCompilerOptions();
|
||||
performance.mark("afterProgram");
|
||||
performance.measure("Program", "beforeProgram", "afterProgram");
|
||||
tracing.end();
|
||||
|
||||
return program;
|
||||
|
||||
@ -1505,6 +1508,7 @@ namespace ts {
|
||||
|
||||
function emitBuildInfo(writeFileCallback?: WriteFileCallback): EmitResult {
|
||||
Debug.assert(!outFile(options));
|
||||
tracing.begin(tracing.Phase.Emit, "emitBuildInfo", {});
|
||||
performance.mark("beforeEmit");
|
||||
const emitResult = emitFiles(
|
||||
notImplementedResolver,
|
||||
@ -1517,6 +1521,7 @@ namespace ts {
|
||||
|
||||
performance.mark("afterEmit");
|
||||
performance.measure("Emit", "beforeEmit", "afterEmit");
|
||||
tracing.end();
|
||||
return emitResult;
|
||||
}
|
||||
|
||||
@ -1577,7 +1582,10 @@ namespace ts {
|
||||
}
|
||||
|
||||
function emit(sourceFile?: SourceFile, writeFileCallback?: WriteFileCallback, cancellationToken?: CancellationToken, emitOnlyDtsFiles?: boolean, transformers?: CustomTransformers, forceDtsEmit?: boolean): EmitResult {
|
||||
return runWithCancellationToken(() => emitWorker(program, sourceFile, writeFileCallback, cancellationToken, emitOnlyDtsFiles, transformers, forceDtsEmit));
|
||||
tracing.begin(tracing.Phase.Emit, "emit", { path: sourceFile?.path });
|
||||
const result = runWithCancellationToken(() => emitWorker(program, sourceFile, writeFileCallback, cancellationToken, emitOnlyDtsFiles, transformers, forceDtsEmit));
|
||||
tracing.end();
|
||||
return result;
|
||||
}
|
||||
|
||||
function isEmitBlocked(emitFileName: string): boolean {
|
||||
|
||||
@ -1047,11 +1047,13 @@ namespace ts {
|
||||
args: string[];
|
||||
newLine: string;
|
||||
useCaseSensitiveFileNames: boolean;
|
||||
write(s: string): void;
|
||||
write(s: string, fd?: number): void;
|
||||
writeOutputIsTTY?(): boolean;
|
||||
readFile(path: string, encoding?: string): string | undefined;
|
||||
getFileSize?(path: string): number;
|
||||
writeFile(path: string, data: string, writeByteOrderMark?: boolean): void;
|
||||
openFile(path: string, mode: "w"): number | undefined;
|
||||
closeFile(fd: number): void;
|
||||
|
||||
/**
|
||||
* @pollingInterval - this parameter is used in polling-based watchers and ignored in watchers that
|
||||
@ -1183,12 +1185,33 @@ namespace ts {
|
||||
args: process.argv.slice(2),
|
||||
newLine: _os.EOL,
|
||||
useCaseSensitiveFileNames,
|
||||
write(s: string): void {
|
||||
process.stdout.write(s);
|
||||
write(s: string, fd?: number): void {
|
||||
if (fd) {
|
||||
_fs.writeSync(fd, s);
|
||||
}
|
||||
else {
|
||||
process.stdout.write(s);
|
||||
}
|
||||
},
|
||||
writeOutputIsTTY() {
|
||||
return process.stdout.isTTY;
|
||||
},
|
||||
openFile: (path, mode) => {
|
||||
try {
|
||||
return _fs.openSync(path, mode);
|
||||
}
|
||||
catch {
|
||||
return undefined;
|
||||
}
|
||||
},
|
||||
closeFile: (fd) => {
|
||||
try {
|
||||
_fs.closeSync(fd);
|
||||
}
|
||||
catch {
|
||||
// ignore
|
||||
}
|
||||
},
|
||||
readFile,
|
||||
writeFile,
|
||||
watchFile,
|
||||
|
||||
140
src/compiler/tracing.ts
Normal file
140
src/compiler/tracing.ts
Normal file
@ -0,0 +1,140 @@
|
||||
/*@internal*/
|
||||
/** Tracing events for the compiler. */
|
||||
namespace ts.tracing {
|
||||
type WriteFn = (data: string) => void;
|
||||
|
||||
let write: WriteFn | undefined;
|
||||
|
||||
/** Enables (and resets) tracing events for the compiler. */
|
||||
export function startTracing(w: WriteFn) {
|
||||
write = w;
|
||||
write(`[\n`);
|
||||
}
|
||||
|
||||
/** Disables tracing events for the compiler. */
|
||||
export function stopTracing() {
|
||||
// This both indicates that the trace is untruncated and conveniently
|
||||
// ensures that the last array element won't have a trailing comma.
|
||||
write?.(`{"pid":1,"tid":1,"ph":"i","ts":${1000 * timestamp()},"name":"done","s":"g"}\n`);
|
||||
write?.(`]\n`);
|
||||
write = undefined;
|
||||
}
|
||||
|
||||
export function isTracing() {
|
||||
return !!write;
|
||||
}
|
||||
|
||||
export const enum Phase {
|
||||
Parse = "parse",
|
||||
Program = "program",
|
||||
Bind = "bind",
|
||||
Check = "check",
|
||||
Emit = "emit",
|
||||
}
|
||||
|
||||
export function begin(phase: Phase, name: string, args: object) {
|
||||
performance.mark("beginTracing");
|
||||
write?.(`{"pid":1,"tid":1,"ph":"B","cat":"${phase}","ts":${1000 * timestamp()},"name":"${name}","args":{ "ts": ${JSON.stringify(args)} }},\n`);
|
||||
performance.mark("endTracing");
|
||||
performance.measure("Tracing", "beginTracing", "endTracing");
|
||||
}
|
||||
|
||||
export function end() {
|
||||
performance.mark("beginTracing");
|
||||
write?.(`{"pid":1,"tid":1,"ph":"E","ts":${1000 * timestamp()}},\n`);
|
||||
performance.mark("endTracing");
|
||||
performance.measure("Tracing", "beginTracing", "endTracing");
|
||||
}
|
||||
|
||||
function indexFromOne(lc: LineAndCharacter): LineAndCharacter {
|
||||
return {
|
||||
line: lc.line + 1,
|
||||
character: lc.character + 1,
|
||||
};
|
||||
}
|
||||
|
||||
export function dumpTypes(types: readonly Type[], write: WriteFn) {
|
||||
performance.mark("beginDumpTypes");
|
||||
|
||||
const numTypes = types.length;
|
||||
|
||||
// Cleverness: no line break hear so that the type ID will match the line number
|
||||
write("[");
|
||||
for (let i = 0; i < numTypes; i++) {
|
||||
const type = types[i];
|
||||
const objectFlags = (type as any).objectFlags;
|
||||
const symbol = type.aliasSymbol ?? type.symbol;
|
||||
const firstDeclaration = symbol?.declarations?.[0];
|
||||
const firstFile = firstDeclaration && getSourceFileOfNode(firstDeclaration);
|
||||
|
||||
// It's slow to compute the display text, so skip it unless it's really valuable (or cheap)
|
||||
let display: string | undefined;
|
||||
if ((objectFlags & ObjectFlags.Anonymous) | (type.flags & TypeFlags.Literal)) {
|
||||
try {
|
||||
display = type.checker?.typeToString(type);
|
||||
}
|
||||
catch {
|
||||
display = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
let indexedAccessProperties: object = {};
|
||||
if (type.flags & TypeFlags.IndexedAccess) {
|
||||
const indexedAccessType = type as IndexedAccessType;
|
||||
indexedAccessProperties = {
|
||||
indexedAccessObjectType: indexedAccessType.objectType?.id,
|
||||
indexedAccessIndexType: indexedAccessType.indexType?.id,
|
||||
};
|
||||
}
|
||||
|
||||
let referenceProperties: object = {};
|
||||
if (objectFlags & ObjectFlags.Reference) {
|
||||
const referenceType = type as TypeReference;
|
||||
referenceProperties = {
|
||||
instantiatedType: referenceType.target?.id,
|
||||
typeArguments: referenceType.resolvedTypeArguments?.map(t => t.id),
|
||||
};
|
||||
}
|
||||
|
||||
let conditionalProperties: object = {};
|
||||
if (type.flags & TypeFlags.Conditional) {
|
||||
const conditionalType = type as ConditionalType;
|
||||
conditionalProperties = {
|
||||
conditionalCheckType: conditionalType.checkType?.id,
|
||||
conditionalExtendsType: conditionalType.extendsType?.id,
|
||||
conditionalTrueType: conditionalType.resolvedTrueType?.id ?? -1,
|
||||
conditionalFalseType: conditionalType.resolvedFalseType?.id ?? -1,
|
||||
};
|
||||
}
|
||||
|
||||
const descriptor = {
|
||||
id: type.id,
|
||||
intrinsicName: (type as any).intrinsicName,
|
||||
symbolName: symbol?.escapedName && unescapeLeadingUnderscores(symbol.escapedName),
|
||||
unionTypes: (type.flags & TypeFlags.Union) ? (type as UnionType).types?.map(t => t.id) : undefined,
|
||||
intersectionTypes: (type.flags & TypeFlags.Intersection) ? (type as IntersectionType).types.map(t => t.id) : undefined,
|
||||
aliasTypeArguments: type.aliasTypeArguments?.map(t => t.id),
|
||||
keyofType: (type.flags & TypeFlags.Index) ? (type as IndexType).type?.id : undefined,
|
||||
...indexedAccessProperties,
|
||||
...referenceProperties,
|
||||
...conditionalProperties,
|
||||
firstDeclaration: firstDeclaration && {
|
||||
path: firstFile.path,
|
||||
start: indexFromOne(getLineAndCharacterOfPosition(firstFile, firstDeclaration.pos)),
|
||||
end: indexFromOne(getLineAndCharacterOfPosition(getSourceFileOfNode(firstDeclaration), firstDeclaration.end)),
|
||||
},
|
||||
flags: Debug.formatTypeFlags(type.flags).split("|"),
|
||||
display,
|
||||
};
|
||||
|
||||
write(JSON.stringify(descriptor));
|
||||
if (i < numTypes - 1) {
|
||||
write(",\n");
|
||||
}
|
||||
}
|
||||
write("]\n");
|
||||
|
||||
performance.mark("endDumpTypes");
|
||||
performance.measure("Dump types", "beginDumpTypes", "endDumpTypes");
|
||||
}
|
||||
}
|
||||
@ -222,7 +222,12 @@ namespace ts {
|
||||
state = TransformationState.Initialized;
|
||||
|
||||
// Transform each node.
|
||||
const transformed = map(nodes, allowDtsFiles ? transformation : transformRoot);
|
||||
const transformed: T[] = [];
|
||||
for (const node of nodes) {
|
||||
tracing.begin(tracing.Phase.Emit, "transformNodes", node.kind === SyntaxKind.SourceFile ? { path: (node as any as SourceFile).path } : { kind: node.kind, pos: node.pos, end: node.end });
|
||||
transformed.push((allowDtsFiles ? transformation : transformRoot)(node));
|
||||
tracing.end();
|
||||
}
|
||||
|
||||
// prevent modification of the lexical environment.
|
||||
state = TransformationState.Completed;
|
||||
|
||||
@ -23,6 +23,7 @@ namespace ts {
|
||||
/* @internal */ extendedDiagnostics?: boolean;
|
||||
/* @internal */ locale?: string;
|
||||
/* @internal */ generateCpuProfile?: string;
|
||||
/* @internal */ generateTrace?: string;
|
||||
|
||||
[option: string]: CompilerOptionsValue | undefined;
|
||||
}
|
||||
|
||||
@ -17,6 +17,7 @@
|
||||
"performance.ts",
|
||||
"perfLogger.ts",
|
||||
"semver.ts",
|
||||
"tracing.ts",
|
||||
|
||||
"types.ts",
|
||||
"sys.ts",
|
||||
|
||||
@ -3729,6 +3729,8 @@ namespace ts {
|
||||
|
||||
/* @internal */ getClassifiableNames(): Set<__String>;
|
||||
|
||||
getTypeCatalog(): readonly Type[];
|
||||
|
||||
getNodeCount(): number;
|
||||
getIdentifierCount(): number;
|
||||
getSymbolCount(): number;
|
||||
@ -4063,6 +4065,8 @@ namespace ts {
|
||||
/* @internal */ getGlobalDiagnostics(): Diagnostic[];
|
||||
/* @internal */ getEmitResolver(sourceFile?: SourceFile, cancellationToken?: CancellationToken): EmitResolver;
|
||||
|
||||
/* @internal */ getTypeCatalog(): readonly Type[];
|
||||
|
||||
/* @internal */ getNodeCount(): number;
|
||||
/* @internal */ getIdentifierCount(): number;
|
||||
/* @internal */ getSymbolCount(): number;
|
||||
@ -5675,6 +5679,7 @@ namespace ts {
|
||||
experimentalDecorators?: boolean;
|
||||
forceConsistentCasingInFileNames?: boolean;
|
||||
/*@internal*/generateCpuProfile?: string;
|
||||
/*@internal*/generateTrace?: string;
|
||||
/*@internal*/help?: boolean;
|
||||
importHelpers?: boolean;
|
||||
importsNotUsedAsValues?: ImportsNotUsedAsValues;
|
||||
|
||||
@ -5520,7 +5520,7 @@ namespace ts {
|
||||
|
||||
function Type(this: Type, checker: TypeChecker, flags: TypeFlags) {
|
||||
this.flags = flags;
|
||||
if (Debug.isDebugging) {
|
||||
if (Debug.isDebugging || tracing.isTracing()) {
|
||||
this.checker = checker;
|
||||
}
|
||||
}
|
||||
|
||||
@ -476,7 +476,7 @@ namespace ts {
|
||||
const currentDirectory = host.getCurrentDirectory();
|
||||
const getCanonicalFileName = createGetCanonicalFileName(host.useCaseSensitiveFileNames());
|
||||
changeCompilerHostLikeToUseCache(host, fileName => toPath(fileName, currentDirectory, getCanonicalFileName));
|
||||
enableStatistics(sys, options);
|
||||
enableStatisticsAndTracing(sys, options);
|
||||
|
||||
const programOptions: CreateProgramOptions = {
|
||||
rootNames: fileNames,
|
||||
@ -504,7 +504,7 @@ namespace ts {
|
||||
config: ParsedCommandLine
|
||||
) {
|
||||
const { options, fileNames, projectReferences } = config;
|
||||
enableStatistics(sys, options);
|
||||
enableStatisticsAndTracing(sys, options);
|
||||
const host = createIncrementalCompilerHost(options, sys);
|
||||
const exitStatus = ts.performIncrementalCompilation({
|
||||
host,
|
||||
@ -541,7 +541,7 @@ namespace ts {
|
||||
host.createProgram = (rootNames, options, host, oldProgram, configFileParsingDiagnostics, projectReferences) => {
|
||||
Debug.assert(rootNames !== undefined || (options === undefined && !!oldProgram));
|
||||
if (options !== undefined) {
|
||||
enableStatistics(sys, options);
|
||||
enableStatisticsAndTracing(sys, options);
|
||||
}
|
||||
return compileUsingBuilder(rootNames, options, host, oldProgram, configFileParsingDiagnostics, projectReferences);
|
||||
};
|
||||
@ -610,15 +610,44 @@ namespace ts {
|
||||
return system === sys && (compilerOptions.diagnostics || compilerOptions.extendedDiagnostics);
|
||||
}
|
||||
|
||||
function enableStatistics(sys: System, compilerOptions: CompilerOptions) {
|
||||
if (canReportDiagnostics(sys, compilerOptions)) {
|
||||
let traceCount = 0;
|
||||
let tracingFd: number | undefined;
|
||||
|
||||
function enableStatisticsAndTracing(system: System, compilerOptions: CompilerOptions) {
|
||||
if (canReportDiagnostics(system, compilerOptions)) {
|
||||
performance.enable();
|
||||
}
|
||||
|
||||
Debug.assert(!tracingFd, "Tracing already started");
|
||||
if (system === sys) {
|
||||
const tracePath = compilerOptions.generateTrace;
|
||||
if (tracePath) {
|
||||
const extension = getAnyExtensionFromPath(tracePath);
|
||||
tracingFd = sys.openFile(changeAnyExtension(tracePath, `${++traceCount}${extension}`), "w");
|
||||
if (tracingFd) {
|
||||
tracing.startTracing(event => sys.write(event, tracingFd));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function reportStatistics(sys: System, program: Program) {
|
||||
let statistics: Statistic[];
|
||||
const compilerOptions = program.getCompilerOptions();
|
||||
|
||||
if (tracingFd) {
|
||||
tracing.stopTracing();
|
||||
sys.closeFile(tracingFd);
|
||||
tracingFd = undefined;
|
||||
|
||||
const typesPath = changeAnyExtension(compilerOptions.generateTrace!, `${traceCount}.types.json`);
|
||||
const typesFd = sys.openFile(typesPath, "w");
|
||||
if (typesFd) {
|
||||
tracing.dumpTypes(program.getTypeCatalog(), type => sys.write(type, typesFd));
|
||||
sys.closeFile(typesFd);
|
||||
}
|
||||
}
|
||||
|
||||
let statistics: Statistic[];
|
||||
if (canReportDiagnostics(sys, compilerOptions)) {
|
||||
statistics = [];
|
||||
const memoryUsed = sys.getMemoryUsage ? sys.getMemoryUsage() : -1;
|
||||
|
||||
@ -37,7 +37,8 @@ namespace fakes {
|
||||
return true;
|
||||
}
|
||||
|
||||
public write(message: string) {
|
||||
public write(message: string, fd?: number) {
|
||||
assert.isUndefined(fd);
|
||||
this.output.push(message);
|
||||
}
|
||||
|
||||
@ -60,6 +61,15 @@ namespace fakes {
|
||||
this.vfs.unlinkSync(path);
|
||||
}
|
||||
|
||||
public openFile(_path: string, _mode: "w"): number | undefined {
|
||||
assert.fail("NYI");
|
||||
return undefined;
|
||||
}
|
||||
|
||||
public closeFile(_fd: number): void{
|
||||
assert.fail("NYI");
|
||||
}
|
||||
|
||||
public fileExists(path: string) {
|
||||
const stats = this._getStats(path);
|
||||
return stats ? stats.isFile() : false;
|
||||
|
||||
@ -724,7 +724,8 @@ namespace Harness.LanguageService {
|
||||
|
||||
onMessage = ts.noop;
|
||||
writeMessage = ts.noop; // overridden
|
||||
write(message: string): void {
|
||||
write(message: string, fd?: number): void {
|
||||
assert.isUndefined(fd);
|
||||
this.writeMessage(message);
|
||||
}
|
||||
|
||||
@ -744,6 +745,9 @@ namespace Harness.LanguageService {
|
||||
|
||||
writeFile = ts.noop;
|
||||
|
||||
openFile = ts.returnUndefined;
|
||||
closeFile = ts.noop;
|
||||
|
||||
resolvePath(path: string): string {
|
||||
return path;
|
||||
}
|
||||
|
||||
@ -1011,11 +1011,21 @@ interface Array<T> { length: number; [n: number]: T; }`
|
||||
}
|
||||
}
|
||||
|
||||
openFile(_path: string, _mode: "w"): number | undefined {
|
||||
assert.fail("NYI");
|
||||
return undefined;
|
||||
}
|
||||
|
||||
closeFile(_fd: number): void {
|
||||
assert.fail("NYI");
|
||||
}
|
||||
|
||||
appendFile(path: string, content: string, options?: Partial<ReloadWatchInvokeOptions>): void {
|
||||
this.modifyFile(path, this.readFile(path) + content, options);
|
||||
}
|
||||
|
||||
write(message: string) {
|
||||
write(message: string, fd?: number) {
|
||||
assert.isUndefined(fd);
|
||||
this.output.push(message);
|
||||
}
|
||||
|
||||
|
||||
@ -7,9 +7,11 @@ namespace ts.server {
|
||||
args: [],
|
||||
newLine: "\n",
|
||||
useCaseSensitiveFileNames: true,
|
||||
write(s): void { lastWrittenToHost = s; },
|
||||
write(s, _fd: number): void { lastWrittenToHost = s; },
|
||||
readFile: returnUndefined,
|
||||
writeFile: noop,
|
||||
openFile: returnUndefined,
|
||||
closeFile: noop,
|
||||
resolvePath(): string { return undefined!; }, // TODO: GH#18217
|
||||
fileExists: () => false,
|
||||
directoryExists: () => false,
|
||||
|
||||
@ -2053,6 +2053,7 @@ declare namespace ts {
|
||||
* Gets a type checker that can be used to semantically analyze source files in the program.
|
||||
*/
|
||||
getTypeChecker(): TypeChecker;
|
||||
getTypeCatalog(): readonly Type[];
|
||||
getNodeCount(): number;
|
||||
getIdentifierCount(): number;
|
||||
getSymbolCount(): number;
|
||||
@ -3840,11 +3841,13 @@ declare namespace ts {
|
||||
args: string[];
|
||||
newLine: string;
|
||||
useCaseSensitiveFileNames: boolean;
|
||||
write(s: string): void;
|
||||
write(s: string, fd?: number): void;
|
||||
writeOutputIsTTY?(): boolean;
|
||||
readFile(path: string, encoding?: string): string | undefined;
|
||||
getFileSize?(path: string): number;
|
||||
writeFile(path: string, data: string, writeByteOrderMark?: boolean): void;
|
||||
openFile(path: string, mode: "w"): number | undefined;
|
||||
closeFile(fd: number): void;
|
||||
/**
|
||||
* @pollingInterval - this parameter is used in polling-based watchers and ignored in watchers that
|
||||
* use native OS file watching
|
||||
|
||||
@ -2053,6 +2053,7 @@ declare namespace ts {
|
||||
* Gets a type checker that can be used to semantically analyze source files in the program.
|
||||
*/
|
||||
getTypeChecker(): TypeChecker;
|
||||
getTypeCatalog(): readonly Type[];
|
||||
getNodeCount(): number;
|
||||
getIdentifierCount(): number;
|
||||
getSymbolCount(): number;
|
||||
@ -3840,11 +3841,13 @@ declare namespace ts {
|
||||
args: string[];
|
||||
newLine: string;
|
||||
useCaseSensitiveFileNames: boolean;
|
||||
write(s: string): void;
|
||||
write(s: string, fd?: number): void;
|
||||
writeOutputIsTTY?(): boolean;
|
||||
readFile(path: string, encoding?: string): string | undefined;
|
||||
getFileSize?(path: string): number;
|
||||
writeFile(path: string, data: string, writeByteOrderMark?: boolean): void;
|
||||
openFile(path: string, mode: "w"): number | undefined;
|
||||
closeFile(fd: number): void;
|
||||
/**
|
||||
* @pollingInterval - this parameter is used in polling-based watchers and ignored in watchers that
|
||||
* use native OS file watching
|
||||
|
||||
@ -0,0 +1,5 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"generateTrace": "./someString"
|
||||
}
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user