diff --git a/src/compiler/binder.ts b/src/compiler/binder.ts index 1003b23b455ec..0451ab38f9124 100644 --- a/src/compiler/binder.ts +++ b/src/compiler/binder.ts @@ -174,12 +174,14 @@ namespace ts { const binder = createBinder(); export function bindSourceFile(file: SourceFile, options: CompilerOptions) { + tracing.begin(tracing.Phase.Bind, "bindSourceFile", { path: file.path }); performance.mark("beforeBind"); perfLogger.logStartBindFile("" + file.fileName); binder(file, options); perfLogger.logStopBindFile(); performance.mark("afterBind"); performance.measure("Bind", "beforeBind", "afterBind"); + tracing.end(); } function createBinder(): (file: SourceFile, options: CompilerOptions) => void { diff --git a/src/compiler/checker.ts b/src/compiler/checker.ts index a85281375e6b9..65b6eea3f71f8 100644 --- a/src/compiler/checker.ts +++ b/src/compiler/checker.ts @@ -317,6 +317,8 @@ namespace ts { let constraintDepth = 0; let currentNode: Node | undefined; + const typeCatalog: Type[] = []; // NB: id is index + 1 + const emptySymbols = createSymbolTable(); const arrayVariances = [VarianceFlags.Covariant]; @@ -360,6 +362,7 @@ namespace ts { getNodeCount: () => sum(host.getSourceFiles(), "nodeCount"), getIdentifierCount: () => sum(host.getSourceFiles(), "identifierCount"), getSymbolCount: () => sum(host.getSourceFiles(), "symbolCount") + symbolCount, + getTypeCatalog: () => typeCatalog, getTypeCount: () => typeCount, getInstantiationCount: () => totalInstantiationCount, getRelationCacheSizes: () => ({ @@ -374,6 +377,7 @@ namespace ts { getMergedSymbol, getDiagnostics, getGlobalDiagnostics, + getRecursionIdentity, getTypeOfSymbolAtLocation: (symbol, locationIn) => { const location = getParseTreeNode(locationIn); return location ? getTypeOfSymbolAtLocation(symbol, location) : errorType; @@ -3661,6 +3665,7 @@ namespace ts { const result = new Type(checker, flags); typeCount++; result.id = typeCount; + typeCatalog.push(result); return result; } @@ -10799,6 +10804,7 @@ namespace ts { // very high likelihood we're dealing with an infinite generic type that perpetually generates // new type identities as we descend into it. We stop the recursion here and mark this type // and the outer types as having circular constraints. + tracing.instant(tracing.Phase.Check, "getImmediateBaseConstraint_DepthLimit", { typeId: t.id, originalTypeId: type.id, depth: constraintDepth }); error(currentNode, Diagnostics.Type_instantiation_is_excessively_deep_and_possibly_infinite); nonTerminating = true; return t.immediateBaseConstraint = noConstraintType; @@ -12873,6 +12879,7 @@ namespace ts { // caps union types at 5000 unique literal types and 1000 unique object types. const estimatedCount = (count / (len - i)) * len; if (estimatedCount > (primitivesOnly ? 25000000 : 1000000)) { + tracing.instant(tracing.Phase.Check, "removeSubtypes_DepthLimit", { typeIds: types.map(t => t.id) }); error(currentNode, Diagnostics.Expression_produces_a_union_type_that_is_too_complex_to_represent); return false; } @@ -14935,6 +14942,7 @@ namespace ts { // We have reached 50 recursive type instantiations and there is a very high likelyhood we're dealing // with a combination of infinite generic types that perpetually generate new type identities. We stop // the recursion here by yielding the error type. + tracing.instant(tracing.Phase.Check, "instantiateType_DepthLimit", { typeId: type.id, instantiationDepth, instantiationCount }); error(currentNode, Diagnostics.Type_instantiation_is_excessively_deep_and_possibly_infinite); return errorType; } @@ -16031,6 +16039,7 @@ namespace ts { containingMessageChain?: () => DiagnosticMessageChain | undefined, errorOutputContainer?: { errors?: Diagnostic[], skipLogging?: boolean }, ): boolean { + let errorInfo: DiagnosticMessageChain | undefined; let relatedInfo: [DiagnosticRelatedInformation, ...DiagnosticRelatedInformation[]] | undefined; let maybeKeys: string[]; @@ -16052,6 +16061,7 @@ namespace ts { reportIncompatibleStack(); } if (overflow) { + tracing.instant(tracing.Phase.Check, "checkTypeRelatedTo_DepthLimit", { sourceId: source.id, targetId: target.id, depth }); const diag = error(errorNode || currentNode, Diagnostics.Excessive_stack_depth_comparing_types_0_and_1, typeToString(source), typeToString(target)); if (errorOutputContainer) { (errorOutputContainer.errors || (errorOutputContainer.errors = [])).push(diag); @@ -16093,6 +16103,8 @@ namespace ts { if (errorNode && errorOutputContainer && errorOutputContainer.skipLogging && result === Ternary.False) { Debug.assert(!!errorOutputContainer.errors, "missed opportunity to interact with error."); } + + return result !== Ternary.False; function resetErrorInfo(saved: ReturnType) { @@ -16870,6 +16882,17 @@ namespace ts { return originalHandler!(onlyUnreliable); }; } + + if (expandingFlags === ExpandingFlags.Both) { + tracing.instant(tracing.Phase.Check, "recursiveTypeRelatedTo_DepthLimit", { + sourceId: source.id, + sourceIdStack: sourceStack.map(t => t.id), + targetId: target.id, + targetIdStack: targetStack.map(t => t.id), + depth, + }); + } + const result = expandingFlags !== ExpandingFlags.Both ? structuredTypeRelatedTo(source, target, reportErrors, intersectionState) : Ternary.Maybe; if (outofbandVarianceMarkerHandler) { outofbandVarianceMarkerHandler = originalHandler; @@ -16895,6 +16918,13 @@ namespace ts { } function structuredTypeRelatedTo(source: Type, target: Type, reportErrors: boolean, intersectionState: IntersectionState): Ternary { + tracing.begin(tracing.Phase.Check, "structuredTypeRelatedTo", { sourceId: source.id, targetId: target.id }); + const result = structuredTypeRelatedToWorker(source, target, reportErrors, intersectionState); + tracing.end(); + return result; + } + + function structuredTypeRelatedToWorker(source: Type, target: Type, reportErrors: boolean, intersectionState: IntersectionState): Ternary { if (intersectionState & IntersectionState.PropertyCheck) { return propertiesRelatedTo(source, target, reportErrors, /*excludedProperties*/ undefined, IntersectionState.None); } @@ -17343,6 +17373,7 @@ namespace ts { numCombinations *= countTypes(getTypeOfSymbol(sourceProperty)); if (numCombinations > 25) { // We've reached the complexity limit. + tracing.instant(tracing.Phase.Check, "typeRelatedToDiscriminatedType_DepthLimit", { sourceId: source.id, targetId: target.id, numCombinations }); return Ternary.False; } } @@ -18082,6 +18113,7 @@ namespace ts { function getVariancesWorker(typeParameters: readonly TypeParameter[] = emptyArray, cache: TCache, createMarkerType: (input: TCache, param: TypeParameter, marker: Type) => Type): VarianceFlags[] { let variances = cache.variances; if (!variances) { + tracing.begin(tracing.Phase.Check, "getVariancesWorker", { arity: typeParameters.length, id: (cache as any).id ?? (cache as any).declaredType?.id ?? -1 }); // The emptyArray singleton is used to signal a recursive invocation. cache.variances = emptyArray; variances = []; @@ -18116,6 +18148,7 @@ namespace ts { variances.push(variance); } cache.variances = variances; + tracing.end(); } return variances; } @@ -18264,7 +18297,9 @@ namespace ts { for (let i = 0; i < depth; i++) { if (getRecursionIdentity(stack[i]) === identity) { count++; - if (count >= 5) return true; + if (count >= 5) { + return true; + } } } } @@ -19369,6 +19404,7 @@ namespace ts { inferFromTypes(originalSource, originalTarget); function inferFromTypes(source: Type, target: Type): void { + if (!couldContainTypeVariables(target)) { return; } @@ -21071,6 +21107,7 @@ namespace ts { if (flowDepth === 2000) { // We have made 2000 recursive invocations. To avoid overflowing the call stack we report an error // and disable further control flow analysis in the containing function or module body. + tracing.instant(tracing.Phase.Check, "getTypeAtFlowNode_DepthLimit", { flowId: flow.id }); flowAnalysisDisabled = true; reportFlowControlError(reference); return errorType; @@ -30257,6 +30294,7 @@ namespace ts { } function checkExpression(node: Expression | QualifiedName, checkMode?: CheckMode, forceTuple?: boolean): Type { + tracing.begin(tracing.Phase.Check, "checkExpression", { kind: node.kind, pos: node.pos, end: node.end }); const saveCurrentNode = currentNode; currentNode = node; instantiationCount = 0; @@ -30266,6 +30304,7 @@ namespace ts { checkConstEnumAccess(node, type); } currentNode = saveCurrentNode; + tracing.end(); return type; } @@ -33032,8 +33071,10 @@ namespace ts { } function checkVariableDeclaration(node: VariableDeclaration) { + tracing.begin(tracing.Phase.Check, "checkVariableDeclaration", { kind: node.kind, pos: node.pos, end: node.end }); checkGrammarVariableDeclaration(node); - return checkVariableLikeDeclaration(node); + checkVariableLikeDeclaration(node); + tracing.end(); } function checkBindingElement(node: BindingElement) { @@ -36069,10 +36110,12 @@ namespace ts { } function checkSourceFile(node: SourceFile) { + tracing.begin(tracing.Phase.Check, "checkSourceFile", { path: node.path }); performance.mark("beforeCheck"); checkSourceFileWorker(node); performance.mark("afterCheck"); performance.measure("Check", "beforeCheck", "afterCheck"); + tracing.end(); } function unusedIsError(kind: UnusedKind, isAmbient: boolean): boolean { diff --git a/src/compiler/commandLineParser.ts b/src/compiler/commandLineParser.ts index 3095c9d7dc82e..fd0c64fb515f9 100644 --- a/src/compiler/commandLineParser.ts +++ b/src/compiler/commandLineParser.ts @@ -197,6 +197,15 @@ namespace ts { category: Diagnostics.Advanced_Options, description: Diagnostics.Generates_a_CPU_profile }, + { + name: "generateTrace", + type: "string", + isFilePath: true, + isCommandLineOnly: true, + paramType: Diagnostics.DIRECTORY, + category: Diagnostics.Advanced_Options, + description: Diagnostics.Generates_an_event_trace_and_a_list_of_types + }, { name: "incremental", shortName: "i", diff --git a/src/compiler/diagnosticMessages.json b/src/compiler/diagnosticMessages.json index a5a7c2b3d3d12..a134d9afbc314 100644 --- a/src/compiler/diagnosticMessages.json +++ b/src/compiler/diagnosticMessages.json @@ -4486,6 +4486,10 @@ "category": "Error", "code": 6236 }, + "Generates an event trace and a list of types.": { + "category": "Message", + "code": 6237 + }, "Projects to reference": { "category": "Message", diff --git a/src/compiler/emitter.ts b/src/compiler/emitter.ts index e273a40511967..ce2f940acbb91 100644 --- a/src/compiler/emitter.ts +++ b/src/compiler/emitter.ts @@ -300,9 +300,17 @@ namespace ts { sourceFiles: sourceFileOrBundle.sourceFiles.map(file => relativeToBuildInfo(getNormalizedAbsolutePath(file.fileName, host.getCurrentDirectory()))) }; } + tracing.begin(tracing.Phase.Emit, "emitJsFileOrBundle", { jsFilePath }); emitJsFileOrBundle(sourceFileOrBundle, jsFilePath, sourceMapFilePath, relativeToBuildInfo); + tracing.end(); + + tracing.begin(tracing.Phase.Emit, "emitDeclarationFileOrBundle", { declarationFilePath }); emitDeclarationFileOrBundle(sourceFileOrBundle, declarationFilePath, declarationMapPath, relativeToBuildInfo); + tracing.end(); + + tracing.begin(tracing.Phase.Emit, "emitBuildInfo", { buildInfoPath }); emitBuildInfo(bundleBuildInfo, buildInfoPath); + tracing.end(); if (!emitSkipped && emittedFilesList) { if (!emitOnlyDtsFiles) { diff --git a/src/compiler/parser.ts b/src/compiler/parser.ts index d7655812e9b87..f5c8cfeceeb90 100644 --- a/src/compiler/parser.ts +++ b/src/compiler/parser.ts @@ -604,6 +604,7 @@ namespace ts { } export function createSourceFile(fileName: string, sourceText: string, languageVersion: ScriptTarget, setParentNodes = false, scriptKind?: ScriptKind): SourceFile { + tracing.begin(tracing.Phase.Parse, "createSourceFile", { path: fileName }); performance.mark("beforeParse"); let result: SourceFile; @@ -618,6 +619,7 @@ namespace ts { performance.mark("afterParse"); performance.measure("Parse", "beforeParse", "afterParse"); + tracing.end(); return result; } diff --git a/src/compiler/program.ts b/src/compiler/program.ts index 5703181e90a35..a8fe79d12c702 100644 --- a/src/compiler/program.ts +++ b/src/compiler/program.ts @@ -734,6 +734,7 @@ namespace ts { // Track source files that are source files found by searching under node_modules, as these shouldn't be compiled. const sourceFilesFoundSearchingNodeModules = new Map(); + tracing.begin(tracing.Phase.Program, "createProgram", {}); performance.mark("beforeProgram"); const host = createProgramOptions.host || createCompilerHost(options); @@ -948,6 +949,7 @@ namespace ts { getNodeCount: () => getDiagnosticsProducingTypeChecker().getNodeCount(), getIdentifierCount: () => getDiagnosticsProducingTypeChecker().getIdentifierCount(), getSymbolCount: () => getDiagnosticsProducingTypeChecker().getSymbolCount(), + getTypeCatalog: () => getDiagnosticsProducingTypeChecker().getTypeCatalog(), getTypeCount: () => getDiagnosticsProducingTypeChecker().getTypeCount(), getInstantiationCount: () => getDiagnosticsProducingTypeChecker().getInstantiationCount(), getRelationCacheSizes: () => getDiagnosticsProducingTypeChecker().getRelationCacheSizes(), @@ -982,6 +984,7 @@ namespace ts { verifyCompilerOptions(); performance.mark("afterProgram"); performance.measure("Program", "beforeProgram", "afterProgram"); + tracing.end(); return program; @@ -1505,6 +1508,7 @@ namespace ts { function emitBuildInfo(writeFileCallback?: WriteFileCallback): EmitResult { Debug.assert(!outFile(options)); + tracing.begin(tracing.Phase.Emit, "emitBuildInfo", {}); performance.mark("beforeEmit"); const emitResult = emitFiles( notImplementedResolver, @@ -1517,6 +1521,7 @@ namespace ts { performance.mark("afterEmit"); performance.measure("Emit", "beforeEmit", "afterEmit"); + tracing.end(); return emitResult; } @@ -1577,7 +1582,10 @@ namespace ts { } function emit(sourceFile?: SourceFile, writeFileCallback?: WriteFileCallback, cancellationToken?: CancellationToken, emitOnlyDtsFiles?: boolean, transformers?: CustomTransformers, forceDtsEmit?: boolean): EmitResult { - return runWithCancellationToken(() => emitWorker(program, sourceFile, writeFileCallback, cancellationToken, emitOnlyDtsFiles, transformers, forceDtsEmit)); + tracing.begin(tracing.Phase.Emit, "emit", { path: sourceFile?.path }); + const result = runWithCancellationToken(() => emitWorker(program, sourceFile, writeFileCallback, cancellationToken, emitOnlyDtsFiles, transformers, forceDtsEmit)); + tracing.end(); + return result; } function isEmitBlocked(emitFileName: string): boolean { diff --git a/src/compiler/tracing.ts b/src/compiler/tracing.ts new file mode 100644 index 0000000000000..30b90a2b0d1a2 --- /dev/null +++ b/src/compiler/tracing.ts @@ -0,0 +1,256 @@ +/*@internal*/ +/** Tracing events for the compiler. */ +namespace ts.tracing { + let fs: typeof import("fs") | false | undefined; + + let traceCount = 0; + let traceFd: number | undefined; + + let legendPath: string | undefined; + const legend: TraceRecord[] = []; + + /** Starts tracing for the given project (unless the `fs` module is unavailable). */ + export function startTracing(configFilePath: string | undefined, traceDir: string, isBuildMode: boolean) { + Debug.assert(!traceFd, "Tracing already started"); + + if (fs === undefined) { + try { + fs = require("fs"); + } + catch { + fs = false; + } + } + + if (!fs) { + return; + } + + if (legendPath === undefined) { + legendPath = combinePaths(traceDir, "legend.json"); + } + + // Note that writing will fail later on if it exists and is not a directory + if (!fs.existsSync(traceDir)) { + fs.mkdirSync(traceDir, { recursive: true }); + } + + const countPart = isBuildMode ? `.${++traceCount}` : ``; + const tracePath = combinePaths(traceDir, `trace${countPart}.json`); + const typesPath = combinePaths(traceDir, `types${countPart}.json`); + + legend.push({ + configFilePath, + tracePath, + typesPath, + }); + + traceFd = fs.openSync(tracePath, "w"); + fs.writeSync(traceFd, `[\n`); + } + + /** Stops tracing for the in-progress project and dumps the type catalog (unless the `fs` module is unavailable). */ + export function stopTracing(typeCatalog: readonly Type[]) { + if (!traceFd) { + Debug.assert(!fs, "Tracing is not in progress"); + return; + } + + Debug.assert(fs); + + // This both indicates that the trace is untruncated and conveniently + // ensures that the last array element won't have a trailing comma. + fs.writeSync(traceFd, `{"pid":1,"tid":1,"ph":"i","ts":${1000 * timestamp()},"name":"done","s":"g"}\n`); + fs.writeSync(traceFd, `]\n`); + + fs.closeSync(traceFd); + traceFd = undefined; + + if (typeCatalog) { + dumpTypes(typeCatalog); + } + else { + // We pre-computed this path for convenience, but clear it + // now that the file won't be created. + legend[legend.length - 1].typesPath = undefined; + } + } + + export function isTracing() { + return !!traceFd; + } + + export const enum Phase { + Parse = "parse", + Program = "program", + Bind = "bind", + Check = "check", + Emit = "emit", + } + + export function begin(phase: Phase, name: string, args: object) { + if (!traceFd) { + return; + } + Debug.assert(fs); + + performance.mark("beginTracing"); + fs.writeSync(traceFd, `{"pid":1,"tid":1,"ph":"B","cat":"${phase}","ts":${1000 * timestamp()},"name":"${name}","args":{ "ts": ${JSON.stringify(args)} }},\n`); + performance.mark("endTracing"); + performance.measure("Tracing", "beginTracing", "endTracing"); + } + + export function end() { + if (!traceFd) { + return; + } + Debug.assert(fs); + + performance.mark("beginTracing"); + fs.writeSync(traceFd, `{"pid":1,"tid":1,"ph":"E","ts":${1000 * timestamp()}},\n`); + performance.mark("endTracing"); + performance.measure("Tracing", "beginTracing", "endTracing"); + } + + export function instant(phase: Phase, name: string, args: object) { + if (!traceFd) { + return; + } + Debug.assert(fs); + + performance.mark("beginTracing"); + fs.writeSync(traceFd, `{"pid":1,"tid":1,"ph":"i","cat":"${phase}","ts":${1000 * timestamp()},"name":"${name}","s":"g","args":{ "ts": ${JSON.stringify(args)} }},\n`); + performance.mark("endTracing"); + performance.measure("Tracing", "beginTracing", "endTracing"); + } + + function indexFromOne(lc: LineAndCharacter): LineAndCharacter { + return { + line: lc.line + 1, + character: lc.character + 1, + }; + } + + function dumpTypes(types: readonly Type[]) { + Debug.assert(fs); + + performance.mark("beginDumpTypes"); + + const typesPath = legend[legend.length - 1].typesPath!; + const typesFd = fs.openSync(typesPath, "w"); + + const recursionIdentityMap = new Map(); + + // Cleverness: no line break here so that the type ID will match the line number + fs.writeSync(typesFd, "["); + + const numTypes = types.length; + for (let i = 0; i < numTypes; i++) { + const type = types[i]; + const objectFlags = (type as any).objectFlags; + const symbol = type.aliasSymbol ?? type.symbol; + const firstDeclaration = symbol?.declarations?.[0]; + const firstFile = firstDeclaration && getSourceFileOfNode(firstDeclaration); + + // It's slow to compute the display text, so skip it unless it's really valuable (or cheap) + let display: string | undefined; + if ((objectFlags & ObjectFlags.Anonymous) | (type.flags & TypeFlags.Literal)) { + try { + display = type.checker?.typeToString(type); + } + catch { + display = undefined; + } + } + + let indexedAccessProperties: object = {}; + if (type.flags & TypeFlags.IndexedAccess) { + const indexedAccessType = type as IndexedAccessType; + indexedAccessProperties = { + indexedAccessObjectType: indexedAccessType.objectType?.id, + indexedAccessIndexType: indexedAccessType.indexType?.id, + }; + } + + let referenceProperties: object = {}; + if (objectFlags & ObjectFlags.Reference) { + const referenceType = type as TypeReference; + referenceProperties = { + instantiatedType: referenceType.target?.id, + typeArguments: referenceType.resolvedTypeArguments?.map(t => t.id), + }; + } + + let conditionalProperties: object = {}; + if (type.flags & TypeFlags.Conditional) { + const conditionalType = type as ConditionalType; + conditionalProperties = { + conditionalCheckType: conditionalType.checkType?.id, + conditionalExtendsType: conditionalType.extendsType?.id, + conditionalTrueType: conditionalType.resolvedTrueType?.id ?? -1, + conditionalFalseType: conditionalType.resolvedFalseType?.id ?? -1, + }; + } + + // We can't print out an arbitrary object, so just assign each one a unique number. + // Don't call it an "id" so people don't treat it as a type id. + let recursionToken: number | undefined; + const recursionIdentity = type.checker.getRecursionIdentity(type); + if (recursionIdentity) { + recursionToken = recursionIdentityMap.get(recursionIdentity); + if (!recursionToken) { + recursionToken = recursionIdentityMap.size; + recursionIdentityMap.set(recursionIdentity, recursionToken); + } + } + + const descriptor = { + id: type.id, + intrinsicName: (type as any).intrinsicName, + symbolName: symbol?.escapedName && unescapeLeadingUnderscores(symbol.escapedName), + recursionId: recursionToken, + unionTypes: (type.flags & TypeFlags.Union) ? (type as UnionType).types?.map(t => t.id) : undefined, + intersectionTypes: (type.flags & TypeFlags.Intersection) ? (type as IntersectionType).types.map(t => t.id) : undefined, + aliasTypeArguments: type.aliasTypeArguments?.map(t => t.id), + keyofType: (type.flags & TypeFlags.Index) ? (type as IndexType).type?.id : undefined, + ...indexedAccessProperties, + ...referenceProperties, + ...conditionalProperties, + firstDeclaration: firstDeclaration && { + path: firstFile.path, + start: indexFromOne(getLineAndCharacterOfPosition(firstFile, firstDeclaration.pos)), + end: indexFromOne(getLineAndCharacterOfPosition(getSourceFileOfNode(firstDeclaration), firstDeclaration.end)), + }, + flags: Debug.formatTypeFlags(type.flags).split("|"), + display, + }; + + fs.writeSync(typesFd, JSON.stringify(descriptor)); + if (i < numTypes - 1) { + fs.writeSync(typesFd, ",\n"); + } + } + + fs.writeSync(typesFd, "]\n"); + + fs.closeSync(typesFd); + + performance.mark("endDumpTypes"); + performance.measure("Dump types", "beginDumpTypes", "endDumpTypes"); + } + + export function dumpLegend() { + if (!legendPath) { + return; + } + Debug.assert(fs); + + fs.writeFileSync(legendPath, JSON.stringify(legend)); + } + + interface TraceRecord { + configFilePath?: string; + tracePath: string; + typesPath?: string; + } +} diff --git a/src/compiler/transformer.ts b/src/compiler/transformer.ts index 2ed0e6ad93162..2172d152412df 100644 --- a/src/compiler/transformer.ts +++ b/src/compiler/transformer.ts @@ -222,7 +222,12 @@ namespace ts { state = TransformationState.Initialized; // Transform each node. - const transformed = map(nodes, allowDtsFiles ? transformation : transformRoot); + const transformed: T[] = []; + for (const node of nodes) { + tracing.begin(tracing.Phase.Emit, "transformNodes", node.kind === SyntaxKind.SourceFile ? { path: (node as any as SourceFile).path } : { kind: node.kind, pos: node.pos, end: node.end }); + transformed.push((allowDtsFiles ? transformation : transformRoot)(node)); + tracing.end(); + } // prevent modification of the lexical environment. state = TransformationState.Completed; diff --git a/src/compiler/tsbuildPublic.ts b/src/compiler/tsbuildPublic.ts index a9e2c9a4a95e1..8d1c59fb3fd80 100644 --- a/src/compiler/tsbuildPublic.ts +++ b/src/compiler/tsbuildPublic.ts @@ -23,6 +23,7 @@ namespace ts { /* @internal */ extendedDiagnostics?: boolean; /* @internal */ locale?: string; /* @internal */ generateCpuProfile?: string; + /* @internal */ generateTrace?: string; [option: string]: CompilerOptionsValue | undefined; } diff --git a/src/compiler/tsconfig.json b/src/compiler/tsconfig.json index 8213e019b340e..1edb2bbd63da3 100644 --- a/src/compiler/tsconfig.json +++ b/src/compiler/tsconfig.json @@ -17,6 +17,7 @@ "performance.ts", "perfLogger.ts", "semver.ts", + "tracing.ts", "types.ts", "sys.ts", diff --git a/src/compiler/types.ts b/src/compiler/types.ts index 40f43b3ad6bbc..2bfc8e5a5cf1d 100644 --- a/src/compiler/types.ts +++ b/src/compiler/types.ts @@ -3729,6 +3729,8 @@ namespace ts { /* @internal */ getClassifiableNames(): Set<__String>; + getTypeCatalog(): readonly Type[]; + getNodeCount(): number; getIdentifierCount(): number; getSymbolCount(): number; @@ -4063,12 +4065,15 @@ namespace ts { /* @internal */ getGlobalDiagnostics(): Diagnostic[]; /* @internal */ getEmitResolver(sourceFile?: SourceFile, cancellationToken?: CancellationToken): EmitResolver; + /* @internal */ getTypeCatalog(): readonly Type[]; + /* @internal */ getNodeCount(): number; /* @internal */ getIdentifierCount(): number; /* @internal */ getSymbolCount(): number; /* @internal */ getTypeCount(): number; /* @internal */ getInstantiationCount(): number; /* @internal */ getRelationCacheSizes(): { assignable: number, identity: number, subtype: number, strictSubtype: number }; + /* @internal */ getRecursionIdentity(type: Type): object | undefined; /* @internal */ isArrayType(type: Type): boolean; /* @internal */ isTupleType(type: Type): boolean; @@ -5675,6 +5680,7 @@ namespace ts { experimentalDecorators?: boolean; forceConsistentCasingInFileNames?: boolean; /*@internal*/generateCpuProfile?: string; + /*@internal*/generateTrace?: string; /*@internal*/help?: boolean; importHelpers?: boolean; importsNotUsedAsValues?: ImportsNotUsedAsValues; diff --git a/src/compiler/utilities.ts b/src/compiler/utilities.ts index 6b58a9aafd6d3..17b45554cff4f 100644 --- a/src/compiler/utilities.ts +++ b/src/compiler/utilities.ts @@ -5520,7 +5520,7 @@ namespace ts { function Type(this: Type, checker: TypeChecker, flags: TypeFlags) { this.flags = flags; - if (Debug.isDebugging) { + if (Debug.isDebugging || tracing.isTracing()) { this.checker = checker; } } diff --git a/src/executeCommandLine/executeCommandLine.ts b/src/executeCommandLine/executeCommandLine.ts index 9d1ecc7c66621..ce469755daf06 100644 --- a/src/executeCommandLine/executeCommandLine.ts +++ b/src/executeCommandLine/executeCommandLine.ts @@ -456,6 +456,7 @@ namespace ts { updateSolutionBuilderHost(sys, cb, buildHost); const builder = createSolutionBuilder(buildHost, projects, buildOptions); const exitStatus = buildOptions.clean ? builder.clean() : builder.build(); + tracing.dumpLegend(); return sys.exit(exitStatus); } @@ -476,7 +477,7 @@ namespace ts { const currentDirectory = host.getCurrentDirectory(); const getCanonicalFileName = createGetCanonicalFileName(host.useCaseSensitiveFileNames()); changeCompilerHostLikeToUseCache(host, fileName => toPath(fileName, currentDirectory, getCanonicalFileName)); - enableStatistics(sys, options); + enableStatisticsAndTracing(sys, options, /*isBuildMode*/ false); const programOptions: CreateProgramOptions = { rootNames: fileNames, @@ -504,7 +505,7 @@ namespace ts { config: ParsedCommandLine ) { const { options, fileNames, projectReferences } = config; - enableStatistics(sys, options); + enableStatisticsAndTracing(sys, options, /*isBuildMode*/ false); const host = createIncrementalCompilerHost(options, sys); const exitStatus = ts.performIncrementalCompilation({ host, @@ -541,7 +542,7 @@ namespace ts { host.createProgram = (rootNames, options, host, oldProgram, configFileParsingDiagnostics, projectReferences) => { Debug.assert(rootNames !== undefined || (options === undefined && !!oldProgram)); if (options !== undefined) { - enableStatistics(sys, options); + enableStatisticsAndTracing(sys, options, /*isBuildMode*/ true); } return compileUsingBuilder(rootNames, options, host, oldProgram, configFileParsingDiagnostics, projectReferences); }; @@ -610,15 +611,28 @@ namespace ts { return system === sys && (compilerOptions.diagnostics || compilerOptions.extendedDiagnostics); } - function enableStatistics(sys: System, compilerOptions: CompilerOptions) { - if (canReportDiagnostics(sys, compilerOptions)) { + function canTrace(system: System, compilerOptions: CompilerOptions) { + return system === sys && compilerOptions.generateTrace; + } + + function enableStatisticsAndTracing(system: System, compilerOptions: CompilerOptions, isBuildMode: boolean) { + if (canReportDiagnostics(system, compilerOptions)) { performance.enable(); } + + if (canTrace(system, compilerOptions)) { + tracing.startTracing(compilerOptions.configFilePath, compilerOptions.generateTrace!, isBuildMode); + } } function reportStatistics(sys: System, program: Program) { - let statistics: Statistic[]; const compilerOptions = program.getCompilerOptions(); + + if (canTrace(sys, compilerOptions)) { + tracing.stopTracing(program.getTypeCatalog()); + } + + let statistics: Statistic[]; if (canReportDiagnostics(sys, compilerOptions)) { statistics = []; const memoryUsed = sys.getMemoryUsage ? sys.getMemoryUsage() : -1; diff --git a/tests/baselines/reference/api/tsserverlibrary.d.ts b/tests/baselines/reference/api/tsserverlibrary.d.ts index 142dc3a9e7d29..0bca465cc6064 100644 --- a/tests/baselines/reference/api/tsserverlibrary.d.ts +++ b/tests/baselines/reference/api/tsserverlibrary.d.ts @@ -2053,6 +2053,7 @@ declare namespace ts { * Gets a type checker that can be used to semantically analyze source files in the program. */ getTypeChecker(): TypeChecker; + getTypeCatalog(): readonly Type[]; getNodeCount(): number; getIdentifierCount(): number; getSymbolCount(): number; diff --git a/tests/baselines/reference/api/typescript.d.ts b/tests/baselines/reference/api/typescript.d.ts index 86432ff0efad4..ecbbcd23c770b 100644 --- a/tests/baselines/reference/api/typescript.d.ts +++ b/tests/baselines/reference/api/typescript.d.ts @@ -2053,6 +2053,7 @@ declare namespace ts { * Gets a type checker that can be used to semantically analyze source files in the program. */ getTypeChecker(): TypeChecker; + getTypeCatalog(): readonly Type[]; getNodeCount(): number; getIdentifierCount(): number; getSymbolCount(): number; diff --git a/tests/baselines/reference/showConfig/Shows tsconfig for single option/generateTrace/tsconfig.json b/tests/baselines/reference/showConfig/Shows tsconfig for single option/generateTrace/tsconfig.json new file mode 100644 index 0000000000000..d02fdafbf84db --- /dev/null +++ b/tests/baselines/reference/showConfig/Shows tsconfig for single option/generateTrace/tsconfig.json @@ -0,0 +1,5 @@ +{ + "compilerOptions": { + "generateTrace": "./someString" + } +}