Skip to content

Commit

Permalink
Merge pull request #29161 from Microsoft/incrementalBuild
Browse files Browse the repository at this point in the history
Supports incremental build in tsc --b --w mode
  • Loading branch information
sheetalkamat authored Jan 17, 2019
2 parents 31e8f02 + 900d6f7 commit 9bd2365
Show file tree
Hide file tree
Showing 20 changed files with 1,016 additions and 513 deletions.
281 changes: 218 additions & 63 deletions src/compiler/builder.ts

Large diffs are not rendered by default.

41 changes: 34 additions & 7 deletions src/compiler/builderState.ts
Original file line number Diff line number Diff line change
Expand Up @@ -50,11 +50,15 @@ namespace ts {
/**
* Cache of all files excluding default library file for the current program
*/
allFilesExcludingDefaultLibraryFile: ReadonlyArray<SourceFile> | undefined;
allFilesExcludingDefaultLibraryFile?: ReadonlyArray<SourceFile>;
/**
* Cache of all the file names
*/
allFileNames: ReadonlyArray<string> | undefined;
allFileNames?: ReadonlyArray<string>;
}

export function cloneMapOrUndefined<T>(map: ReadonlyMap<T> | undefined) {
return map ? cloneMap(map) : undefined;
}
}

Expand Down Expand Up @@ -230,9 +234,32 @@ namespace ts.BuilderState {
fileInfos,
referencedMap,
exportedModulesMap,
hasCalledUpdateShapeSignature,
allFilesExcludingDefaultLibraryFile: undefined,
allFileNames: undefined
hasCalledUpdateShapeSignature
};
}

/**
* Releases needed properties
*/
export function releaseCache(state: BuilderState) {
state.allFilesExcludingDefaultLibraryFile = undefined;
state.allFileNames = undefined;
}

/**
* Creates a clone of the state
*/
export function clone(state: Readonly<BuilderState>): BuilderState {
const fileInfos = createMap<FileInfo>();
state.fileInfos.forEach((value, key) => {
fileInfos.set(key, { ...value });
});
// Dont need to backup allFiles info since its cache anyway
return {
fileInfos,
referencedMap: cloneMapOrUndefined(state.referencedMap),
exportedModulesMap: cloneMapOrUndefined(state.exportedModulesMap),
hasCalledUpdateShapeSignature: cloneMap(state.hasCalledUpdateShapeSignature),
};
}

Expand Down Expand Up @@ -505,14 +532,14 @@ namespace ts.BuilderState {

// Start with the paths this file was referenced by
seenFileNamesMap.set(sourceFileWithUpdatedShape.path, sourceFileWithUpdatedShape);
const queue = getReferencedByPaths(state, sourceFileWithUpdatedShape.path);
const queue = getReferencedByPaths(state, sourceFileWithUpdatedShape.resolvedPath);
while (queue.length > 0) {
const currentPath = queue.pop()!;
if (!seenFileNamesMap.has(currentPath)) {
const currentSourceFile = programOfThisState.getSourceFileByPath(currentPath)!;
seenFileNamesMap.set(currentPath, currentSourceFile);
if (currentSourceFile && updateShapeSignature(state, programOfThisState, currentSourceFile, cacheToUpdateSignature, cancellationToken, computeHash!, exportedModulesMapCache)) { // TODO: GH#18217
queue.push(...getReferencedByPaths(state, currentPath));
queue.push(...getReferencedByPaths(state, currentSourceFile.resolvedPath));
}
}
}
Expand Down
12 changes: 12 additions & 0 deletions src/compiler/core.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1387,6 +1387,18 @@ namespace ts {
return result;
}

export function copyProperties<T1 extends T2, T2>(first: T1, second: T2) {
for (const id in second) {
if (hasOwnProperty.call(second, id)) {
(first as any)[id] = second[id];
}
}
}

export function maybeBind<T, A extends any[], R>(obj: T, fn: ((this: T, ...args: A) => R) | undefined): ((...args: A) => R) | undefined {
return fn ? fn.bind(obj) : undefined;
}

export interface MultiMap<T> extends Map<T[]> {
/**
* Adds the value to an array of values associated with the key, and returns the array.
Expand Down
4 changes: 4 additions & 0 deletions src/compiler/diagnosticMessages.json
Original file line number Diff line number Diff line change
Expand Up @@ -3957,6 +3957,10 @@
"category": "Error",
"code": 6370
},
"Updating unchanged output timestamps of project '{0}'...": {
"category": "Message",
"code": 6371
},

"The expected type comes from property '{0}' which is declared here on type '{1}'": {
"category": "Message",
Expand Down
95 changes: 56 additions & 39 deletions src/compiler/program.ts
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ namespace ts {
export function createCompilerHost(options: CompilerOptions, setParentNodes?: boolean): CompilerHost {
return createCompilerHostWorker(options, setParentNodes);
}

/*@internal*/
// TODO(shkamat): update this after reworking ts build API
export function createCompilerHostWorker(options: CompilerOptions, setParentNodes?: boolean, system = sys): CompilerHost {
Expand All @@ -93,7 +94,6 @@ namespace ts {
}
text = "";
}

return text !== undefined ? createSourceFile(fileName, text, languageVersion, setParentNodes) : undefined;
}

Expand Down Expand Up @@ -203,18 +203,25 @@ namespace ts {
return compilerHost;
}

interface CompilerHostLikeForCache {
fileExists(fileName: string): boolean;
readFile(fileName: string, encoding?: string): string | undefined;
directoryExists?(directory: string): boolean;
createDirectory?(directory: string): void;
writeFile?: WriteFileCallback;
}

/*@internal*/
export function changeCompilerHostToUseCache(
host: CompilerHost,
export function changeCompilerHostLikeToUseCache(
host: CompilerHostLikeForCache,
toPath: (fileName: string) => Path,
useCacheForSourceFile: boolean
getSourceFile?: CompilerHost["getSourceFile"]
) {
const originalReadFile = host.readFile;
const originalFileExists = host.fileExists;
const originalDirectoryExists = host.directoryExists;
const originalCreateDirectory = host.createDirectory;
const originalWriteFile = host.writeFile;
const originalGetSourceFile = host.getSourceFile;
const readFileCache = createMap<string | false>();
const fileExistsCache = createMap<boolean>();
const directoryExistsCache = createMap<boolean>();
Expand Down Expand Up @@ -242,19 +249,17 @@ namespace ts {
return setReadFileCache(key, fileName);
};

if (useCacheForSourceFile) {
host.getSourceFile = (fileName, languageVersion, onError, shouldCreateNewSourceFile) => {
const key = toPath(fileName);
const value = sourceFileCache.get(key);
if (value) return value;
const getSourceFileWithCache: CompilerHost["getSourceFile"] | undefined = getSourceFile ? (fileName, languageVersion, onError, shouldCreateNewSourceFile) => {
const key = toPath(fileName);
const value = sourceFileCache.get(key);
if (value) return value;

const sourceFile = originalGetSourceFile.call(host, fileName, languageVersion, onError, shouldCreateNewSourceFile);
if (sourceFile && (isDeclarationFileName(fileName) || fileExtensionIs(fileName, Extension.Json))) {
sourceFileCache.set(key, sourceFile);
}
return sourceFile;
};
}
const sourceFile = getSourceFile(fileName, languageVersion, onError, shouldCreateNewSourceFile);
if (sourceFile && (isDeclarationFileName(fileName) || fileExtensionIs(fileName, Extension.Json))) {
sourceFileCache.set(key, sourceFile);
}
return sourceFile;
} : undefined;

// fileExists for any kind of extension
host.fileExists = fileName => {
Expand All @@ -265,23 +270,25 @@ namespace ts {
fileExistsCache.set(key, !!newValue);
return newValue;
};
host.writeFile = (fileName, data, writeByteOrderMark, onError, sourceFiles) => {
const key = toPath(fileName);
fileExistsCache.delete(key);
if (originalWriteFile) {
host.writeFile = (fileName, data, writeByteOrderMark, onError, sourceFiles) => {
const key = toPath(fileName);
fileExistsCache.delete(key);

const value = readFileCache.get(key);
if (value && value !== data) {
readFileCache.delete(key);
sourceFileCache.delete(key);
}
else if (useCacheForSourceFile) {
const sourceFile = sourceFileCache.get(key);
if (sourceFile && sourceFile.text !== data) {
const value = readFileCache.get(key);
if (value && value !== data) {
readFileCache.delete(key);
sourceFileCache.delete(key);
}
}
originalWriteFile.call(host, fileName, data, writeByteOrderMark, onError, sourceFiles);
};
else if (getSourceFileWithCache) {
const sourceFile = sourceFileCache.get(key);
if (sourceFile && sourceFile.text !== data) {
sourceFileCache.delete(key);
}
}
originalWriteFile.call(host, fileName, data, writeByteOrderMark, onError, sourceFiles);
};
}

// directoryExists
if (originalDirectoryExists && originalCreateDirectory) {
Expand All @@ -306,7 +313,7 @@ namespace ts {
originalDirectoryExists,
originalCreateDirectory,
originalWriteFile,
originalGetSourceFile,
getSourceFileWithCache,
readFileWithCache
};
}
Expand Down Expand Up @@ -735,7 +742,7 @@ namespace ts {
performance.mark("beforeProgram");

const host = createProgramOptions.host || createCompilerHost(options);
const configParsingHost = parseConfigHostFromCompilerHost(host);
const configParsingHost = parseConfigHostFromCompilerHostLike(host);

let skipDefaultLib = options.noLib;
const getDefaultLibraryFileName = memoize(() => host.getDefaultLibFileName(options));
Expand Down Expand Up @@ -3104,18 +3111,28 @@ namespace ts {
}
}

interface CompilerHostLike {
useCaseSensitiveFileNames(): boolean;
getCurrentDirectory(): string;
fileExists(fileName: string): boolean;
readFile(fileName: string): string | undefined;
readDirectory?(rootDir: string, extensions: ReadonlyArray<string>, excludes: ReadonlyArray<string> | undefined, includes: ReadonlyArray<string>, depth?: number): string[];
trace?(s: string): void;
onUnRecoverableConfigFileDiagnostic?: DiagnosticReporter;
}

/* @internal */
export function parseConfigHostFromCompilerHost(host: CompilerHost): ParseConfigFileHost {
export function parseConfigHostFromCompilerHostLike(host: CompilerHostLike, directoryStructureHost: DirectoryStructureHost = host): ParseConfigFileHost {
return {
fileExists: f => host.fileExists(f),
fileExists: f => directoryStructureHost.fileExists(f),
readDirectory(root, extensions, excludes, includes, depth) {
Debug.assertDefined(host.readDirectory, "'CompilerHost.readDirectory' must be implemented to correctly process 'projectReferences'");
return host.readDirectory!(root, extensions, excludes, includes, depth);
Debug.assertDefined(directoryStructureHost.readDirectory, "'CompilerHost.readDirectory' must be implemented to correctly process 'projectReferences'");
return directoryStructureHost.readDirectory!(root, extensions, excludes, includes, depth);
},
readFile: f => host.readFile(f),
readFile: f => directoryStructureHost.readFile(f),
useCaseSensitiveFileNames: host.useCaseSensitiveFileNames(),
getCurrentDirectory: () => host.getCurrentDirectory(),
onUnRecoverableConfigFileDiagnostic: () => undefined,
onUnRecoverableConfigFileDiagnostic: host.onUnRecoverableConfigFileDiagnostic || (() => undefined),
trace: host.trace ? (s) => host.trace!(s) : undefined
};
}
Expand Down
19 changes: 10 additions & 9 deletions src/compiler/sys.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,16 @@ declare function setTimeout(handler: (...args: any[]) => void, timeout: number):
declare function clearTimeout(handle: any): void;

namespace ts {
/**
* djb2 hashing algorithm
* http://www.cse.yorku.ca/~oz/hash.html
*/
/* @internal */
export function generateDjb2Hash(data: string): string {
const chars = data.split("").map(str => str.charCodeAt(0));
return `${chars.reduce((prev, curr) => ((prev << 5) + prev) + curr, 5381)}`;
}

/**
* Set a high stack trace limit to provide more information in case of an error.
* Called for command-line and server use cases.
Expand Down Expand Up @@ -1115,15 +1125,6 @@ namespace ts {
}
}

/**
* djb2 hashing algorithm
* http://www.cse.yorku.ca/~oz/hash.html
*/
function generateDjb2Hash(data: string): string {
const chars = data.split("").map(str => str.charCodeAt(0));
return `${chars.reduce((prev, curr) => ((prev << 5) + prev) + curr, 5381)}`;
}

function createMD5HashUsingNativeCrypto(data: string): string {
const hash = _crypto!.createHash("md5");
hash.update(data);
Expand Down
Loading

0 comments on commit 9bd2365

Please sign in to comment.