Switch to ES Map/Set internally (#33771)
* Add full implemention of Map and Set to shims * Update default Map interface * Remove WeakMap/WeakSet * Add tests for set shim * Update most usages of Map<K, true> to Set * PR Feedback * Fix lint issues * Change key in fsWatchCallback * Simpler shim, more tests * Fix typo in collection shim
This commit is contained in:
parent
95690c0aca
commit
eb2f4e2337
|
@ -15,7 +15,7 @@ namespace ts {
|
|||
referenced: boolean;
|
||||
}
|
||||
|
||||
export function getModuleInstanceState(node: ModuleDeclaration, visited?: Map<ModuleInstanceState | undefined>): ModuleInstanceState {
|
||||
export function getModuleInstanceState(node: ModuleDeclaration, visited?: Map<number, ModuleInstanceState | undefined>): ModuleInstanceState {
|
||||
if (node.body && !node.body.parent) {
|
||||
// getModuleInstanceStateForAliasTarget needs to walk up the parent chain, so parent pointers must be set on this tree already
|
||||
setParent(node.body, node);
|
||||
|
@ -24,8 +24,8 @@ namespace ts {
|
|||
return node.body ? getModuleInstanceStateCached(node.body, visited) : ModuleInstanceState.Instantiated;
|
||||
}
|
||||
|
||||
function getModuleInstanceStateCached(node: Node, visited = createMap<ModuleInstanceState | undefined>()) {
|
||||
const nodeId = "" + getNodeId(node);
|
||||
function getModuleInstanceStateCached(node: Node, visited = new Map<number, ModuleInstanceState | undefined>()) {
|
||||
const nodeId = getNodeId(node);
|
||||
if (visited.has(nodeId)) {
|
||||
return visited.get(nodeId) || ModuleInstanceState.NonInstantiated;
|
||||
}
|
||||
|
@ -35,7 +35,7 @@ namespace ts {
|
|||
return result;
|
||||
}
|
||||
|
||||
function getModuleInstanceStateWorker(node: Node, visited: Map<ModuleInstanceState | undefined>): ModuleInstanceState {
|
||||
function getModuleInstanceStateWorker(node: Node, visited: Map<number, ModuleInstanceState | undefined>): ModuleInstanceState {
|
||||
// A module is uninstantiated if it contains only
|
||||
switch (node.kind) {
|
||||
// 1. interface declarations, type alias declarations
|
||||
|
@ -107,7 +107,7 @@ namespace ts {
|
|||
return ModuleInstanceState.Instantiated;
|
||||
}
|
||||
|
||||
function getModuleInstanceStateForAliasTarget(specifier: ExportSpecifier, visited: Map<ModuleInstanceState | undefined>) {
|
||||
function getModuleInstanceStateForAliasTarget(specifier: ExportSpecifier, visited: Map<number, ModuleInstanceState | undefined>) {
|
||||
const name = specifier.propertyName || specifier.name;
|
||||
let p: Node | undefined = specifier.parent;
|
||||
while (p) {
|
||||
|
@ -2884,8 +2884,7 @@ namespace ts {
|
|||
|
||||
function addLateBoundAssignmentDeclarationToSymbol(node: BinaryExpression | DynamicNamedDeclaration, symbol: Symbol | undefined) {
|
||||
if (symbol) {
|
||||
const members = symbol.assignmentDeclarationMembers || (symbol.assignmentDeclarationMembers = createMap());
|
||||
members.set("" + getNodeId(node), node);
|
||||
(symbol.assignmentDeclarationMembers || (symbol.assignmentDeclarationMembers = new Map())).set(getNodeId(node), node);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -24,11 +24,11 @@ namespace ts {
|
|||
/**
|
||||
* Cache of bind and check diagnostics for files with their Path being the key
|
||||
*/
|
||||
semanticDiagnosticsPerFile?: ReadonlyMap<readonly ReusableDiagnostic[] | readonly Diagnostic[]> | undefined;
|
||||
semanticDiagnosticsPerFile?: ReadonlyMap<Path, readonly ReusableDiagnostic[] | readonly Diagnostic[]> | undefined;
|
||||
/**
|
||||
* The map has key by source file's path that has been changed
|
||||
*/
|
||||
changedFilesSet?: ReadonlyMap<true>;
|
||||
changedFilesSet?: ReadonlySet<Path>;
|
||||
/**
|
||||
* Set of affected files being iterated
|
||||
*/
|
||||
|
@ -41,7 +41,7 @@ namespace ts {
|
|||
* Map of file signatures, with key being file path, calculated while getting current changed file's affected files
|
||||
* These will be committed whenever the iteration through affected files of current changed file is complete
|
||||
*/
|
||||
currentAffectedFilesSignatures?: ReadonlyMap<string> | undefined;
|
||||
currentAffectedFilesSignatures?: ReadonlyMap<Path, string> | undefined;
|
||||
/**
|
||||
* Newly computed visible to outside referencedSet
|
||||
*/
|
||||
|
@ -49,7 +49,7 @@ namespace ts {
|
|||
/**
|
||||
* True if the semantic diagnostics were copied from the old state
|
||||
*/
|
||||
semanticDiagnosticsFromOldState?: Map<true>;
|
||||
semanticDiagnosticsFromOldState?: Set<Path>;
|
||||
/**
|
||||
* program corresponding to this state
|
||||
*/
|
||||
|
@ -65,7 +65,7 @@ namespace ts {
|
|||
/**
|
||||
* Files pending to be emitted kind.
|
||||
*/
|
||||
affectedFilesPendingEmitKind?: ReadonlyMap<BuilderFileEmit> | undefined;
|
||||
affectedFilesPendingEmitKind?: ReadonlyMap<Path, BuilderFileEmit> | undefined;
|
||||
/**
|
||||
* Current index to retrieve pending affected file
|
||||
*/
|
||||
|
@ -89,11 +89,11 @@ namespace ts {
|
|||
/**
|
||||
* Cache of bind and check diagnostics for files with their Path being the key
|
||||
*/
|
||||
semanticDiagnosticsPerFile: Map<readonly Diagnostic[]> | undefined;
|
||||
semanticDiagnosticsPerFile: Map<Path, readonly Diagnostic[]> | undefined;
|
||||
/**
|
||||
* The map has key by source file's path that has been changed
|
||||
*/
|
||||
changedFilesSet: Map<true>;
|
||||
changedFilesSet: Set<Path>;
|
||||
/**
|
||||
* Set of affected files being iterated
|
||||
*/
|
||||
|
@ -110,7 +110,7 @@ namespace ts {
|
|||
* Map of file signatures, with key being file path, calculated while getting current changed file's affected files
|
||||
* These will be committed whenever the iteration through affected files of current changed file is complete
|
||||
*/
|
||||
currentAffectedFilesSignatures: Map<string> | undefined;
|
||||
currentAffectedFilesSignatures: Map<Path, string> | undefined;
|
||||
/**
|
||||
* Newly computed visible to outside referencedSet
|
||||
*/
|
||||
|
@ -118,7 +118,7 @@ namespace ts {
|
|||
/**
|
||||
* Already seen affected files
|
||||
*/
|
||||
seenAffectedFiles: Map<true> | undefined;
|
||||
seenAffectedFiles: Set<Path> | undefined;
|
||||
/**
|
||||
* whether this program has cleaned semantic diagnostics cache for lib files
|
||||
*/
|
||||
|
@ -126,7 +126,7 @@ namespace ts {
|
|||
/**
|
||||
* True if the semantic diagnostics were copied from the old state
|
||||
*/
|
||||
semanticDiagnosticsFromOldState?: Map<true>;
|
||||
semanticDiagnosticsFromOldState?: Set<Path>;
|
||||
/**
|
||||
* program corresponding to this state
|
||||
*/
|
||||
|
@ -142,7 +142,7 @@ namespace ts {
|
|||
/**
|
||||
* Files pending to be emitted kind.
|
||||
*/
|
||||
affectedFilesPendingEmitKind: Map<BuilderFileEmit> | undefined;
|
||||
affectedFilesPendingEmitKind: Map<Path, BuilderFileEmit> | undefined;
|
||||
/**
|
||||
* Current index to retrieve pending affected file
|
||||
*/
|
||||
|
@ -154,16 +154,16 @@ namespace ts {
|
|||
/**
|
||||
* Already seen emitted files
|
||||
*/
|
||||
seenEmittedFiles: Map<BuilderFileEmit> | undefined;
|
||||
seenEmittedFiles: Map<Path, BuilderFileEmit> | undefined;
|
||||
/**
|
||||
* true if program has been emitted
|
||||
*/
|
||||
programEmitComplete?: true;
|
||||
}
|
||||
|
||||
function hasSameKeys<T, U>(map1: ReadonlyMap<T> | undefined, map2: ReadonlyMap<U> | undefined): boolean {
|
||||
function hasSameKeys(map1: ReadonlyCollection<string> | undefined, map2: ReadonlyCollection<string> | undefined): boolean {
|
||||
// Has same size and every key is present in both maps
|
||||
return map1 as ReadonlyMap<T | U> === map2 || map1 !== undefined && map2 !== undefined && map1.size === map2.size && !forEachKey(map1, key => !map2.has(key));
|
||||
return map1 === map2 || map1 !== undefined && map2 !== undefined && map1.size === map2.size && !forEachKey(map1, key => !map2.has(key));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -176,9 +176,9 @@ namespace ts {
|
|||
state.compilerOptions = compilerOptions;
|
||||
// With --out or --outFile, any change affects all semantic diagnostics so no need to cache them
|
||||
if (!outFile(compilerOptions)) {
|
||||
state.semanticDiagnosticsPerFile = createMap<readonly Diagnostic[]>();
|
||||
state.semanticDiagnosticsPerFile = new Map();
|
||||
}
|
||||
state.changedFilesSet = createMap<true>();
|
||||
state.changedFilesSet = new Set();
|
||||
|
||||
const useOldState = BuilderState.canReuseOldState(state.referencedMap, oldState);
|
||||
const oldCompilerOptions = useOldState ? oldState!.compilerOptions : undefined;
|
||||
|
@ -196,14 +196,12 @@ namespace ts {
|
|||
}
|
||||
|
||||
// Copy old state's changed files set
|
||||
if (changedFilesSet) {
|
||||
copyEntries(changedFilesSet, state.changedFilesSet);
|
||||
}
|
||||
changedFilesSet?.forEach(value => state.changedFilesSet.add(value));
|
||||
if (!outFile(compilerOptions) && oldState!.affectedFilesPendingEmit) {
|
||||
state.affectedFilesPendingEmit = oldState!.affectedFilesPendingEmit.slice();
|
||||
state.affectedFilesPendingEmitKind = cloneMapOrUndefined(oldState!.affectedFilesPendingEmitKind);
|
||||
state.affectedFilesPendingEmitKind = oldState!.affectedFilesPendingEmitKind && new Map(oldState!.affectedFilesPendingEmitKind);
|
||||
state.affectedFilesPendingEmitIndex = oldState!.affectedFilesPendingEmitIndex;
|
||||
state.seenAffectedFiles = createMap();
|
||||
state.seenAffectedFiles = new Set();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -227,10 +225,10 @@ namespace ts {
|
|||
// Referenced file was deleted in the new program
|
||||
newReferences && forEachKey(newReferences, path => !state.fileInfos.has(path) && oldState!.fileInfos.has(path))) {
|
||||
// Register file as changed file and do not copy semantic diagnostics, since all changed files need to be re-evaluated
|
||||
state.changedFilesSet.set(sourceFilePath, true);
|
||||
state.changedFilesSet.add(sourceFilePath);
|
||||
}
|
||||
else if (canCopySemanticDiagnostics) {
|
||||
const sourceFile = newProgram.getSourceFileByPath(sourceFilePath as Path)!;
|
||||
const sourceFile = newProgram.getSourceFileByPath(sourceFilePath)!;
|
||||
|
||||
if (sourceFile.isDeclarationFile && !copyDeclarationFileDiagnostics) { return; }
|
||||
if (sourceFile.hasNoDefaultLib && !copyLibFileDiagnostics) { return; }
|
||||
|
@ -240,9 +238,9 @@ namespace ts {
|
|||
if (diagnostics) {
|
||||
state.semanticDiagnosticsPerFile!.set(sourceFilePath, oldState!.hasReusableDiagnostic ? convertToDiagnostics(diagnostics as readonly ReusableDiagnostic[], newProgram, getCanonicalFileName) : diagnostics as readonly Diagnostic[]);
|
||||
if (!state.semanticDiagnosticsFromOldState) {
|
||||
state.semanticDiagnosticsFromOldState = createMap<true>();
|
||||
state.semanticDiagnosticsFromOldState = new Set();
|
||||
}
|
||||
state.semanticDiagnosticsFromOldState.set(sourceFilePath, true);
|
||||
state.semanticDiagnosticsFromOldState.add(sourceFilePath);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@ -250,13 +248,13 @@ namespace ts {
|
|||
// If the global file is removed, add all files as changed
|
||||
if (useOldState && forEachEntry(oldState!.fileInfos, (info, sourceFilePath) => info.affectsGlobalScope && !state.fileInfos.has(sourceFilePath))) {
|
||||
BuilderState.getAllFilesExcludingDefaultLibraryFile(state, newProgram, /*firstSourceFile*/ undefined)
|
||||
.forEach(file => state.changedFilesSet.set(file.resolvedPath, true));
|
||||
.forEach(file => state.changedFilesSet.add(file.resolvedPath));
|
||||
}
|
||||
else if (oldCompilerOptions && !outFile(compilerOptions) && compilerOptionsAffectEmit(compilerOptions, oldCompilerOptions)) {
|
||||
// Add all files to affectedFilesPendingEmit since emit changed
|
||||
newProgram.getSourceFiles().forEach(f => addToAffectedFilesPendingEmit(state, f.resolvedPath, BuilderFileEmit.Full));
|
||||
Debug.assert(!state.seenAffectedFiles || !state.seenAffectedFiles.size);
|
||||
state.seenAffectedFiles = state.seenAffectedFiles || createMap<true>();
|
||||
state.seenAffectedFiles = state.seenAffectedFiles || new Set();
|
||||
}
|
||||
|
||||
state.buildInfoEmitPending = !!state.changedFilesSet.size;
|
||||
|
@ -307,22 +305,22 @@ namespace ts {
|
|||
*/
|
||||
function cloneBuilderProgramState(state: Readonly<BuilderProgramState>): BuilderProgramState {
|
||||
const newState = BuilderState.clone(state) as BuilderProgramState;
|
||||
newState.semanticDiagnosticsPerFile = cloneMapOrUndefined(state.semanticDiagnosticsPerFile);
|
||||
newState.changedFilesSet = cloneMap(state.changedFilesSet);
|
||||
newState.semanticDiagnosticsPerFile = state.semanticDiagnosticsPerFile && new Map(state.semanticDiagnosticsPerFile);
|
||||
newState.changedFilesSet = new Set(state.changedFilesSet);
|
||||
newState.affectedFiles = state.affectedFiles;
|
||||
newState.affectedFilesIndex = state.affectedFilesIndex;
|
||||
newState.currentChangedFilePath = state.currentChangedFilePath;
|
||||
newState.currentAffectedFilesSignatures = cloneMapOrUndefined(state.currentAffectedFilesSignatures);
|
||||
newState.currentAffectedFilesExportedModulesMap = cloneMapOrUndefined(state.currentAffectedFilesExportedModulesMap);
|
||||
newState.seenAffectedFiles = cloneMapOrUndefined(state.seenAffectedFiles);
|
||||
newState.currentAffectedFilesSignatures = state.currentAffectedFilesSignatures && new Map(state.currentAffectedFilesSignatures);
|
||||
newState.currentAffectedFilesExportedModulesMap = state.currentAffectedFilesExportedModulesMap && new Map(state.currentAffectedFilesExportedModulesMap);
|
||||
newState.seenAffectedFiles = state.seenAffectedFiles && new Set(state.seenAffectedFiles);
|
||||
newState.cleanedDiagnosticsOfLibFiles = state.cleanedDiagnosticsOfLibFiles;
|
||||
newState.semanticDiagnosticsFromOldState = cloneMapOrUndefined(state.semanticDiagnosticsFromOldState);
|
||||
newState.semanticDiagnosticsFromOldState = state.semanticDiagnosticsFromOldState && new Set(state.semanticDiagnosticsFromOldState);
|
||||
newState.program = state.program;
|
||||
newState.compilerOptions = state.compilerOptions;
|
||||
newState.affectedFilesPendingEmit = state.affectedFilesPendingEmit && state.affectedFilesPendingEmit.slice();
|
||||
newState.affectedFilesPendingEmitKind = cloneMapOrUndefined(state.affectedFilesPendingEmitKind);
|
||||
newState.affectedFilesPendingEmitKind = state.affectedFilesPendingEmitKind && new Map(state.affectedFilesPendingEmitKind);
|
||||
newState.affectedFilesPendingEmitIndex = state.affectedFilesPendingEmitIndex;
|
||||
newState.seenEmittedFiles = cloneMapOrUndefined(state.seenEmittedFiles);
|
||||
newState.seenEmittedFiles = state.seenEmittedFiles && new Map(state.seenEmittedFiles);
|
||||
newState.programEmitComplete = state.programEmitComplete;
|
||||
return newState;
|
||||
}
|
||||
|
@ -384,14 +382,14 @@ namespace ts {
|
|||
}
|
||||
|
||||
// Get next batch of affected files
|
||||
state.currentAffectedFilesSignatures = state.currentAffectedFilesSignatures || createMap();
|
||||
if (!state.currentAffectedFilesSignatures) state.currentAffectedFilesSignatures = new Map();
|
||||
if (state.exportedModulesMap) {
|
||||
state.currentAffectedFilesExportedModulesMap = state.currentAffectedFilesExportedModulesMap || createMap<BuilderState.ReferencedSet | false>();
|
||||
if (!state.currentAffectedFilesExportedModulesMap) state.currentAffectedFilesExportedModulesMap = new Map();
|
||||
}
|
||||
state.affectedFiles = BuilderState.getFilesAffectedBy(state, program, nextKey.value as Path, cancellationToken, computeHash, state.currentAffectedFilesSignatures, state.currentAffectedFilesExportedModulesMap);
|
||||
state.currentChangedFilePath = nextKey.value as Path;
|
||||
state.affectedFiles = BuilderState.getFilesAffectedBy(state, program, nextKey.value, cancellationToken, computeHash, state.currentAffectedFilesSignatures, state.currentAffectedFilesExportedModulesMap);
|
||||
state.currentChangedFilePath = nextKey.value;
|
||||
state.affectedFilesIndex = 0;
|
||||
state.seenAffectedFiles = state.seenAffectedFiles || createMap<true>();
|
||||
if (!state.seenAffectedFiles) state.seenAffectedFiles = new Set();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -401,7 +399,7 @@ namespace ts {
|
|||
function getNextAffectedFilePendingEmit(state: BuilderProgramState) {
|
||||
const { affectedFilesPendingEmit } = state;
|
||||
if (affectedFilesPendingEmit) {
|
||||
const seenEmittedFiles = state.seenEmittedFiles || (state.seenEmittedFiles = createMap());
|
||||
const seenEmittedFiles = (state.seenEmittedFiles || (state.seenEmittedFiles = new Map()));
|
||||
for (let i = state.affectedFilesPendingEmitIndex!; i < affectedFilesPendingEmit.length; i++) {
|
||||
const affectedFile = Debug.checkDefined(state.program).getSourceFileByPath(affectedFilesPendingEmit[i]);
|
||||
if (affectedFile) {
|
||||
|
@ -516,7 +514,7 @@ namespace ts {
|
|||
// Since isolated modules dont change js files, files affected by change in signature is itself
|
||||
// But we need to cleanup semantic diagnostics and queue dts emit for affected files
|
||||
if (state.compilerOptions.isolatedModules) {
|
||||
const seenFileNamesMap = createMap<true>();
|
||||
const seenFileNamesMap = new Map<Path, true>();
|
||||
seenFileNamesMap.set(affectedFile.resolvedPath, true);
|
||||
const queue = BuilderState.getReferencedByPaths(state, affectedFile.resolvedPath);
|
||||
while (queue.length > 0) {
|
||||
|
@ -533,13 +531,13 @@ namespace ts {
|
|||
}
|
||||
|
||||
Debug.assert(!!state.currentAffectedFilesExportedModulesMap);
|
||||
const seenFileAndExportsOfFile = createMap<true>();
|
||||
const seenFileAndExportsOfFile = new Set<string>();
|
||||
// Go through exported modules from cache first
|
||||
// If exported modules has path, all files referencing file exported from are affected
|
||||
if (forEachEntry(state.currentAffectedFilesExportedModulesMap, (exportedModules, exportedFromPath) =>
|
||||
exportedModules &&
|
||||
exportedModules.has(affectedFile.resolvedPath) &&
|
||||
forEachFilesReferencingPath(state, exportedFromPath as Path, seenFileAndExportsOfFile, fn)
|
||||
forEachFilesReferencingPath(state, exportedFromPath, seenFileAndExportsOfFile, fn)
|
||||
)) {
|
||||
return;
|
||||
}
|
||||
|
@ -548,24 +546,24 @@ namespace ts {
|
|||
forEachEntry(state.exportedModulesMap, (exportedModules, exportedFromPath) =>
|
||||
!state.currentAffectedFilesExportedModulesMap!.has(exportedFromPath) && // If we already iterated this through cache, ignore it
|
||||
exportedModules.has(affectedFile.resolvedPath) &&
|
||||
forEachFilesReferencingPath(state, exportedFromPath as Path, seenFileAndExportsOfFile, fn)
|
||||
forEachFilesReferencingPath(state, exportedFromPath, seenFileAndExportsOfFile, fn)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterate on files referencing referencedPath
|
||||
*/
|
||||
function forEachFilesReferencingPath(state: BuilderProgramState, referencedPath: Path, seenFileAndExportsOfFile: Map<true>, fn: (state: BuilderProgramState, filePath: Path) => boolean) {
|
||||
function forEachFilesReferencingPath(state: BuilderProgramState, referencedPath: Path, seenFileAndExportsOfFile: Set<string>, fn: (state: BuilderProgramState, filePath: Path) => boolean) {
|
||||
return forEachEntry(state.referencedMap!, (referencesInFile, filePath) =>
|
||||
referencesInFile.has(referencedPath) && forEachFileAndExportsOfFile(state, filePath as Path, seenFileAndExportsOfFile, fn)
|
||||
referencesInFile.has(referencedPath) && forEachFileAndExportsOfFile(state, filePath, seenFileAndExportsOfFile, fn)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* fn on file and iterate on anything that exports this file
|
||||
*/
|
||||
function forEachFileAndExportsOfFile(state: BuilderProgramState, filePath: Path, seenFileAndExportsOfFile: Map<true>, fn: (state: BuilderProgramState, filePath: Path) => boolean): boolean {
|
||||
if (!addToSeen(seenFileAndExportsOfFile, filePath)) {
|
||||
function forEachFileAndExportsOfFile(state: BuilderProgramState, filePath: Path, seenFileAndExportsOfFile: Set<string>, fn: (state: BuilderProgramState, filePath: Path) => boolean): boolean {
|
||||
if (!tryAddToSet(seenFileAndExportsOfFile, filePath)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -580,7 +578,7 @@ namespace ts {
|
|||
if (forEachEntry(state.currentAffectedFilesExportedModulesMap, (exportedModules, exportedFromPath) =>
|
||||
exportedModules &&
|
||||
exportedModules.has(filePath) &&
|
||||
forEachFileAndExportsOfFile(state, exportedFromPath as Path, seenFileAndExportsOfFile, fn)
|
||||
forEachFileAndExportsOfFile(state, exportedFromPath, seenFileAndExportsOfFile, fn)
|
||||
)) {
|
||||
return true;
|
||||
}
|
||||
|
@ -589,7 +587,7 @@ namespace ts {
|
|||
if (forEachEntry(state.exportedModulesMap!, (exportedModules, exportedFromPath) =>
|
||||
!state.currentAffectedFilesExportedModulesMap!.has(exportedFromPath) && // If we already iterated this through cache, ignore it
|
||||
exportedModules.has(filePath) &&
|
||||
forEachFileAndExportsOfFile(state, exportedFromPath as Path, seenFileAndExportsOfFile, fn)
|
||||
forEachFileAndExportsOfFile(state, exportedFromPath, seenFileAndExportsOfFile, fn)
|
||||
)) {
|
||||
return true;
|
||||
}
|
||||
|
@ -598,7 +596,7 @@ namespace ts {
|
|||
return !!forEachEntry(state.referencedMap!, (referencesInFile, referencingFilePath) =>
|
||||
referencesInFile.has(filePath) &&
|
||||
!seenFileAndExportsOfFile.has(referencingFilePath) && // Not already removed diagnostic file
|
||||
fn(state, referencingFilePath as Path) // Dont add to seen since this is not yet done with the export removal
|
||||
fn(state, referencingFilePath) // Dont add to seen since this is not yet done with the export removal
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -622,9 +620,9 @@ namespace ts {
|
|||
state.programEmitComplete = true;
|
||||
}
|
||||
else {
|
||||
state.seenAffectedFiles!.set((affected as SourceFile).resolvedPath, true);
|
||||
state.seenAffectedFiles!.add((affected as SourceFile).resolvedPath);
|
||||
if (emitKind !== undefined) {
|
||||
(state.seenEmittedFiles || (state.seenEmittedFiles = createMap())).set((affected as SourceFile).resolvedPath, emitKind);
|
||||
(state.seenEmittedFiles || (state.seenEmittedFiles = new Map())).set((affected as SourceFile).resolvedPath, emitKind);
|
||||
}
|
||||
if (isPendingEmit) {
|
||||
state.affectedFilesPendingEmitIndex!++;
|
||||
|
@ -760,9 +758,9 @@ namespace ts {
|
|||
|
||||
if (state.affectedFilesPendingEmit) {
|
||||
const affectedFilesPendingEmit: ProgramBuilderInfoFilePendingEmit[] = [];
|
||||
const seenFiles = createMap<true>();
|
||||
const seenFiles = new Set<Path>();
|
||||
for (const path of state.affectedFilesPendingEmit.slice(state.affectedFilesPendingEmitIndex).sort(compareStringsCaseSensitive)) {
|
||||
if (addToSeen(seenFiles, path)) {
|
||||
if (tryAddToSet(seenFiles, path)) {
|
||||
affectedFilesPendingEmit.push([relativeToBuildInfo(path), state.affectedFilesPendingEmitKind!.get(path)!]);
|
||||
}
|
||||
}
|
||||
|
@ -1127,7 +1125,7 @@ namespace ts {
|
|||
|
||||
function addToAffectedFilesPendingEmit(state: BuilderProgramState, affectedFilePendingEmit: Path, kind: BuilderFileEmit) {
|
||||
if (!state.affectedFilesPendingEmit) state.affectedFilesPendingEmit = [];
|
||||
if (!state.affectedFilesPendingEmitKind) state.affectedFilesPendingEmitKind = createMap();
|
||||
if (!state.affectedFilesPendingEmitKind) state.affectedFilesPendingEmitKind = new Map();
|
||||
|
||||
const existingKind = state.affectedFilesPendingEmitKind.get(affectedFilePendingEmit);
|
||||
state.affectedFilesPendingEmit.push(affectedFilePendingEmit);
|
||||
|
@ -1142,14 +1140,14 @@ namespace ts {
|
|||
}
|
||||
}
|
||||
|
||||
function getMapOfReferencedSet(mapLike: MapLike<readonly string[]> | undefined, toPath: (path: string) => Path): ReadonlyMap<BuilderState.ReferencedSet> | undefined {
|
||||
function getMapOfReferencedSet(mapLike: MapLike<readonly string[]> | undefined, toPath: (path: string) => Path): ReadonlyMap<Path, BuilderState.ReferencedSet> | undefined {
|
||||
if (!mapLike) return undefined;
|
||||
const map = createMap<BuilderState.ReferencedSet>();
|
||||
const map = new Map<Path, BuilderState.ReferencedSet>();
|
||||
// Copies keys/values from template. Note that for..in will not throw if
|
||||
// template is undefined, and instead will just exit the loop.
|
||||
for (const key in mapLike) {
|
||||
if (hasProperty(mapLike, key)) {
|
||||
map.set(toPath(key), arrayToSet(mapLike[key], toPath));
|
||||
map.set(toPath(key), new Set(mapLike[key].map(toPath)));
|
||||
}
|
||||
}
|
||||
return map;
|
||||
|
@ -1159,7 +1157,7 @@ namespace ts {
|
|||
const buildInfoDirectory = getDirectoryPath(getNormalizedAbsolutePath(buildInfoPath, host.getCurrentDirectory()));
|
||||
const getCanonicalFileName = createGetCanonicalFileName(host.useCaseSensitiveFileNames());
|
||||
|
||||
const fileInfos = createMap<BuilderState.FileInfo>();
|
||||
const fileInfos = new Map<Path, BuilderState.FileInfo>();
|
||||
for (const key in program.fileInfos) {
|
||||
if (hasProperty(program.fileInfos, key)) {
|
||||
fileInfos.set(toPath(key), program.fileInfos[key]);
|
||||
|
|
|
@ -15,42 +15,42 @@ namespace ts {
|
|||
/**
|
||||
* Information of the file eg. its version, signature etc
|
||||
*/
|
||||
fileInfos: ReadonlyMap<BuilderState.FileInfo>;
|
||||
fileInfos: ReadonlyMap<Path, BuilderState.FileInfo>;
|
||||
/**
|
||||
* Contains the map of ReferencedSet=Referenced files of the file if module emit is enabled
|
||||
* Otherwise undefined
|
||||
* Thus non undefined value indicates, module emit
|
||||
*/
|
||||
readonly referencedMap?: ReadonlyMap<BuilderState.ReferencedSet> | undefined;
|
||||
readonly referencedMap?: ReadonlyMap<Path, BuilderState.ReferencedSet> | undefined;
|
||||
/**
|
||||
* Contains the map of exported modules ReferencedSet=exported module files from the file if module emit is enabled
|
||||
* Otherwise undefined
|
||||
*/
|
||||
readonly exportedModulesMap?: ReadonlyMap<BuilderState.ReferencedSet> | undefined;
|
||||
readonly exportedModulesMap?: ReadonlyMap<Path, BuilderState.ReferencedSet> | undefined;
|
||||
}
|
||||
|
||||
export interface BuilderState {
|
||||
/**
|
||||
* Information of the file eg. its version, signature etc
|
||||
*/
|
||||
fileInfos: Map<BuilderState.FileInfo>;
|
||||
fileInfos: Map<Path, BuilderState.FileInfo>;
|
||||
/**
|
||||
* Contains the map of ReferencedSet=Referenced files of the file if module emit is enabled
|
||||
* Otherwise undefined
|
||||
* Thus non undefined value indicates, module emit
|
||||
*/
|
||||
readonly referencedMap: ReadonlyMap<BuilderState.ReferencedSet> | undefined;
|
||||
readonly referencedMap: ReadonlyMap<Path, BuilderState.ReferencedSet> | undefined;
|
||||
/**
|
||||
* Contains the map of exported modules ReferencedSet=exported module files from the file if module emit is enabled
|
||||
* Otherwise undefined
|
||||
*/
|
||||
readonly exportedModulesMap: Map<BuilderState.ReferencedSet> | undefined;
|
||||
readonly exportedModulesMap: Map<Path, BuilderState.ReferencedSet> | undefined;
|
||||
/**
|
||||
* Map of files that have already called update signature.
|
||||
* That means hence forth these files are assumed to have
|
||||
* no change in their signature for this version of the program
|
||||
*/
|
||||
hasCalledUpdateShapeSignature: Map<true>;
|
||||
hasCalledUpdateShapeSignature: Set<Path>;
|
||||
/**
|
||||
* Cache of all files excluding default library file for the current program
|
||||
*/
|
||||
|
@ -73,7 +73,7 @@ namespace ts {
|
|||
/**
|
||||
* Referenced files with values for the keys as referenced file's path to be true
|
||||
*/
|
||||
export type ReferencedSet = ReadonlyMap<true>;
|
||||
export type ReferencedSet = ReadonlySet<Path>;
|
||||
/**
|
||||
* Compute the hash to store the shape of the file
|
||||
*/
|
||||
|
@ -83,7 +83,7 @@ namespace ts {
|
|||
* Exported modules to from declaration emit being computed.
|
||||
* This can contain false in the affected file path to specify that there are no exported module(types from other modules) for this file
|
||||
*/
|
||||
export type ComputingExportedModulesMap = Map<ReferencedSet | false>;
|
||||
export type ComputingExportedModulesMap = Map<Path, ReferencedSet | false>;
|
||||
|
||||
/**
|
||||
* Get the referencedFile from the imported module symbol
|
||||
|
@ -113,8 +113,8 @@ namespace ts {
|
|||
/**
|
||||
* Gets the referenced files for a file from the program with values for the keys as referenced file's path to be true
|
||||
*/
|
||||
function getReferencedFiles(program: Program, sourceFile: SourceFile, getCanonicalFileName: GetCanonicalFileName): Map<true> | undefined {
|
||||
let referencedFiles: Map<true> | undefined;
|
||||
function getReferencedFiles(program: Program, sourceFile: SourceFile, getCanonicalFileName: GetCanonicalFileName): Set<Path> | undefined {
|
||||
let referencedFiles: Set<Path> | undefined;
|
||||
|
||||
// We need to use a set here since the code can contain the same import twice,
|
||||
// but that will only be one dependency.
|
||||
|
@ -185,17 +185,14 @@ namespace ts {
|
|||
}
|
||||
|
||||
function addReferencedFile(referencedPath: Path) {
|
||||
if (!referencedFiles) {
|
||||
referencedFiles = createMap<true>();
|
||||
}
|
||||
referencedFiles.set(referencedPath, true);
|
||||
(referencedFiles || (referencedFiles = new Set())).add(referencedPath);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if oldState is reusable, that is the emitKind = module/non module has not changed
|
||||
*/
|
||||
export function canReuseOldState(newReferencedMap: ReadonlyMap<ReferencedSet> | undefined, oldState: Readonly<ReusableBuilderState> | undefined) {
|
||||
export function canReuseOldState(newReferencedMap: ReadonlyMap<Path, ReferencedSet> | undefined, oldState: Readonly<ReusableBuilderState> | undefined) {
|
||||
return oldState && !oldState.referencedMap === !newReferencedMap;
|
||||
}
|
||||
|
||||
|
@ -203,10 +200,10 @@ namespace ts {
|
|||
* Creates the state of file references and signature for the new program from oldState if it is safe
|
||||
*/
|
||||
export function create(newProgram: Program, getCanonicalFileName: GetCanonicalFileName, oldState?: Readonly<ReusableBuilderState>): BuilderState {
|
||||
const fileInfos = createMap<FileInfo>();
|
||||
const referencedMap = newProgram.getCompilerOptions().module !== ModuleKind.None ? createMap<ReferencedSet>() : undefined;
|
||||
const exportedModulesMap = referencedMap ? createMap<ReferencedSet>() : undefined;
|
||||
const hasCalledUpdateShapeSignature = createMap<true>();
|
||||
const fileInfos = new Map<Path, FileInfo>();
|
||||
const referencedMap = newProgram.getCompilerOptions().module !== ModuleKind.None ? new Map<Path, ReferencedSet>() : undefined;
|
||||
const exportedModulesMap = referencedMap ? new Map<Path, ReferencedSet>() : undefined;
|
||||
const hasCalledUpdateShapeSignature = new Set<Path>();
|
||||
const useOldState = canReuseOldState(referencedMap, oldState);
|
||||
|
||||
// Create the reference map, and set the file infos
|
||||
|
@ -249,28 +246,24 @@ namespace ts {
|
|||
* Creates a clone of the state
|
||||
*/
|
||||
export function clone(state: Readonly<BuilderState>): BuilderState {
|
||||
const fileInfos = createMap<FileInfo>();
|
||||
state.fileInfos.forEach((value, key) => {
|
||||
fileInfos.set(key, { ...value });
|
||||
});
|
||||
// Dont need to backup allFiles info since its cache anyway
|
||||
return {
|
||||
fileInfos,
|
||||
referencedMap: cloneMapOrUndefined(state.referencedMap),
|
||||
exportedModulesMap: cloneMapOrUndefined(state.exportedModulesMap),
|
||||
hasCalledUpdateShapeSignature: cloneMap(state.hasCalledUpdateShapeSignature),
|
||||
fileInfos: new Map(state.fileInfos),
|
||||
referencedMap: state.referencedMap && new Map(state.referencedMap),
|
||||
exportedModulesMap: state.exportedModulesMap && new Map(state.exportedModulesMap),
|
||||
hasCalledUpdateShapeSignature: new Set(state.hasCalledUpdateShapeSignature),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the files affected by the path from the program
|
||||
*/
|
||||
export function getFilesAffectedBy(state: BuilderState, programOfThisState: Program, path: Path, cancellationToken: CancellationToken | undefined, computeHash: ComputeHash, cacheToUpdateSignature?: Map<string>, exportedModulesMapCache?: ComputingExportedModulesMap): readonly SourceFile[] {
|
||||
export function getFilesAffectedBy(state: BuilderState, programOfThisState: Program, path: Path, cancellationToken: CancellationToken | undefined, computeHash: ComputeHash, cacheToUpdateSignature?: Map<Path, string>, exportedModulesMapCache?: ComputingExportedModulesMap): readonly SourceFile[] {
|
||||
// Since the operation could be cancelled, the signatures are always stored in the cache
|
||||
// They will be committed once it is safe to use them
|
||||
// eg when calling this api from tsserver, if there is no cancellation of the operation
|
||||
// In the other cases the affected files signatures are committed only after the iteration through the result is complete
|
||||
const signatureCache = cacheToUpdateSignature || createMap();
|
||||
const signatureCache = cacheToUpdateSignature || new Map();
|
||||
const sourceFile = programOfThisState.getSourceFileByPath(path);
|
||||
if (!sourceFile) {
|
||||
return emptyArray;
|
||||
|
@ -292,19 +285,19 @@ namespace ts {
|
|||
* Updates the signatures from the cache into state's fileinfo signatures
|
||||
* This should be called whenever it is safe to commit the state of the builder
|
||||
*/
|
||||
export function updateSignaturesFromCache(state: BuilderState, signatureCache: Map<string>) {
|
||||
signatureCache.forEach((signature, path) => updateSignatureOfFile(state, signature, path as Path));
|
||||
export function updateSignaturesFromCache(state: BuilderState, signatureCache: Map<Path, string>) {
|
||||
signatureCache.forEach((signature, path) => updateSignatureOfFile(state, signature, path));
|
||||
}
|
||||
|
||||
export function updateSignatureOfFile(state: BuilderState, signature: string | undefined, path: Path) {
|
||||
state.fileInfos.get(path)!.signature = signature;
|
||||
state.hasCalledUpdateShapeSignature.set(path, true);
|
||||
state.hasCalledUpdateShapeSignature.add(path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns if the shape of the signature has changed since last emit
|
||||
*/
|
||||
export function updateShapeSignature(state: Readonly<BuilderState>, programOfThisState: Program, sourceFile: SourceFile, cacheToUpdateSignature: Map<string>, cancellationToken: CancellationToken | undefined, computeHash: ComputeHash, exportedModulesMapCache?: ComputingExportedModulesMap) {
|
||||
export function updateShapeSignature(state: Readonly<BuilderState>, programOfThisState: Program, sourceFile: SourceFile, cacheToUpdateSignature: Map<Path, string>, cancellationToken: CancellationToken | undefined, computeHash: ComputeHash, exportedModulesMapCache?: ComputingExportedModulesMap) {
|
||||
Debug.assert(!!sourceFile);
|
||||
Debug.assert(!exportedModulesMapCache || !!state.exportedModulesMap, "Compute visible to outside map only if visibleToOutsideReferencedMap present in the state");
|
||||
|
||||
|
@ -365,16 +358,16 @@ namespace ts {
|
|||
return;
|
||||
}
|
||||
|
||||
let exportedModules: Map<true> | undefined;
|
||||
let exportedModules: Set<Path> | undefined;
|
||||
exportedModulesFromDeclarationEmit.forEach(symbol => addExportedModule(getReferencedFileFromImportedModuleSymbol(symbol)));
|
||||
exportedModulesMapCache.set(sourceFile.resolvedPath, exportedModules || false);
|
||||
|
||||
function addExportedModule(exportedModulePath: Path | undefined) {
|
||||
if (exportedModulePath) {
|
||||
if (!exportedModules) {
|
||||
exportedModules = createMap<true>();
|
||||
exportedModules = new Set();
|
||||
}
|
||||
exportedModules.set(exportedModulePath, true);
|
||||
exportedModules.add(exportedModulePath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -413,26 +406,23 @@ namespace ts {
|
|||
}
|
||||
|
||||
// Get the references, traversing deep from the referenceMap
|
||||
const seenMap = createMap<true>();
|
||||
const seenMap = new Set<Path>();
|
||||
const queue = [sourceFile.resolvedPath];
|
||||
while (queue.length) {
|
||||
const path = queue.pop()!;
|
||||
if (!seenMap.has(path)) {
|
||||
seenMap.set(path, true);
|
||||
seenMap.add(path);
|
||||
const references = state.referencedMap.get(path);
|
||||
if (references) {
|
||||
const iterator = references.keys();
|
||||
for (let iterResult = iterator.next(); !iterResult.done; iterResult = iterator.next()) {
|
||||
queue.push(iterResult.value as Path);
|
||||
queue.push(iterResult.value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return arrayFrom(mapDefinedIterator(seenMap.keys(), path => {
|
||||
const file = programOfThisState.getSourceFileByPath(path as Path);
|
||||
return file ? file.fileName : path;
|
||||
}));
|
||||
return arrayFrom(mapDefinedIterator(seenMap.keys(), path => programOfThisState.getSourceFileByPath(path)?.fileName ?? path));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -451,7 +441,7 @@ namespace ts {
|
|||
*/
|
||||
export function getReferencedByPaths(state: Readonly<BuilderState>, referencedFilePath: Path) {
|
||||
return arrayFrom(mapDefinedIterator(state.referencedMap!.entries(), ([filePath, referencesInFile]) =>
|
||||
referencesInFile.has(referencedFilePath) ? filePath as Path : undefined
|
||||
referencesInFile.has(referencedFilePath) ? filePath : undefined
|
||||
));
|
||||
}
|
||||
|
||||
|
@ -528,7 +518,7 @@ namespace ts {
|
|||
/**
|
||||
* When program emits modular code, gets the files affected by the sourceFile whose shape has changed
|
||||
*/
|
||||
function getFilesAffectedByUpdatedShapeWhenModuleEmit(state: BuilderState, programOfThisState: Program, sourceFileWithUpdatedShape: SourceFile, cacheToUpdateSignature: Map<string>, cancellationToken: CancellationToken | undefined, computeHash: ComputeHash | undefined, exportedModulesMapCache: ComputingExportedModulesMap | undefined) {
|
||||
function getFilesAffectedByUpdatedShapeWhenModuleEmit(state: BuilderState, programOfThisState: Program, sourceFileWithUpdatedShape: SourceFile, cacheToUpdateSignature: Map<Path, string>, cancellationToken: CancellationToken | undefined, computeHash: ComputeHash | undefined, exportedModulesMapCache: ComputingExportedModulesMap | undefined) {
|
||||
if (isFileAffectingGlobalScope(sourceFileWithUpdatedShape)) {
|
||||
return getAllFilesExcludingDefaultLibraryFile(state, programOfThisState, sourceFileWithUpdatedShape);
|
||||
}
|
||||
|
@ -541,7 +531,7 @@ namespace ts {
|
|||
// Now we need to if each file in the referencedBy list has a shape change as well.
|
||||
// Because if so, its own referencedBy files need to be saved as well to make the
|
||||
// emitting result consistent with files on disk.
|
||||
const seenFileNamesMap = createMap<SourceFile>();
|
||||
const seenFileNamesMap = new Map<Path, SourceFile>();
|
||||
|
||||
// Start with the paths this file was referenced by
|
||||
seenFileNamesMap.set(sourceFileWithUpdatedShape.resolvedPath, sourceFileWithUpdatedShape);
|
||||
|
@ -562,8 +552,4 @@ namespace ts {
|
|||
return arrayFrom(mapDefinedIterator(seenFileNamesMap.values(), value => value));
|
||||
}
|
||||
}
|
||||
|
||||
export function cloneMapOrUndefined<T>(map: ReadonlyMap<T> | undefined) {
|
||||
return map ? cloneMap(map) : undefined;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -134,7 +134,7 @@ namespace ts {
|
|||
EmptyObjectFacts = All,
|
||||
}
|
||||
|
||||
const typeofEQFacts: ReadonlyMap<TypeFacts> = createMapFromTemplate({
|
||||
const typeofEQFacts: ReadonlyMap<string, TypeFacts> = createMapFromTemplate({
|
||||
string: TypeFacts.TypeofEQString,
|
||||
number: TypeFacts.TypeofEQNumber,
|
||||
bigint: TypeFacts.TypeofEQBigInt,
|
||||
|
@ -145,7 +145,7 @@ namespace ts {
|
|||
function: TypeFacts.TypeofEQFunction
|
||||
});
|
||||
|
||||
const typeofNEFacts: ReadonlyMap<TypeFacts> = createMapFromTemplate({
|
||||
const typeofNEFacts: ReadonlyMap<string, TypeFacts> = createMapFromTemplate({
|
||||
string: TypeFacts.TypeofNEString,
|
||||
number: TypeFacts.TypeofNENumber,
|
||||
bigint: TypeFacts.TypeofNEBigInt,
|
||||
|
@ -277,13 +277,13 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function createTypeChecker(host: TypeCheckerHost, produceDiagnostics: boolean): TypeChecker {
|
||||
const getPackagesSet: () => Map<true> = memoize(() => {
|
||||
const set = createMap<true>();
|
||||
const getPackagesSet = memoize(() => {
|
||||
const set = new Set<string>();
|
||||
host.getSourceFiles().forEach(sf => {
|
||||
if (!sf.resolvedModules) return;
|
||||
|
||||
forEachEntry(sf.resolvedModules, r => {
|
||||
if (r && r.packageId) set.set(r.packageId.name, true);
|
||||
if (r && r.packageId) set.add(r.packageId.name);
|
||||
});
|
||||
});
|
||||
return set;
|
||||
|
@ -826,10 +826,10 @@ namespace ts {
|
|||
readonly firstFile: SourceFile;
|
||||
readonly secondFile: SourceFile;
|
||||
/** Key is symbol name. */
|
||||
readonly conflictingSymbols: Map<DuplicateInfoForSymbol>;
|
||||
readonly conflictingSymbols: Map<string, DuplicateInfoForSymbol>;
|
||||
}
|
||||
/** Key is "/path/to/a.ts|/path/to/b.ts". */
|
||||
let amalgamatedDuplicates: Map<DuplicateInfoForFiles> | undefined;
|
||||
let amalgamatedDuplicates: Map<string, DuplicateInfoForFiles> | undefined;
|
||||
const reverseMappedCache = createMap<Type | undefined>();
|
||||
let inInferTypeForHomomorphicMappedType = false;
|
||||
let ambientModulesCache: Symbol[] | undefined;
|
||||
|
@ -839,7 +839,7 @@ namespace ts {
|
|||
* This is only used if there is no exact match.
|
||||
*/
|
||||
let patternAmbientModules: PatternAmbientModule[];
|
||||
let patternAmbientModuleAugmentations: Map<Symbol> | undefined;
|
||||
let patternAmbientModuleAugmentations: Map<string, Symbol> | undefined;
|
||||
|
||||
let globalObjectType: ObjectType;
|
||||
let globalFunctionType: ObjectType;
|
||||
|
@ -907,7 +907,7 @@ namespace ts {
|
|||
const mergedSymbols: Symbol[] = [];
|
||||
const symbolLinks: SymbolLinks[] = [];
|
||||
const nodeLinks: NodeLinks[] = [];
|
||||
const flowLoopCaches: Map<Type>[] = [];
|
||||
const flowLoopCaches: Map<string, Type>[] = [];
|
||||
const flowLoopNodes: FlowNode[] = [];
|
||||
const flowLoopKeys: string[] = [];
|
||||
const flowLoopTypes: Type[][] = [];
|
||||
|
@ -923,7 +923,7 @@ namespace ts {
|
|||
const diagnostics = createDiagnosticCollection();
|
||||
const suggestionDiagnostics = createDiagnosticCollection();
|
||||
|
||||
const typeofTypesByName: ReadonlyMap<Type> = createMapFromTemplate<Type>({
|
||||
const typeofTypesByName: ReadonlyMap<string, Type> = createMapFromTemplate<Type>({
|
||||
string: stringType,
|
||||
number: numberType,
|
||||
bigint: bigintType,
|
||||
|
@ -3753,7 +3753,7 @@ namespace ts {
|
|||
return rightMeaning === SymbolFlags.Value ? SymbolFlags.Value : SymbolFlags.Namespace;
|
||||
}
|
||||
|
||||
function getAccessibleSymbolChain(symbol: Symbol | undefined, enclosingDeclaration: Node | undefined, meaning: SymbolFlags, useOnlyExternalAliasing: boolean, visitedSymbolTablesMap: Map<SymbolTable[]> = createMap()): Symbol[] | undefined {
|
||||
function getAccessibleSymbolChain(symbol: Symbol | undefined, enclosingDeclaration: Node | undefined, meaning: SymbolFlags, useOnlyExternalAliasing: boolean, visitedSymbolTablesMap: Map<string, SymbolTable[]> = createMap()): Symbol[] | undefined {
|
||||
if (!(symbol && !isPropertyOrMethodDeclarationSymbol(symbol))) {
|
||||
return undefined;
|
||||
}
|
||||
|
@ -4458,7 +4458,7 @@ namespace ts {
|
|||
|
||||
function typeToTypeNodeOrCircularityElision(type: Type) {
|
||||
if (type.flags & TypeFlags.Union) {
|
||||
if (context.visitedTypes && context.visitedTypes.has("" + getTypeId(type))) {
|
||||
if (context.visitedTypes?.has(getTypeId(type))) {
|
||||
if (!(context.flags & NodeBuilderFlags.AllowAnonymousIdentifier)) {
|
||||
context.encounteredError = true;
|
||||
context.tracker?.reportCyclicStructureError?.();
|
||||
|
@ -4491,7 +4491,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
function createAnonymousTypeNode(type: ObjectType): TypeNode {
|
||||
const typeId = "" + type.id;
|
||||
const typeId = type.id;
|
||||
const symbol = type.symbol;
|
||||
if (symbol) {
|
||||
if (isJSConstructor(symbol.valueDeclaration)) {
|
||||
|
@ -4505,7 +4505,7 @@ namespace ts {
|
|||
shouldWriteTypeOfFunctionSymbol()) {
|
||||
return symbolToTypeNode(symbol, context, SymbolFlags.Value);
|
||||
}
|
||||
else if (context.visitedTypes && context.visitedTypes.has(typeId)) {
|
||||
else if (context.visitedTypes?.has(typeId)) {
|
||||
// If type is an anonymous type literal in a type alias declaration, use type alias name
|
||||
const typeAlias = getTypeAliasForTypeLiteral(type);
|
||||
if (typeAlias) {
|
||||
|
@ -4533,14 +4533,14 @@ namespace ts {
|
|||
declaration.parent.kind === SyntaxKind.SourceFile || declaration.parent.kind === SyntaxKind.ModuleBlock));
|
||||
if (isStaticMethodSymbol || isNonLocalFunctionSymbol) {
|
||||
// typeof is allowed only for static/non local functions
|
||||
return (!!(context.flags & NodeBuilderFlags.UseTypeOfFunction) || (context.visitedTypes && context.visitedTypes.has(typeId))) && // it is type of the symbol uses itself recursively
|
||||
return (!!(context.flags & NodeBuilderFlags.UseTypeOfFunction) || (context.visitedTypes?.has(typeId))) && // it is type of the symbol uses itself recursively
|
||||
(!(context.flags & NodeBuilderFlags.UseStructuralFallback) || isValueSymbolAccessible(symbol, context.enclosingDeclaration)); // And the build is going to succeed without visibility error or there is no structural fallback allowed
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function visitAndTransformType<T>(type: Type, transform: (type: Type) => T) {
|
||||
const typeId = "" + type.id;
|
||||
const typeId = type.id;
|
||||
const isConstructorObject = getObjectFlags(type) & ObjectFlags.Anonymous && type.symbol && type.symbol.flags & SymbolFlags.Class;
|
||||
const id = getObjectFlags(type) & ObjectFlags.Reference && (<TypeReference>type).node ? "N" + getNodeId((<TypeReference>type).node!) :
|
||||
type.symbol ? (isConstructorObject ? "+" : "") + getSymbolId(type.symbol) :
|
||||
|
@ -4548,7 +4548,7 @@ namespace ts {
|
|||
// Since instantiations of the same anonymous type have the same symbol, tracking symbols instead
|
||||
// of types allows us to catch circular references to instantiations of the same anonymous type
|
||||
if (!context.visitedTypes) {
|
||||
context.visitedTypes = createMap<true>();
|
||||
context.visitedTypes = new Set();
|
||||
}
|
||||
if (id && !context.symbolDepth) {
|
||||
context.symbolDepth = createMap<number>();
|
||||
|
@ -4562,7 +4562,7 @@ namespace ts {
|
|||
}
|
||||
context.symbolDepth!.set(id, depth + 1);
|
||||
}
|
||||
context.visitedTypes.set(typeId, true);
|
||||
context.visitedTypes.add(typeId);
|
||||
const result = transform(type);
|
||||
context.visitedTypes.delete(typeId);
|
||||
if (id) {
|
||||
|
@ -5251,11 +5251,11 @@ namespace ts {
|
|||
function lookupTypeParameterNodes(chain: Symbol[], index: number, context: NodeBuilderContext) {
|
||||
Debug.assert(chain && 0 <= index && index < chain.length);
|
||||
const symbol = chain[index];
|
||||
const symbolId = "" + getSymbolId(symbol);
|
||||
if (context.typeParameterSymbolList && context.typeParameterSymbolList.get(symbolId)) {
|
||||
const symbolId = getSymbolId(symbol);
|
||||
if (context.typeParameterSymbolList?.has(symbolId)) {
|
||||
return undefined;
|
||||
}
|
||||
(context.typeParameterSymbolList || (context.typeParameterSymbolList = createMap())).set(symbolId, true);
|
||||
(context.typeParameterSymbolList || (context.typeParameterSymbolList = new Set())).add(symbolId);
|
||||
let typeParameterNodes: readonly TypeNode[] | readonly TypeParameterDeclaration[] | undefined;
|
||||
if (context.flags & NodeBuilderFlags.WriteTypeParametersInQualifiedName && index < (chain.length - 1)) {
|
||||
const parentSymbol = symbol;
|
||||
|
@ -5468,7 +5468,7 @@ namespace ts {
|
|||
const rawtext = result.escapedText as string;
|
||||
let i = 0;
|
||||
let text = rawtext;
|
||||
while ((context.typeParameterNamesByText && context.typeParameterNamesByText.get(text)) || typeParameterShadowsNameInScope(text as __String, context, type)) {
|
||||
while (context.typeParameterNamesByText?.has(text) || typeParameterShadowsNameInScope(text as __String, context, type)) {
|
||||
i++;
|
||||
text = `${rawtext}_${i}`;
|
||||
}
|
||||
|
@ -5476,7 +5476,7 @@ namespace ts {
|
|||
result = factory.createIdentifier(text, result.typeArguments);
|
||||
}
|
||||
(context.typeParameterNames || (context.typeParameterNames = createMap())).set("" + getTypeId(type), result);
|
||||
(context.typeParameterNamesByText || (context.typeParameterNamesByText = createMap())).set(result.escapedText as string, true);
|
||||
(context.typeParameterNamesByText || (context.typeParameterNamesByText = new Set())).add(result.escapedText as string);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
@ -5635,10 +5635,10 @@ namespace ts {
|
|||
initial.typeParameterNames = cloneMap(initial.typeParameterNames);
|
||||
}
|
||||
if (initial.typeParameterNamesByText) {
|
||||
initial.typeParameterNamesByText = cloneMap(initial.typeParameterNamesByText);
|
||||
initial.typeParameterNamesByText = new Set(initial.typeParameterNamesByText);
|
||||
}
|
||||
if (initial.typeParameterSymbolList) {
|
||||
initial.typeParameterSymbolList = cloneMap(initial.typeParameterSymbolList);
|
||||
initial.typeParameterSymbolList = new Set(initial.typeParameterSymbolList);
|
||||
}
|
||||
return initial;
|
||||
}
|
||||
|
@ -5877,12 +5877,12 @@ namespace ts {
|
|||
// we're trying to emit from later on)
|
||||
const enclosingDeclaration = context.enclosingDeclaration!;
|
||||
let results: Statement[] = [];
|
||||
const visitedSymbols: Map<true> = createMap();
|
||||
let deferredPrivates: Map<Symbol> | undefined;
|
||||
const visitedSymbols = new Set<number>();
|
||||
let deferredPrivates: Map<string, Symbol> | undefined;
|
||||
const oldcontext = context;
|
||||
context = {
|
||||
...oldcontext,
|
||||
usedSymbolNames: createMap(),
|
||||
usedSymbolNames: new Set(oldcontext.usedSymbolNames),
|
||||
remappedSymbolNames: createMap(),
|
||||
tracker: {
|
||||
...oldcontext.tracker,
|
||||
|
@ -5901,11 +5901,6 @@ namespace ts {
|
|||
}
|
||||
}
|
||||
};
|
||||
if (oldcontext.usedSymbolNames) {
|
||||
oldcontext.usedSymbolNames.forEach((_, name) => {
|
||||
context.usedSymbolNames!.set(name, true);
|
||||
});
|
||||
}
|
||||
forEachEntry(symbolTable, (symbol, name) => {
|
||||
const baseName = unescapeLeadingUnderscores(name);
|
||||
void getInternalSymbolName(symbol, baseName); // Called to cache values into `usedSymbolNames` and `remappedSymbolNames`
|
||||
|
@ -6116,10 +6111,10 @@ namespace ts {
|
|||
// cache visited list based on merged symbol, since we want to use the unmerged top-level symbol, but
|
||||
// still skip reserializing it if we encounter the merged product later on
|
||||
const visitedSym = getMergedSymbol(symbol);
|
||||
if (visitedSymbols.has("" + getSymbolId(visitedSym))) {
|
||||
if (visitedSymbols.has(getSymbolId(visitedSym))) {
|
||||
return; // Already printed
|
||||
}
|
||||
visitedSymbols.set("" + getSymbolId(visitedSym), true);
|
||||
visitedSymbols.add(getSymbolId(visitedSym));
|
||||
// Only actually serialize symbols within the correct enclosing declaration, otherwise do nothing with the out-of-context symbol
|
||||
const skipMembershipCheck = !isPrivate; // We only call this on exported symbols when we know they're in the correct scope
|
||||
if (skipMembershipCheck || (!!length(symbol.declarations) && some(symbol.declarations, d => !!findAncestor(d, n => n === enclosingDeclaration)))) {
|
||||
|
@ -7074,11 +7069,11 @@ namespace ts {
|
|||
}
|
||||
let i = 0;
|
||||
const original = input;
|
||||
while (context.usedSymbolNames!.has(input)) {
|
||||
while (context.usedSymbolNames?.has(input)) {
|
||||
i++;
|
||||
input = `${original}_${i}`;
|
||||
}
|
||||
context.usedSymbolNames!.set(input, true);
|
||||
context.usedSymbolNames?.add(input);
|
||||
if (symbol) {
|
||||
context.remappedSymbolNames!.set("" + getSymbolId(symbol), input);
|
||||
}
|
||||
|
@ -7190,16 +7185,16 @@ namespace ts {
|
|||
|
||||
// State
|
||||
encounteredError: boolean;
|
||||
visitedTypes: Map<true> | undefined;
|
||||
symbolDepth: Map<number> | undefined;
|
||||
visitedTypes: Set<number> | undefined;
|
||||
symbolDepth: Map<string, number> | undefined;
|
||||
inferTypeParameters: TypeParameter[] | undefined;
|
||||
approximateLength: number;
|
||||
truncating?: boolean;
|
||||
typeParameterSymbolList?: Map<true>;
|
||||
typeParameterNames?: Map<Identifier>;
|
||||
typeParameterNamesByText?: Map<true>;
|
||||
usedSymbolNames?: Map<true>;
|
||||
remappedSymbolNames?: Map<string>;
|
||||
typeParameterSymbolList?: Set<number>;
|
||||
typeParameterNames?: Map<string, Identifier>;
|
||||
typeParameterNamesByText?: Set<string>;
|
||||
usedSymbolNames?: Set<string>;
|
||||
remappedSymbolNames?: Map<string, string>;
|
||||
}
|
||||
|
||||
function isDefaultBindingContext(location: Node) {
|
||||
|
@ -7395,10 +7390,10 @@ namespace ts {
|
|||
exportSymbol = getTargetOfExportSpecifier(<ExportSpecifier>node.parent, SymbolFlags.Value | SymbolFlags.Type | SymbolFlags.Namespace | SymbolFlags.Alias);
|
||||
}
|
||||
let result: Node[] | undefined;
|
||||
let visited: Map<true> | undefined;
|
||||
let visited: Set<number> | undefined;
|
||||
if (exportSymbol) {
|
||||
visited = createMap();
|
||||
visited.set("" + getSymbolId(exportSymbol), true);
|
||||
visited = new Set();
|
||||
visited.add(getSymbolId(exportSymbol));
|
||||
buildVisibleNodeList(exportSymbol.declarations);
|
||||
}
|
||||
return result;
|
||||
|
@ -7420,10 +7415,10 @@ namespace ts {
|
|||
const firstIdentifier = getFirstIdentifier(internalModuleReference);
|
||||
const importSymbol = resolveName(declaration, firstIdentifier.escapedText, SymbolFlags.Value | SymbolFlags.Type | SymbolFlags.Namespace,
|
||||
undefined, undefined, /*isUse*/ false);
|
||||
const id = importSymbol && "" + getSymbolId(importSymbol);
|
||||
if (importSymbol && !visited!.has(id!)) {
|
||||
visited!.set(id!, true);
|
||||
buildVisibleNodeList(importSymbol.declarations);
|
||||
if (importSymbol && visited) {
|
||||
if (tryAddToSet(visited, getSymbolId(importSymbol))) {
|
||||
buildVisibleNodeList(importSymbol.declarations);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@ -10824,7 +10819,7 @@ namespace ts {
|
|||
|
||||
function createUnionOrIntersectionProperty(containingType: UnionOrIntersectionType, name: __String): Symbol | undefined {
|
||||
let singleProp: Symbol | undefined;
|
||||
let propSet: Map<Symbol> | undefined;
|
||||
let propSet: Map<string, Symbol> | undefined;
|
||||
let indexTypes: Type[] | undefined;
|
||||
const isUnion = containingType.flags & TypeFlags.Union;
|
||||
// Flags we want to propagate to the result if they exist in all source symbols
|
||||
|
@ -12885,7 +12880,7 @@ namespace ts {
|
|||
return links.resolvedType;
|
||||
}
|
||||
|
||||
function addTypeToIntersection(typeSet: Map<Type>, includes: TypeFlags, type: Type) {
|
||||
function addTypeToIntersection(typeSet: Map<string, Type>, includes: TypeFlags, type: Type) {
|
||||
const flags = type.flags;
|
||||
if (flags & TypeFlags.Intersection) {
|
||||
return addTypesToIntersection(typeSet, includes, (<IntersectionType>type).types);
|
||||
|
@ -12915,7 +12910,7 @@ namespace ts {
|
|||
|
||||
// Add the given types to the given type set. Order is preserved, freshness is removed from literal
|
||||
// types, duplicates are removed, and nested types of the given kind are flattened into the set.
|
||||
function addTypesToIntersection(typeSet: Map<Type>, includes: TypeFlags, types: readonly Type[]) {
|
||||
function addTypesToIntersection(typeSet: Map<string, Type>, includes: TypeFlags, types: readonly Type[]) {
|
||||
for (const type of types) {
|
||||
includes = addTypeToIntersection(typeSet, includes, getRegularTypeOfLiteralType(type));
|
||||
}
|
||||
|
@ -13032,7 +13027,7 @@ namespace ts {
|
|||
// Also, unlike union types, the order of the constituent types is preserved in order that overload resolution
|
||||
// for intersections of types with signatures can be deterministic.
|
||||
function getIntersectionType(types: readonly Type[], aliasSymbol?: Symbol, aliasTypeArguments?: readonly Type[]): Type {
|
||||
const typeMembershipMap: Map<Type> = createMap();
|
||||
const typeMembershipMap: Map<string, Type> = createMap();
|
||||
const includes = addTypesToIntersection(typeMembershipMap, 0, types);
|
||||
const typeSet: Type[] = arrayFrom(typeMembershipMap.values());
|
||||
// An intersection type is considered empty if it contains
|
||||
|
@ -15072,7 +15067,7 @@ namespace ts {
|
|||
function checkTypeRelatedToAndOptionallyElaborate(
|
||||
source: Type,
|
||||
target: Type,
|
||||
relation: Map<RelationComparisonResult>,
|
||||
relation: Map<string, RelationComparisonResult>,
|
||||
errorNode: Node | undefined,
|
||||
expr: Expression | undefined,
|
||||
headMessage: DiagnosticMessage | undefined,
|
||||
|
@ -15094,7 +15089,7 @@ namespace ts {
|
|||
node: Expression | undefined,
|
||||
source: Type,
|
||||
target: Type,
|
||||
relation: Map<RelationComparisonResult>,
|
||||
relation: Map<string, RelationComparisonResult>,
|
||||
headMessage: DiagnosticMessage | undefined,
|
||||
containingMessageChain: (() => DiagnosticMessageChain | undefined) | undefined,
|
||||
errorOutputContainer: { errors?: Diagnostic[], skipLogging?: boolean } | undefined
|
||||
|
@ -15131,7 +15126,7 @@ namespace ts {
|
|||
node: Expression,
|
||||
source: Type,
|
||||
target: Type,
|
||||
relation: Map<RelationComparisonResult>,
|
||||
relation: Map<string, RelationComparisonResult>,
|
||||
headMessage: DiagnosticMessage | undefined,
|
||||
containingMessageChain: (() => DiagnosticMessageChain | undefined) | undefined,
|
||||
errorOutputContainer: { errors?: Diagnostic[], skipLogging?: boolean } | undefined
|
||||
|
@ -15160,7 +15155,7 @@ namespace ts {
|
|||
node: ArrowFunction,
|
||||
source: Type,
|
||||
target: Type,
|
||||
relation: Map<RelationComparisonResult>,
|
||||
relation: Map<string, RelationComparisonResult>,
|
||||
containingMessageChain: (() => DiagnosticMessageChain | undefined) | undefined,
|
||||
errorOutputContainer: { errors?: Diagnostic[], skipLogging?: boolean } | undefined
|
||||
): boolean {
|
||||
|
@ -15247,7 +15242,7 @@ namespace ts {
|
|||
iterator: ElaborationIterator,
|
||||
source: Type,
|
||||
target: Type,
|
||||
relation: Map<RelationComparisonResult>,
|
||||
relation: Map<string, RelationComparisonResult>,
|
||||
containingMessageChain: (() => DiagnosticMessageChain | undefined) | undefined,
|
||||
errorOutputContainer: { errors?: Diagnostic[], skipLogging?: boolean } | undefined
|
||||
) {
|
||||
|
@ -15361,7 +15356,7 @@ namespace ts {
|
|||
node: JsxAttributes,
|
||||
source: Type,
|
||||
target: Type,
|
||||
relation: Map<RelationComparisonResult>,
|
||||
relation: Map<string, RelationComparisonResult>,
|
||||
containingMessageChain: (() => DiagnosticMessageChain | undefined) | undefined,
|
||||
errorOutputContainer: { errors?: Diagnostic[], skipLogging?: boolean } | undefined
|
||||
) {
|
||||
|
@ -15462,7 +15457,7 @@ namespace ts {
|
|||
node: ArrayLiteralExpression,
|
||||
source: Type,
|
||||
target: Type,
|
||||
relation: Map<RelationComparisonResult>,
|
||||
relation: Map<string, RelationComparisonResult>,
|
||||
containingMessageChain: (() => DiagnosticMessageChain | undefined) | undefined,
|
||||
errorOutputContainer: { errors?: Diagnostic[], skipLogging?: boolean } | undefined
|
||||
) {
|
||||
|
@ -15515,7 +15510,7 @@ namespace ts {
|
|||
node: ObjectLiteralExpression,
|
||||
source: Type,
|
||||
target: Type,
|
||||
relation: Map<RelationComparisonResult>,
|
||||
relation: Map<string, RelationComparisonResult>,
|
||||
containingMessageChain: (() => DiagnosticMessageChain | undefined) | undefined,
|
||||
errorOutputContainer: { errors?: Diagnostic[], skipLogging?: boolean } | undefined
|
||||
) {
|
||||
|
@ -15806,7 +15801,7 @@ namespace ts {
|
|||
return true;
|
||||
}
|
||||
|
||||
function isSimpleTypeRelatedTo(source: Type, target: Type, relation: Map<RelationComparisonResult>, errorReporter?: ErrorReporter) {
|
||||
function isSimpleTypeRelatedTo(source: Type, target: Type, relation: Map<string, RelationComparisonResult>, errorReporter?: ErrorReporter) {
|
||||
const s = source.flags;
|
||||
const t = target.flags;
|
||||
if (t & TypeFlags.AnyOrUnknown || s & TypeFlags.Never || source === wildcardType) return true;
|
||||
|
@ -15843,7 +15838,7 @@ namespace ts {
|
|||
return false;
|
||||
}
|
||||
|
||||
function isTypeRelatedTo(source: Type, target: Type, relation: Map<RelationComparisonResult>) {
|
||||
function isTypeRelatedTo(source: Type, target: Type, relation: Map<string, RelationComparisonResult>) {
|
||||
if (isFreshLiteralType(source)) {
|
||||
source = (<FreshableType>source).regularType;
|
||||
}
|
||||
|
@ -15906,7 +15901,7 @@ namespace ts {
|
|||
function checkTypeRelatedTo(
|
||||
source: Type,
|
||||
target: Type,
|
||||
relation: Map<RelationComparisonResult>,
|
||||
relation: Map<string, RelationComparisonResult>,
|
||||
errorNode: Node | undefined,
|
||||
headMessage?: DiagnosticMessage,
|
||||
containingMessageChain?: () => DiagnosticMessageChain | undefined,
|
||||
|
@ -18028,7 +18023,7 @@ namespace ts {
|
|||
* To improve caching, the relation key for two generic types uses the target's id plus ids of the type parameters.
|
||||
* For other cases, the types ids are used.
|
||||
*/
|
||||
function getRelationKey(source: Type, target: Type, intersectionState: IntersectionState, relation: Map<RelationComparisonResult>) {
|
||||
function getRelationKey(source: Type, target: Type, intersectionState: IntersectionState, relation: Map<string, RelationComparisonResult>) {
|
||||
if (relation === identityRelation && source.id > target.id) {
|
||||
const temp = source;
|
||||
source = target;
|
||||
|
@ -19212,7 +19207,7 @@ namespace ts {
|
|||
|
||||
function inferTypes(inferences: InferenceInfo[], originalSource: Type, originalTarget: Type, priority: InferencePriority = 0, contravariant = false) {
|
||||
let symbolOrTypeStack: (Symbol | Type)[];
|
||||
let visited: Map<number>;
|
||||
let visited: Map<string, number>;
|
||||
let bivariant = false;
|
||||
let propagationType: Type;
|
||||
let inferencePriority = InferencePriority.MaxValue;
|
||||
|
@ -25904,7 +25899,7 @@ namespace ts {
|
|||
function checkApplicableSignatureForJsxOpeningLikeElement(
|
||||
node: JsxOpeningLikeElement,
|
||||
signature: Signature,
|
||||
relation: Map<RelationComparisonResult>,
|
||||
relation: Map<string, RelationComparisonResult>,
|
||||
checkMode: CheckMode,
|
||||
reportErrors: boolean,
|
||||
containingMessageChain: (() => DiagnosticMessageChain | undefined) | undefined,
|
||||
|
@ -26004,7 +25999,7 @@ namespace ts {
|
|||
node: CallLikeExpression,
|
||||
args: readonly Expression[],
|
||||
signature: Signature,
|
||||
relation: Map<RelationComparisonResult>,
|
||||
relation: Map<string, RelationComparisonResult>,
|
||||
checkMode: CheckMode,
|
||||
reportErrors: boolean,
|
||||
containingMessageChain: (() => DiagnosticMessageChain | undefined) | undefined,
|
||||
|
@ -26528,7 +26523,7 @@ namespace ts {
|
|||
|
||||
return getCandidateForOverloadFailure(node, candidates, args, !!candidatesOutArray);
|
||||
|
||||
function chooseOverload(candidates: Signature[], relation: Map<RelationComparisonResult>, signatureHelpTrailingComma = false) {
|
||||
function chooseOverload(candidates: Signature[], relation: Map<string, RelationComparisonResult>, signatureHelpTrailingComma = false) {
|
||||
candidatesForArgumentError = undefined;
|
||||
candidateForArgumentArityError = undefined;
|
||||
candidateForTypeArgumentError = undefined;
|
||||
|
@ -32268,7 +32263,7 @@ namespace ts {
|
|||
return !(getMergedSymbol(typeParameter.symbol).isReferenced! & SymbolFlags.TypeParameter) && !isIdentifierThatStartsWithUnderscore(typeParameter.name);
|
||||
}
|
||||
|
||||
function addToGroup<K, V>(map: Map<[K, V[]]>, key: K, value: V, getKey: (key: K) => number | string): void {
|
||||
function addToGroup<K, V>(map: Map<string, [K, V[]]>, key: K, value: V, getKey: (key: K) => number | string): void {
|
||||
const keyString = String(getKey(key));
|
||||
const group = map.get(keyString);
|
||||
if (group) {
|
||||
|
@ -37159,7 +37154,7 @@ namespace ts {
|
|||
// this variable and functions that use it are deliberately moved here from the outer scope
|
||||
// to avoid scope pollution
|
||||
const resolvedTypeReferenceDirectives = host.getResolvedTypeReferenceDirectives();
|
||||
let fileToDirective: Map<string>;
|
||||
let fileToDirective: Map<string, string>;
|
||||
if (resolvedTypeReferenceDirectives) {
|
||||
// populate reverse mapping: file path -> type reference directive that was resolved to this file
|
||||
fileToDirective = createMap<string>();
|
||||
|
|
|
@ -76,7 +76,7 @@ namespace ts {
|
|||
* option as well as for resolving lib reference directives.
|
||||
*/
|
||||
/* @internal */
|
||||
export const libMap = createMapFromEntries(libEntries);
|
||||
export const libMap = new Map(libEntries);
|
||||
|
||||
// Watch related options
|
||||
/* @internal */
|
||||
|
@ -1090,8 +1090,8 @@ namespace ts {
|
|||
|
||||
/* @internal */
|
||||
export interface OptionsNameMap {
|
||||
optionsNameMap: Map<CommandLineOption>;
|
||||
shortOptionNames: Map<string>;
|
||||
optionsNameMap: Map<string, CommandLineOption>;
|
||||
shortOptionNames: Map<string, string>;
|
||||
}
|
||||
|
||||
/*@internal*/
|
||||
|
@ -1469,7 +1469,7 @@ namespace ts {
|
|||
configFileName: string,
|
||||
optionsToExtend: CompilerOptions,
|
||||
host: ParseConfigFileHost,
|
||||
extendedConfigCache?: Map<ExtendedConfigCacheEntry>,
|
||||
extendedConfigCache?: Map<string, ExtendedConfigCacheEntry>,
|
||||
watchOptionsToExtend?: WatchOptions,
|
||||
extraFileExtensions?: readonly FileExtensionInfo[],
|
||||
): ParsedCommandLine | undefined {
|
||||
|
@ -1562,15 +1562,15 @@ namespace ts {
|
|||
optionTypeMismatchDiagnostic: Diagnostics.Watch_option_0_requires_a_value_of_type_1
|
||||
};
|
||||
|
||||
let commandLineCompilerOptionsMapCache: Map<CommandLineOption>;
|
||||
let commandLineCompilerOptionsMapCache: Map<string, CommandLineOption>;
|
||||
function getCommandLineCompilerOptionsMap() {
|
||||
return commandLineCompilerOptionsMapCache || (commandLineCompilerOptionsMapCache = commandLineOptionsToMap(optionDeclarations));
|
||||
}
|
||||
let commandLineWatchOptionsMapCache: Map<CommandLineOption>;
|
||||
let commandLineWatchOptionsMapCache: Map<string, CommandLineOption>;
|
||||
function getCommandLineWatchOptionsMap() {
|
||||
return commandLineWatchOptionsMapCache || (commandLineWatchOptionsMapCache = commandLineOptionsToMap(optionsForWatch));
|
||||
}
|
||||
let commandLineTypeAcquisitionMapCache: Map<CommandLineOption>;
|
||||
let commandLineTypeAcquisitionMapCache: Map<string, CommandLineOption>;
|
||||
function getCommandLineTypeAcquisitionMap() {
|
||||
return commandLineTypeAcquisitionMapCache || (commandLineTypeAcquisitionMapCache = commandLineOptionsToMap(typeAcquisitionDeclarations));
|
||||
}
|
||||
|
@ -1703,13 +1703,13 @@ namespace ts {
|
|||
|
||||
return convertPropertyValueToJson(sourceFile.statements[0].expression, knownRootOptions);
|
||||
|
||||
function isRootOptionMap(knownOptions: Map<CommandLineOption> | undefined) {
|
||||
function isRootOptionMap(knownOptions: Map<string, CommandLineOption> | undefined) {
|
||||
return knownRootOptions && (knownRootOptions as TsConfigOnlyOption).elementOptions === knownOptions;
|
||||
}
|
||||
|
||||
function convertObjectLiteralExpressionToJson(
|
||||
node: ObjectLiteralExpression,
|
||||
knownOptions: Map<CommandLineOption> | undefined,
|
||||
knownOptions: Map<string, CommandLineOption> | undefined,
|
||||
extraKeyDiagnostics: DidYouMeanOptionsDiagnostics | undefined,
|
||||
parentOption: string | undefined
|
||||
): any {
|
||||
|
@ -1964,7 +1964,7 @@ namespace ts {
|
|||
return config;
|
||||
}
|
||||
|
||||
function optionMapToObject(optionMap: Map<CompilerOptionsValue>): object {
|
||||
function optionMapToObject(optionMap: Map<string, CompilerOptionsValue>): object {
|
||||
return {
|
||||
...arrayFrom(optionMap.entries()).reduce((prev, cur) => ({ ...prev, [cur[0]]: cur[1] }), {}),
|
||||
};
|
||||
|
@ -1994,7 +1994,7 @@ namespace ts {
|
|||
return _ => true;
|
||||
}
|
||||
|
||||
function getCustomTypeMapOfCommandLineOption(optionDefinition: CommandLineOption): Map<string | number> | undefined {
|
||||
function getCustomTypeMapOfCommandLineOption(optionDefinition: CommandLineOption): Map<string, string | number> | undefined {
|
||||
if (optionDefinition.type === "string" || optionDefinition.type === "number" || optionDefinition.type === "boolean" || optionDefinition.type === "object") {
|
||||
// this is of a type CommandLineOptionOfPrimitiveType
|
||||
return undefined;
|
||||
|
@ -2007,7 +2007,7 @@ namespace ts {
|
|||
}
|
||||
}
|
||||
|
||||
function getNameOfCompilerOptionValue(value: CompilerOptionsValue, customTypeMap: Map<string | number>): string | undefined {
|
||||
function getNameOfCompilerOptionValue(value: CompilerOptionsValue, customTypeMap: Map<string, string | number>): string | undefined {
|
||||
// There is a typeMap associated with this command-line option so use it to map value back to its name
|
||||
return forEachEntry(customTypeMap, (mapValue, key) => {
|
||||
if (mapValue === value) {
|
||||
|
@ -2019,7 +2019,7 @@ namespace ts {
|
|||
function serializeCompilerOptions(
|
||||
options: CompilerOptions,
|
||||
pathOptions?: { configFilePath: string, useCaseSensitiveFileNames: boolean }
|
||||
): Map<CompilerOptionsValue> {
|
||||
): Map<string, CompilerOptionsValue> {
|
||||
return serializeOptionBaseObject(options, getOptionsNameMap(), pathOptions);
|
||||
}
|
||||
|
||||
|
@ -2031,7 +2031,7 @@ namespace ts {
|
|||
options: OptionsBase,
|
||||
{ optionsNameMap }: OptionsNameMap,
|
||||
pathOptions?: { configFilePath: string, useCaseSensitiveFileNames: boolean }
|
||||
): Map<CompilerOptionsValue> {
|
||||
): Map<string, CompilerOptionsValue> {
|
||||
const result = createMap<CompilerOptionsValue>();
|
||||
const getCanonicalFileName = pathOptions && createGetCanonicalFileName(pathOptions.useCaseSensitiveFileNames);
|
||||
|
||||
|
@ -2220,7 +2220,7 @@ namespace ts {
|
|||
* @param basePath A root directory to resolve relative path entries in the config
|
||||
* file to. e.g. outDir
|
||||
*/
|
||||
export function parseJsonConfigFileContent(json: any, host: ParseConfigHost, basePath: string, existingOptions?: CompilerOptions, configFileName?: string, resolutionStack?: Path[], extraFileExtensions?: readonly FileExtensionInfo[], extendedConfigCache?: Map<ExtendedConfigCacheEntry>, existingWatchOptions?: WatchOptions): ParsedCommandLine {
|
||||
export function parseJsonConfigFileContent(json: any, host: ParseConfigHost, basePath: string, existingOptions?: CompilerOptions, configFileName?: string, resolutionStack?: Path[], extraFileExtensions?: readonly FileExtensionInfo[], extendedConfigCache?: Map<string, ExtendedConfigCacheEntry>, existingWatchOptions?: WatchOptions): ParsedCommandLine {
|
||||
return parseJsonConfigFileContentWorker(json, /*sourceFile*/ undefined, host, basePath, existingOptions, existingWatchOptions, configFileName, resolutionStack, extraFileExtensions, extendedConfigCache);
|
||||
}
|
||||
|
||||
|
@ -2231,7 +2231,7 @@ namespace ts {
|
|||
* @param basePath A root directory to resolve relative path entries in the config
|
||||
* file to. e.g. outDir
|
||||
*/
|
||||
export function parseJsonSourceFileConfigFileContent(sourceFile: TsConfigSourceFile, host: ParseConfigHost, basePath: string, existingOptions?: CompilerOptions, configFileName?: string, resolutionStack?: Path[], extraFileExtensions?: readonly FileExtensionInfo[], extendedConfigCache?: Map<ExtendedConfigCacheEntry>, existingWatchOptions?: WatchOptions): ParsedCommandLine {
|
||||
export function parseJsonSourceFileConfigFileContent(sourceFile: TsConfigSourceFile, host: ParseConfigHost, basePath: string, existingOptions?: CompilerOptions, configFileName?: string, resolutionStack?: Path[], extraFileExtensions?: readonly FileExtensionInfo[], extendedConfigCache?: Map<string, ExtendedConfigCacheEntry>, existingWatchOptions?: WatchOptions): ParsedCommandLine {
|
||||
return parseJsonConfigFileContentWorker(/*json*/ undefined, sourceFile, host, basePath, existingOptions, existingWatchOptions, configFileName, resolutionStack, extraFileExtensions, extendedConfigCache);
|
||||
}
|
||||
|
||||
|
@ -2271,7 +2271,7 @@ namespace ts {
|
|||
configFileName?: string,
|
||||
resolutionStack: Path[] = [],
|
||||
extraFileExtensions: readonly FileExtensionInfo[] = [],
|
||||
extendedConfigCache?: Map<ExtendedConfigCacheEntry>
|
||||
extendedConfigCache?: Map<string, ExtendedConfigCacheEntry>
|
||||
): ParsedCommandLine {
|
||||
Debug.assert((json === undefined && sourceFile !== undefined) || (json !== undefined && sourceFile === undefined));
|
||||
const errors: Diagnostic[] = [];
|
||||
|
@ -2456,7 +2456,7 @@ namespace ts {
|
|||
configFileName: string | undefined,
|
||||
resolutionStack: string[],
|
||||
errors: Push<Diagnostic>,
|
||||
extendedConfigCache?: Map<ExtendedConfigCacheEntry>
|
||||
extendedConfigCache?: Map<string, ExtendedConfigCacheEntry>
|
||||
): ParsedTsconfig {
|
||||
basePath = normalizeSlashes(basePath);
|
||||
const resolvedPath = getNormalizedAbsolutePath(configFileName || "", basePath);
|
||||
|
@ -2645,7 +2645,7 @@ namespace ts {
|
|||
basePath: string,
|
||||
resolutionStack: string[],
|
||||
errors: Push<Diagnostic>,
|
||||
extendedConfigCache?: Map<ExtendedConfigCacheEntry>
|
||||
extendedConfigCache?: Map<string, ExtendedConfigCacheEntry>
|
||||
): ParsedTsconfig | undefined {
|
||||
const path = host.useCaseSensitiveFileNames ? extendedConfigPath : toFileNameLowerCase(extendedConfigPath);
|
||||
let value: ExtendedConfigCacheEntry | undefined;
|
||||
|
@ -2750,11 +2750,11 @@ namespace ts {
|
|||
return convertOptionsFromJson(getCommandLineWatchOptionsMap(), jsonOptions, basePath, /*defaultOptions*/ undefined, watchOptionsDidYouMeanDiagnostics, errors);
|
||||
}
|
||||
|
||||
function convertOptionsFromJson(optionsNameMap: Map<CommandLineOption>, jsonOptions: any, basePath: string,
|
||||
function convertOptionsFromJson(optionsNameMap: Map<string, CommandLineOption>, jsonOptions: any, basePath: string,
|
||||
defaultOptions: undefined, diagnostics: DidYouMeanOptionsDiagnostics, errors: Push<Diagnostic>): WatchOptions | undefined;
|
||||
function convertOptionsFromJson(optionsNameMap: Map<CommandLineOption>, jsonOptions: any, basePath: string,
|
||||
function convertOptionsFromJson(optionsNameMap: Map<string, CommandLineOption>, jsonOptions: any, basePath: string,
|
||||
defaultOptions: CompilerOptions | TypeAcquisition, diagnostics: DidYouMeanOptionsDiagnostics, errors: Push<Diagnostic>): CompilerOptions | TypeAcquisition;
|
||||
function convertOptionsFromJson(optionsNameMap: Map<CommandLineOption>, jsonOptions: any, basePath: string,
|
||||
function convertOptionsFromJson(optionsNameMap: Map<string, CommandLineOption>, jsonOptions: any, basePath: string,
|
||||
defaultOptions: CompilerOptions | TypeAcquisition | WatchOptions | undefined, diagnostics: DidYouMeanOptionsDiagnostics, errors: Push<Diagnostic>) {
|
||||
|
||||
if (!jsonOptions) {
|
||||
|
@ -3174,7 +3174,7 @@ namespace ts {
|
|||
* @param extensionPriority The priority of the extension.
|
||||
* @param context The expansion context.
|
||||
*/
|
||||
function hasFileWithHigherPriorityExtension(file: string, literalFiles: Map<string>, wildcardFiles: Map<string>, extensions: readonly string[], keyMapper: (value: string) => string) {
|
||||
function hasFileWithHigherPriorityExtension(file: string, literalFiles: Map<string, string>, wildcardFiles: Map<string, string>, extensions: readonly string[], keyMapper: (value: string) => string) {
|
||||
const extensionPriority = getExtensionPriority(file, extensions);
|
||||
const adjustedExtensionPriority = adjustExtensionPriority(extensionPriority, extensions);
|
||||
for (let i = ExtensionPriority.Highest; i < adjustedExtensionPriority; i++) {
|
||||
|
@ -3196,7 +3196,7 @@ namespace ts {
|
|||
* @param extensionPriority The priority of the extension.
|
||||
* @param context The expansion context.
|
||||
*/
|
||||
function removeWildcardFilesWithLowerPriorityExtension(file: string, wildcardFiles: Map<string>, extensions: readonly string[], keyMapper: (value: string) => string) {
|
||||
function removeWildcardFilesWithLowerPriorityExtension(file: string, wildcardFiles: Map<string, string>, extensions: readonly string[], keyMapper: (value: string) => string) {
|
||||
const extensionPriority = getExtensionPriority(file, extensions);
|
||||
const nextExtensionPriority = getNextLowestExtensionPriority(extensionPriority, extensions);
|
||||
for (let i = nextExtensionPriority; i < extensions.length; i++) {
|
||||
|
|
|
@ -1,26 +1,56 @@
|
|||
|
||||
/* @internal */
|
||||
namespace ts {
|
||||
type GetIteratorCallback = <I extends readonly any[] | ReadonlySet<any> | ReadonlyMap<any, any> | undefined>(iterable: I) => Iterator<
|
||||
I extends ReadonlyMap<infer K, infer V> ? [K, V] :
|
||||
I extends ReadonlySet<infer T> ? T :
|
||||
I extends readonly (infer T)[] ? T :
|
||||
I extends undefined ? undefined :
|
||||
never>;
|
||||
|
||||
function getCollectionImplementation<
|
||||
K1 extends MatchingKeys<typeof NativeCollections, () => any>,
|
||||
K2 extends MatchingKeys<typeof ShimCollections, (getIterator?: GetIteratorCallback) => ReturnType<(typeof NativeCollections)[K1]>>
|
||||
>(name: string, nativeFactory: K1, shimFactory: K2): NonNullable<ReturnType<(typeof NativeCollections)[K1]>> {
|
||||
// NOTE: ts.ShimCollections will be defined for typescriptServices.js but not for tsc.js, so we must test for it.
|
||||
const constructor = NativeCollections[nativeFactory]() ?? ShimCollections?.[shimFactory](getIterator);
|
||||
if (constructor) return constructor as NonNullable<ReturnType<(typeof NativeCollections)[K1]>>;
|
||||
throw new Error(`TypeScript requires an environment that provides a compatible native ${name} implementation.`);
|
||||
}
|
||||
|
||||
export const Map = getCollectionImplementation("Map", "tryGetNativeMap", "createMapShim");
|
||||
export const Set = getCollectionImplementation("Set", "tryGetNativeSet", "createSetShim");
|
||||
|
||||
export function getIterator<I extends readonly any[] | ReadonlySet<any> | ReadonlyMap<any, any> | undefined>(iterable: I): Iterator<
|
||||
I extends ReadonlyMap<infer K, infer V> ? [K, V] :
|
||||
I extends ReadonlySet<infer T> ? T :
|
||||
I extends readonly (infer T)[] ? T :
|
||||
I extends undefined ? undefined :
|
||||
never>;
|
||||
export function getIterator<K, V>(iterable: ReadonlyMap<K, V>): Iterator<[K, V]>;
|
||||
export function getIterator<K, V>(iterable: ReadonlyMap<K, V> | undefined): Iterator<[K, V]> | undefined;
|
||||
export function getIterator<T>(iterable: readonly T[] | ReadonlySet<T>): Iterator<T>;
|
||||
export function getIterator<T>(iterable: readonly T[] | ReadonlySet<T> | undefined): Iterator<T> | undefined;
|
||||
export function getIterator(iterable: readonly any[] | ReadonlySet<any> | ReadonlyMap<any, any> | undefined): Iterator<any> | undefined {
|
||||
if (iterable) {
|
||||
if (isArray(iterable)) return arrayIterator(iterable);
|
||||
if (iterable instanceof Map) return iterable.entries();
|
||||
if (iterable instanceof Set) return iterable.values();
|
||||
throw new Error("Iteration not supported.");
|
||||
}
|
||||
}
|
||||
|
||||
export const emptyArray: never[] = [] as never[];
|
||||
|
||||
/** Create a new map. */
|
||||
export function createMap<T>(): Map<T> {
|
||||
return new Map<T>();
|
||||
}
|
||||
|
||||
/** Create a new map from an array of entries. */
|
||||
export function createMapFromEntries<T>(entries: [string, T][]): Map<T> {
|
||||
const map = createMap<T>();
|
||||
for (const [key, value] of entries) {
|
||||
map.set(key, value);
|
||||
}
|
||||
return map;
|
||||
export function createMap<K, V>(): Map<K, V>;
|
||||
export function createMap<T>(): Map<string, T>;
|
||||
export function createMap<K, V>(): Map<K, V> {
|
||||
return new Map<K, V>();
|
||||
}
|
||||
|
||||
/** Create a new map from a template object is provided, the map will copy entries from it. */
|
||||
export function createMapFromTemplate<T>(template: MapLike<T>): Map<T> {
|
||||
const map: Map<T> = new Map<T>();
|
||||
export function createMapFromTemplate<T>(template: MapLike<T>): Map<string, T> {
|
||||
const map: Map<string, T> = new Map<string, T>();
|
||||
|
||||
// Copies keys/values from template. Note that for..in will not throw if
|
||||
// template is undefined, and instead will just exit the loop.
|
||||
|
@ -97,6 +127,16 @@ namespace ts {
|
|||
}
|
||||
}
|
||||
|
||||
export function reduceLeftIterator<T, U>(iterator: Iterator<T> | undefined, f: (memo: U, value: T, i: number) => U, initial: U): U {
|
||||
let result = initial;
|
||||
if (iterator) {
|
||||
for (let step = iterator.next(), pos = 0; !step.done; step = iterator.next(), pos++) {
|
||||
result = f(result, step.value, pos);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
export function zipWith<T, U, V>(arrayA: readonly T[], arrayB: readonly U[], callback: (a: T, b: U, index: number) => V): V[] {
|
||||
const result: V[] = [];
|
||||
Debug.assertEqual(arrayA.length, arrayB.length);
|
||||
|
@ -120,9 +160,9 @@ namespace ts {
|
|||
};
|
||||
}
|
||||
|
||||
export function zipToMap<T>(keys: readonly string[], values: readonly T[]): Map<T> {
|
||||
export function zipToMap<K, V>(keys: readonly K[], values: readonly V[]): Map<K, V> {
|
||||
Debug.assert(keys.length === values.length);
|
||||
const map = createMap<T>();
|
||||
const map = new Map<K, V>();
|
||||
for (let i = 0; i < keys.length; ++i) {
|
||||
map.set(keys[i], values[i]);
|
||||
}
|
||||
|
@ -514,17 +554,50 @@ namespace ts {
|
|||
};
|
||||
}
|
||||
|
||||
export function mapDefinedMap<T, U>(map: ReadonlyMap<T>, mapValue: (value: T, key: string) => U | undefined, mapKey: (key: string) => string = identity): Map<U> {
|
||||
const result = createMap<U>();
|
||||
export function mapDefinedEntries<K1, V1, K2, V2>(map: ReadonlyMap<K1, V1>, f: (key: K1, value: V1) => readonly [K2, V2] | undefined): Map<K2, V2>;
|
||||
export function mapDefinedEntries<K1, V1, K2, V2>(map: ReadonlyMap<K1, V1> | undefined, f: (key: K1, value: V1) => readonly [K2 | undefined, V2 | undefined] | undefined): Map<K2, V2> | undefined;
|
||||
export function mapDefinedEntries<K1, V1, K2, V2>(map: ReadonlyMap<K1, V1> | undefined, f: (key: K1, value: V1) => readonly [K2 | undefined, V2 | undefined] | undefined): Map<K2, V2> | undefined {
|
||||
if (!map) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const result = new Map<K2, V2>();
|
||||
map.forEach((value, key) => {
|
||||
const mapped = mapValue(value, key);
|
||||
if (mapped !== undefined) {
|
||||
result.set(mapKey(key), mapped);
|
||||
const entry = f(key, value);
|
||||
if (entry !== undefined) {
|
||||
const [newKey, newValue] = entry;
|
||||
if (newKey !== undefined && newValue !== undefined) {
|
||||
result.set(newKey, newValue);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
export function mapDefinedValues<V1, V2>(set: ReadonlySet<V1>, f: (value: V1) => V2 | undefined): Set<V2>;
|
||||
export function mapDefinedValues<V1, V2>(set: ReadonlySet<V1> | undefined, f: (value: V1) => V2 | undefined): Set<V2> | undefined;
|
||||
export function mapDefinedValues<V1, V2>(set: ReadonlySet<V1> | undefined, f: (value: V1) => V2 | undefined): Set<V2> | undefined {
|
||||
if (set) {
|
||||
const result = new Set<V2>();
|
||||
set.forEach(value => {
|
||||
const newValue = f(value);
|
||||
if (newValue !== undefined) {
|
||||
result.add(newValue);
|
||||
}
|
||||
});
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
export function tryAddToSet<T>(set: Set<T>, value: T) {
|
||||
if (!set.has(value)) {
|
||||
set.add(value);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export const emptyIterator: Iterator<never> = { next: () => ({ value: undefined as never, done: true }) };
|
||||
|
||||
export function singleIterator<T>(value: T): Iterator<T> {
|
||||
|
@ -587,20 +660,21 @@ namespace ts {
|
|||
return result;
|
||||
}
|
||||
|
||||
export function mapEntries<T, U>(map: ReadonlyMap<T>, f: (key: string, value: T) => [string, U]): Map<U>;
|
||||
export function mapEntries<T, U>(map: ReadonlyMap<T> | undefined, f: (key: string, value: T) => [string, U]): Map<U> | undefined;
|
||||
export function mapEntries<T, U>(map: ReadonlyMap<T> | undefined, f: (key: string, value: T) => [string, U]): Map<U> | undefined {
|
||||
export function mapEntries<K1, V1, K2, V2>(map: ReadonlyMap<K1, V1>, f: (key: K1, value: V1) => readonly [K2, V2]): Map<K2, V2>;
|
||||
export function mapEntries<K1, V1, K2, V2>(map: ReadonlyMap<K1, V1> | undefined, f: (key: K1, value: V1) => readonly [K2, V2]): Map<K2, V2> | undefined;
|
||||
export function mapEntries<K1, V1, K2, V2>(map: ReadonlyMap<K1, V1> | undefined, f: (key: K1, value: V1) => readonly [K2, V2]): Map<K2, V2> | undefined {
|
||||
if (!map) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const result = createMap<U>();
|
||||
const result = new Map<K2, V2>();
|
||||
map.forEach((value, key) => {
|
||||
const [newKey, newValue] = f(key, value);
|
||||
result.set(newKey, newValue);
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
export function some<T>(array: readonly T[] | undefined): array is readonly T[];
|
||||
export function some<T>(array: readonly T[] | undefined, predicate: (value: T) => boolean): boolean;
|
||||
export function some<T>(array: readonly T[] | undefined, predicate?: (value: T) => boolean): boolean {
|
||||
|
@ -1268,10 +1342,12 @@ namespace ts {
|
|||
* the same key with the given 'makeKey' function, then the element with the higher
|
||||
* index in the array will be the one associated with the produced key.
|
||||
*/
|
||||
export function arrayToMap<T>(array: readonly T[], makeKey: (value: T) => string | undefined): Map<T>;
|
||||
export function arrayToMap<T, U>(array: readonly T[], makeKey: (value: T) => string | undefined, makeValue: (value: T) => U): Map<U>;
|
||||
export function arrayToMap<T, U>(array: readonly T[], makeKey: (value: T) => string | undefined, makeValue: (value: T) => T | U = identity): Map<T | U> {
|
||||
const result = createMap<T | U>();
|
||||
export function arrayToMap<K, V>(array: readonly V[], makeKey: (value: V) => K | undefined): Map<K, V>;
|
||||
export function arrayToMap<K, V1, V2>(array: readonly V1[], makeKey: (value: V1) => K | undefined, makeValue: (value: V1) => V2): Map<K, V2>;
|
||||
export function arrayToMap<T>(array: readonly T[], makeKey: (value: T) => string | undefined): Map<string, T>;
|
||||
export function arrayToMap<T, U>(array: readonly T[], makeKey: (value: T) => string | undefined, makeValue: (value: T) => U): Map<string, U>;
|
||||
export function arrayToMap<K, V1, V2>(array: readonly V1[], makeKey: (value: V1) => K | undefined, makeValue: (value: V1) => V1 | V2 = identity): Map<K, V1 | V2> {
|
||||
const result = new Map<K, V1 | V2>();
|
||||
for (const value of array) {
|
||||
const key = makeKey(value);
|
||||
if (key !== undefined) result.set(key, makeValue(value));
|
||||
|
@ -1289,10 +1365,10 @@ namespace ts {
|
|||
return result;
|
||||
}
|
||||
|
||||
export function arrayToMultiMap<T>(values: readonly T[], makeKey: (value: T) => string): MultiMap<T>;
|
||||
export function arrayToMultiMap<T, U>(values: readonly T[], makeKey: (value: T) => string, makeValue: (value: T) => U): MultiMap<U>;
|
||||
export function arrayToMultiMap<T, U>(values: readonly T[], makeKey: (value: T) => string, makeValue: (value: T) => T | U = identity): MultiMap<T | U> {
|
||||
const result = createMultiMap<T | U>();
|
||||
export function arrayToMultiMap<K, V>(values: readonly V[], makeKey: (value: V) => K): MultiMap<K, V>;
|
||||
export function arrayToMultiMap<K, V, U>(values: readonly V[], makeKey: (value: V) => K, makeValue: (value: V) => U): MultiMap<K, U>;
|
||||
export function arrayToMultiMap<K, V, U>(values: readonly V[], makeKey: (value: V) => K, makeValue: (value: V) => V | U = identity): MultiMap<K, V | U> {
|
||||
const result = createMultiMap<K, V | U>();
|
||||
for (const value of values) {
|
||||
result.add(makeKey(value), makeValue(value));
|
||||
}
|
||||
|
@ -1349,35 +1425,29 @@ namespace ts {
|
|||
return fn ? fn.bind(obj) : undefined;
|
||||
}
|
||||
|
||||
export function mapMap<T, U>(map: Map<T>, f: (t: T, key: string) => [string, U]): Map<U>;
|
||||
export function mapMap<T, U>(map: UnderscoreEscapedMap<T>, f: (t: T, key: __String) => [string, U]): Map<U>;
|
||||
export function mapMap<T, U>(map: Map<T> | UnderscoreEscapedMap<T>, f: ((t: T, key: string) => [string, U]) | ((t: T, key: __String) => [string, U])): Map<U> {
|
||||
const result = createMap<U>();
|
||||
map.forEach((t: T, key: string & __String) => result.set(...(f(t, key))));
|
||||
return result;
|
||||
}
|
||||
|
||||
export interface MultiMap<T> extends Map<T[]> {
|
||||
export interface MultiMap<K, V> extends Map<K, V[]> {
|
||||
/**
|
||||
* Adds the value to an array of values associated with the key, and returns the array.
|
||||
* Creates the array if it does not already exist.
|
||||
*/
|
||||
add(key: string, value: T): T[];
|
||||
add(key: K, value: V): V[];
|
||||
/**
|
||||
* Removes a value from an array of values associated with the key.
|
||||
* Does not preserve the order of those values.
|
||||
* Does nothing if `key` is not in `map`, or `value` is not in `map[key]`.
|
||||
*/
|
||||
remove(key: string, value: T): void;
|
||||
remove(key: K, value: V): void;
|
||||
}
|
||||
|
||||
export function createMultiMap<T>(): MultiMap<T> {
|
||||
const map = createMap<T[]>() as MultiMap<T>;
|
||||
export function createMultiMap<K, V>(): MultiMap<K, V>;
|
||||
export function createMultiMap<V>(): MultiMap<string, V>;
|
||||
export function createMultiMap<K, V>(): MultiMap<K, V> {
|
||||
const map = new Map<K, V[]>() as MultiMap<K, V>;
|
||||
map.add = multiMapAdd;
|
||||
map.remove = multiMapRemove;
|
||||
return map;
|
||||
}
|
||||
function multiMapAdd<T>(this: MultiMap<T>, key: string, value: T) {
|
||||
function multiMapAdd<K, V>(this: MultiMap<K, V>, key: K, value: V) {
|
||||
let values = this.get(key);
|
||||
if (values) {
|
||||
values.push(value);
|
||||
|
@ -1387,7 +1457,7 @@ namespace ts {
|
|||
}
|
||||
return values;
|
||||
}
|
||||
function multiMapRemove<T>(this: MultiMap<T>, key: string, value: T) {
|
||||
function multiMapRemove<K, V>(this: MultiMap<K, V>, key: K, value: V) {
|
||||
const values = this.get(key);
|
||||
if (values) {
|
||||
unorderedRemoveItem(values, value);
|
||||
|
@ -1412,7 +1482,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function createUnderscoreEscapedMultiMap<T>(): UnderscoreEscapedMultiMap<T> {
|
||||
return createMultiMap<T>() as UnderscoreEscapedMultiMap<T>;
|
||||
return createMultiMap() as UnderscoreEscapedMultiMap<T>;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1653,7 +1723,7 @@ namespace ts {
|
|||
* Case-insensitive comparisons compare both strings one code-point at a time using the integer
|
||||
* value of each code-point after applying `toUpperCase` to each string. We always map both
|
||||
* strings to their upper-case form as some unicode characters do not properly round-trip to
|
||||
* lowercase (such as `ẞ` (German sharp capital s)).
|
||||
* lowercase (such as `ẞ` (German sharp capital s)).
|
||||
*/
|
||||
export function compareStringsCaseInsensitive(a: string, b: string) {
|
||||
if (a === b) return Comparison.EqualTo;
|
||||
|
@ -1725,7 +1795,7 @@ namespace ts {
|
|||
//
|
||||
// For case insensitive comparisons we always map both strings to their
|
||||
// upper-case form as some unicode characters do not properly round-trip to
|
||||
// lowercase (such as `ẞ` (German sharp capital s)).
|
||||
// lowercase (such as `ẞ` (German sharp capital s)).
|
||||
return (a, b) => compareWithCallback(a, b, compareDictionaryOrder);
|
||||
|
||||
function compareDictionaryOrder(a: string, b: string) {
|
||||
|
|
|
@ -22,54 +22,57 @@ namespace ts {
|
|||
" __sortedArrayBrand": any;
|
||||
}
|
||||
|
||||
/** ES6 Map interface, only read methods included. */
|
||||
export interface ReadonlyMap<T> {
|
||||
get(key: string): T | undefined;
|
||||
has(key: string): boolean;
|
||||
forEach(action: (value: T, key: string) => void): void;
|
||||
/** Common read methods for ES6 Map/Set. */
|
||||
export interface ReadonlyCollection<K> {
|
||||
readonly size: number;
|
||||
keys(): Iterator<string>;
|
||||
values(): Iterator<T>;
|
||||
entries(): Iterator<[string, T]>;
|
||||
has(key: K): boolean;
|
||||
keys(): Iterator<K>;
|
||||
}
|
||||
|
||||
/** Common write methods for ES6 Map/Set. */
|
||||
export interface Collection<K> extends ReadonlyCollection<K> {
|
||||
delete(key: K): boolean;
|
||||
clear(): void;
|
||||
}
|
||||
|
||||
/** ES6 Map interface, only read methods included. */
|
||||
export interface ReadonlyMap<K, V> extends ReadonlyCollection<K> {
|
||||
get(key: K): V | undefined;
|
||||
values(): Iterator<V>;
|
||||
entries(): Iterator<[K, V]>;
|
||||
forEach(action: (value: V, key: K) => void): void;
|
||||
}
|
||||
|
||||
/** ES6 Map interface. */
|
||||
export interface Map<T> extends ReadonlyMap<T> {
|
||||
set(key: string, value: T): this;
|
||||
delete(key: string): boolean;
|
||||
clear(): void;
|
||||
export interface Map<K, V> extends ReadonlyMap<K, V>, Collection<K> {
|
||||
set(key: K, value: V): this;
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
export interface MapConstructor {
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-function-type
|
||||
new <T>(): Map<T>;
|
||||
new <K, V>(iterable?: readonly (readonly [K, V])[] | ReadonlyMap<K, V>): Map<K, V>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the native Map implementation if it is available and compatible (i.e. supports iteration).
|
||||
*/
|
||||
/* @internal */
|
||||
export function tryGetNativeMap(): MapConstructor | undefined {
|
||||
// Internet Explorer's Map doesn't support iteration, so don't use it.
|
||||
// Natives
|
||||
// NOTE: TS doesn't strictly allow in-line declares, but if we suppress the error, the declaration
|
||||
// is still used for typechecking _and_ correctly elided, which is out goal, as this prevents us from
|
||||
// needing to pollute an outer scope with a declaration of `Map` just to satisfy the checks in this function
|
||||
//@ts-ignore
|
||||
declare const Map: (new <T>() => Map<T>) | undefined;
|
||||
// eslint-disable-next-line no-in-operator
|
||||
return typeof Map !== "undefined" && "entries" in Map.prototype ? Map : undefined;
|
||||
/** ES6 Set interface, only read methods included. */
|
||||
export interface ReadonlySet<T> extends ReadonlyCollection<T> {
|
||||
has(value: T): boolean;
|
||||
values(): Iterator<T>;
|
||||
entries(): Iterator<[T, T]>;
|
||||
forEach(action: (value: T, key: T) => void): void;
|
||||
}
|
||||
|
||||
/** ES6 Set interface. */
|
||||
export interface Set<T> extends ReadonlySet<T>, Collection<T> {
|
||||
add(value: T): this;
|
||||
delete(value: T): boolean;
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
export const Map: MapConstructor = tryGetNativeMap() || (() => {
|
||||
// NOTE: createMapShim will be defined for typescriptServices.js but not for tsc.js, so we must test for it.
|
||||
if (typeof createMapShim === "function") {
|
||||
return createMapShim();
|
||||
}
|
||||
throw new Error("TypeScript requires an environment that provides a compatible native Map implementation.");
|
||||
})();
|
||||
export interface SetConstructor {
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-function-type
|
||||
new <T>(iterable?: readonly T[] | ReadonlySet<T>): Set<T>;
|
||||
}
|
||||
|
||||
/** ES6 Iterator type. */
|
||||
export interface Iterator<T> {
|
||||
|
@ -94,4 +97,28 @@ namespace ts {
|
|||
EqualTo = 0,
|
||||
GreaterThan = 1
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
export namespace NativeCollections {
|
||||
declare const Map: MapConstructor | undefined;
|
||||
declare const Set: SetConstructor | undefined;
|
||||
|
||||
/**
|
||||
* Returns the native Map implementation if it is available and compatible (i.e. supports iteration).
|
||||
*/
|
||||
export function tryGetNativeMap(): MapConstructor | undefined {
|
||||
// Internet Explorer's Map doesn't support iteration, so don't use it.
|
||||
// eslint-disable-next-line no-in-operator
|
||||
return typeof Map !== "undefined" && "entries" in Map.prototype && new Map([[0, 0]]).size === 1 ? Map : undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the native Set implementation if it is available and compatible (i.e. supports iteration).
|
||||
*/
|
||||
export function tryGetNativeSet(): SetConstructor | undefined {
|
||||
// Internet Explorer's Set doesn't support iteration, so don't use it.
|
||||
// eslint-disable-next-line no-in-operator
|
||||
return typeof Set !== "undefined" && "entries" in Set.prototype && new Set([0]).size === 1 ? Set : undefined;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -839,11 +839,11 @@ namespace ts {
|
|||
let currentSourceFile: SourceFile | undefined;
|
||||
let nodeIdToGeneratedName: string[]; // Map of generated names for specific nodes.
|
||||
let autoGeneratedIdToGeneratedName: string[]; // Map of generated names for temp and loop variables.
|
||||
let generatedNames: Map<true>; // Set of names generated by the NameGenerator.
|
||||
let generatedNames: Set<string>; // Set of names generated by the NameGenerator.
|
||||
let tempFlagsStack: TempFlags[]; // Stack of enclosing name generation scopes.
|
||||
let tempFlags: TempFlags; // TempFlags for the current name generation scope.
|
||||
let reservedNamesStack: Map<true>[]; // Stack of TempFlags reserved in enclosing name generation scopes.
|
||||
let reservedNames: Map<true>; // TempFlags to reserve in nested name generation scopes.
|
||||
let reservedNamesStack: Set<string>[]; // Stack of TempFlags reserved in enclosing name generation scopes.
|
||||
let reservedNames: Set<string>; // TempFlags to reserve in nested name generation scopes.
|
||||
let preserveSourceNewlines = printerOptions.preserveSourceNewlines; // Can be overridden inside nodes with the `IgnoreSourceNewlines` emit flag.
|
||||
|
||||
let writer: EmitTextWriter;
|
||||
|
@ -1119,7 +1119,7 @@ namespace ts {
|
|||
function reset() {
|
||||
nodeIdToGeneratedName = [];
|
||||
autoGeneratedIdToGeneratedName = [];
|
||||
generatedNames = createMap<true>();
|
||||
generatedNames = new Set();
|
||||
tempFlagsStack = [];
|
||||
tempFlags = TempFlags.Auto;
|
||||
reservedNamesStack = [];
|
||||
|
@ -3718,7 +3718,7 @@ namespace ts {
|
|||
* Emits any prologue directives at the start of a Statement list, returning the
|
||||
* number of prologue directives written to the output.
|
||||
*/
|
||||
function emitPrologueDirectives(statements: readonly Node[], sourceFile?: SourceFile, seenPrologueDirectives?: Map<true>, recordBundleFileSection?: true): number {
|
||||
function emitPrologueDirectives(statements: readonly Node[], sourceFile?: SourceFile, seenPrologueDirectives?: Set<string>, recordBundleFileSection?: true): number {
|
||||
let needsToSetSourceFile = !!sourceFile;
|
||||
for (let i = 0; i < statements.length; i++) {
|
||||
const statement = statements[i];
|
||||
|
@ -3734,7 +3734,7 @@ namespace ts {
|
|||
emit(statement);
|
||||
if (recordBundleFileSection && bundleFileInfo) bundleFileInfo.sections.push({ pos, end: writer.getTextPos(), kind: BundleFileSectionKind.Prologue, data: statement.expression.text });
|
||||
if (seenPrologueDirectives) {
|
||||
seenPrologueDirectives.set(statement.expression.text, true);
|
||||
seenPrologueDirectives.add(statement.expression.text);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3747,7 +3747,7 @@ namespace ts {
|
|||
return statements.length;
|
||||
}
|
||||
|
||||
function emitUnparsedPrologues(prologues: readonly UnparsedPrologue[], seenPrologueDirectives: Map<true>) {
|
||||
function emitUnparsedPrologues(prologues: readonly UnparsedPrologue[], seenPrologueDirectives: Set<string>) {
|
||||
for (const prologue of prologues) {
|
||||
if (!seenPrologueDirectives.has(prologue.data)) {
|
||||
writeLine();
|
||||
|
@ -3755,7 +3755,7 @@ namespace ts {
|
|||
emit(prologue);
|
||||
if (bundleFileInfo) bundleFileInfo.sections.push({ pos, end: writer.getTextPos(), kind: BundleFileSectionKind.Prologue, data: prologue.data });
|
||||
if (seenPrologueDirectives) {
|
||||
seenPrologueDirectives.set(prologue.data, true);
|
||||
seenPrologueDirectives.add(prologue.data);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3766,7 +3766,7 @@ namespace ts {
|
|||
emitPrologueDirectives(sourceFileOrBundle.statements, sourceFileOrBundle);
|
||||
}
|
||||
else {
|
||||
const seenPrologueDirectives = createMap<true>();
|
||||
const seenPrologueDirectives = new Set<string>();
|
||||
for (const prepend of sourceFileOrBundle.prepends) {
|
||||
emitUnparsedPrologues((prepend as UnparsedSource).prologues, seenPrologueDirectives);
|
||||
}
|
||||
|
@ -3778,7 +3778,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
function getPrologueDirectivesFromBundledSourceFiles(bundle: Bundle): SourceFilePrologueInfo[] | undefined {
|
||||
const seenPrologueDirectives = createMap<true>();
|
||||
const seenPrologueDirectives = new Set<string>();
|
||||
let prologues: SourceFilePrologueInfo[] | undefined;
|
||||
for (let index = 0; index < bundle.sourceFiles.length; index++) {
|
||||
const sourceFile = bundle.sourceFiles[index];
|
||||
|
@ -3787,7 +3787,7 @@ namespace ts {
|
|||
for (const statement of sourceFile.statements) {
|
||||
if (!isPrologueDirective(statement)) break;
|
||||
if (seenPrologueDirectives.has(statement.expression.text)) continue;
|
||||
seenPrologueDirectives.set(statement.expression.text, true);
|
||||
seenPrologueDirectives.add(statement.expression.text);
|
||||
(directives || (directives = [])).push({
|
||||
pos: statement.pos,
|
||||
end: statement.end,
|
||||
|
@ -4539,9 +4539,9 @@ namespace ts {
|
|||
|
||||
function reserveNameInNestedScopes(name: string) {
|
||||
if (!reservedNames || reservedNames === lastOrUndefined(reservedNamesStack)) {
|
||||
reservedNames = createMap<true>();
|
||||
reservedNames = new Set();
|
||||
}
|
||||
reservedNames.set(name, true);
|
||||
reservedNames.add(name);
|
||||
}
|
||||
|
||||
function generateNames(node: Node | undefined) {
|
||||
|
@ -4758,7 +4758,7 @@ namespace ts {
|
|||
reserveNameInNestedScopes(baseName);
|
||||
}
|
||||
else {
|
||||
generatedNames.set(baseName, true);
|
||||
generatedNames.add(baseName);
|
||||
}
|
||||
return baseName;
|
||||
}
|
||||
|
@ -4775,7 +4775,7 @@ namespace ts {
|
|||
reserveNameInNestedScopes(generatedName);
|
||||
}
|
||||
else {
|
||||
generatedNames.set(generatedName, true);
|
||||
generatedNames.add(generatedName);
|
||||
}
|
||||
return generatedName;
|
||||
}
|
||||
|
|
|
@ -841,7 +841,7 @@ namespace ts {
|
|||
};`
|
||||
};
|
||||
|
||||
let allUnscopedEmitHelpers: ReadonlyMap<UnscopedEmitHelper> | undefined;
|
||||
let allUnscopedEmitHelpers: ReadonlyMap<string, UnscopedEmitHelper> | undefined;
|
||||
|
||||
export function getAllUnscopedEmitHelpers() {
|
||||
return allUnscopedEmitHelpers || (allUnscopedEmitHelpers = arrayToMap([
|
||||
|
|
|
@ -442,8 +442,8 @@ namespace ts {
|
|||
* This assumes that any module id will have the same resolution for sibling files located in the same folder.
|
||||
*/
|
||||
export interface ModuleResolutionCache extends NonRelativeModuleNameResolutionCache {
|
||||
getOrCreateCacheForDirectory(directoryName: string, redirectedReference?: ResolvedProjectReference): Map<ResolvedModuleWithFailedLookupLocations>;
|
||||
/*@internal*/ directoryToModuleNameMap: CacheWithRedirects<Map<ResolvedModuleWithFailedLookupLocations>>;
|
||||
getOrCreateCacheForDirectory(directoryName: string, redirectedReference?: ResolvedProjectReference): Map<string, ResolvedModuleWithFailedLookupLocations>;
|
||||
/*@internal*/ directoryToModuleNameMap: CacheWithRedirects<Map<string, ResolvedModuleWithFailedLookupLocations>>;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -472,18 +472,18 @@ namespace ts {
|
|||
|
||||
/*@internal*/
|
||||
export interface CacheWithRedirects<T> {
|
||||
ownMap: Map<T>;
|
||||
redirectsMap: Map<Map<T>>;
|
||||
getOrCreateMapOfCacheRedirects(redirectedReference: ResolvedProjectReference | undefined): Map<T>;
|
||||
ownMap: Map<string, T>;
|
||||
redirectsMap: Map<Path, Map<string, T>>;
|
||||
getOrCreateMapOfCacheRedirects(redirectedReference: ResolvedProjectReference | undefined): Map<string, T>;
|
||||
clear(): void;
|
||||
setOwnOptions(newOptions: CompilerOptions): void;
|
||||
setOwnMap(newOwnMap: Map<T>): void;
|
||||
setOwnMap(newOwnMap: Map<string, T>): void;
|
||||
}
|
||||
|
||||
/*@internal*/
|
||||
export function createCacheWithRedirects<T>(options?: CompilerOptions): CacheWithRedirects<T> {
|
||||
let ownMap: Map<T> = createMap();
|
||||
const redirectsMap: Map<Map<T>> = createMap();
|
||||
let ownMap: Map<string, T> = createMap();
|
||||
const redirectsMap = new Map<Path, Map<string, T>>();
|
||||
return {
|
||||
ownMap,
|
||||
redirectsMap,
|
||||
|
@ -497,7 +497,7 @@ namespace ts {
|
|||
options = newOptions;
|
||||
}
|
||||
|
||||
function setOwnMap(newOwnMap: Map<T>) {
|
||||
function setOwnMap(newOwnMap: Map<string, T>) {
|
||||
ownMap = newOwnMap;
|
||||
}
|
||||
|
||||
|
@ -523,7 +523,7 @@ namespace ts {
|
|||
|
||||
/*@internal*/
|
||||
export function createModuleResolutionCacheWithMaps(
|
||||
directoryToModuleNameMap: CacheWithRedirects<Map<ResolvedModuleWithFailedLookupLocations>>,
|
||||
directoryToModuleNameMap: CacheWithRedirects<Map<string, ResolvedModuleWithFailedLookupLocations>>,
|
||||
moduleNameToDirectoryMap: CacheWithRedirects<PerModuleNameCache>,
|
||||
currentDirectory: string,
|
||||
getCanonicalFileName: GetCanonicalFileName): ModuleResolutionCache {
|
||||
|
@ -532,7 +532,7 @@ namespace ts {
|
|||
|
||||
function getOrCreateCacheForDirectory(directoryName: string, redirectedReference?: ResolvedProjectReference) {
|
||||
const path = toPath(directoryName, currentDirectory, getCanonicalFileName);
|
||||
return getOrCreateCache<Map<ResolvedModuleWithFailedLookupLocations>>(directoryToModuleNameMap, redirectedReference, path, createMap);
|
||||
return getOrCreateCache<Map<string, ResolvedModuleWithFailedLookupLocations>>(directoryToModuleNameMap, redirectedReference, path, createMap);
|
||||
}
|
||||
|
||||
function getOrCreateCacheForModuleName(nonRelativeModuleName: string, redirectedReference?: ResolvedProjectReference): PerModuleNameCache {
|
||||
|
|
|
@ -722,13 +722,13 @@ namespace ts {
|
|||
|
||||
let currentToken: SyntaxKind;
|
||||
let nodeCount: number;
|
||||
let identifiers: Map<string>;
|
||||
let privateIdentifiers: Map<string>;
|
||||
let identifiers: Map<string, string>;
|
||||
let privateIdentifiers: Map<string, string>;
|
||||
let identifierCount: number;
|
||||
|
||||
let parsingContext: ParsingContext;
|
||||
|
||||
let notParenthesizedArrow: Map<true> | undefined;
|
||||
let notParenthesizedArrow: Set<number> | undefined;
|
||||
|
||||
// Flags that dictate what parsing context we're in. For example:
|
||||
// Whether or not we are in strict parsing mode. All that changes in strict parsing mode is
|
||||
|
@ -4121,13 +4121,13 @@ namespace ts {
|
|||
|
||||
function parsePossibleParenthesizedArrowFunctionExpression(): ArrowFunction | undefined {
|
||||
const tokenPos = scanner.getTokenPos();
|
||||
if (notParenthesizedArrow && notParenthesizedArrow.has(tokenPos.toString())) {
|
||||
if (notParenthesizedArrow?.has(tokenPos)) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const result = parseParenthesizedArrowFunctionExpression(/*allowAmbiguity*/ false);
|
||||
if (!result) {
|
||||
(notParenthesizedArrow || (notParenthesizedArrow = createMap())).set(tokenPos.toString(), true);
|
||||
(notParenthesizedArrow || (notParenthesizedArrow = new Set())).add(tokenPos);
|
||||
}
|
||||
|
||||
return result;
|
||||
|
|
|
@ -8,9 +8,9 @@ namespace ts.performance {
|
|||
|
||||
let enabled = false;
|
||||
let profilerStart = 0;
|
||||
let counts: Map<number>;
|
||||
let marks: Map<number>;
|
||||
let measures: Map<number>;
|
||||
let counts: Map<string, number>;
|
||||
let marks: Map<string, number>;
|
||||
let measures: Map<string, number>;
|
||||
|
||||
export interface Timer {
|
||||
enter(): void;
|
||||
|
|
|
@ -126,7 +126,7 @@ namespace ts {
|
|||
}
|
||||
}
|
||||
|
||||
let outputFingerprints: Map<OutputFingerprint>;
|
||||
let outputFingerprints: Map<string, OutputFingerprint>;
|
||||
function writeFileWorker(fileName: string, data: string, writeByteOrderMark: boolean) {
|
||||
if (!isWatchSet(options) || !system.createHash || !system.getModifiedTime) {
|
||||
system.writeFile(fileName, data, writeByteOrderMark);
|
||||
|
@ -533,7 +533,7 @@ namespace ts {
|
|||
export const inferredTypesContainingFile = "__inferred type names__.ts";
|
||||
|
||||
interface DiagnosticCache<T extends Diagnostic> {
|
||||
perFile?: Map<readonly T[]>;
|
||||
perFile?: Map<Path, readonly T[]>;
|
||||
allDiagnostics?: readonly T[];
|
||||
}
|
||||
|
||||
|
@ -702,14 +702,14 @@ namespace ts {
|
|||
let processingDefaultLibFiles: SourceFile[] | undefined;
|
||||
let processingOtherFiles: SourceFile[] | undefined;
|
||||
let files: SourceFile[];
|
||||
let symlinks: ReadonlyMap<string> | undefined;
|
||||
let symlinks: ReadonlyMap<string, string> | undefined;
|
||||
let commonSourceDirectory: string;
|
||||
let diagnosticsProducingTypeChecker: TypeChecker;
|
||||
let noDiagnosticsTypeChecker: TypeChecker;
|
||||
let classifiableNames: UnderscoreEscapedMap<true>;
|
||||
const ambientModuleNameToUnmodifiedFileName = createMap<string>();
|
||||
// Todo:: Use this to report why file was included in --extendedDiagnostics
|
||||
let refFileMap: MultiMap<ts.RefFile> | undefined;
|
||||
let refFileMap: MultiMap<Path, ts.RefFile> | undefined;
|
||||
|
||||
const cachedBindAndCheckDiagnosticsForFile: DiagnosticCache<Diagnostic> = {};
|
||||
const cachedDeclarationDiagnosticsForFile: DiagnosticCache<DiagnosticWithLocation> = {};
|
||||
|
@ -803,9 +803,9 @@ namespace ts {
|
|||
|
||||
// A parallel array to projectReferences storing the results of reading in the referenced tsconfig files
|
||||
let resolvedProjectReferences: readonly (ResolvedProjectReference | undefined)[] | undefined;
|
||||
let projectReferenceRedirects: Map<ResolvedProjectReference | false> | undefined;
|
||||
let mapFromFileToProjectReferenceRedirects: Map<Path> | undefined;
|
||||
let mapFromToProjectReferenceRedirectSource: Map<SourceOfProjectReferenceRedirect> | undefined;
|
||||
let projectReferenceRedirects: Map<Path, ResolvedProjectReference | false> | undefined;
|
||||
let mapFromFileToProjectReferenceRedirects: Map<Path, Path> | undefined;
|
||||
let mapFromToProjectReferenceRedirectSource: Map<Path, SourceOfProjectReferenceRedirect> | undefined;
|
||||
|
||||
const useSourceOfProjectReferenceRedirect = !!host.useSourceOfProjectReferenceRedirect?.() &&
|
||||
!options.disableSourceOfProjectReferenceRedirect;
|
||||
|
@ -2028,7 +2028,7 @@ namespace ts {
|
|||
): readonly U[] {
|
||||
|
||||
const cachedResult = sourceFile
|
||||
? cache.perFile && cache.perFile.get(sourceFile.path)
|
||||
? cache.perFile?.get(sourceFile.path)
|
||||
: cache.allDiagnostics;
|
||||
|
||||
if (cachedResult) {
|
||||
|
@ -2036,10 +2036,7 @@ namespace ts {
|
|||
}
|
||||
const result = getDiagnostics(sourceFile, cancellationToken);
|
||||
if (sourceFile) {
|
||||
if (!cache.perFile) {
|
||||
cache.perFile = createMap();
|
||||
}
|
||||
cache.perFile.set(sourceFile.path, result);
|
||||
(cache.perFile || (cache.perFile = new Map())).set(sourceFile.path, result);
|
||||
}
|
||||
else {
|
||||
cache.allDiagnostics = result;
|
||||
|
@ -2546,7 +2543,7 @@ namespace ts {
|
|||
*/
|
||||
function getResolvedProjectReferenceToRedirect(fileName: string) {
|
||||
if (mapFromFileToProjectReferenceRedirects === undefined) {
|
||||
mapFromFileToProjectReferenceRedirects = createMap();
|
||||
mapFromFileToProjectReferenceRedirects = new Map();
|
||||
forEachResolvedProjectReference((referencedProject, referenceProjectPath) => {
|
||||
// not input file from the referenced project, ignore
|
||||
if (referencedProject &&
|
||||
|
@ -2892,13 +2889,13 @@ namespace ts {
|
|||
function checkSourceFilesBelongToPath(sourceFiles: readonly SourceFile[], rootDirectory: string): boolean {
|
||||
let allFilesBelongToPath = true;
|
||||
const absoluteRootDirectoryPath = host.getCanonicalFileName(getNormalizedAbsolutePath(rootDirectory, currentDirectory));
|
||||
let rootPaths: Map<true> | undefined;
|
||||
let rootPaths: Set<Path> | undefined;
|
||||
|
||||
for (const sourceFile of sourceFiles) {
|
||||
if (!sourceFile.isDeclarationFile) {
|
||||
const absoluteSourceFilePath = host.getCanonicalFileName(getNormalizedAbsolutePath(sourceFile.fileName, currentDirectory));
|
||||
if (absoluteSourceFilePath.indexOf(absoluteRootDirectoryPath) !== 0) {
|
||||
if (!rootPaths) rootPaths = arrayToSet(rootNames, toPath);
|
||||
if (!rootPaths) rootPaths = new Set(rootNames.map(toPath));
|
||||
addProgramDiagnosticAtRefPath(
|
||||
sourceFile,
|
||||
rootPaths,
|
||||
|
@ -2916,7 +2913,7 @@ namespace ts {
|
|||
|
||||
function parseProjectReferenceConfigFile(ref: ProjectReference): ResolvedProjectReference | undefined {
|
||||
if (!projectReferenceRedirects) {
|
||||
projectReferenceRedirects = createMap<ResolvedProjectReference | false>();
|
||||
projectReferenceRedirects = new Map();
|
||||
}
|
||||
|
||||
// The actual filename (i.e. add "/tsconfig.json" if necessary)
|
||||
|
@ -3015,7 +3012,7 @@ namespace ts {
|
|||
|
||||
// List of collected files is complete; validate exhautiveness if this is a project with a file list
|
||||
if (options.composite) {
|
||||
const rootPaths = arrayToSet(rootNames, toPath);
|
||||
const rootPaths = new Set(rootNames.map(toPath));
|
||||
for (const file of files) {
|
||||
// Ignore file that is not emitted
|
||||
if (sourceFileMayBeEmitted(file, program) && !rootPaths.has(file.path)) {
|
||||
|
@ -3204,7 +3201,7 @@ namespace ts {
|
|||
// If the emit is enabled make sure that every output file is unique and not overwriting any of the input files
|
||||
if (!options.noEmit && !options.suppressOutputPathCheck) {
|
||||
const emitHost = getEmitHost();
|
||||
const emitFilesSeen = createMap<true>();
|
||||
const emitFilesSeen = new Set<string>();
|
||||
forEachEmittedFile(emitHost, (emitFileNames) => {
|
||||
if (!options.emitDeclarationOnly) {
|
||||
verifyEmitFilePath(emitFileNames.jsFilePath, emitFilesSeen);
|
||||
|
@ -3214,7 +3211,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
// Verify that all the emit files are unique and don't overwrite input files
|
||||
function verifyEmitFilePath(emitFileName: string | undefined, emitFilesSeen: Map<true>) {
|
||||
function verifyEmitFilePath(emitFileName: string | undefined, emitFilesSeen: Set<string>) {
|
||||
if (emitFileName) {
|
||||
const emitFilePath = toPath(emitFileName);
|
||||
// Report error if the output overwrites input file
|
||||
|
@ -3235,7 +3232,7 @@ namespace ts {
|
|||
blockEmittingOfFile(emitFileName, createCompilerDiagnostic(Diagnostics.Cannot_write_file_0_because_it_would_be_overwritten_by_multiple_input_files, emitFileName));
|
||||
}
|
||||
else {
|
||||
emitFilesSeen.set(emitFileKey, true);
|
||||
emitFilesSeen.add(emitFileKey);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3262,8 +3259,8 @@ namespace ts {
|
|||
return createFileDiagnostic(refFile, pos, end - pos, message, ...args);
|
||||
}
|
||||
|
||||
function addProgramDiagnosticAtRefPath(file: SourceFile, rootPaths: Map<true>, message: DiagnosticMessage, ...args: (string | number | undefined)[]) {
|
||||
const refPaths = refFileMap && refFileMap.get(file.path);
|
||||
function addProgramDiagnosticAtRefPath(file: SourceFile, rootPaths: Set<Path>, message: DiagnosticMessage, ...args: (string | number | undefined)[]) {
|
||||
const refPaths = refFileMap?.get(file.path);
|
||||
const refPathToReportErrorOn = forEach(refPaths, refPath => rootPaths.has(refPath.file) ? refPath : undefined) ||
|
||||
elementAt(refPaths, 0);
|
||||
programDiagnostics.add(
|
||||
|
@ -3455,7 +3452,7 @@ namespace ts {
|
|||
return comparePaths(file1, file2, currentDirectory, !host.useCaseSensitiveFileNames()) === Comparison.EqualTo;
|
||||
}
|
||||
|
||||
function getProbableSymlinks(): ReadonlyMap<string> {
|
||||
function getProbableSymlinks(): ReadonlyMap<string, string> {
|
||||
if (host.getSymlinks) {
|
||||
return host.getSymlinks();
|
||||
}
|
||||
|
@ -3481,9 +3478,9 @@ namespace ts {
|
|||
}
|
||||
|
||||
function updateHostForUseSourceOfProjectReferenceRedirect(host: HostForUseSourceOfProjectReferenceRedirect) {
|
||||
let mapOfDeclarationDirectories: Map<true> | undefined;
|
||||
let symlinkedDirectories: Map<SymlinkedDirectory | false> | undefined;
|
||||
let symlinkedFiles: Map<string> | undefined;
|
||||
let setOfDeclarationDirectories: Set<Path> | undefined;
|
||||
let symlinkedDirectories: Map<Path, SymlinkedDirectory | false> | undefined;
|
||||
let symlinkedFiles: Map<Path, string> | undefined;
|
||||
|
||||
const originalFileExists = host.compilerHost.fileExists;
|
||||
const originalDirectoryExists = host.compilerHost.directoryExists;
|
||||
|
@ -3506,19 +3503,19 @@ namespace ts {
|
|||
|
||||
if (!host.getResolvedProjectReferences()) return false;
|
||||
|
||||
if (!mapOfDeclarationDirectories) {
|
||||
mapOfDeclarationDirectories = createMap();
|
||||
if (!setOfDeclarationDirectories) {
|
||||
setOfDeclarationDirectories = new Set();
|
||||
host.forEachResolvedProjectReference(ref => {
|
||||
if (!ref) return;
|
||||
const out = outFile(ref.commandLine.options);
|
||||
if (out) {
|
||||
mapOfDeclarationDirectories!.set(getDirectoryPath(host.toPath(out)), true);
|
||||
setOfDeclarationDirectories!.add(getDirectoryPath(host.toPath(out)));
|
||||
}
|
||||
else {
|
||||
// Set declaration's in different locations only, if they are next to source the directory present doesnt change
|
||||
const declarationDir = ref.commandLine.options.declarationDir || ref.commandLine.options.outDir;
|
||||
if (declarationDir) {
|
||||
mapOfDeclarationDirectories!.set(host.toPath(declarationDir), true);
|
||||
setOfDeclarationDirectories!.add(host.toPath(declarationDir));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@ -3576,7 +3573,7 @@ namespace ts {
|
|||
const dirPath = host.toPath(dir);
|
||||
const dirPathWithTrailingDirectorySeparator = `${dirPath}${directorySeparator}`;
|
||||
return forEachKey(
|
||||
mapOfDeclarationDirectories!,
|
||||
setOfDeclarationDirectories!,
|
||||
declDirPath => dirPath === declDirPath ||
|
||||
// Any parent directory of declaration dir
|
||||
startsWith(declDirPath, dirPathWithTrailingDirectorySeparator) ||
|
||||
|
@ -3590,7 +3587,7 @@ namespace ts {
|
|||
|
||||
// Because we already watch node_modules, handle symlinks in there
|
||||
if (!originalRealpath || !stringContains(directory, nodeModulesPathPart)) return;
|
||||
if (!symlinkedDirectories) symlinkedDirectories = createMap();
|
||||
if (!symlinkedDirectories) symlinkedDirectories = new Map();
|
||||
const directoryPath = ensureTrailingDirectorySeparator(host.toPath(directory));
|
||||
if (symlinkedDirectories.has(directoryPath)) return;
|
||||
|
||||
|
@ -3629,7 +3626,7 @@ namespace ts {
|
|||
if (!symlinkedDirectory || !startsWith(fileOrDirectoryPath, directoryPath)) return undefined;
|
||||
const result = fileOrDirectoryExistsUsingSource(fileOrDirectoryPath.replace(directoryPath, symlinkedDirectory.realPath));
|
||||
if (isFile && result) {
|
||||
if (!symlinkedFiles) symlinkedFiles = createMap();
|
||||
if (!symlinkedFiles) symlinkedFiles = new Map();
|
||||
// Store the real path for the file'
|
||||
const absolutePath = getNormalizedAbsolutePath(fileOrDirectory, host.compilerHost.getCurrentDirectory());
|
||||
symlinkedFiles.set(
|
||||
|
|
|
@ -13,7 +13,7 @@ namespace ts {
|
|||
invalidateResolutionOfFile(filePath: Path): void;
|
||||
removeResolutionsOfFile(filePath: Path): void;
|
||||
removeResolutionsFromProjectReferenceRedirects(filePath: Path): void;
|
||||
setFilesWithInvalidatedNonRelativeUnresolvedImports(filesWithUnresolvedImports: Map<readonly string[]>): void;
|
||||
setFilesWithInvalidatedNonRelativeUnresolvedImports(filesWithUnresolvedImports: Map<Path, readonly string[]>): void;
|
||||
createHasInvalidatedResolution(forceAllFilesAsInvalidated?: boolean): HasInvalidatedResolution;
|
||||
hasChangedAutomaticTypeDirectiveNames(): boolean;
|
||||
|
||||
|
@ -143,8 +143,8 @@ namespace ts {
|
|||
|
||||
export function createResolutionCache(resolutionHost: ResolutionCacheHost, rootDirForResolution: string | undefined, logChangesWhenResolvingModule: boolean): ResolutionCache {
|
||||
let filesWithChangedSetOfUnresolvedImports: Path[] | undefined;
|
||||
let filesWithInvalidatedResolutions: Map<true> | undefined;
|
||||
let filesWithInvalidatedNonRelativeUnresolvedImports: ReadonlyMap<readonly string[]> | undefined;
|
||||
let filesWithInvalidatedResolutions: Set<Path> | undefined;
|
||||
let filesWithInvalidatedNonRelativeUnresolvedImports: ReadonlyMap<Path, readonly string[]> | undefined;
|
||||
const nonRelativeExternalModuleResolutions = createMultiMap<ResolutionWithFailedLookupLocations>();
|
||||
|
||||
const resolutionsWithFailedLookups: ResolutionWithFailedLookupLocations[] = [];
|
||||
|
@ -161,8 +161,8 @@ namespace ts {
|
|||
// The resolvedModuleNames and resolvedTypeReferenceDirectives are the cache of resolutions per file.
|
||||
// The key in the map is source file's path.
|
||||
// The values are Map of resolutions with key being name lookedup.
|
||||
const resolvedModuleNames = createMap<Map<CachedResolvedModuleWithFailedLookupLocations>>();
|
||||
const perDirectoryResolvedModuleNames: CacheWithRedirects<Map<CachedResolvedModuleWithFailedLookupLocations>> = createCacheWithRedirects();
|
||||
const resolvedModuleNames = new Map<Path, Map<string, CachedResolvedModuleWithFailedLookupLocations>>();
|
||||
const perDirectoryResolvedModuleNames: CacheWithRedirects<Map<string, CachedResolvedModuleWithFailedLookupLocations>> = createCacheWithRedirects();
|
||||
const nonRelativeModuleNameCache: CacheWithRedirects<PerModuleNameCache> = createCacheWithRedirects();
|
||||
const moduleResolutionCache = createModuleResolutionCacheWithMaps(
|
||||
perDirectoryResolvedModuleNames,
|
||||
|
@ -171,8 +171,8 @@ namespace ts {
|
|||
resolutionHost.getCanonicalFileName
|
||||
);
|
||||
|
||||
const resolvedTypeReferenceDirectives = createMap<Map<CachedResolvedTypeReferenceDirectiveWithFailedLookupLocations>>();
|
||||
const perDirectoryResolvedTypeReferenceDirectives: CacheWithRedirects<Map<CachedResolvedTypeReferenceDirectiveWithFailedLookupLocations>> = createCacheWithRedirects();
|
||||
const resolvedTypeReferenceDirectives = new Map<Path, Map<string, CachedResolvedTypeReferenceDirectiveWithFailedLookupLocations>>();
|
||||
const perDirectoryResolvedTypeReferenceDirectives: CacheWithRedirects<Map<string, CachedResolvedTypeReferenceDirectiveWithFailedLookupLocations>> = createCacheWithRedirects();
|
||||
|
||||
/**
|
||||
* These are the extensions that failed lookup files will have by default,
|
||||
|
@ -334,8 +334,8 @@ namespace ts {
|
|||
names: readonly string[];
|
||||
containingFile: string;
|
||||
redirectedReference: ResolvedProjectReference | undefined;
|
||||
cache: Map<Map<T>>;
|
||||
perDirectoryCacheWithRedirects: CacheWithRedirects<Map<T>>;
|
||||
cache: Map<Path, Map<string, T>>;
|
||||
perDirectoryCacheWithRedirects: CacheWithRedirects<Map<string, T>>;
|
||||
loader: (name: string, containingFile: string, options: CompilerOptions, host: ModuleResolutionHost, redirectedReference?: ResolvedProjectReference) => T;
|
||||
getResolutionWithResolvedFileName: GetResolutionWithResolvedFileName<T, R>;
|
||||
shouldRetryResolution: (t: T) => boolean;
|
||||
|
@ -684,7 +684,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
function removeResolutionsOfFileFromCache<T extends ResolutionWithFailedLookupLocations, R extends ResolutionWithResolvedFileName>(
|
||||
cache: Map<Map<T>>,
|
||||
cache: Map<string, Map<string, T>>,
|
||||
filePath: Path,
|
||||
getResolutionWithResolvedFileName: GetResolutionWithResolvedFileName<T, R>,
|
||||
) {
|
||||
|
@ -722,7 +722,7 @@ namespace ts {
|
|||
if (resolution.isInvalidated || !canInvalidate(resolution)) continue;
|
||||
resolution.isInvalidated = invalidated = true;
|
||||
for (const containingFilePath of Debug.assertDefined(resolution.files)) {
|
||||
(filesWithInvalidatedResolutions || (filesWithInvalidatedResolutions = createMap<true>())).set(containingFilePath, true);
|
||||
(filesWithInvalidatedResolutions || (filesWithInvalidatedResolutions = new Set())).add(containingFilePath);
|
||||
// When its a file with inferred types resolution, invalidate type reference directive resolution
|
||||
hasChangedAutomaticTypeDirectiveNames = hasChangedAutomaticTypeDirectiveNames || containingFilePath.endsWith(inferredTypesContainingFile);
|
||||
}
|
||||
|
@ -741,7 +741,7 @@ namespace ts {
|
|||
}
|
||||
}
|
||||
|
||||
function setFilesWithInvalidatedNonRelativeUnresolvedImports(filesMap: ReadonlyMap<readonly string[]>) {
|
||||
function setFilesWithInvalidatedNonRelativeUnresolvedImports(filesMap: ReadonlyMap<Path, readonly string[]>) {
|
||||
Debug.assert(filesWithInvalidatedNonRelativeUnresolvedImports === filesMap || filesWithInvalidatedNonRelativeUnresolvedImports === undefined);
|
||||
filesWithInvalidatedNonRelativeUnresolvedImports = filesMap;
|
||||
}
|
||||
|
|
|
@ -330,7 +330,7 @@ namespace ts {
|
|||
lookupInUnicodeMap(code, unicodeES3IdentifierPart);
|
||||
}
|
||||
|
||||
function makeReverseMap(source: Map<number>): string[] {
|
||||
function makeReverseMap(source: Map<string, number>): string[] {
|
||||
const result: string[] = [];
|
||||
source.forEach((value, name) => {
|
||||
result[value] = name;
|
||||
|
|
|
@ -16,7 +16,7 @@ namespace ts {
|
|||
let sourcesContent: (string | null)[] | undefined;
|
||||
|
||||
const names: string[] = [];
|
||||
let nameToNameIndexMap: Map<number> | undefined;
|
||||
let nameToNameIndexMap: Map<string, number> | undefined;
|
||||
let mappings = "";
|
||||
|
||||
// Last recorded and encoded mappings
|
||||
|
@ -622,7 +622,7 @@ namespace ts {
|
|||
const generatedAbsoluteFilePath = getNormalizedAbsolutePath(map.file, mapDirectory);
|
||||
const generatedFile = host.getSourceFileLike(generatedAbsoluteFilePath);
|
||||
const sourceFileAbsolutePaths = map.sources.map(source => getNormalizedAbsolutePath(source, sourceRoot));
|
||||
const sourceToSourceIndexMap = createMapFromEntries(sourceFileAbsolutePaths.map((source, i) => [host.getCanonicalFileName(source), i] as [string, number]));
|
||||
const sourceToSourceIndexMap = new Map(sourceFileAbsolutePaths.map((source, i) => [host.getCanonicalFileName(source), i]));
|
||||
let decodedMappings: readonly MappedPosition[] | undefined;
|
||||
let generatedMappings: SortedReadonlyArray<MappedPosition> | undefined;
|
||||
let sourceMappings: readonly SortedReadonlyArray<SourceMappedPosition>[] | undefined;
|
||||
|
|
|
@ -310,7 +310,7 @@ namespace ts {
|
|||
function createUseFsEventsOnParentDirectoryWatchFile(fsWatch: FsWatch, useCaseSensitiveFileNames: boolean): HostWatchFile {
|
||||
// One file can have multiple watchers
|
||||
const fileWatcherCallbacks = createMultiMap<FileWatcherCallback>();
|
||||
const dirWatchers = createMap<DirectoryWatcher>();
|
||||
const dirWatchers = new Map<string, DirectoryWatcher>();
|
||||
const toCanonicalName = createGetCanonicalFileName(useCaseSensitiveFileNames);
|
||||
return nonPollingWatchFile;
|
||||
|
||||
|
@ -370,7 +370,7 @@ namespace ts {
|
|||
watcher: FileWatcher;
|
||||
refCount: number;
|
||||
}
|
||||
const cache = createMap<SingleFileWatcher>();
|
||||
const cache = new Map<string, SingleFileWatcher>();
|
||||
const callbacksCache = createMultiMap<FileWatcherCallback>();
|
||||
const toCanonicalFileName = createGetCanonicalFileName(useCaseSensitiveFileNames);
|
||||
|
||||
|
@ -473,9 +473,9 @@ namespace ts {
|
|||
refCount: number;
|
||||
}
|
||||
|
||||
const cache = createMap<HostDirectoryWatcher>();
|
||||
const callbackCache = createMultiMap<{ dirName: string; callback: DirectoryWatcherCallback; }>();
|
||||
const cacheToUpdateChildWatches = createMap<{ dirName: string; options: WatchOptions | undefined; fileNames: string[]; }>();
|
||||
const cache = new Map<string, HostDirectoryWatcher>();
|
||||
const callbackCache = createMultiMap<Path, { dirName: string; callback: DirectoryWatcherCallback; }>();
|
||||
const cacheToUpdateChildWatches = new Map<Path, { dirName: string; options: WatchOptions | undefined; fileNames: string[]; }>();
|
||||
let timerToUpdateChildWatches: any;
|
||||
|
||||
const filePathComparer = getStringComparer(!host.useCaseSensitiveFileNames);
|
||||
|
@ -538,7 +538,7 @@ namespace ts {
|
|||
};
|
||||
}
|
||||
|
||||
type InvokeMap = Map<string[] | true>;
|
||||
type InvokeMap = Map<Path, string[] | true>;
|
||||
function invokeCallbacks(dirPath: Path, fileName: string): void;
|
||||
function invokeCallbacks(dirPath: Path, invokeMap: InvokeMap, fileNames: string[] | undefined): void;
|
||||
function invokeCallbacks(dirPath: Path, fileNameOrInvokeMap: string | InvokeMap, fileNames?: string[]) {
|
||||
|
@ -608,7 +608,7 @@ namespace ts {
|
|||
timerToUpdateChildWatches = undefined;
|
||||
sysLog(`sysLog:: onTimerToUpdateChildWatches:: ${cacheToUpdateChildWatches.size}`);
|
||||
const start = timestamp();
|
||||
const invokeMap = createMap<string[]>();
|
||||
const invokeMap = new Map<Path, string[]>();
|
||||
|
||||
while (!timerToUpdateChildWatches && cacheToUpdateChildWatches.size) {
|
||||
const { value: [dirPath, { dirName, options, fileNames }], done } = cacheToUpdateChildWatches.entries().next();
|
||||
|
@ -616,8 +616,8 @@ namespace ts {
|
|||
cacheToUpdateChildWatches.delete(dirPath);
|
||||
// Because the child refresh is fresh, we would need to invalidate whole root directory being watched
|
||||
// to ensure that all the changes are reflected at this time
|
||||
const hasChanges = updateChildWatches(dirName, dirPath as Path, options);
|
||||
invokeCallbacks(dirPath as Path, invokeMap, hasChanges ? undefined : fileNames);
|
||||
const hasChanges = updateChildWatches(dirName, dirPath, options);
|
||||
invokeCallbacks(dirPath, invokeMap, hasChanges ? undefined : fileNames);
|
||||
}
|
||||
|
||||
sysLog(`sysLog:: invokingWatchers:: ${timestamp() - start}ms:: ${cacheToUpdateChildWatches.size}`);
|
||||
|
@ -1316,7 +1316,7 @@ namespace ts {
|
|||
*/
|
||||
function cleanupPaths(profile: import("inspector").Profiler.Profile) {
|
||||
let externalFileCounter = 0;
|
||||
const remappedPaths = createMap<string>();
|
||||
const remappedPaths = new Map<string, string>();
|
||||
const normalizedDir = normalizeSlashes(__dirname);
|
||||
// Windows rooted dir names need an extra `/` prepended to be valid file:/// urls
|
||||
const fileUrlRoot = `file://${getRootLength(normalizedDir) === 1 ? "" : "/"}${normalizedDir}`;
|
||||
|
|
|
@ -58,9 +58,9 @@ namespace ts {
|
|||
let needsScopeFixMarker = false;
|
||||
let resultHasScopeMarker = false;
|
||||
let enclosingDeclaration: Node;
|
||||
let necessaryTypeReferences: Map<true> | undefined;
|
||||
let necessaryTypeReferences: Set<string> | undefined;
|
||||
let lateMarkedStatements: LateVisibilityPaintedStatement[] | undefined;
|
||||
let lateStatementReplacementMap: Map<VisitResult<LateVisibilityPaintedStatement | ExportAssignment>>;
|
||||
let lateStatementReplacementMap: Map<string, VisitResult<LateVisibilityPaintedStatement | ExportAssignment>>;
|
||||
let suppressNewDiagnosticContexts: boolean;
|
||||
let exportedModulesFromDeclarationEmit: Symbol[] | undefined;
|
||||
|
||||
|
@ -81,8 +81,8 @@ namespace ts {
|
|||
let errorNameNode: DeclarationName | undefined;
|
||||
|
||||
let currentSourceFile: SourceFile;
|
||||
let refs: Map<SourceFile>;
|
||||
let libs: Map<boolean>;
|
||||
let refs: Map<string, SourceFile>;
|
||||
let libs: Map<string, boolean>;
|
||||
let emittedImports: readonly AnyImportSyntax[] | undefined; // must be declared in container so it can be `undefined` while transformer's first pass
|
||||
const resolver = context.getEmitResolver();
|
||||
const options = context.getCompilerOptions();
|
||||
|
@ -93,9 +93,9 @@ namespace ts {
|
|||
if (!typeReferenceDirectives) {
|
||||
return;
|
||||
}
|
||||
necessaryTypeReferences = necessaryTypeReferences || createMap<true>();
|
||||
necessaryTypeReferences = necessaryTypeReferences || new Set();
|
||||
for (const ref of typeReferenceDirectives) {
|
||||
necessaryTypeReferences.set(ref, true);
|
||||
necessaryTypeReferences.add(ref);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -402,7 +402,7 @@ namespace ts {
|
|||
}
|
||||
}
|
||||
|
||||
function collectReferences(sourceFile: SourceFile | UnparsedSource, ret: Map<SourceFile>) {
|
||||
function collectReferences(sourceFile: SourceFile | UnparsedSource, ret: Map<string, SourceFile>) {
|
||||
if (noResolve || (!isUnparsedSource(sourceFile) && isSourceFileJS(sourceFile))) return ret;
|
||||
forEach(sourceFile.referencedFiles, f => {
|
||||
const elem = host.getSourceFileFromReference(sourceFile, f);
|
||||
|
@ -413,7 +413,7 @@ namespace ts {
|
|||
return ret;
|
||||
}
|
||||
|
||||
function collectLibs(sourceFile: SourceFile | UnparsedSource, ret: Map<boolean>) {
|
||||
function collectLibs(sourceFile: SourceFile | UnparsedSource, ret: Map<string, boolean>) {
|
||||
forEach(sourceFile.libReferenceDirectives, ref => {
|
||||
const lib = host.getLibFileFromReference(ref);
|
||||
if (lib) {
|
||||
|
|
|
@ -72,15 +72,15 @@ namespace ts {
|
|||
* set of labels that occurred inside the converted loop
|
||||
* used to determine if labeled jump can be emitted as is or it should be dispatched to calling code
|
||||
*/
|
||||
labels?: Map<boolean>;
|
||||
labels?: Map<string, boolean>;
|
||||
/*
|
||||
* collection of labeled jumps that transfer control outside the converted loop.
|
||||
* maps store association 'label -> labelMarker' where
|
||||
* - label - value of label as it appear in code
|
||||
* - label marker - return value that should be interpreted by calling code as 'jump to <label>'
|
||||
*/
|
||||
labeledNonLocalBreaks?: Map<string>;
|
||||
labeledNonLocalContinues?: Map<string>;
|
||||
labeledNonLocalBreaks?: Map<string, string>;
|
||||
labeledNonLocalContinues?: Map<string, string>;
|
||||
|
||||
/*
|
||||
* set of non-labeled jumps that transfer control outside the converted loop
|
||||
|
@ -3291,7 +3291,7 @@ namespace ts {
|
|||
}
|
||||
}
|
||||
|
||||
function processLabeledJumps(table: Map<string>, isBreak: boolean, loopResultName: Identifier, outerLoop: ConvertedLoopState | undefined, caseClauses: CaseClause[]): void {
|
||||
function processLabeledJumps(table: Map<string, string>, isBreak: boolean, loopResultName: Identifier, outerLoop: ConvertedLoopState | undefined, caseClauses: CaseClause[]): void {
|
||||
if (!table) {
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -244,7 +244,7 @@ namespace ts {
|
|||
const previousOnSubstituteNode = context.onSubstituteNode;
|
||||
context.onSubstituteNode = onSubstituteNode;
|
||||
|
||||
let renamedCatchVariables: Map<boolean>;
|
||||
let renamedCatchVariables: Map<string, boolean>;
|
||||
let renamedCatchVariableDeclarations: Identifier[];
|
||||
|
||||
let inGeneratorFunctionBody: boolean;
|
||||
|
|
|
@ -13,7 +13,7 @@ namespace ts {
|
|||
context.enableEmitNotification(SyntaxKind.SourceFile);
|
||||
context.enableSubstitution(SyntaxKind.Identifier);
|
||||
|
||||
let helperNameSubstitutions: Map<Identifier> | undefined;
|
||||
let helperNameSubstitutions: Map<string, Identifier> | undefined;
|
||||
return chainBundle(context, transformSourceFile);
|
||||
|
||||
function transformSourceFile(node: SourceFile) {
|
||||
|
|
|
@ -8,7 +8,7 @@ namespace ts {
|
|||
export interface ExternalModuleInfo {
|
||||
externalImports: (ImportDeclaration | ImportEqualsDeclaration | ExportDeclaration)[]; // imports of other external modules
|
||||
externalHelpersImportDeclaration: ImportDeclaration | undefined; // import of external helpers
|
||||
exportSpecifiers: Map<ExportSpecifier[]>; // file-local export specifiers by name (no reexports)
|
||||
exportSpecifiers: Map<string, ExportSpecifier[]>; // file-local export specifiers by name (no reexports)
|
||||
exportedBindings: Identifier[][]; // exported names of local declarations
|
||||
exportedNames: Identifier[] | undefined; // all exported names in the module, both local and reexported
|
||||
exportEquals: ExportAssignment | undefined; // an export= declaration if one was present
|
||||
|
@ -219,7 +219,7 @@ namespace ts {
|
|||
}
|
||||
}
|
||||
|
||||
function collectExportedVariableInfo(decl: VariableDeclaration | BindingElement, uniqueExports: Map<boolean>, exportedNames: Identifier[] | undefined) {
|
||||
function collectExportedVariableInfo(decl: VariableDeclaration | BindingElement, uniqueExports: Map<string, boolean>, exportedNames: Identifier[] | undefined) {
|
||||
if (isBindingPattern(decl.name)) {
|
||||
for (const element of decl.name.elements) {
|
||||
if (!isOmittedExpression(element)) {
|
||||
|
|
|
@ -51,24 +51,8 @@ namespace ts {
|
|||
|
||||
/*@internal*/
|
||||
export type ResolvedConfigFilePath = ResolvedConfigFileName & Path;
|
||||
interface FileMap<T, U extends Path = Path> extends Map<T> {
|
||||
get(key: U): T | undefined;
|
||||
has(key: U): boolean;
|
||||
forEach(action: (value: T, key: U) => void): void;
|
||||
readonly size: number;
|
||||
keys(): Iterator<U>;
|
||||
values(): Iterator<T>;
|
||||
entries(): Iterator<[U, T]>;
|
||||
set(key: U, value: T): this;
|
||||
delete(key: U): boolean;
|
||||
clear(): void;
|
||||
}
|
||||
type ConfigFileMap<T> = FileMap<T, ResolvedConfigFilePath>;
|
||||
function createConfigFileMap<T>(): ConfigFileMap<T> {
|
||||
return createMap() as ConfigFileMap<T>;
|
||||
}
|
||||
|
||||
function getOrCreateValueFromConfigFileMap<T>(configFileMap: ConfigFileMap<T>, resolved: ResolvedConfigFilePath, createT: () => T): T {
|
||||
function getOrCreateValueFromConfigFileMap<T>(configFileMap: Map<ResolvedConfigFilePath, T>, resolved: ResolvedConfigFilePath, createT: () => T): T {
|
||||
const existingValue = configFileMap.get(resolved);
|
||||
let newValue: T | undefined;
|
||||
if (!existingValue) {
|
||||
|
@ -78,8 +62,8 @@ namespace ts {
|
|||
return existingValue || newValue!;
|
||||
}
|
||||
|
||||
function getOrCreateValueMapFromConfigFileMap<T>(configFileMap: ConfigFileMap<Map<T>>, resolved: ResolvedConfigFilePath): Map<T> {
|
||||
return getOrCreateValueFromConfigFileMap<Map<T>>(configFileMap, resolved, createMap);
|
||||
function getOrCreateValueMapFromConfigFileMap<T>(configFileMap: Map<ResolvedConfigFilePath, Map<string, T>>, resolved: ResolvedConfigFilePath): Map<string, T> {
|
||||
return getOrCreateValueFromConfigFileMap<Map<string, T>>(configFileMap, resolved, createMap);
|
||||
}
|
||||
|
||||
function newer(date1: Date, date2: Date): Date {
|
||||
|
@ -238,17 +222,17 @@ namespace ts {
|
|||
readonly rootNames: readonly string[];
|
||||
readonly baseWatchOptions: WatchOptions | undefined;
|
||||
|
||||
readonly resolvedConfigFilePaths: Map<ResolvedConfigFilePath>;
|
||||
readonly configFileCache: ConfigFileMap<ConfigFileCacheEntry>;
|
||||
readonly resolvedConfigFilePaths: Map<string, ResolvedConfigFilePath>;
|
||||
readonly configFileCache: Map<ResolvedConfigFilePath, ConfigFileCacheEntry>;
|
||||
/** Map from config file name to up-to-date status */
|
||||
readonly projectStatus: ConfigFileMap<UpToDateStatus>;
|
||||
readonly buildInfoChecked: ConfigFileMap<true>;
|
||||
readonly extendedConfigCache: Map<ExtendedConfigCacheEntry>;
|
||||
readonly projectStatus: Map<ResolvedConfigFilePath, UpToDateStatus>;
|
||||
readonly buildInfoChecked: Map<ResolvedConfigFilePath, true>;
|
||||
readonly extendedConfigCache: Map<string, ExtendedConfigCacheEntry>;
|
||||
|
||||
readonly builderPrograms: ConfigFileMap<T>;
|
||||
readonly diagnostics: ConfigFileMap<readonly Diagnostic[]>;
|
||||
readonly projectPendingBuild: ConfigFileMap<ConfigFileProgramReloadLevel>;
|
||||
readonly projectErrorsReported: ConfigFileMap<true>;
|
||||
readonly builderPrograms: Map<ResolvedConfigFilePath, T>;
|
||||
readonly diagnostics: Map<ResolvedConfigFilePath, readonly Diagnostic[]>;
|
||||
readonly projectPendingBuild: Map<ResolvedConfigFilePath, ConfigFileProgramReloadLevel>;
|
||||
readonly projectErrorsReported: Map<ResolvedConfigFilePath, true>;
|
||||
|
||||
readonly compilerHost: CompilerHost;
|
||||
readonly moduleResolutionCache: ModuleResolutionCache | undefined;
|
||||
|
@ -265,9 +249,9 @@ namespace ts {
|
|||
|
||||
// Watch state
|
||||
readonly watch: boolean;
|
||||
readonly allWatchedWildcardDirectories: ConfigFileMap<Map<WildcardDirectoryWatcher>>;
|
||||
readonly allWatchedInputFiles: ConfigFileMap<Map<FileWatcher>>;
|
||||
readonly allWatchedConfigFiles: ConfigFileMap<FileWatcher>;
|
||||
readonly allWatchedWildcardDirectories: Map<ResolvedConfigFilePath, Map<string, WildcardDirectoryWatcher>>;
|
||||
readonly allWatchedInputFiles: Map<ResolvedConfigFilePath, Map<Path, FileWatcher>>;
|
||||
readonly allWatchedConfigFiles: Map<ResolvedConfigFilePath, FileWatcher>;
|
||||
|
||||
timerToBuildInvalidatedProject: any;
|
||||
reportFileChangeDetected: boolean;
|
||||
|
@ -313,16 +297,16 @@ namespace ts {
|
|||
rootNames,
|
||||
baseWatchOptions,
|
||||
|
||||
resolvedConfigFilePaths: createMap(),
|
||||
configFileCache: createConfigFileMap(),
|
||||
projectStatus: createConfigFileMap(),
|
||||
buildInfoChecked: createConfigFileMap(),
|
||||
extendedConfigCache: createMap(),
|
||||
resolvedConfigFilePaths: new Map(),
|
||||
configFileCache: new Map(),
|
||||
projectStatus: new Map(),
|
||||
buildInfoChecked: new Map(),
|
||||
extendedConfigCache: new Map(),
|
||||
|
||||
builderPrograms: createConfigFileMap(),
|
||||
diagnostics: createConfigFileMap(),
|
||||
projectPendingBuild: createConfigFileMap(),
|
||||
projectErrorsReported: createConfigFileMap(),
|
||||
builderPrograms: new Map(),
|
||||
diagnostics: new Map(),
|
||||
projectPendingBuild: new Map(),
|
||||
projectErrorsReported: new Map(),
|
||||
|
||||
compilerHost,
|
||||
moduleResolutionCache,
|
||||
|
@ -339,9 +323,9 @@ namespace ts {
|
|||
|
||||
// Watch state
|
||||
watch,
|
||||
allWatchedWildcardDirectories: createConfigFileMap(),
|
||||
allWatchedInputFiles: createConfigFileMap(),
|
||||
allWatchedConfigFiles: createConfigFileMap(),
|
||||
allWatchedWildcardDirectories: new Map(),
|
||||
allWatchedInputFiles: new Map(),
|
||||
allWatchedConfigFiles: new Map(),
|
||||
|
||||
timerToBuildInvalidatedProject: undefined,
|
||||
reportFileChangeDetected: false,
|
||||
|
@ -400,8 +384,8 @@ namespace ts {
|
|||
}
|
||||
|
||||
function createBuildOrder(state: SolutionBuilderState, roots: readonly ResolvedConfigFileName[]): AnyBuildOrder {
|
||||
const temporaryMarks = createMap() as ConfigFileMap<true>;
|
||||
const permanentMarks = createMap() as ConfigFileMap<true>;
|
||||
const temporaryMarks = new Map<ResolvedConfigFilePath, true>();
|
||||
const permanentMarks = new Map<ResolvedConfigFilePath, true>();
|
||||
const circularityReportStack: string[] = [];
|
||||
let buildOrder: ResolvedConfigFileName[] | undefined;
|
||||
let circularDiagnostics: Diagnostic[] | undefined;
|
||||
|
@ -458,7 +442,7 @@ namespace ts {
|
|||
const currentProjects = arrayToSet(
|
||||
getBuildOrderFromAnyBuildOrder(buildOrder),
|
||||
resolved => toResolvedConfigFilePath(state, resolved)
|
||||
) as ConfigFileMap<true>;
|
||||
) as Map<ResolvedConfigFilePath, true>;
|
||||
|
||||
const noopOnDelete = { onDeleteValue: noop };
|
||||
// Config file cache
|
||||
|
@ -940,7 +924,7 @@ namespace ts {
|
|||
let newestDeclarationFileContentChangedTime = minimumDate;
|
||||
let anyDtsChanged = false;
|
||||
const emitterDiagnostics = createDiagnosticCollection();
|
||||
const emittedOutputs = createMap() as FileMap<string>;
|
||||
const emittedOutputs = new Map<Path, string>();
|
||||
outputFiles.forEach(({ name, text, writeByteOrderMark }) => {
|
||||
let priorChangeTime: Date | undefined;
|
||||
if (!anyDtsChanged && isDeclarationFile(name)) {
|
||||
|
@ -992,7 +976,7 @@ namespace ts {
|
|||
|
||||
function finishEmit(
|
||||
emitterDiagnostics: DiagnosticCollection,
|
||||
emittedOutputs: FileMap<string>,
|
||||
emittedOutputs: Map<Path, string>,
|
||||
priorNewestUpdateTime: Date,
|
||||
newestDeclarationFileContentChangedTimeIsMaximumDate: boolean,
|
||||
oldestOutputFileName: string,
|
||||
|
@ -1073,7 +1057,7 @@ namespace ts {
|
|||
// Actual Emit
|
||||
Debug.assert(!!outputFiles.length);
|
||||
const emitterDiagnostics = createDiagnosticCollection();
|
||||
const emittedOutputs = createMap() as FileMap<string>;
|
||||
const emittedOutputs = new Map<Path, string>();
|
||||
outputFiles.forEach(({ name, text, writeByteOrderMark }) => {
|
||||
emittedOutputs.set(toPath(state, name), name);
|
||||
writeFile(writeFileCallback ? { writeFile: writeFileCallback } : compilerHost, emitterDiagnostics, name, text, writeByteOrderMark);
|
||||
|
@ -1560,7 +1544,7 @@ namespace ts {
|
|||
return actual;
|
||||
}
|
||||
|
||||
function updateOutputTimestampsWorker(state: SolutionBuilderState, proj: ParsedCommandLine, priorNewestUpdateTime: Date, verboseMessage: DiagnosticMessage, skipOutputs?: FileMap<string>) {
|
||||
function updateOutputTimestampsWorker(state: SolutionBuilderState, proj: ParsedCommandLine, priorNewestUpdateTime: Date, verboseMessage: DiagnosticMessage, skipOutputs?: Map<Path, string>) {
|
||||
const { host } = state;
|
||||
const outputs = getAllProjectOutputs(proj, !host.useCaseSensitiveFileNames());
|
||||
if (!skipOutputs || outputs.length !== skipOutputs.size) {
|
||||
|
|
|
@ -3397,7 +3397,7 @@ namespace ts {
|
|||
|
||||
// this map is used by transpiler to supply alternative names for dependencies (i.e. in case of bundling)
|
||||
/* @internal */
|
||||
renamedDependencies?: ReadonlyMap<string>;
|
||||
renamedDependencies?: ReadonlyMap<string, string>;
|
||||
|
||||
/**
|
||||
* lib.d.ts should have a reference comment like
|
||||
|
@ -3423,7 +3423,7 @@ namespace ts {
|
|||
// JS identifier-declarations that are intended to merge with globals
|
||||
/* @internal */ jsGlobalAugmentations?: SymbolTable;
|
||||
|
||||
/* @internal */ identifiers: Map<string>; // Map from a string to an interned string
|
||||
/* @internal */ identifiers: Map<string, string>; // Map from a string to an interned string
|
||||
/* @internal */ nodeCount: number;
|
||||
/* @internal */ identifierCount: number;
|
||||
/* @internal */ symbolCount: number;
|
||||
|
@ -3451,8 +3451,8 @@ namespace ts {
|
|||
// Stores a mapping 'external module reference text' -> 'resolved file name' | undefined
|
||||
// It is used to resolve module names in the checker.
|
||||
// Content of this field should never be used directly - use getResolvedModuleFileName/setResolvedModuleFileName functions instead
|
||||
/* @internal */ resolvedModules?: Map<ResolvedModuleFull | undefined>;
|
||||
/* @internal */ resolvedTypeReferenceDirectiveNames: Map<ResolvedTypeReferenceDirective | undefined>;
|
||||
/* @internal */ resolvedModules?: Map<string, ResolvedModuleFull | undefined>;
|
||||
/* @internal */ resolvedTypeReferenceDirectiveNames: Map<string, ResolvedTypeReferenceDirective | undefined>;
|
||||
/* @internal */ imports: readonly StringLiteralLike[];
|
||||
// Identifier only if `declare global`
|
||||
/* @internal */ moduleAugmentations: readonly (StringLiteral | Identifier)[];
|
||||
|
@ -3682,9 +3682,9 @@ namespace ts {
|
|||
/* @internal */
|
||||
getMissingFilePaths(): readonly Path[];
|
||||
/* @internal */
|
||||
getRefFileMap(): MultiMap<RefFile> | undefined;
|
||||
getRefFileMap(): MultiMap<Path, RefFile> | undefined;
|
||||
/* @internal */
|
||||
getFilesByNameMap(): Map<SourceFile | false | undefined>;
|
||||
getFilesByNameMap(): Map<string, SourceFile | false | undefined>;
|
||||
|
||||
/**
|
||||
* Emits the JavaScript and declaration files. If targetSourceFile is not specified, then
|
||||
|
@ -3734,7 +3734,7 @@ namespace ts {
|
|||
getRelationCacheSizes(): { assignable: number, identity: number, subtype: number, strictSubtype: number };
|
||||
|
||||
/* @internal */ getFileProcessingDiagnostics(): DiagnosticCollection;
|
||||
/* @internal */ getResolvedTypeReferenceDirectives(): Map<ResolvedTypeReferenceDirective | undefined>;
|
||||
/* @internal */ getResolvedTypeReferenceDirectives(): Map<string, ResolvedTypeReferenceDirective | undefined>;
|
||||
isSourceFileFromExternalLibrary(file: SourceFile): boolean;
|
||||
isSourceFileDefaultLibrary(file: SourceFile): boolean;
|
||||
|
||||
|
@ -3745,9 +3745,9 @@ namespace ts {
|
|||
/* @internal */ getLibFileFromReference(ref: FileReference): SourceFile | undefined;
|
||||
|
||||
/** Given a source file, get the name of the package it was imported from. */
|
||||
/* @internal */ sourceFileToPackageName: Map<string>;
|
||||
/* @internal */ sourceFileToPackageName: Map<string, string>;
|
||||
/** Set of all source files that some other source file redirects to. */
|
||||
/* @internal */ redirectTargetsMap: MultiMap<string>;
|
||||
/* @internal */ redirectTargetsMap: MultiMap<string, string>;
|
||||
/** Is the file emitted file */
|
||||
/* @internal */ isEmittedFile(file: string): boolean;
|
||||
|
||||
|
@ -3762,7 +3762,7 @@ namespace ts {
|
|||
/*@internal*/ isSourceOfProjectReferenceRedirect(fileName: string): boolean;
|
||||
/*@internal*/ getProgramBuildInfo?(): ProgramBuildInfo | undefined;
|
||||
/*@internal*/ emitBuildInfo(writeFile?: WriteFileCallback, cancellationToken?: CancellationToken): EmitResult;
|
||||
/*@internal*/ getProbableSymlinks(): ReadonlyMap<string>;
|
||||
/*@internal*/ getProbableSymlinks(): ReadonlyMap<string, string>;
|
||||
/**
|
||||
* This implementation handles file exists to be true if file is source of project reference redirect when program is created using useSourceOfProjectReferenceRedirect
|
||||
*/
|
||||
|
@ -3774,7 +3774,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
/* @internal */
|
||||
export type RedirectTargetsMap = ReadonlyMap<readonly string[]>;
|
||||
export type RedirectTargetsMap = ReadonlyMap<string, readonly string[]>;
|
||||
|
||||
export interface ResolvedProjectReference {
|
||||
commandLine: ParsedCommandLine;
|
||||
|
@ -3870,7 +3870,7 @@ namespace ts {
|
|||
|
||||
getSourceFiles(): readonly SourceFile[];
|
||||
getSourceFile(fileName: string): SourceFile | undefined;
|
||||
getResolvedTypeReferenceDirectives(): ReadonlyMap<ResolvedTypeReferenceDirective | undefined>;
|
||||
getResolvedTypeReferenceDirectives(): ReadonlyMap<string, ResolvedTypeReferenceDirective | undefined>;
|
||||
getProjectReferenceRedirect(fileName: string): string | undefined;
|
||||
isSourceOfProjectReferenceRedirect(fileName: string): boolean;
|
||||
|
||||
|
@ -4591,7 +4591,7 @@ namespace ts {
|
|||
/* @internal */ isReferenced?: SymbolFlags; // True if the symbol is referenced elsewhere. Keeps track of the meaning of a reference in case a symbol is both a type parameter and parameter.
|
||||
/* @internal */ isReplaceableByMethod?: boolean; // Can this Javascript class property be replaced by a method symbol?
|
||||
/* @internal */ isAssigned?: boolean; // True if the symbol is a parameter with assignments
|
||||
/* @internal */ assignmentDeclarationMembers?: Map<Declaration>; // detected late-bound assignment declarations associated with the symbol
|
||||
/* @internal */ assignmentDeclarationMembers?: Map<number, Declaration>; // detected late-bound assignment declarations associated with the symbol
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
|
@ -4604,8 +4604,8 @@ namespace ts {
|
|||
declaredType?: Type; // Type of class, interface, enum, type alias, or type parameter
|
||||
typeParameters?: TypeParameter[]; // Type parameters of type alias (undefined if non-generic)
|
||||
outerTypeParameters?: TypeParameter[]; // Outer type parameters of anonymous object type
|
||||
instantiations?: Map<Type>; // Instantiations of generic type alias (undefined if non-generic)
|
||||
inferredClassSymbol?: Map<TransientSymbol>; // Symbol of an inferred ES5 constructor function
|
||||
instantiations?: Map<string, Type>; // Instantiations of generic type alias (undefined if non-generic)
|
||||
inferredClassSymbol?: Map<string, TransientSymbol>; // Symbol of an inferred ES5 constructor function
|
||||
mapper?: TypeMapper; // Type mapper for instantiation alias
|
||||
referenced?: boolean; // True if alias symbol has been referenced as a value that can be emitted
|
||||
constEnumReferenced?: boolean; // True if alias symbol resolves to a const enum and is referenced as a value ('referenced' will be false)
|
||||
|
@ -4624,9 +4624,9 @@ namespace ts {
|
|||
enumKind?: EnumKind; // Enum declaration classification
|
||||
originatingImport?: ImportDeclaration | ImportCall; // Import declaration which produced the symbol, present if the symbol is marked as uncallable but had call signatures in `resolveESModuleSymbol`
|
||||
lateSymbol?: Symbol; // Late-bound symbol for a computed property
|
||||
specifierCache?: Map<string>; // For symbols corresponding to external modules, a cache of incoming path -> module specifier name mappings
|
||||
specifierCache?: Map<string, string>; // For symbols corresponding to external modules, a cache of incoming path -> module specifier name mappings
|
||||
extendedContainers?: Symbol[]; // Containers (other than the parent) which this symbol is aliased in
|
||||
extendedContainersByFile?: Map<Symbol[]>; // Containers (other than the parent) which this symbol is aliased in
|
||||
extendedContainersByFile?: Map<string, Symbol[]>; // Containers (other than the parent) which this symbol is aliased in
|
||||
variances?: VarianceFlags[]; // Alias symbol type argument variance cache
|
||||
deferralConstituents?: Type[]; // Calculated list of constituents for a deferred type
|
||||
deferralParent?: Type; // Source union/intersection of a deferred type
|
||||
|
@ -4718,21 +4718,11 @@ namespace ts {
|
|||
export type __String = (string & { __escapedIdentifier: void }) | (void & { __escapedIdentifier: void }) | InternalSymbolName;
|
||||
|
||||
/** ReadonlyMap where keys are `__String`s. */
|
||||
export interface ReadonlyUnderscoreEscapedMap<T> {
|
||||
get(key: __String): T | undefined;
|
||||
has(key: __String): boolean;
|
||||
forEach(action: (value: T, key: __String) => void): void;
|
||||
readonly size: number;
|
||||
keys(): Iterator<__String>;
|
||||
values(): Iterator<T>;
|
||||
entries(): Iterator<[__String, T]>;
|
||||
export interface ReadonlyUnderscoreEscapedMap<T> extends ReadonlyMap<__String, T> {
|
||||
}
|
||||
|
||||
/** Map where keys are `__String`s. */
|
||||
export interface UnderscoreEscapedMap<T> extends ReadonlyUnderscoreEscapedMap<T> {
|
||||
set(key: __String, value: T): this;
|
||||
delete(key: __String): boolean;
|
||||
clear(): void;
|
||||
export interface UnderscoreEscapedMap<T> extends Map<__String, T>, ReadonlyUnderscoreEscapedMap<T> {
|
||||
}
|
||||
|
||||
/** SymbolTable based on ES6 Map interface. */
|
||||
|
@ -4792,10 +4782,10 @@ namespace ts {
|
|||
switchTypes?: Type[]; // Cached array of switch case expression types
|
||||
jsxNamespace?: Symbol | false; // Resolved jsx namespace symbol for this node
|
||||
contextFreeType?: Type; // Cached context-free type used by the first pass of inference; used when a function's return is partially contextually sensitive
|
||||
deferredNodes?: Map<Node>; // Set of nodes whose checking has been deferred
|
||||
deferredNodes?: Map<string, Node>; // Set of nodes whose checking has been deferred
|
||||
capturedBlockScopeBindings?: Symbol[]; // Block-scoped bindings captured beneath this part of an IterationStatement
|
||||
outerTypeParameters?: TypeParameter[]; // Outer type parameters of anonymous object type
|
||||
instantiations?: Map<Type>; // Instantiations of generic type alias (undefined if non-generic)
|
||||
instantiations?: Map<string, Type>; // Instantiations of generic type alias (undefined if non-generic)
|
||||
isExhaustive?: boolean; // Is node an exhaustive switch statement
|
||||
skipDirectInference?: true; // Flag set by the API `getContextualType` call on a node when `Completions` is passed to force the checker to skip making inferences to a node's type
|
||||
declarationRequiresScopeChange?: boolean; // Set by `useOuterVariableScopeInParameter` in checker when downlevel emit would change the name resolution scope inside of a parameter.
|
||||
|
@ -5095,7 +5085,7 @@ namespace ts {
|
|||
// Generic class and interface types
|
||||
export interface GenericType extends InterfaceType, TypeReference {
|
||||
/* @internal */
|
||||
instantiations: Map<TypeReference>; // Generic instantiation cache
|
||||
instantiations: Map<string, TypeReference>; // Generic instantiation cache
|
||||
/* @internal */
|
||||
variances?: VarianceFlags[]; // Variance of each type parameter
|
||||
}
|
||||
|
@ -5283,7 +5273,7 @@ namespace ts {
|
|||
isDistributive: boolean;
|
||||
inferTypeParameters?: TypeParameter[];
|
||||
outerTypeParameters?: TypeParameter[];
|
||||
instantiations?: Map<Type>;
|
||||
instantiations?: Map<string, Type>;
|
||||
aliasSymbol?: Symbol;
|
||||
aliasTypeArguments?: Type[];
|
||||
}
|
||||
|
@ -5378,7 +5368,7 @@ namespace ts {
|
|||
/* @internal */
|
||||
isolatedSignatureType?: ObjectType; // A manufactured type that just contains the signature for purposes of signature comparison
|
||||
/* @internal */
|
||||
instantiations?: Map<Signature>; // Generic signature instantiation cache
|
||||
instantiations?: Map<string, Signature>; // Generic signature instantiation cache
|
||||
}
|
||||
|
||||
export const enum IndexKind {
|
||||
|
@ -5899,7 +5889,7 @@ namespace ts {
|
|||
/* @internal */
|
||||
export interface CommandLineOptionBase {
|
||||
name: string;
|
||||
type: "string" | "number" | "boolean" | "object" | "list" | Map<number | string>; // a value of a primitive type, or an object literal mapping named values to actual values
|
||||
type: "string" | "number" | "boolean" | "object" | "list" | Map<string, number | string>; // a value of a primitive type, or an object literal mapping named values to actual values
|
||||
isFilePath?: boolean; // True if option value is a path or fileName
|
||||
shortName?: string; // A short mnemonic for convenience - for instance, 'h' can be used in place of 'help'
|
||||
description?: DiagnosticMessage; // The message describing what the command line switch does
|
||||
|
@ -5924,7 +5914,7 @@ namespace ts {
|
|||
|
||||
/* @internal */
|
||||
export interface CommandLineOptionOfCustomType extends CommandLineOptionBase {
|
||||
type: Map<number | string>; // an object literal mapping named values to actual values
|
||||
type: Map<string, number | string>; // an object literal mapping named values to actual values
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
|
@ -5937,7 +5927,7 @@ namespace ts {
|
|||
/* @internal */
|
||||
export interface TsConfigOnlyOption extends CommandLineOptionBase {
|
||||
type: "object";
|
||||
elementOptions?: Map<CommandLineOption>;
|
||||
elementOptions?: Map<string, CommandLineOption>;
|
||||
extraKeyDiagnostics?: DidYouMeanOptionsDiagnostics;
|
||||
}
|
||||
|
||||
|
@ -6226,7 +6216,7 @@ namespace ts {
|
|||
|
||||
// TODO: later handle this in better way in builder host instead once the api for tsbuild finalizes and doesn't use compilerHost as base
|
||||
/*@internal*/createDirectory?(directory: string): void;
|
||||
/*@internal*/getSymlinks?(): ReadonlyMap<string>;
|
||||
/*@internal*/getSymlinks?(): ReadonlyMap<string, string>;
|
||||
}
|
||||
|
||||
/** true if --out otherwise source file name */
|
||||
|
@ -7739,7 +7729,7 @@ namespace ts {
|
|||
fileExists(path: string): boolean;
|
||||
getCurrentDirectory(): string;
|
||||
readFile?(path: string): string | undefined;
|
||||
getProbableSymlinks?(files: readonly SourceFile[]): ReadonlyMap<string>;
|
||||
getProbableSymlinks?(files: readonly SourceFile[]): ReadonlyMap<string, string>;
|
||||
getGlobalTypingsCacheLocation?(): string | undefined;
|
||||
|
||||
getSourceFiles(): readonly SourceFile[];
|
||||
|
@ -8003,7 +7993,7 @@ namespace ts {
|
|||
export type PragmaPseudoMapEntry = {[K in keyof PragmaPseudoMap]: {name: K, args: PragmaPseudoMap[K]}}[keyof PragmaPseudoMap];
|
||||
|
||||
/* @internal */
|
||||
export interface ReadonlyPragmaMap extends ReadonlyMap<PragmaPseudoMap[keyof PragmaPseudoMap] | PragmaPseudoMap[keyof PragmaPseudoMap][]> {
|
||||
export interface ReadonlyPragmaMap extends ReadonlyMap<string, PragmaPseudoMap[keyof PragmaPseudoMap] | PragmaPseudoMap[keyof PragmaPseudoMap][]> {
|
||||
get<TKey extends keyof PragmaPseudoMap>(key: TKey): PragmaPseudoMap[TKey] | PragmaPseudoMap[TKey][];
|
||||
forEach(action: <TKey extends keyof PragmaPseudoMap>(value: PragmaPseudoMap[TKey] | PragmaPseudoMap[TKey][], key: TKey) => void): void;
|
||||
}
|
||||
|
@ -8014,7 +8004,7 @@ namespace ts {
|
|||
* in multiple places
|
||||
*/
|
||||
/* @internal */
|
||||
export interface PragmaMap extends Map<PragmaPseudoMap[keyof PragmaPseudoMap] | PragmaPseudoMap[keyof PragmaPseudoMap][]>, ReadonlyPragmaMap {
|
||||
export interface PragmaMap extends Map<string, PragmaPseudoMap[keyof PragmaPseudoMap] | PragmaPseudoMap[keyof PragmaPseudoMap][]>, ReadonlyPragmaMap {
|
||||
set<TKey extends keyof PragmaPseudoMap>(key: TKey, value: PragmaPseudoMap[TKey] | PragmaPseudoMap[TKey][]): this;
|
||||
get<TKey extends keyof PragmaPseudoMap>(key: TKey): PragmaPseudoMap[TKey] | PragmaPseudoMap[TKey][];
|
||||
forEach(action: <TKey extends keyof PragmaPseudoMap>(value: PragmaPseudoMap[TKey] | PragmaPseudoMap[TKey][], key: TKey) => void): void;
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* @internal */
|
||||
namespace ts {
|
||||
export const resolvingEmptyArray: never[] = [] as never[];
|
||||
export const emptyMap = createMap<never>() as ReadonlyMap<never> & ReadonlyPragmaMap;
|
||||
export const emptyMap = createMap<never, never>() as ReadonlyMap<never, never> & ReadonlyPragmaMap;
|
||||
export const emptyUnderscoreEscapedMap: ReadonlyUnderscoreEscapedMap<never> = emptyMap as ReadonlyUnderscoreEscapedMap<never>;
|
||||
|
||||
export const externalHelpersModuleNameText = "tslib";
|
||||
|
@ -24,7 +24,7 @@ namespace ts {
|
|||
|
||||
/** Create a new escaped identifier map. */
|
||||
export function createUnderscoreEscapedMap<T>(): UnderscoreEscapedMap<T> {
|
||||
return new Map<T>() as UnderscoreEscapedMap<T>;
|
||||
return new Map<string, T>() as UnderscoreEscapedMap<T>;
|
||||
}
|
||||
|
||||
export function hasEntries(map: ReadonlyUnderscoreEscapedMap<any> | undefined): map is ReadonlyUnderscoreEscapedMap<any> {
|
||||
|
@ -132,13 +132,11 @@ namespace ts {
|
|||
* Calls `callback` for each entry in the map, returning the first truthy result.
|
||||
* Use `map.forEach` instead for normal iteration.
|
||||
*/
|
||||
export function forEachEntry<T, U>(map: ReadonlyUnderscoreEscapedMap<T>, callback: (value: T, key: __String) => U | undefined): U | undefined;
|
||||
export function forEachEntry<T, U>(map: ReadonlyMap<T>, callback: (value: T, key: string) => U | undefined): U | undefined;
|
||||
export function forEachEntry<T, U>(map: ReadonlyUnderscoreEscapedMap<T> | ReadonlyMap<T>, callback: (value: T, key: (string & __String)) => U | undefined): U | undefined {
|
||||
export function forEachEntry<K, V, U>(map: ReadonlyMap<K, V>, callback: (value: V, key: K) => U | undefined): U | undefined {
|
||||
const iterator = map.entries();
|
||||
for (let iterResult = iterator.next(); !iterResult.done; iterResult = iterator.next()) {
|
||||
const [key, value] = iterResult.value;
|
||||
const result = callback(value, key as (string & __String));
|
||||
const result = callback(value, key);
|
||||
if (result) {
|
||||
return result;
|
||||
}
|
||||
|
@ -147,12 +145,10 @@ namespace ts {
|
|||
}
|
||||
|
||||
/** `forEachEntry` for just keys. */
|
||||
export function forEachKey<T>(map: ReadonlyUnderscoreEscapedMap<{}>, callback: (key: __String) => T | undefined): T | undefined;
|
||||
export function forEachKey<T>(map: ReadonlyMap<{}>, callback: (key: string) => T | undefined): T | undefined;
|
||||
export function forEachKey<T>(map: ReadonlyUnderscoreEscapedMap<{}> | ReadonlyMap<{}>, callback: (key: string & __String) => T | undefined): T | undefined {
|
||||
export function forEachKey<K, T>(map: ReadonlyCollection<K>, callback: (key: K) => T | undefined): T | undefined {
|
||||
const iterator = map.keys();
|
||||
for (let iterResult = iterator.next(); !iterResult.done; iterResult = iterator.next()) {
|
||||
const result = callback(iterResult.value as string & __String);
|
||||
const result = callback(iterResult.value);
|
||||
if (result) {
|
||||
return result;
|
||||
}
|
||||
|
@ -161,11 +157,9 @@ namespace ts {
|
|||
}
|
||||
|
||||
/** Copy entries from `source` to `target`. */
|
||||
export function copyEntries<T>(source: ReadonlyUnderscoreEscapedMap<T>, target: UnderscoreEscapedMap<T>): void;
|
||||
export function copyEntries<T>(source: ReadonlyMap<T>, target: Map<T>): void;
|
||||
export function copyEntries<T, U extends UnderscoreEscapedMap<T> | Map<T>>(source: U, target: U): void {
|
||||
(source as Map<T>).forEach((value, key) => {
|
||||
(target as Map<T>).set(key, value);
|
||||
export function copyEntries<K, V>(source: ReadonlyMap<K, V>, target: Map<K, V>): void {
|
||||
source.forEach((value, key) => {
|
||||
target.set(key, value);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -174,19 +168,19 @@ namespace ts {
|
|||
*
|
||||
* @param array the array of input elements.
|
||||
*/
|
||||
export function arrayToSet(array: readonly string[]): Map<true>;
|
||||
export function arrayToSet<T>(array: readonly T[], makeKey: (value: T) => string | undefined): Map<true>;
|
||||
export function arrayToSet(array: readonly string[]): Map<string, true>;
|
||||
export function arrayToSet<T>(array: readonly T[], makeKey: (value: T) => string | undefined): Map<string, true>;
|
||||
export function arrayToSet<T>(array: readonly T[], makeKey: (value: T) => __String | undefined): UnderscoreEscapedMap<true>;
|
||||
export function arrayToSet(array: readonly any[], makeKey?: (value: any) => string | __String | undefined): Map<true> | UnderscoreEscapedMap<true> {
|
||||
export function arrayToSet(array: readonly any[], makeKey?: (value: any) => string | __String | undefined): Map<string, true> | UnderscoreEscapedMap<true> {
|
||||
return arrayToMap<any, true>(array, makeKey || (s => s), returnTrue);
|
||||
}
|
||||
|
||||
export function cloneMap(map: SymbolTable): SymbolTable;
|
||||
export function cloneMap<T>(map: ReadonlyMap<T>): Map<T>;
|
||||
export function cloneMap<T>(map: ReadonlyUnderscoreEscapedMap<T>): UnderscoreEscapedMap<T>;
|
||||
export function cloneMap<T>(map: ReadonlyMap<T> | ReadonlyUnderscoreEscapedMap<T> | SymbolTable): Map<T> | UnderscoreEscapedMap<T> | SymbolTable {
|
||||
const clone = createMap<T>();
|
||||
copyEntries(map as Map<T>, clone);
|
||||
export function cloneMap<K, V>(map: ReadonlyMap<K, V>): Map<K, V>;
|
||||
export function cloneMap<K, V>(map: ReadonlyMap<K, V>): Map<K, V> {
|
||||
const clone = createMap<K, V>();
|
||||
copyEntries(map, clone);
|
||||
return clone;
|
||||
}
|
||||
|
||||
|
@ -256,7 +250,7 @@ namespace ts {
|
|||
export function hasChangesInResolutions<T>(
|
||||
names: readonly string[],
|
||||
newResolutions: readonly T[],
|
||||
oldResolutions: ReadonlyMap<T> | undefined,
|
||||
oldResolutions: ReadonlyMap<string, T> | undefined,
|
||||
comparer: (oldResolution: T, newResolution: T) => boolean): boolean {
|
||||
Debug.assert(names.length === newResolutions.length);
|
||||
|
||||
|
@ -472,7 +466,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function createCommentDirectivesMap(sourceFile: SourceFile, commentDirectives: CommentDirective[]): CommentDirectivesMap {
|
||||
const directivesByLine = createMapFromEntries(
|
||||
const directivesByLine = new Map(
|
||||
commentDirectives.map(commentDirective => ([
|
||||
`${getLineAndCharacterOfPosition(sourceFile, commentDirective.range.end).line}`,
|
||||
commentDirective,
|
||||
|
@ -5267,7 +5261,7 @@ namespace ts {
|
|||
/**
|
||||
* clears already present map by calling onDeleteExistingValue callback before deleting that key/value
|
||||
*/
|
||||
export function clearMap<T>(map: { forEach: Map<T>["forEach"]; clear: Map<T>["clear"]; }, onDeleteValue: (valueInMap: T, key: string) => void) {
|
||||
export function clearMap<T>(map: { forEach: Map<string, T>["forEach"]; clear: Map<string, T>["clear"]; }, onDeleteValue: (valueInMap: T, key: string) => void) {
|
||||
// Remove all
|
||||
map.forEach(onDeleteValue);
|
||||
map.clear();
|
||||
|
@ -5289,8 +5283,8 @@ namespace ts {
|
|||
* Mutates the map with newMap such that keys in map will be same as newMap.
|
||||
*/
|
||||
export function mutateMapSkippingNewValues<T, U>(
|
||||
map: Map<T>,
|
||||
newMap: ReadonlyMap<U>,
|
||||
map: Map<string, T>,
|
||||
newMap: ReadonlyMap<string, U>,
|
||||
options: MutateMapSkippingNewValuesOptions<T, U>
|
||||
) {
|
||||
const { onDeleteValue, onExistingValue } = options;
|
||||
|
@ -5316,7 +5310,7 @@ namespace ts {
|
|||
/**
|
||||
* Mutates the map with newMap such that keys in map will be same as newMap.
|
||||
*/
|
||||
export function mutateMap<T, U>(map: Map<T>, newMap: ReadonlyMap<U>, options: MutateMapOptions<T, U>) {
|
||||
export function mutateMap<T, U>(map: Map<string, T>, newMap: ReadonlyMap<string, U>, options: MutateMapOptions<T, U>) {
|
||||
// Needs update
|
||||
mutateMapSkippingNewValues(map, newMap, options);
|
||||
|
||||
|
@ -5386,9 +5380,9 @@ namespace ts {
|
|||
}
|
||||
|
||||
/** Add a value to a set, and return true if it wasn't already present. */
|
||||
export function addToSeen(seen: Map<true>, key: string | number): boolean;
|
||||
export function addToSeen<T>(seen: Map<T>, key: string | number, value: T): boolean;
|
||||
export function addToSeen<T>(seen: Map<T>, key: string | number, value: T = true as any): boolean {
|
||||
export function addToSeen(seen: Map<string, true>, key: string | number): boolean;
|
||||
export function addToSeen<T>(seen: Map<string, T>, key: string | number, value: T): boolean;
|
||||
export function addToSeen<T>(seen: Map<string, T>, key: string | number, value: T = true as any): boolean {
|
||||
key = String(key);
|
||||
if (seen.has(key)) {
|
||||
return false;
|
||||
|
@ -5943,7 +5937,7 @@ namespace ts {
|
|||
return true;
|
||||
}
|
||||
|
||||
export function discoverProbableSymlinks(files: readonly SourceFile[], getCanonicalFileName: GetCanonicalFileName, cwd: string): ReadonlyMap<string> {
|
||||
export function discoverProbableSymlinks(files: readonly SourceFile[], getCanonicalFileName: GetCanonicalFileName, cwd: string): ReadonlyMap<string, string> {
|
||||
const result = createMap<string>();
|
||||
const symlinks = flatten<readonly [string, string]>(mapDefined(files, sf =>
|
||||
sf.resolvedModules && compact(arrayFrom(mapIterator(sf.resolvedModules.values(), res =>
|
||||
|
@ -6654,7 +6648,7 @@ namespace ts {
|
|||
return a === b || typeof a === "object" && a !== null && typeof b === "object" && b !== null && equalOwnProperties(a as MapLike<unknown>, b as MapLike<unknown>, isJsonEqual);
|
||||
}
|
||||
|
||||
export function getOrUpdate<T>(map: Map<T>, key: string, getDefault: () => T): T {
|
||||
export function getOrUpdate<T>(map: Map<string, T>, key: string, getDefault: () => T): T {
|
||||
const got = map.get(key);
|
||||
if (got === undefined) {
|
||||
const value = getDefault();
|
||||
|
|
|
@ -246,8 +246,8 @@ namespace ts {
|
|||
|
||||
let builderProgram: T;
|
||||
let reloadLevel: ConfigFileProgramReloadLevel; // level to indicate if the program needs to be reloaded from config file/just filenames etc
|
||||
let missingFilesMap: Map<FileWatcher>; // Map of file watchers for the missing files
|
||||
let watchedWildcardDirectories: Map<WildcardDirectoryWatcher>; // map of watchers for the wild card directories in the config file
|
||||
let missingFilesMap: Map<Path, FileWatcher>; // Map of file watchers for the missing files
|
||||
let watchedWildcardDirectories: Map<string, WildcardDirectoryWatcher>; // map of watchers for the wild card directories in the config file
|
||||
let timerToUpdateProgram: any; // timer callback to recompile the program
|
||||
let timerToInvalidateFailedLookupResolutions: any; // timer callback to invalidate resolutions for changes in failed lookup locations
|
||||
|
||||
|
@ -419,7 +419,7 @@ namespace ts {
|
|||
resolutionCache.finishCachingPerDirectoryResolution();
|
||||
|
||||
// Update watches
|
||||
updateMissingFilePathsWatch(builderProgram.getProgram(), missingFilesMap || (missingFilesMap = createMap()), watchMissingFilePath);
|
||||
updateMissingFilePathsWatch(builderProgram.getProgram(), missingFilesMap || (missingFilesMap = new Map()), watchMissingFilePath);
|
||||
if (needsUpdateInTypeRootWatch) {
|
||||
resolutionCache.updateTypeRootsWatch();
|
||||
}
|
||||
|
|
|
@ -262,7 +262,7 @@ namespace ts {
|
|||
*/
|
||||
export function updateMissingFilePathsWatch(
|
||||
program: Program,
|
||||
missingFileWatches: Map<FileWatcher>,
|
||||
missingFileWatches: Map<Path, FileWatcher>,
|
||||
createMissingFileWatch: (missingFilePath: Path) => FileWatcher,
|
||||
) {
|
||||
const missingFilePaths = program.getMissingFilePaths();
|
||||
|
@ -293,8 +293,8 @@ namespace ts {
|
|||
* as wildcard directories wont change unless reloading config file
|
||||
*/
|
||||
export function updateWatchingWildcardDirectories(
|
||||
existingWatchedForWildcards: Map<WildcardDirectoryWatcher>,
|
||||
wildcardDirectories: Map<WatchDirectoryFlags>,
|
||||
existingWatchedForWildcards: Map<string, WildcardDirectoryWatcher>,
|
||||
wildcardDirectories: Map<string, WatchDirectoryFlags>,
|
||||
watchDirectory: (directory: string, flags: WatchDirectoryFlags) => FileWatcher
|
||||
) {
|
||||
mutateMap(
|
||||
|
|
|
@ -100,7 +100,7 @@ namespace ts {
|
|||
if (option.name === "lib") {
|
||||
description = getDiagnosticText(option.description);
|
||||
const element = (<CommandLineOptionOfListType>option).element;
|
||||
const typeMap = <Map<number | string>>element.type;
|
||||
const typeMap = <Map<string, number | string>>element.type;
|
||||
optionsDescriptionMap.set(description, arrayFrom(typeMap.keys()).map(key => `'${key}'`));
|
||||
}
|
||||
else {
|
||||
|
|
|
@ -35,7 +35,7 @@ namespace ts.server {
|
|||
|
||||
export class SessionClient implements LanguageService {
|
||||
private sequence = 0;
|
||||
private lineMaps: Map<number[]> = createMap<number[]>();
|
||||
private lineMaps: Map<string, number[]> = createMap<number[]>();
|
||||
private messages: string[] = [];
|
||||
private lastRenameEntry: RenameEntry | undefined;
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ namespace FourSlash {
|
|||
symlinks: vfs.FileSet | undefined;
|
||||
|
||||
// A mapping from marker names to name/position pairs
|
||||
markerPositions: ts.Map<Marker>;
|
||||
markerPositions: ts.Map<string, Marker>;
|
||||
|
||||
markers: Marker[];
|
||||
|
||||
|
@ -42,7 +42,7 @@ namespace FourSlash {
|
|||
* is a range with `text in range` "selected".
|
||||
*/
|
||||
ranges: Range[];
|
||||
rangesByText?: ts.MultiMap<Range>;
|
||||
rangesByText?: ts.MultiMap<string, Range>;
|
||||
}
|
||||
|
||||
export interface Marker {
|
||||
|
@ -2340,7 +2340,7 @@ namespace FourSlash {
|
|||
return this.getRanges().filter(r => r.fileName === fileName);
|
||||
}
|
||||
|
||||
public rangesByText(): ts.Map<Range[]> {
|
||||
public rangesByText(): ts.Map<string, Range[]> {
|
||||
if (this.testData.rangesByText) return this.testData.rangesByText;
|
||||
const result = ts.createMultiMap<Range>();
|
||||
this.testData.rangesByText = result;
|
||||
|
@ -3420,7 +3420,7 @@ namespace FourSlash {
|
|||
return text;
|
||||
}
|
||||
|
||||
private formatCallHierarchyItem(file: FourSlashFile, callHierarchyItem: ts.CallHierarchyItem, direction: CallHierarchyItemDirection, seen: ts.Map<boolean>, prefix: string, trailingPrefix: string = prefix) {
|
||||
private formatCallHierarchyItem(file: FourSlashFile, callHierarchyItem: ts.CallHierarchyItem, direction: CallHierarchyItemDirection, seen: ts.Map<string, boolean>, prefix: string, trailingPrefix: string = prefix) {
|
||||
const key = `${callHierarchyItem.file}|${JSON.stringify(callHierarchyItem.span)}|${direction}`;
|
||||
const alreadySeen = seen.has(key);
|
||||
seen.set(key, true);
|
||||
|
@ -3944,7 +3944,7 @@ namespace FourSlash {
|
|||
throw new Error(errorMessage);
|
||||
}
|
||||
|
||||
function recordObjectMarker(fileName: string, location: LocationInformation, text: string, markerMap: ts.Map<Marker>, markers: Marker[]): Marker | undefined {
|
||||
function recordObjectMarker(fileName: string, location: LocationInformation, text: string, markerMap: ts.Map<string, Marker>, markers: Marker[]): Marker | undefined {
|
||||
let markerValue: any;
|
||||
try {
|
||||
// Attempt to parse the marker value as JSON
|
||||
|
@ -3975,7 +3975,7 @@ namespace FourSlash {
|
|||
return marker;
|
||||
}
|
||||
|
||||
function recordMarker(fileName: string, location: LocationInformation, name: string, markerMap: ts.Map<Marker>, markers: Marker[]): Marker | undefined {
|
||||
function recordMarker(fileName: string, location: LocationInformation, name: string, markerMap: ts.Map<string, Marker>, markers: Marker[]): Marker | undefined {
|
||||
const marker: Marker = {
|
||||
fileName,
|
||||
position: location.position
|
||||
|
@ -3994,7 +3994,7 @@ namespace FourSlash {
|
|||
}
|
||||
}
|
||||
|
||||
function parseFileContent(content: string, fileName: string, markerMap: ts.Map<Marker>, markers: Marker[], ranges: Range[]): FourSlashFile {
|
||||
function parseFileContent(content: string, fileName: string, markerMap: ts.Map<string, Marker>, markers: Marker[], ranges: Range[]): FourSlashFile {
|
||||
content = chompLeadingSpace(content);
|
||||
|
||||
// Any slash-star comment with a character not in this string is not a marker.
|
||||
|
|
|
@ -27,7 +27,7 @@ namespace FourSlashInterface {
|
|||
return this.ranges().map(r => ts.createTextSpan(r.pos, r.end - r.pos));
|
||||
}
|
||||
|
||||
public rangesByText(): ts.Map<FourSlash.Range[]> {
|
||||
public rangesByText(): ts.Map<string, FourSlash.Range[]> {
|
||||
return this.state.rangesByText();
|
||||
}
|
||||
|
||||
|
|
|
@ -243,7 +243,7 @@ namespace Harness {
|
|||
export const es2015DefaultLibFileName = "lib.es2015.d.ts";
|
||||
|
||||
// Cache of lib files from "built/local"
|
||||
let libFileNameSourceFileMap: ts.Map<ts.SourceFile> | undefined;
|
||||
let libFileNameSourceFileMap: ts.Map<string, ts.SourceFile> | undefined;
|
||||
|
||||
export function getDefaultLibrarySourceFile(fileName = defaultLibFileName): ts.SourceFile | undefined {
|
||||
if (!isDefaultLibraryFile(fileName)) {
|
||||
|
@ -313,7 +313,7 @@ namespace Harness {
|
|||
{ name: "fullEmitPaths", type: "boolean" }
|
||||
];
|
||||
|
||||
let optionsIndex: ts.Map<ts.CommandLineOption>;
|
||||
let optionsIndex: ts.Map<string, ts.CommandLineOption>;
|
||||
function getCommandLineOption(name: string): ts.CommandLineOption | undefined {
|
||||
if (!optionsIndex) {
|
||||
optionsIndex = ts.createMap<ts.CommandLineOption>();
|
||||
|
@ -958,7 +958,7 @@ namespace Harness {
|
|||
}
|
||||
}
|
||||
|
||||
function checkDuplicatedFileName(resultName: string, dupeCase: ts.Map<number>): string {
|
||||
function checkDuplicatedFileName(resultName: string, dupeCase: ts.Map<string, number>): string {
|
||||
resultName = sanitizeTestFilePath(resultName);
|
||||
if (dupeCase.has(resultName)) {
|
||||
// A different baseline filename should be manufactured if the names differ only in case, for windows compat
|
||||
|
@ -1066,9 +1066,9 @@ namespace Harness {
|
|||
}
|
||||
}
|
||||
|
||||
let booleanVaryByStarSettingValues: ts.Map<string | number> | undefined;
|
||||
let booleanVaryByStarSettingValues: ts.Map<string, string | number> | undefined;
|
||||
|
||||
function getVaryByStarSettingValues(varyBy: string): ts.ReadonlyMap<string | number> | undefined {
|
||||
function getVaryByStarSettingValues(varyBy: string): ts.ReadonlyMap<string, string | number> | undefined {
|
||||
const option = ts.forEach(ts.optionDeclarations, decl => ts.equateStringsCaseInsensitive(decl.name, varyBy) ? decl : undefined);
|
||||
if (option) {
|
||||
if (typeof option.type === "object") {
|
||||
|
|
|
@ -126,7 +126,7 @@ namespace Harness.LanguageService {
|
|||
|
||||
export abstract class LanguageServiceAdapterHost {
|
||||
public readonly sys = new fakes.System(new vfs.FileSystem(/*ignoreCase*/ true, { cwd: virtualFileSystemRoot }));
|
||||
public typesRegistry: ts.Map<void> | undefined;
|
||||
public typesRegistry: ts.Map<string, void> | undefined;
|
||||
private scriptInfos: collections.SortedMap<string, ScriptInfo>;
|
||||
|
||||
constructor(protected cancellationToken = DefaultHostCancellationToken.instance,
|
||||
|
|
|
@ -83,7 +83,7 @@ namespace Playback {
|
|||
|
||||
let recordLog: IoLog | undefined;
|
||||
let replayLog: IoLog | undefined;
|
||||
let replayFilesRead: ts.Map<IoLogFile> | undefined;
|
||||
let replayFilesRead: ts.Map<string, IoLogFile> | undefined;
|
||||
let recordLogFileNameBase = "";
|
||||
|
||||
interface Memoized<T> {
|
||||
|
|
|
@ -36,7 +36,7 @@ interface Array<T> { length: number; [n: number]: T; }`
|
|||
currentDirectory?: string;
|
||||
newLine?: string;
|
||||
windowsStyleRoot?: string;
|
||||
environmentVariables?: Map<string>;
|
||||
environmentVariables?: Map<string, string>;
|
||||
runWithoutRecursiveWatches?: boolean;
|
||||
runWithFallbackPolling?: boolean;
|
||||
}
|
||||
|
@ -122,12 +122,12 @@ interface Array<T> { length: number; [n: number]: T; }`
|
|||
}
|
||||
}
|
||||
|
||||
function createWatcher<T>(map: MultiMap<T>, path: string, callback: T): FileWatcher {
|
||||
function createWatcher<T>(map: MultiMap<Path, T>, path: Path, callback: T): FileWatcher {
|
||||
map.add(path, callback);
|
||||
return { close: () => map.remove(path, callback) };
|
||||
}
|
||||
|
||||
function getDiffInKeys<T>(map: Map<T>, expectedKeys: readonly string[]) {
|
||||
function getDiffInKeys<T>(map: Map<string, T>, expectedKeys: readonly string[]) {
|
||||
if (map.size === expectedKeys.length) {
|
||||
return "";
|
||||
}
|
||||
|
@ -154,19 +154,19 @@ interface Array<T> { length: number; [n: number]: T; }`
|
|||
return `\n\nNotInActual: ${notInActual}\nDuplicates: ${duplicates}\nInActualButNotInExpected: ${inActualNotExpected}`;
|
||||
}
|
||||
|
||||
export function verifyMapSize(caption: string, map: Map<any>, expectedKeys: readonly string[]) {
|
||||
export function verifyMapSize(caption: string, map: Map<string, any>, expectedKeys: readonly string[]) {
|
||||
assert.equal(map.size, expectedKeys.length, `${caption}: incorrect size of map: Actual keys: ${arrayFrom(map.keys())} Expected: ${expectedKeys}${getDiffInKeys(map, expectedKeys)}`);
|
||||
}
|
||||
|
||||
export type MapValueTester<T, U> = [Map<U[]> | undefined, (value: T) => U];
|
||||
export type MapValueTester<T, U> = [Map<string, U[]> | undefined, (value: T) => U];
|
||||
|
||||
export function checkMap<T, U = undefined>(caption: string, actual: MultiMap<T>, expectedKeys: ReadonlyMap<number>, valueTester?: MapValueTester<T,U>): void;
|
||||
export function checkMap<T, U = undefined>(caption: string, actual: MultiMap<T>, expectedKeys: readonly string[], eachKeyCount: number, valueTester?: MapValueTester<T, U>): void;
|
||||
export function checkMap<T>(caption: string, actual: Map<T> | MultiMap<T>, expectedKeys: readonly string[], eachKeyCount: undefined): void;
|
||||
export function checkMap<T, U = undefined>(caption: string, actual: MultiMap<string, T>, expectedKeys: ReadonlyMap<string, number>, valueTester?: MapValueTester<T,U>): void;
|
||||
export function checkMap<T, U = undefined>(caption: string, actual: MultiMap<string, T>, expectedKeys: readonly string[], eachKeyCount: number, valueTester?: MapValueTester<T, U>): void;
|
||||
export function checkMap<T>(caption: string, actual: Map<string, T> | MultiMap<string, T>, expectedKeys: readonly string[], eachKeyCount: undefined): void;
|
||||
export function checkMap<T, U = undefined>(
|
||||
caption: string,
|
||||
actual: Map<T> | MultiMap<T>,
|
||||
expectedKeysMapOrArray: ReadonlyMap<number> | readonly string[],
|
||||
actual: Map<string, T> | MultiMap<string, T>,
|
||||
expectedKeysMapOrArray: ReadonlyMap<string, number> | readonly string[],
|
||||
eachKeyCountOrValueTester?: number | MapValueTester<T, U>,
|
||||
valueTester?: MapValueTester<T, U>) {
|
||||
const expectedKeys = isArray(expectedKeysMapOrArray) ? arrayToMap(expectedKeysMapOrArray, s => s, () => eachKeyCountOrValueTester as number) : expectedKeysMapOrArray;
|
||||
|
@ -179,10 +179,10 @@ interface Array<T> { length: number; [n: number]: T; }`
|
|||
assert.isTrue(actual.has(name), `${caption}: expected to contain ${name}, actual keys: ${arrayFrom(actual.keys())}`);
|
||||
// Check key information only if eachKeyCount is provided
|
||||
if (!isArray(expectedKeysMapOrArray) || eachKeyCountOrValueTester !== undefined) {
|
||||
assert.equal((actual as MultiMap<T>).get(name)!.length, count, `${caption}: Expected to be have ${count} entries for ${name}. Actual entry: ${JSON.stringify(actual.get(name))}`);
|
||||
assert.equal((actual as MultiMap<string, T>).get(name)!.length, count, `${caption}: Expected to be have ${count} entries for ${name}. Actual entry: ${JSON.stringify(actual.get(name))}`);
|
||||
if (expectedValues) {
|
||||
assert.deepEqual(
|
||||
(actual as MultiMap<T>).get(name)!.map(valueMapper),
|
||||
(actual as MultiMap<string, T>).get(name)!.map(valueMapper),
|
||||
expectedValues.get(name),
|
||||
`${caption}:: expected values mismatch for ${name}`
|
||||
);
|
||||
|
@ -203,9 +203,9 @@ interface Array<T> { length: number; [n: number]: T; }`
|
|||
fileName: string;
|
||||
pollingInterval: PollingInterval;
|
||||
}
|
||||
export function checkWatchedFilesDetailed(host: TestServerHost, expectedFiles: ReadonlyMap<number>, expectedDetails?: Map<WatchFileDetails[]>): void;
|
||||
export function checkWatchedFilesDetailed(host: TestServerHost, expectedFiles: readonly string[], eachFileWatchCount: number, expectedDetails?: Map<WatchFileDetails[]>): void;
|
||||
export function checkWatchedFilesDetailed(host: TestServerHost, expectedFiles: ReadonlyMap<number> | readonly string[], eachFileWatchCountOrExpectedDetails?: number | Map<WatchFileDetails[]>, expectedDetails?: Map<WatchFileDetails[]>) {
|
||||
export function checkWatchedFilesDetailed(host: TestServerHost, expectedFiles: ReadonlyMap<string, number>, expectedDetails?: Map<string, WatchFileDetails[]>): void;
|
||||
export function checkWatchedFilesDetailed(host: TestServerHost, expectedFiles: readonly string[], eachFileWatchCount: number, expectedDetails?: Map<string, WatchFileDetails[]>): void;
|
||||
export function checkWatchedFilesDetailed(host: TestServerHost, expectedFiles: ReadonlyMap<string, number> | readonly string[], eachFileWatchCountOrExpectedDetails?: number | Map<string, WatchFileDetails[]>, expectedDetails?: Map<string, WatchFileDetails[]>) {
|
||||
if (!isNumber(eachFileWatchCountOrExpectedDetails)) expectedDetails = eachFileWatchCountOrExpectedDetails;
|
||||
if (isArray(expectedFiles)) {
|
||||
checkMap(
|
||||
|
@ -235,9 +235,9 @@ interface Array<T> { length: number; [n: number]: T; }`
|
|||
fallbackPollingInterval: PollingInterval;
|
||||
fallbackOptions: WatchOptions | undefined;
|
||||
}
|
||||
export function checkWatchedDirectoriesDetailed(host: TestServerHost, expectedDirectories: ReadonlyMap<number>, recursive: boolean, expectedDetails?: Map<WatchDirectoryDetails[]>): void;
|
||||
export function checkWatchedDirectoriesDetailed(host: TestServerHost, expectedDirectories: readonly string[], eachDirectoryWatchCount: number, recursive: boolean, expectedDetails?: Map<WatchDirectoryDetails[]>): void;
|
||||
export function checkWatchedDirectoriesDetailed(host: TestServerHost, expectedDirectories: ReadonlyMap<number> | readonly string[], recursiveOrEachDirectoryWatchCount: boolean | number, recursiveOrExpectedDetails?: boolean | Map<WatchDirectoryDetails[]>, expectedDetails?: Map<WatchDirectoryDetails[]>) {
|
||||
export function checkWatchedDirectoriesDetailed(host: TestServerHost, expectedDirectories: ReadonlyMap<string, number>, recursive: boolean, expectedDetails?: Map<string, WatchDirectoryDetails[]>): void;
|
||||
export function checkWatchedDirectoriesDetailed(host: TestServerHost, expectedDirectories: readonly string[], eachDirectoryWatchCount: number, recursive: boolean, expectedDetails?: Map<string, WatchDirectoryDetails[]>): void;
|
||||
export function checkWatchedDirectoriesDetailed(host: TestServerHost, expectedDirectories: ReadonlyMap<string, number> | readonly string[], recursiveOrEachDirectoryWatchCount: boolean | number, recursiveOrExpectedDetails?: boolean | Map<string, WatchDirectoryDetails[]>, expectedDetails?: Map<string, WatchDirectoryDetails[]>) {
|
||||
if (typeof recursiveOrExpectedDetails !== "boolean") expectedDetails = recursiveOrExpectedDetails;
|
||||
if (isArray(expectedDirectories)) {
|
||||
checkMap(
|
||||
|
@ -368,7 +368,7 @@ interface Array<T> { length: number; [n: number]: T; }`
|
|||
fileOrFolderorSymLinkList: readonly FileOrFolderOrSymLink[];
|
||||
newLine?: string;
|
||||
useWindowsStylePaths?: boolean;
|
||||
environmentVariables?: Map<string>;
|
||||
environmentVariables?: Map<string, string>;
|
||||
}
|
||||
|
||||
export class TestServerHost implements server.ServerHost, FormatDiagnosticsHost, ModuleResolutionHost {
|
||||
|
@ -376,7 +376,7 @@ interface Array<T> { length: number; [n: number]: T; }`
|
|||
|
||||
private readonly output: string[] = [];
|
||||
|
||||
private fs: Map<FSEntry> = createMap<FSEntry>();
|
||||
private fs: Map<Path, FSEntry> = new Map();
|
||||
private time = timeIncrements;
|
||||
getCanonicalFileName: (s: string) => string;
|
||||
private toPath: (f: string) => Path;
|
||||
|
@ -384,14 +384,14 @@ interface Array<T> { length: number; [n: number]: T; }`
|
|||
private immediateCallbacks = new Callbacks();
|
||||
readonly screenClears: number[] = [];
|
||||
|
||||
readonly watchedFiles = createMultiMap<TestFileWatcher>();
|
||||
readonly fsWatches = createMultiMap<TestFsWatcher>();
|
||||
readonly fsWatchesRecursive = createMultiMap<TestFsWatcher>();
|
||||
readonly watchedFiles = createMultiMap<Path, TestFileWatcher>();
|
||||
readonly fsWatches = createMultiMap<Path, TestFsWatcher>();
|
||||
readonly fsWatchesRecursive = createMultiMap<Path, TestFsWatcher>();
|
||||
runWithFallbackPolling: boolean;
|
||||
public readonly useCaseSensitiveFileNames: boolean;
|
||||
public readonly newLine: string;
|
||||
public readonly windowsStyleRoot?: string;
|
||||
private readonly environmentVariables?: Map<string>;
|
||||
private readonly environmentVariables?: Map<string, string>;
|
||||
private readonly executingFilePath: string;
|
||||
private readonly currentDirectory: string;
|
||||
public require: ((initialPath: string, moduleName: string) => RequireResult) | undefined;
|
||||
|
@ -750,7 +750,7 @@ interface Array<T> { length: number; [n: number]: T; }`
|
|||
invokeWatcherCallbacks(this.watchedFiles.get(this.toPath(fileFullPath)), ({ cb, fileName }) => cb(useFileNameInCallback ? fileName : fileFullPath, eventKind));
|
||||
}
|
||||
|
||||
private fsWatchCallback(map: MultiMap<TestFsWatcher>, fullPath: string, eventName: "rename" | "change", entryFullPath?: string) {
|
||||
private fsWatchCallback(map: MultiMap<Path, TestFsWatcher>, fullPath: string, eventName: "rename" | "change", entryFullPath?: string) {
|
||||
invokeWatcherCallbacks(map.get(this.toPath(fullPath)), ({ cb }) => cb(eventName, entryFullPath ? this.getRelativePathToDirectory(fullPath, entryFullPath) : ""));
|
||||
}
|
||||
|
||||
|
@ -1042,8 +1042,8 @@ interface Array<T> { length: number; [n: number]: T; }`
|
|||
this.clearOutput();
|
||||
}
|
||||
|
||||
snap(): Map<FSEntry> {
|
||||
const result = new Map<FSEntry>();
|
||||
snap(): Map<Path, FSEntry> {
|
||||
const result = new Map<Path, FSEntry>();
|
||||
this.fs.forEach((value, key) => {
|
||||
const cloneValue = clone(value);
|
||||
if (isFsFolder(cloneValue)) {
|
||||
|
@ -1055,8 +1055,8 @@ interface Array<T> { length: number; [n: number]: T; }`
|
|||
return result;
|
||||
}
|
||||
|
||||
writtenFiles?: Map<number>;
|
||||
diff(baseline: string[], base: Map<FSEntry> = new Map()) {
|
||||
writtenFiles?: Map<Path, number>;
|
||||
diff(baseline: string[], base: Map<string, FSEntry> = new Map()) {
|
||||
this.fs.forEach(newFsEntry => {
|
||||
diffFsEntry(baseline, base.get(newFsEntry.path), newFsEntry, this.writtenFiles);
|
||||
});
|
||||
|
@ -1115,7 +1115,7 @@ interface Array<T> { length: number; [n: number]: T; }`
|
|||
function diffFsSymLink(baseline: string[], fsEntry: FsSymLink) {
|
||||
baseline.push(`//// [${fsEntry.fullPath}] symlink(${fsEntry.symLink})`);
|
||||
}
|
||||
function diffFsEntry(baseline: string[], oldFsEntry: FSEntry | undefined, newFsEntry: FSEntry | undefined, writtenFiles: Map<any> | undefined): void {
|
||||
function diffFsEntry(baseline: string[], oldFsEntry: FSEntry | undefined, newFsEntry: FSEntry | undefined, writtenFiles: Map<string, any> | undefined): void {
|
||||
const file = newFsEntry && newFsEntry.fullPath;
|
||||
if (isFsFile(oldFsEntry)) {
|
||||
if (isFsFile(newFsEntry)) {
|
||||
|
@ -1192,7 +1192,7 @@ interface Array<T> { length: number; [n: number]: T; }`
|
|||
};
|
||||
}
|
||||
|
||||
function serializeMultiMap<T, U>(baseline: string[], caption: string, multiMap: MultiMap<T>, valueMapper: (value: T) => U) {
|
||||
function serializeMultiMap<T, U>(baseline: string[], caption: string, multiMap: MultiMap<string, T>, valueMapper: (value: T) => U) {
|
||||
baseline.push(`${caption}::`);
|
||||
multiMap.forEach((values, key) => {
|
||||
baseline.push(`${key}:`);
|
||||
|
@ -1208,12 +1208,12 @@ interface Array<T> { length: number; [n: number]: T; }`
|
|||
}
|
||||
}
|
||||
|
||||
export type TestServerHostTrackingWrittenFiles = TestServerHost & { writtenFiles: Map<number>; };
|
||||
export type TestServerHostTrackingWrittenFiles = TestServerHost & { writtenFiles: Map<Path, number>; };
|
||||
|
||||
export function changeToHostTrackingWrittenFiles(inputHost: TestServerHost) {
|
||||
const host = inputHost as TestServerHostTrackingWrittenFiles;
|
||||
const originalWriteFile = host.writeFile;
|
||||
host.writtenFiles = createMap<number>();
|
||||
host.writtenFiles = new Map<Path, number>();
|
||||
host.writeFile = (fileName, content) => {
|
||||
originalWriteFile.call(host, fileName, content);
|
||||
const path = host.toFullPath(fileName);
|
||||
|
|
|
@ -77,7 +77,7 @@ namespace ts.JsTyping {
|
|||
/**
|
||||
* A map of loose file names to library names that we are confident require typings
|
||||
*/
|
||||
export type SafeList = ReadonlyMap<string>;
|
||||
export type SafeList = ReadonlyMap<string, string>;
|
||||
|
||||
export function loadSafeList(host: TypingResolutionHost, safeListPath: Path): SafeList {
|
||||
const result = readConfigFile(safeListPath, path => host.readFile(path));
|
||||
|
@ -107,10 +107,10 @@ namespace ts.JsTyping {
|
|||
fileNames: string[],
|
||||
projectRootPath: Path,
|
||||
safeList: SafeList,
|
||||
packageNameToTypingLocation: ReadonlyMap<CachedTyping>,
|
||||
packageNameToTypingLocation: ReadonlyMap<string, CachedTyping>,
|
||||
typeAcquisition: TypeAcquisition,
|
||||
unresolvedImports: readonly string[],
|
||||
typesRegistry: ReadonlyMap<MapLike<string>>):
|
||||
typesRegistry: ReadonlyMap<string, MapLike<string>>):
|
||||
{ cachedTypingPaths: string[], newTypingNames: string[], filesToWatch: string[] } {
|
||||
|
||||
if (!typeAcquisition || !typeAcquisition.enable) {
|
||||
|
|
|
@ -157,11 +157,11 @@ namespace ts.server {
|
|||
[name: string]: { match: RegExp, exclude?: (string | number)[][], types?: string[] };
|
||||
}
|
||||
|
||||
function prepareConvertersForEnumLikeCompilerOptions(commandLineOptions: CommandLineOption[]): Map<Map<number>> {
|
||||
const map: Map<Map<number>> = createMap<Map<number>>();
|
||||
function prepareConvertersForEnumLikeCompilerOptions(commandLineOptions: CommandLineOption[]): Map<string, Map<string, number>> {
|
||||
const map: Map<string, Map<string, number>> = createMap<Map<string, number>>();
|
||||
for (const option of commandLineOptions) {
|
||||
if (typeof option.type === "object") {
|
||||
const optionMap = <Map<number>>option.type;
|
||||
const optionMap = <Map<string, number>>option.type;
|
||||
// verify that map contains only numbers
|
||||
optionMap.forEach(value => {
|
||||
Debug.assert(typeof value === "number");
|
||||
|
@ -373,7 +373,7 @@ namespace ts.server {
|
|||
* It is false when the open file that would still be impacted by existence of
|
||||
* this config file but it is not the root of inferred project
|
||||
*/
|
||||
openFilesImpactedByConfigFile: Map<boolean>;
|
||||
openFilesImpactedByConfigFile: Map<Path, boolean>;
|
||||
/**
|
||||
* The file watcher watching the config file because there is open script info that is root of
|
||||
* inferred project and will be impacted by change in the status of the config file
|
||||
|
@ -586,11 +586,11 @@ namespace ts.server {
|
|||
* Map to the real path of the infos
|
||||
*/
|
||||
/* @internal */
|
||||
readonly realpathToScriptInfos: MultiMap<ScriptInfo> | undefined;
|
||||
readonly realpathToScriptInfos: MultiMap<Path, ScriptInfo> | undefined;
|
||||
/**
|
||||
* maps external project file name to list of config files that were the part of this project
|
||||
*/
|
||||
private readonly externalProjectToConfiguredProjectMap: Map<NormalizedPath[]> = createMap<NormalizedPath[]>();
|
||||
private readonly externalProjectToConfiguredProjectMap: Map<string, NormalizedPath[]> = createMap<NormalizedPath[]>();
|
||||
|
||||
/**
|
||||
* external projects (configuration and list of root files is not controlled by tsserver)
|
||||
|
@ -607,7 +607,7 @@ namespace ts.server {
|
|||
/**
|
||||
* Open files: with value being project root path, and key being Path of the file that is open
|
||||
*/
|
||||
readonly openFiles = createMap<NormalizedPath | undefined>();
|
||||
readonly openFiles = new Map<Path, NormalizedPath | undefined>();
|
||||
/**
|
||||
* Map of open files that are opened without complete path but have projectRoot as current directory
|
||||
*/
|
||||
|
@ -620,7 +620,7 @@ namespace ts.server {
|
|||
/**
|
||||
* Project size for configured or external projects
|
||||
*/
|
||||
private readonly projectToSizeMap: Map<number> = createMap<number>();
|
||||
private readonly projectToSizeMap: Map<string, number> = createMap<number>();
|
||||
/**
|
||||
* This is a map of config file paths existence that doesnt need query to disk
|
||||
* - The entry can be present because there is inferred project that needs to watch addition of config file to directory
|
||||
|
@ -656,7 +656,7 @@ namespace ts.server {
|
|||
public readonly globalPlugins: readonly string[];
|
||||
public readonly pluginProbeLocations: readonly string[];
|
||||
public readonly allowLocalPluginLoads: boolean;
|
||||
private currentPluginConfigOverrides: Map<any> | undefined;
|
||||
private currentPluginConfigOverrides: Map<string, any> | undefined;
|
||||
|
||||
public readonly typesMapLocation: string | undefined;
|
||||
|
||||
|
@ -671,7 +671,7 @@ namespace ts.server {
|
|||
/*@internal*/
|
||||
readonly packageJsonCache: PackageJsonCache;
|
||||
/*@internal*/
|
||||
private packageJsonFilesMap: Map<FileWatcher> | undefined;
|
||||
private packageJsonFilesMap: Map<Path, FileWatcher> | undefined;
|
||||
|
||||
|
||||
private performanceEventHandler?: PerformanceEventHandler;
|
||||
|
@ -875,7 +875,7 @@ namespace ts.server {
|
|||
const event: ProjectsUpdatedInBackgroundEvent = {
|
||||
eventName: ProjectsUpdatedInBackgroundEvent,
|
||||
data: {
|
||||
openFiles: arrayFrom(this.openFiles.keys(), path => this.getScriptInfoForPath(path as Path)!.fileName)
|
||||
openFiles: arrayFrom(this.openFiles.keys(), path => this.getScriptInfoForPath(path)!.fileName)
|
||||
}
|
||||
};
|
||||
this.eventHandler(event);
|
||||
|
@ -1120,9 +1120,9 @@ namespace ts.server {
|
|||
}
|
||||
}
|
||||
|
||||
private delayUpdateSourceInfoProjects(sourceInfos: Map<true> | undefined) {
|
||||
private delayUpdateSourceInfoProjects(sourceInfos: Set<Path> | undefined) {
|
||||
if (sourceInfos) {
|
||||
sourceInfos.forEach((_value, path) => this.delayUpdateProjectsOfScriptInfoPath(path as Path));
|
||||
sourceInfos.forEach((_value, path) => this.delayUpdateProjectsOfScriptInfoPath(path));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1375,7 +1375,7 @@ namespace ts.server {
|
|||
private assignOrphanScriptInfosToInferredProject() {
|
||||
// collect orphaned files and assign them to inferred project just like we treat open of a file
|
||||
this.openFiles.forEach((projectRootPath, path) => {
|
||||
const info = this.getScriptInfoForPath(path as Path)!;
|
||||
const info = this.getScriptInfoForPath(path)!;
|
||||
// collect all orphaned script infos from open files
|
||||
if (info.isOrphan()) {
|
||||
this.assignOrphanScriptInfoToInferredProject(info, projectRootPath);
|
||||
|
@ -1493,7 +1493,7 @@ namespace ts.server {
|
|||
|
||||
// Cache the host value of file exists and add the info to map of open files impacted by this config file
|
||||
const exists = this.host.fileExists(configFileName);
|
||||
const openFilesImpactedByConfigFile = createMap<boolean>();
|
||||
const openFilesImpactedByConfigFile = new Map<Path, boolean>();
|
||||
if (isOpenScriptInfo(info)) {
|
||||
openFilesImpactedByConfigFile.set(info.path, false);
|
||||
}
|
||||
|
@ -1521,7 +1521,7 @@ namespace ts.server {
|
|||
// Since that route doesnt check if the config file is present or not
|
||||
this.configFileExistenceInfoCache.set(project.canonicalConfigFilePath, {
|
||||
exists: true,
|
||||
openFilesImpactedByConfigFile: createMap<boolean>()
|
||||
openFilesImpactedByConfigFile: new Map<Path, boolean>()
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -1559,7 +1559,7 @@ namespace ts.server {
|
|||
const inferredRoots: string[] = [];
|
||||
const otherFiles: string[] = [];
|
||||
configFileExistenceInfo.openFilesImpactedByConfigFile.forEach((isRootOfInferredProject, key) => {
|
||||
const info = this.getScriptInfoForPath(key as Path)!;
|
||||
const info = this.getScriptInfoForPath(key)!;
|
||||
(isRootOfInferredProject ? inferredRoots : otherFiles).push(info.fileName);
|
||||
});
|
||||
|
||||
|
@ -1657,7 +1657,7 @@ namespace ts.server {
|
|||
// Create the cache
|
||||
configFileExistenceInfo = {
|
||||
exists: this.host.fileExists(configFileName),
|
||||
openFilesImpactedByConfigFile: createMap<boolean>()
|
||||
openFilesImpactedByConfigFile: new Map<Path, boolean>()
|
||||
};
|
||||
this.configFileExistenceInfoCache.set(canonicalConfigFilePath, configFileExistenceInfo);
|
||||
}
|
||||
|
@ -1798,7 +1798,7 @@ namespace ts.server {
|
|||
|
||||
this.logger.info("Open files: ");
|
||||
this.openFiles.forEach((projectRootPath, path) => {
|
||||
const info = this.getScriptInfoForPath(path as Path)!;
|
||||
const info = this.getScriptInfoForPath(path)!;
|
||||
this.logger.info(`\tFileName: ${info.fileName} ProjectRootPath: ${projectRootPath}`);
|
||||
this.logger.info(`\t\tProjects: ${info.containingProjects.map(p => p.getProjectName())}`);
|
||||
});
|
||||
|
@ -2303,8 +2303,8 @@ namespace ts.server {
|
|||
* Note that this does not return projects in info.containingProjects
|
||||
*/
|
||||
/*@internal*/
|
||||
getSymlinkedProjects(info: ScriptInfo): MultiMap<Project> | undefined {
|
||||
let projects: MultiMap<Project> | undefined;
|
||||
getSymlinkedProjects(info: ScriptInfo): MultiMap<Path, Project> | undefined {
|
||||
let projects: MultiMap<Path, Project> | undefined;
|
||||
if (this.realpathToScriptInfos) {
|
||||
const realpath = info.getRealpathIfDifferent();
|
||||
if (realpath) {
|
||||
|
@ -2599,11 +2599,11 @@ namespace ts.server {
|
|||
return documentPositionMapper;
|
||||
}
|
||||
|
||||
private addSourceInfoToSourceMap(sourceFileName: string | undefined, project: Project, sourceInfos?: Map<true>) {
|
||||
private addSourceInfoToSourceMap(sourceFileName: string | undefined, project: Project, sourceInfos?: Set<Path>) {
|
||||
if (sourceFileName) {
|
||||
// Attach as source
|
||||
const sourceInfo = this.getOrCreateScriptInfoNotOpenedByClient(sourceFileName, project.currentDirectory, project.directoryStructureHost)!;
|
||||
(sourceInfos || (sourceInfos = createMap())).set(sourceInfo.path, true);
|
||||
(sourceInfos || (sourceInfos = new Set())).add(sourceInfo.path);
|
||||
}
|
||||
return sourceInfos;
|
||||
}
|
||||
|
@ -2645,7 +2645,7 @@ namespace ts.server {
|
|||
if (declarationInfo && isString(declarationInfo.sourceMapFilePath) && info !== declarationInfo) {
|
||||
const sourceMapInfo = this.getScriptInfoForPath(declarationInfo.sourceMapFilePath);
|
||||
if (sourceMapInfo) {
|
||||
(sourceMapInfo.sourceInfos || (sourceMapInfo.sourceInfos = createMap())).set(info.path, true);
|
||||
(sourceMapInfo.sourceInfos || (sourceMapInfo.sourceInfos = new Set())).add(info.path);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2771,7 +2771,7 @@ namespace ts.server {
|
|||
* If the there is no existing project it just opens the configured project for the config file
|
||||
* reloadForInfo provides a way to filter out files to reload configured project for
|
||||
*/
|
||||
private reloadConfiguredProjectForFiles<T>(openFiles: Map<T>, delayReload: boolean, shouldReloadProjectFor: (openFileValue: T) => boolean, reason: string) {
|
||||
private reloadConfiguredProjectForFiles<T>(openFiles: Map<Path, T>, delayReload: boolean, shouldReloadProjectFor: (openFileValue: T) => boolean, reason: string) {
|
||||
const updatedProjects = createMap<true>();
|
||||
// try to reload config file for all open files
|
||||
openFiles.forEach((openFileValue, path) => {
|
||||
|
@ -2780,7 +2780,7 @@ namespace ts.server {
|
|||
return;
|
||||
}
|
||||
|
||||
const info = this.getScriptInfoForPath(path as Path)!; // TODO: GH#18217
|
||||
const info = this.getScriptInfoForPath(path)!; // TODO: GH#18217
|
||||
Debug.assert(info.isScriptOpen());
|
||||
// This tries to search for a tsconfig.json for the given file. If we found it,
|
||||
// we first detect if there is already a configured project created for it: if so,
|
||||
|
@ -2860,7 +2860,7 @@ namespace ts.server {
|
|||
this.printProjects();
|
||||
|
||||
this.openFiles.forEach((projectRootPath, path) => {
|
||||
const info = this.getScriptInfoForPath(path as Path)!;
|
||||
const info = this.getScriptInfoForPath(path)!;
|
||||
// collect all orphaned script infos from open files
|
||||
if (info.isOrphan()) {
|
||||
this.assignOrphanScriptInfoToInferredProject(info, projectRootPath);
|
||||
|
@ -2936,9 +2936,9 @@ namespace ts.server {
|
|||
|
||||
function addOriginalConfiguredProject(originalProject: ConfiguredProject) {
|
||||
if (!project.originalConfiguredProjects) {
|
||||
project.originalConfiguredProjects = createMap<true>();
|
||||
project.originalConfiguredProjects = new Set();
|
||||
}
|
||||
project.originalConfiguredProjects.set(originalProject.canonicalConfigFilePath, true);
|
||||
project.originalConfiguredProjects.add(originalProject.canonicalConfigFilePath);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3085,13 +3085,13 @@ namespace ts.server {
|
|||
}
|
||||
|
||||
/*@internal*/
|
||||
loadAncestorProjectTree(forProjects?: ReadonlyMap<true>) {
|
||||
forProjects = forProjects || mapDefinedMap(
|
||||
loadAncestorProjectTree(forProjects?: ReadonlyCollection<string>) {
|
||||
forProjects = forProjects || mapDefinedEntries(
|
||||
this.configuredProjects,
|
||||
project => !project.isInitialLoadPending() || undefined
|
||||
(key, project) => !project.isInitialLoadPending() ? [key, true] : undefined
|
||||
);
|
||||
|
||||
const seenProjects = createMap<true>();
|
||||
const seenProjects = new Set<NormalizedPath>();
|
||||
// Work on array copy as we could add more projects as part of callback
|
||||
for (const project of arrayFrom(this.configuredProjects.values())) {
|
||||
// If this project has potential project reference for any of the project we are loading ancestor tree for
|
||||
|
@ -3109,8 +3109,8 @@ namespace ts.server {
|
|||
}
|
||||
}
|
||||
|
||||
private ensureProjectChildren(project: ConfiguredProject, seenProjects: Map<true>) {
|
||||
if (!addToSeen(seenProjects, project.canonicalConfigFilePath)) return;
|
||||
private ensureProjectChildren(project: ConfiguredProject, seenProjects: Set<NormalizedPath>) {
|
||||
if (!tryAddToSet(seenProjects, project.canonicalConfigFilePath)) return;
|
||||
// Update the project
|
||||
updateProjectIfDirty(project);
|
||||
|
||||
|
@ -3214,7 +3214,7 @@ namespace ts.server {
|
|||
if (!info.isScriptOpen() && info.isOrphan() && !info.isContainedByAutoImportProvider()) {
|
||||
// Otherwise if there is any source info that is alive, this alive too
|
||||
if (!info.sourceMapFilePath) return;
|
||||
let sourceInfos: Map<true> | undefined;
|
||||
let sourceInfos: Set<Path> | undefined;
|
||||
if (isString(info.sourceMapFilePath)) {
|
||||
const sourceMapInfo = this.getScriptInfoForPath(info.sourceMapFilePath);
|
||||
sourceInfos = sourceMapInfo && sourceMapInfo.sourceInfos;
|
||||
|
@ -3224,7 +3224,7 @@ namespace ts.server {
|
|||
}
|
||||
if (!sourceInfos) return;
|
||||
if (!forEachKey(sourceInfos, path => {
|
||||
const info = this.getScriptInfoForPath(path as Path);
|
||||
const info = this.getScriptInfoForPath(path);
|
||||
return !!info && (info.isScriptOpen() || !info.isOrphan());
|
||||
})) {
|
||||
return;
|
||||
|
@ -3234,7 +3234,7 @@ namespace ts.server {
|
|||
// Retain this script info
|
||||
toRemoveScriptInfos.delete(info.path);
|
||||
if (info.sourceMapFilePath) {
|
||||
let sourceInfos: Map<true> | undefined;
|
||||
let sourceInfos: Set<Path> | undefined;
|
||||
if (isString(info.sourceMapFilePath)) {
|
||||
// And map file info and source infos
|
||||
toRemoveScriptInfos.delete(info.sourceMapFilePath);
|
||||
|
|
|
@ -114,7 +114,7 @@ namespace ts.server {
|
|||
generatedFilePath: Path;
|
||||
watcher: FileWatcher;
|
||||
}
|
||||
type GeneratedFileWatcherMap = GeneratedFileWatcher | Map<GeneratedFileWatcher>;
|
||||
type GeneratedFileWatcherMap = GeneratedFileWatcher | Map<Path, GeneratedFileWatcher>;
|
||||
function isGeneratedFileWatcher(watch: GeneratedFileWatcherMap): watch is GeneratedFileWatcher {
|
||||
return (watch as GeneratedFileWatcher).generatedFilePath !== undefined;
|
||||
}
|
||||
|
@ -130,7 +130,7 @@ namespace ts.server {
|
|||
private rootFilesMap = createMap<ProjectRootFile>();
|
||||
private program: Program | undefined;
|
||||
private externalFiles: SortedReadonlyArray<string> | undefined;
|
||||
private missingFilesMap: Map<FileWatcher> | undefined;
|
||||
private missingFilesMap: Map<Path, FileWatcher> | undefined;
|
||||
private generatedFilesMap: GeneratedFileWatcherMap | undefined;
|
||||
private plugins: PluginModuleWithName[] = [];
|
||||
|
||||
|
@ -140,7 +140,7 @@ namespace ts.server {
|
|||
* Maop does not contain entries for files that do not have unresolved imports
|
||||
* This helps in containing the set of files to invalidate
|
||||
*/
|
||||
cachedUnresolvedImportsPerFile = createMap<readonly string[]>();
|
||||
cachedUnresolvedImportsPerFile = new Map<Path, readonly string[]>();
|
||||
|
||||
/*@internal*/
|
||||
lastCachedUnresolvedImportsList: SortedReadonlyArray<string> | undefined;
|
||||
|
@ -167,11 +167,11 @@ namespace ts.server {
|
|||
/**
|
||||
* Set of files names that were updated since the last call to getChangesSinceVersion.
|
||||
*/
|
||||
private updatedFileNames: Map<true> | undefined;
|
||||
private updatedFileNames: Set<string> | undefined;
|
||||
/**
|
||||
* Set of files that was returned from the last call to getChangesSinceVersion.
|
||||
*/
|
||||
private lastReportedFileNames: Map<boolean> | undefined;
|
||||
private lastReportedFileNames: Map<string, boolean> | undefined;
|
||||
/**
|
||||
* Last version that was reported.
|
||||
*/
|
||||
|
@ -198,10 +198,10 @@ namespace ts.server {
|
|||
typingFiles: SortedReadonlyArray<string> = emptyArray;
|
||||
|
||||
/*@internal*/
|
||||
originalConfiguredProjects: Map<true> | undefined;
|
||||
originalConfiguredProjects: Set<NormalizedPath> | undefined;
|
||||
|
||||
/*@internal*/
|
||||
packageJsonsForAutoImport: Map<true> | undefined;
|
||||
packageJsonsForAutoImport: Set<string> | undefined;
|
||||
|
||||
/*@internal*/
|
||||
getResolvedProjectReferenceToRedirect(_fileName: string): ResolvedProjectReference | undefined {
|
||||
|
@ -244,9 +244,9 @@ namespace ts.server {
|
|||
/*@internal*/
|
||||
private importSuggestionsCache = Completions.createImportSuggestionsForFileCache();
|
||||
/*@internal*/
|
||||
private dirtyFilesForSuggestions: Map<true> | undefined;
|
||||
private dirtyFilesForSuggestions: Set<Path> | undefined;
|
||||
/*@internal*/
|
||||
private symlinks: ReadonlyMap<string> | undefined;
|
||||
private symlinks: ReadonlyMap<string, string> | undefined;
|
||||
/*@internal*/
|
||||
autoImportProviderHost: AutoImportProviderProject | false | undefined;
|
||||
|
||||
|
@ -323,7 +323,7 @@ namespace ts.server {
|
|||
}
|
||||
|
||||
/*@internal*/
|
||||
getProbableSymlinks(files: readonly SourceFile[]): ReadonlyMap<string> {
|
||||
getProbableSymlinks(files: readonly SourceFile[]): ReadonlyMap<string, string> {
|
||||
return this.symlinks || (this.symlinks = discoverProbableSymlinks(
|
||||
files,
|
||||
this.getCanonicalFileName,
|
||||
|
@ -934,14 +934,14 @@ namespace ts.server {
|
|||
}
|
||||
|
||||
registerFileUpdate(fileName: string) {
|
||||
(this.updatedFileNames || (this.updatedFileNames = createMap<true>())).set(fileName, true);
|
||||
(this.updatedFileNames || (this.updatedFileNames = new Set<string>())).add(fileName);
|
||||
}
|
||||
|
||||
/*@internal*/
|
||||
markFileAsDirty(changedFile: Path) {
|
||||
this.markAsDirty();
|
||||
if (!this.importSuggestionsCache.isEmpty()) {
|
||||
(this.dirtyFilesForSuggestions || (this.dirtyFilesForSuggestions = createMap())).set(changedFile, true);
|
||||
(this.dirtyFilesForSuggestions || (this.dirtyFilesForSuggestions = new Set())).add(changedFile);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1079,7 +1079,7 @@ namespace ts.server {
|
|||
// Update the missing file paths watcher
|
||||
updateMissingFilePathsWatch(
|
||||
this.program,
|
||||
this.missingFilesMap || (this.missingFilesMap = createMap()),
|
||||
this.missingFilesMap || (this.missingFilesMap = new Map()),
|
||||
// Watch the missing files
|
||||
missingFilePath => this.addMissingFileWatcher(missingFilePath)
|
||||
);
|
||||
|
@ -1102,7 +1102,7 @@ namespace ts.server {
|
|||
}
|
||||
else {
|
||||
this.generatedFilesMap.forEach((watcher, source) => {
|
||||
const sourceFile = this.program!.getSourceFileByPath(source as Path);
|
||||
const sourceFile = this.program!.getSourceFileByPath(source);
|
||||
if (!sourceFile ||
|
||||
sourceFile.resolvedPath !== source ||
|
||||
!this.isValidGeneratedFileWatcher(
|
||||
|
@ -1110,7 +1110,7 @@ namespace ts.server {
|
|||
watcher
|
||||
)) {
|
||||
closeFileWatcherOf(watcher);
|
||||
(this.generatedFilesMap as Map<GeneratedFileWatcher>).delete(source);
|
||||
(this.generatedFilesMap as Map<string, GeneratedFileWatcher>).delete(source);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -1386,11 +1386,11 @@ namespace ts.server {
|
|||
getChangesSinceVersion(lastKnownVersion?: number, includeProjectReferenceRedirectInfo?: boolean): ProjectFilesWithTSDiagnostics {
|
||||
const includeProjectReferenceRedirectInfoIfRequested =
|
||||
includeProjectReferenceRedirectInfo
|
||||
? (files: Map<boolean>) => arrayFrom(files.entries(), ([fileName, isSourceOfProjectReferenceRedirect]): protocol.FileWithProjectReferenceRedirectInfo => ({
|
||||
? (files: Map<string, boolean>) => arrayFrom(files.entries(), ([fileName, isSourceOfProjectReferenceRedirect]): protocol.FileWithProjectReferenceRedirectInfo => ({
|
||||
fileName,
|
||||
isSourceOfProjectReferenceRedirect
|
||||
}))
|
||||
: (files: Map<boolean>) => arrayFrom(files.keys());
|
||||
: (files: Map<string, boolean>) => arrayFrom(files.keys());
|
||||
|
||||
// Update the graph only if initial configured project load is not pending
|
||||
if (!this.isInitialLoadPending()) {
|
||||
|
@ -1425,8 +1425,8 @@ namespace ts.server {
|
|||
info => info.isSourceOfProjectReferenceRedirect
|
||||
);
|
||||
|
||||
const added: Map<boolean> = new Map<boolean>();
|
||||
const removed: Map<boolean> = new Map<boolean>();
|
||||
const added: Map<string, boolean> = new Map<string, boolean>();
|
||||
const removed: Map<string, boolean> = new Map<string, boolean>();
|
||||
|
||||
const updated: string[] = updatedFileNames ? arrayFrom(updatedFileNames.keys()) : [];
|
||||
const updatedRedirects: protocol.FileWithProjectReferenceRedirectInfo[] = [];
|
||||
|
@ -1498,7 +1498,7 @@ namespace ts.server {
|
|||
return !!this.program && this.program.isSourceOfProjectReferenceRedirect(fileName);
|
||||
}
|
||||
|
||||
protected enableGlobalPlugins(options: CompilerOptions, pluginConfigOverrides: Map<any> | undefined) {
|
||||
protected enableGlobalPlugins(options: CompilerOptions, pluginConfigOverrides: Map<string, any> | undefined) {
|
||||
const host = this.projectService.host;
|
||||
|
||||
if (!host.require) {
|
||||
|
@ -1530,7 +1530,7 @@ namespace ts.server {
|
|||
}
|
||||
}
|
||||
|
||||
protected enablePlugin(pluginConfigEntry: PluginImport, searchPaths: string[], pluginConfigOverrides: Map<any> | undefined) {
|
||||
protected enablePlugin(pluginConfigEntry: PluginImport, searchPaths: string[], pluginConfigOverrides: Map<string, any> | undefined) {
|
||||
this.projectService.logger.info(`Enabling plugin ${pluginConfigEntry.name} from candidate paths: ${searchPaths.join(",")}`);
|
||||
|
||||
const log = (message: string) => this.projectService.logger.info(message);
|
||||
|
@ -1610,7 +1610,7 @@ namespace ts.server {
|
|||
/*@internal*/
|
||||
getPackageJsonsForAutoImport(rootDir?: string): readonly PackageJsonInfo[] {
|
||||
const packageJsons = this.getPackageJsonsVisibleToFile(combinePaths(this.currentDirectory, inferredTypesContainingFile), rootDir);
|
||||
this.packageJsonsForAutoImport = arrayToSet(packageJsons.map(p => p.fileName));
|
||||
this.packageJsonsForAutoImport = new Set(packageJsons.map(p => p.fileName));
|
||||
return packageJsons;
|
||||
}
|
||||
|
||||
|
@ -1663,12 +1663,12 @@ namespace ts.server {
|
|||
}
|
||||
}
|
||||
|
||||
function getUnresolvedImports(program: Program, cachedUnresolvedImportsPerFile: Map<readonly string[]>): SortedReadonlyArray<string> {
|
||||
function getUnresolvedImports(program: Program, cachedUnresolvedImportsPerFile: Map<Path, readonly string[]>): SortedReadonlyArray<string> {
|
||||
const ambientModules = program.getTypeChecker().getAmbientModules().map(mod => stripQuotes(mod.getName()));
|
||||
return sortAndDeduplicate(flatMap(program.getSourceFiles(), sourceFile =>
|
||||
extractUnresolvedImportsFromSourceFile(sourceFile, ambientModules, cachedUnresolvedImportsPerFile)));
|
||||
}
|
||||
function extractUnresolvedImportsFromSourceFile(file: SourceFile, ambientModules: readonly string[], cachedUnresolvedImportsPerFile: Map<readonly string[]>): readonly string[] {
|
||||
function extractUnresolvedImportsFromSourceFile(file: SourceFile, ambientModules: readonly string[], cachedUnresolvedImportsPerFile: Map<Path, readonly string[]>): readonly string[] {
|
||||
return getOrUpdate(cachedUnresolvedImportsPerFile, file.path, () => {
|
||||
if (!file.resolvedModules) return emptyArray;
|
||||
let unresolvedImports: string[] | undefined;
|
||||
|
@ -1736,7 +1736,7 @@ namespace ts.server {
|
|||
watchOptions: WatchOptions | undefined,
|
||||
projectRootPath: NormalizedPath | undefined,
|
||||
currentDirectory: string | undefined,
|
||||
pluginConfigOverrides: Map<any> | undefined) {
|
||||
pluginConfigOverrides: Map<string, any> | undefined) {
|
||||
super(InferredProject.newName(),
|
||||
ProjectKind.Inferred,
|
||||
projectService,
|
||||
|
@ -1811,7 +1811,7 @@ namespace ts.server {
|
|||
return ts.emptyArray;
|
||||
}
|
||||
|
||||
let dependencyNames: Map<true> | undefined;
|
||||
let dependencyNames: Set<string> | undefined;
|
||||
let rootNames: string[] | undefined;
|
||||
const rootFileName = combinePaths(hostProject.currentDirectory, inferredTypesContainingFile);
|
||||
const packageJsons = hostProject.getPackageJsonsForAutoImport(combinePaths(hostProject.currentDirectory, rootFileName));
|
||||
|
@ -1841,7 +1841,7 @@ namespace ts.server {
|
|||
|
||||
function addDependency(dependency: string) {
|
||||
if (!startsWith(dependency, "@types/")) {
|
||||
(dependencyNames || (dependencyNames = createMap())).set(dependency, true);
|
||||
(dependencyNames || (dependencyNames = new Set())).add(dependency);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1950,7 +1950,7 @@ namespace ts.server {
|
|||
private typeAcquisition!: TypeAcquisition; // TODO: GH#18217
|
||||
/* @internal */
|
||||
configFileWatcher: FileWatcher | undefined;
|
||||
private directoriesWatchedForWildcards: Map<WildcardDirectoryWatcher> | undefined;
|
||||
private directoriesWatchedForWildcards: Map<string, WildcardDirectoryWatcher> | undefined;
|
||||
readonly canonicalConfigFilePath: NormalizedPath;
|
||||
|
||||
/* @internal */
|
||||
|
@ -1976,7 +1976,7 @@ namespace ts.server {
|
|||
|
||||
/** Potential project references before the project is actually loaded (read config file) */
|
||||
/*@internal*/
|
||||
potentialProjectReferences: Map<true> | undefined;
|
||||
potentialProjectReferences: Set<string> | undefined;
|
||||
|
||||
/*@internal*/
|
||||
projectOptions?: ProjectOptions | true;
|
||||
|
@ -2102,7 +2102,7 @@ namespace ts.server {
|
|||
/*@internal*/
|
||||
setPotentialProjectReference(canonicalConfigPath: NormalizedPath) {
|
||||
Debug.assert(this.isInitialLoadPending());
|
||||
(this.potentialProjectReferences || (this.potentialProjectReferences = createMap())).set(canonicalConfigPath, true);
|
||||
(this.potentialProjectReferences || (this.potentialProjectReferences = new Set())).add(canonicalConfigPath);
|
||||
}
|
||||
|
||||
/*@internal*/
|
||||
|
@ -2112,7 +2112,7 @@ namespace ts.server {
|
|||
}
|
||||
|
||||
/*@internal*/
|
||||
enablePluginsWithOptions(options: CompilerOptions, pluginConfigOverrides: Map<any> | undefined) {
|
||||
enablePluginsWithOptions(options: CompilerOptions, pluginConfigOverrides: Map<string, any> | undefined) {
|
||||
const host = this.projectService.host;
|
||||
|
||||
if (!host.require) {
|
||||
|
@ -2167,7 +2167,7 @@ namespace ts.server {
|
|||
}
|
||||
|
||||
/*@internal*/
|
||||
watchWildcards(wildcardDirectories: Map<WatchDirectoryFlags>) {
|
||||
watchWildcards(wildcardDirectories: Map<string, WatchDirectoryFlags>) {
|
||||
updateWatchingWildcardDirectories(
|
||||
this.directoriesWatchedForWildcards || (this.directoriesWatchedForWildcards = createMap()),
|
||||
wildcardDirectories,
|
||||
|
@ -2255,8 +2255,8 @@ namespace ts.server {
|
|||
return forEachEntry(
|
||||
configFileExistenceInfo.openFilesImpactedByConfigFile,
|
||||
(_value, infoPath) => isSolution ?
|
||||
!!this.getDefaultChildProjectFromSolution(this.projectService.getScriptInfoForPath(infoPath as Path)!) :
|
||||
this.containsScriptInfo(this.projectService.getScriptInfoForPath(infoPath as Path)!)
|
||||
!!this.getDefaultChildProjectFromSolution(this.projectService.getScriptInfoForPath(infoPath)!) :
|
||||
this.containsScriptInfo(this.projectService.getScriptInfoForPath(infoPath)!)
|
||||
) || false;
|
||||
}
|
||||
|
||||
|
@ -2290,7 +2290,7 @@ namespace ts.server {
|
|||
lastFileExceededProgramSize: string | undefined,
|
||||
public compileOnSaveEnabled: boolean,
|
||||
projectFilePath?: string,
|
||||
pluginConfigOverrides?: Map<any>,
|
||||
pluginConfigOverrides?: Map<string, any>,
|
||||
watchOptions?: WatchOptions) {
|
||||
super(externalProjectName,
|
||||
ProjectKind.External,
|
||||
|
|
|
@ -286,7 +286,7 @@ namespace ts.server {
|
|||
/*@internal*/
|
||||
export interface SourceMapFileWatcher {
|
||||
watcher: FileWatcher;
|
||||
sourceInfos?: Map<true>;
|
||||
sourceInfos?: Set<Path>;
|
||||
}
|
||||
|
||||
export class ScriptInfo {
|
||||
|
@ -324,7 +324,7 @@ namespace ts.server {
|
|||
/*@internal*/
|
||||
declarationInfoPath?: Path;
|
||||
/*@internal*/
|
||||
sourceInfos?: Map<true>;
|
||||
sourceInfos?: Set<Path>;
|
||||
/*@internal*/
|
||||
documentPositionMapper?: DocumentPositionMapper | false;
|
||||
|
||||
|
|
|
@ -262,7 +262,7 @@ namespace ts.server {
|
|||
|
||||
type Projects = readonly Project[] | {
|
||||
readonly projects: readonly Project[];
|
||||
readonly symLinkedProjects: MultiMap<Project>;
|
||||
readonly symLinkedProjects: MultiMap<Path, Project>;
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -277,7 +277,7 @@ namespace ts.server {
|
|||
const outputs = flatMapToMutable(isArray(projects) ? projects : projects.projects, project => action(project, defaultValue));
|
||||
if (!isArray(projects) && projects.symLinkedProjects) {
|
||||
projects.symLinkedProjects.forEach((projects, path) => {
|
||||
const value = getValue(path as Path);
|
||||
const value = getValue(path);
|
||||
outputs.push(...flatMap(projects, project => action(project, value)));
|
||||
});
|
||||
}
|
||||
|
@ -423,7 +423,7 @@ namespace ts.server {
|
|||
): void {
|
||||
const projectService = defaultProject.projectService;
|
||||
let toDo: ProjectAndLocation<TLocation>[] | undefined;
|
||||
const seenProjects = createMap<true>();
|
||||
const seenProjects = new Set<string>();
|
||||
forEachProjectInProjects(projects, initialLocation && initialLocation.fileName, (project, path) => {
|
||||
// TLocation should be either `DocumentPosition` or `undefined`. Since `initialLocation` is `TLocation` this cast should be valid.
|
||||
const location = (initialLocation ? { fileName: path, pos: initialLocation.pos } : undefined) as TLocation;
|
||||
|
@ -496,7 +496,7 @@ namespace ts.server {
|
|||
location: TLocation,
|
||||
projectService: ProjectService,
|
||||
toDo: ProjectAndLocation<TLocation>[] | undefined,
|
||||
seenProjects: Map<true>,
|
||||
seenProjects: Set<string>,
|
||||
cb: CombineProjectOutputCallback<TLocation>,
|
||||
): ProjectAndLocation<TLocation>[] | undefined {
|
||||
if (project.getCancellationToken().isCancellationRequested()) return undefined; // Skip rest of toDo if cancelled
|
||||
|
@ -517,7 +517,7 @@ namespace ts.server {
|
|||
if (symlinkedProjectsMap) {
|
||||
symlinkedProjectsMap.forEach((symlinkedProjects, symlinkedPath) => {
|
||||
for (const symlinkedProject of symlinkedProjects) {
|
||||
addToTodo(symlinkedProject, { fileName: symlinkedPath, pos: originalLocation.pos } as TLocation, toDo!, seenProjects);
|
||||
addToTodo(symlinkedProject, { fileName: symlinkedPath as string, pos: originalLocation.pos } as TLocation, toDo!, seenProjects);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -526,12 +526,12 @@ namespace ts.server {
|
|||
return toDo;
|
||||
}
|
||||
|
||||
function addToTodo<TLocation extends DocumentPosition | undefined>(project: Project, location: TLocation, toDo: Push<ProjectAndLocation<TLocation>>, seenProjects: Map<true>): void {
|
||||
function addToTodo<TLocation extends DocumentPosition | undefined>(project: Project, location: TLocation, toDo: Push<ProjectAndLocation<TLocation>>, seenProjects: Set<string>): void {
|
||||
if (addToSeen(seenProjects, project)) toDo.push({ project, location });
|
||||
}
|
||||
|
||||
function addToSeen(seenProjects: Map<true>, project: Project) {
|
||||
return ts.addToSeen(seenProjects, getProjectKey(project));
|
||||
function addToSeen(seenProjects: Set<string>, project: Project) {
|
||||
return tryAddToSet(seenProjects, getProjectKey(project));
|
||||
}
|
||||
|
||||
function getProjectKey(project: Project) {
|
||||
|
@ -1360,7 +1360,7 @@ namespace ts.server {
|
|||
|
||||
private getProjects(args: protocol.FileRequestArgs, getScriptInfoEnsuringProjectsUptoDate?: boolean, ignoreNoProjectError?: boolean): Projects {
|
||||
let projects: readonly Project[] | undefined;
|
||||
let symLinkedProjects: MultiMap<Project> | undefined;
|
||||
let symLinkedProjects: MultiMap<Path, Project> | undefined;
|
||||
if (args.projectFileName) {
|
||||
const project = this.getProject(args.projectFileName);
|
||||
if (project) {
|
||||
|
|
|
@ -41,7 +41,7 @@ namespace ts.server {
|
|||
if ((arr1 || emptyArray).length === 0 && (arr2 || emptyArray).length === 0) {
|
||||
return true;
|
||||
}
|
||||
const set: Map<boolean> = createMap<boolean>();
|
||||
const set: Map<string, boolean> = createMap<boolean>();
|
||||
let unique = 0;
|
||||
|
||||
for (const v of arr1!) {
|
||||
|
@ -83,7 +83,7 @@ namespace ts.server {
|
|||
|
||||
/*@internal*/
|
||||
export class TypingsCache {
|
||||
private readonly perProjectCache: Map<TypingsCacheEntry> = createMap<TypingsCacheEntry>();
|
||||
private readonly perProjectCache: Map<string, TypingsCacheEntry> = createMap<TypingsCacheEntry>();
|
||||
|
||||
constructor(private readonly installer: ITypingsInstaller) {
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
/* @internal */
|
||||
namespace ts.server {
|
||||
export class ThrottledOperations {
|
||||
private readonly pendingTimeouts: Map<any> = createMap<any>();
|
||||
private readonly pendingTimeouts: Map<string, any> = createMap<any>();
|
||||
private readonly logger?: Logger | undefined;
|
||||
constructor(private readonly host: ServerHost, logger: Logger) {
|
||||
this.logger = logger.hasLevel(LogLevel.verbose) ? logger : undefined;
|
||||
|
|
|
@ -26,7 +26,7 @@ namespace ts.codefix {
|
|||
},
|
||||
getAllCodeActions: context => {
|
||||
const { sourceFile } = context;
|
||||
const fixedDeclarations = createMap<true>();
|
||||
const fixedDeclarations = new Set<number>();
|
||||
return codeFixAll(context, errorCodes, (t, diagnostic) => {
|
||||
const span = diagnostic.relatedInformation && find(diagnostic.relatedInformation, r => r.code === Diagnostics.Did_you_mean_to_mark_this_function_as_async.code) as TextSpan | undefined;
|
||||
const decl = getFixableErrorSpanDeclaration(sourceFile, span);
|
||||
|
@ -40,18 +40,18 @@ namespace ts.codefix {
|
|||
});
|
||||
|
||||
type FixableDeclaration = ArrowFunction | FunctionDeclaration | FunctionExpression | MethodDeclaration;
|
||||
function getFix(context: CodeFixContext | CodeFixAllContext, decl: FixableDeclaration, trackChanges: ContextualTrackChangesFunction, fixedDeclarations?: Map<true>) {
|
||||
function getFix(context: CodeFixContext | CodeFixAllContext, decl: FixableDeclaration, trackChanges: ContextualTrackChangesFunction, fixedDeclarations?: Set<number>) {
|
||||
const changes = trackChanges(t => makeChange(t, context.sourceFile, decl, fixedDeclarations));
|
||||
return createCodeFixAction(fixId, changes, Diagnostics.Add_async_modifier_to_containing_function, fixId, Diagnostics.Add_all_missing_async_modifiers);
|
||||
}
|
||||
|
||||
function makeChange(changeTracker: textChanges.ChangeTracker, sourceFile: SourceFile, insertionSite: FixableDeclaration, fixedDeclarations?: Map<true>) {
|
||||
function makeChange(changeTracker: textChanges.ChangeTracker, sourceFile: SourceFile, insertionSite: FixableDeclaration, fixedDeclarations?: Set<number>) {
|
||||
if (fixedDeclarations) {
|
||||
if (fixedDeclarations.has(getNodeId(insertionSite).toString())) {
|
||||
if (fixedDeclarations.has(getNodeId(insertionSite))) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
fixedDeclarations?.set(getNodeId(insertionSite).toString(), true);
|
||||
fixedDeclarations?.add(getNodeId(insertionSite));
|
||||
const cloneWithModifier = factory.updateModifiers(
|
||||
getSynthesizedDeepClone(insertionSite, /*includeTrivia*/ true),
|
||||
factory.createNodeArray(factory.createModifiersFromModifierFlags(getSyntacticModifierFlags(insertionSite) | ModifierFlags.Async)));
|
||||
|
|
|
@ -45,7 +45,7 @@ namespace ts.codefix {
|
|||
getAllCodeActions: context => {
|
||||
const { sourceFile, program, cancellationToken } = context;
|
||||
const checker = context.program.getTypeChecker();
|
||||
const fixedDeclarations = createMap<true>();
|
||||
const fixedDeclarations = new Set<number>();
|
||||
return codeFixAll(context, errorCodes, (t, diagnostic) => {
|
||||
const expression = getFixableErrorSpanExpression(sourceFile, diagnostic.code, diagnostic, cancellationToken, program);
|
||||
if (!expression) {
|
||||
|
@ -58,7 +58,7 @@ namespace ts.codefix {
|
|||
},
|
||||
});
|
||||
|
||||
function getDeclarationSiteFix(context: CodeFixContext | CodeFixAllContext, expression: Expression, errorCode: number, checker: TypeChecker, trackChanges: ContextualTrackChangesFunction, fixedDeclarations?: Map<true>) {
|
||||
function getDeclarationSiteFix(context: CodeFixContext | CodeFixAllContext, expression: Expression, errorCode: number, checker: TypeChecker, trackChanges: ContextualTrackChangesFunction, fixedDeclarations?: Set<number>) {
|
||||
const { sourceFile, program, cancellationToken } = context;
|
||||
const awaitableInitializers = findAwaitableInitializers(expression, sourceFile, cancellationToken, program, checker);
|
||||
if (awaitableInitializers) {
|
||||
|
@ -79,7 +79,7 @@ namespace ts.codefix {
|
|||
}
|
||||
}
|
||||
|
||||
function getUseSiteFix(context: CodeFixContext | CodeFixAllContext, expression: Expression, errorCode: number, checker: TypeChecker, trackChanges: ContextualTrackChangesFunction, fixedDeclarations?: Map<true>) {
|
||||
function getUseSiteFix(context: CodeFixContext | CodeFixAllContext, expression: Expression, errorCode: number, checker: TypeChecker, trackChanges: ContextualTrackChangesFunction, fixedDeclarations?: Set<number>) {
|
||||
const changes = trackChanges(t => makeChange(t, errorCode, context.sourceFile, checker, expression, fixedDeclarations));
|
||||
return createCodeFixAction(fixId, changes, Diagnostics.Add_await, fixId, Diagnostics.Fix_all_expressions_possibly_missing_await);
|
||||
}
|
||||
|
@ -234,12 +234,12 @@ namespace ts.codefix {
|
|||
ancestor.parent.kind === SyntaxKind.MethodDeclaration));
|
||||
}
|
||||
|
||||
function makeChange(changeTracker: textChanges.ChangeTracker, errorCode: number, sourceFile: SourceFile, checker: TypeChecker, insertionSite: Expression, fixedDeclarations?: Map<true>) {
|
||||
function makeChange(changeTracker: textChanges.ChangeTracker, errorCode: number, sourceFile: SourceFile, checker: TypeChecker, insertionSite: Expression, fixedDeclarations?: Set<number>) {
|
||||
if (isBinaryExpression(insertionSite)) {
|
||||
for (const side of [insertionSite.left, insertionSite.right]) {
|
||||
if (fixedDeclarations && isIdentifier(side)) {
|
||||
const symbol = checker.getSymbolAtLocation(side);
|
||||
if (symbol && fixedDeclarations.has(getSymbolId(symbol).toString())) {
|
||||
if (symbol && fixedDeclarations.has(getSymbolId(symbol))) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
@ -251,7 +251,7 @@ namespace ts.codefix {
|
|||
else if (errorCode === propertyAccessCode && isPropertyAccessExpression(insertionSite.parent)) {
|
||||
if (fixedDeclarations && isIdentifier(insertionSite.parent.expression)) {
|
||||
const symbol = checker.getSymbolAtLocation(insertionSite.parent.expression);
|
||||
if (symbol && fixedDeclarations.has(getSymbolId(symbol).toString())) {
|
||||
if (symbol && fixedDeclarations.has(getSymbolId(symbol))) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -264,7 +264,7 @@ namespace ts.codefix {
|
|||
else if (contains(callableConstructableErrorCodes, errorCode) && isCallOrNewExpression(insertionSite.parent)) {
|
||||
if (fixedDeclarations && isIdentifier(insertionSite)) {
|
||||
const symbol = checker.getSymbolAtLocation(insertionSite);
|
||||
if (symbol && fixedDeclarations.has(getSymbolId(symbol).toString())) {
|
||||
if (symbol && fixedDeclarations.has(getSymbolId(symbol))) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -274,7 +274,7 @@ namespace ts.codefix {
|
|||
else {
|
||||
if (fixedDeclarations && isVariableDeclaration(insertionSite.parent) && isIdentifier(insertionSite.parent.name)) {
|
||||
const symbol = checker.getSymbolAtLocation(insertionSite.parent.name);
|
||||
if (symbol && !addToSeen(fixedDeclarations, getSymbolId(symbol))) {
|
||||
if (symbol && !tryAddToSet(fixedDeclarations, getSymbolId(symbol))) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -38,8 +38,8 @@ namespace ts.codefix {
|
|||
|
||||
interface Transformer {
|
||||
readonly checker: TypeChecker;
|
||||
readonly synthNamesMap: Map<SynthIdentifier>; // keys are the symbol id of the identifier
|
||||
readonly setOfExpressionsToReturn: ReadonlyMap<true>; // keys are the node ids of the expressions
|
||||
readonly synthNamesMap: Map<string, SynthIdentifier>; // keys are the symbol id of the identifier
|
||||
readonly setOfExpressionsToReturn: ReadonlySet<number>; // keys are the node ids of the expressions
|
||||
readonly isInJSFile: boolean;
|
||||
}
|
||||
|
||||
|
@ -61,7 +61,7 @@ namespace ts.codefix {
|
|||
return;
|
||||
}
|
||||
|
||||
const synthNamesMap: Map<SynthIdentifier> = createMap();
|
||||
const synthNamesMap: Map<string, SynthIdentifier> = createMap();
|
||||
const isInJavascript = isInJSFile(functionToConvert);
|
||||
const setOfExpressionsToReturn = getAllPromiseExpressionsToReturn(functionToConvert, checker);
|
||||
const functionToConvertRenamed = renameCollidingVarNames(functionToConvert, checker, synthNamesMap, context.sourceFile);
|
||||
|
@ -99,24 +99,24 @@ namespace ts.codefix {
|
|||
/*
|
||||
Finds all of the expressions of promise type that should not be saved in a variable during the refactor
|
||||
*/
|
||||
function getAllPromiseExpressionsToReturn(func: FunctionLikeDeclaration, checker: TypeChecker): Map<true> {
|
||||
function getAllPromiseExpressionsToReturn(func: FunctionLikeDeclaration, checker: TypeChecker): Set<number> {
|
||||
if (!func.body) {
|
||||
return createMap<true>();
|
||||
return new Set();
|
||||
}
|
||||
|
||||
const setOfExpressionsToReturn: Map<true> = createMap<true>();
|
||||
const setOfExpressionsToReturn = new Set<number>();
|
||||
forEachChild(func.body, function visit(node: Node) {
|
||||
if (isPromiseReturningCallExpression(node, checker, "then")) {
|
||||
setOfExpressionsToReturn.set(getNodeId(node).toString(), true);
|
||||
setOfExpressionsToReturn.add(getNodeId(node));
|
||||
forEach(node.arguments, visit);
|
||||
}
|
||||
else if (isPromiseReturningCallExpression(node, checker, "catch")) {
|
||||
setOfExpressionsToReturn.set(getNodeId(node).toString(), true);
|
||||
setOfExpressionsToReturn.add(getNodeId(node));
|
||||
// if .catch() is the last call in the chain, move leftward in the chain until we hit something else that should be returned
|
||||
forEachChild(node, visit);
|
||||
}
|
||||
else if (isPromiseTypedExpression(node, checker)) {
|
||||
setOfExpressionsToReturn.set(getNodeId(node).toString(), true);
|
||||
setOfExpressionsToReturn.add(getNodeId(node));
|
||||
// don't recurse here, since we won't refactor any children or arguments of the expression
|
||||
}
|
||||
else {
|
||||
|
@ -148,7 +148,7 @@ namespace ts.codefix {
|
|||
This function collects all existing identifier names and names of identifiers that will be created in the refactor.
|
||||
It then checks for any collisions and renames them through getSynthesizedDeepClone
|
||||
*/
|
||||
function renameCollidingVarNames(nodeToRename: FunctionLikeDeclaration, checker: TypeChecker, synthNamesMap: Map<SynthIdentifier>, sourceFile: SourceFile): FunctionLikeDeclaration {
|
||||
function renameCollidingVarNames(nodeToRename: FunctionLikeDeclaration, checker: TypeChecker, synthNamesMap: Map<string, SynthIdentifier>, sourceFile: SourceFile): FunctionLikeDeclaration {
|
||||
const identsToRenameMap = createMap<Identifier>(); // key is the symbol id
|
||||
const collidingSymbolMap = createMultiMap<Symbol>();
|
||||
forEachChild(nodeToRename, function visit(node: Node) {
|
||||
|
@ -202,7 +202,7 @@ namespace ts.codefix {
|
|||
return getSynthesizedDeepCloneWithRenames(nodeToRename, /*includeTrivia*/ true, identsToRenameMap, checker);
|
||||
}
|
||||
|
||||
function getNewNameIfConflict(name: Identifier, originalNames: ReadonlyMap<Symbol[]>): SynthIdentifier {
|
||||
function getNewNameIfConflict(name: Identifier, originalNames: ReadonlyMap<string, Symbol[]>): SynthIdentifier {
|
||||
const numVarsSameName = (originalNames.get(name.text) || emptyArray).length;
|
||||
const identifier = numVarsSameName === 0 ? name : factory.createIdentifier(name.text + "_" + numVarsSameName);
|
||||
return createSynthIdentifier(identifier);
|
||||
|
@ -593,6 +593,6 @@ namespace ts.codefix {
|
|||
}
|
||||
|
||||
function shouldReturn(expression: Expression, transformer: Transformer): boolean {
|
||||
return !!expression.original && transformer.setOfExpressionsToReturn.has(getNodeId(expression.original).toString());
|
||||
return !!expression.original && transformer.setOfExpressionsToReturn.has(getNodeId(expression.original));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -40,7 +40,7 @@ namespace ts.codefix {
|
|||
|
||||
/** @returns Whether we converted a `module.exports =` to a default export. */
|
||||
function convertFileToEs6Module(sourceFile: SourceFile, checker: TypeChecker, changes: textChanges.ChangeTracker, target: ScriptTarget, quotePreference: QuotePreference): ModuleExportsChanged {
|
||||
const identifiers: Identifiers = { original: collectFreeIdentifiers(sourceFile), additional: createMap<true>() };
|
||||
const identifiers: Identifiers = { original: collectFreeIdentifiers(sourceFile), additional: new Set() };
|
||||
const exports = collectExportRenames(sourceFile, checker, identifiers);
|
||||
convertExportsAccesses(sourceFile, exports, changes);
|
||||
let moduleExportsChangedToDefault = false;
|
||||
|
@ -60,7 +60,7 @@ namespace ts.codefix {
|
|||
* export { _x as x };
|
||||
* This conversion also must place if the exported name is not a valid identifier, e.g. `exports.class = 0;`.
|
||||
*/
|
||||
type ExportRenames = ReadonlyMap<string>;
|
||||
type ExportRenames = ReadonlyMap<string, string>;
|
||||
|
||||
function collectExportRenames(sourceFile: SourceFile, checker: TypeChecker, identifiers: Identifiers): ExportRenames {
|
||||
const res = createMap<string>();
|
||||
|
@ -429,7 +429,7 @@ namespace ts.codefix {
|
|||
while (identifiers.original.has(name) || identifiers.additional.has(name)) {
|
||||
name = `_${name}`;
|
||||
}
|
||||
identifiers.additional.set(name, true);
|
||||
identifiers.additional.add(name);
|
||||
return name;
|
||||
}
|
||||
|
||||
|
@ -441,10 +441,10 @@ namespace ts.codefix {
|
|||
interface Identifiers {
|
||||
readonly original: FreeIdentifiers;
|
||||
// Additional identifiers we've added. Mutable!
|
||||
readonly additional: Map<true>;
|
||||
readonly additional: Set<string>;
|
||||
}
|
||||
|
||||
type FreeIdentifiers = ReadonlyMap<readonly Identifier[]>;
|
||||
type FreeIdentifiers = ReadonlyMap<string, readonly Identifier[]>;
|
||||
function collectFreeIdentifiers(file: SourceFile): FreeIdentifiers {
|
||||
const map = createMultiMap<Identifier>();
|
||||
forEachFreeIdentifier(file, id => map.add(id.text, id));
|
||||
|
|
|
@ -37,7 +37,7 @@ namespace ts.codefix {
|
|||
},
|
||||
fixIds: [fixId],
|
||||
getAllCodeActions: context => {
|
||||
const seenLines = createMap<true>();
|
||||
const seenLines = new Set<number>();
|
||||
return codeFixAll(context, errorCodes, (changes, diag) => {
|
||||
if (textChanges.isValidLocationToAddComment(diag.file, diag.start)) {
|
||||
makeChange(changes, diag.file, diag.start, seenLines);
|
||||
|
@ -46,10 +46,10 @@ namespace ts.codefix {
|
|||
},
|
||||
});
|
||||
|
||||
function makeChange(changes: textChanges.ChangeTracker, sourceFile: SourceFile, position: number, seenLines?: Map<true>) {
|
||||
function makeChange(changes: textChanges.ChangeTracker, sourceFile: SourceFile, position: number, seenLines?: Set<number>) {
|
||||
const { line: lineNumber } = getLineAndCharacterOfPosition(sourceFile, position);
|
||||
// Only need to add `// @ts-ignore` for a line once.
|
||||
if (!seenLines || addToSeen(seenLines, lineNumber)) {
|
||||
if (!seenLines || tryAddToSet(seenLines, lineNumber)) {
|
||||
changes.insertCommentBeforeLine(sourceFile, lineNumber, position, " @ts-ignore");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -519,7 +519,7 @@ namespace ts.codefix {
|
|||
program: Program,
|
||||
useAutoImportProvider: boolean,
|
||||
host: LanguageServiceHost
|
||||
): ReadonlyMap<readonly SymbolExportInfo[]> {
|
||||
): ReadonlyMap<string, readonly SymbolExportInfo[]> {
|
||||
// For each original symbol, keep all re-exports of that symbol together so we can call `getCodeActionsForImport` on the whole group at once.
|
||||
// Maps symbol id to info for modules providing that symbol (original export + re-exports).
|
||||
const originalSymbolToExportInfos = createMultiMap<SymbolExportInfo>();
|
||||
|
|
|
@ -2315,7 +2315,7 @@ namespace ts.Completions {
|
|||
return contextualMemberSymbols;
|
||||
}
|
||||
|
||||
const membersDeclaredBySpreadAssignment = createMap<true>();
|
||||
const membersDeclaredBySpreadAssignment = new Set<string>();
|
||||
const existingMemberNames = createUnderscoreEscapedMap<boolean>();
|
||||
for (const m of existingMembers) {
|
||||
// Ignore omitted expressions for missing members
|
||||
|
@ -2362,14 +2362,14 @@ namespace ts.Completions {
|
|||
return filteredSymbols;
|
||||
}
|
||||
|
||||
function setMembersDeclaredBySpreadAssignment(declaration: SpreadAssignment | JsxSpreadAttribute, membersDeclaredBySpreadAssignment: Map<true>) {
|
||||
function setMembersDeclaredBySpreadAssignment(declaration: SpreadAssignment | JsxSpreadAttribute, membersDeclaredBySpreadAssignment: Set<string>) {
|
||||
const expression = declaration.expression;
|
||||
const symbol = typeChecker.getSymbolAtLocation(expression);
|
||||
const type = symbol && typeChecker.getTypeOfSymbolAtLocation(symbol, expression);
|
||||
const properties = type && (<ObjectType>type).properties;
|
||||
if (properties) {
|
||||
properties.forEach(property => {
|
||||
membersDeclaredBySpreadAssignment.set(property.name, true);
|
||||
membersDeclaredBySpreadAssignment.add(property.name);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -2384,7 +2384,7 @@ namespace ts.Completions {
|
|||
}
|
||||
|
||||
// Set SortText to MemberDeclaredBySpreadAssignment if it is fulfilled by spread assignment
|
||||
function setSortTextToMemberDeclaredBySpreadAssignment(membersDeclaredBySpreadAssignment: Map<true>, contextualMemberSymbols: Symbol[]): void {
|
||||
function setSortTextToMemberDeclaredBySpreadAssignment(membersDeclaredBySpreadAssignment: Set<string>, contextualMemberSymbols: Symbol[]): void {
|
||||
if (membersDeclaredBySpreadAssignment.size === 0) {
|
||||
return;
|
||||
}
|
||||
|
@ -2447,7 +2447,7 @@ namespace ts.Completions {
|
|||
*/
|
||||
function filterJsxAttributes(symbols: Symbol[], attributes: NodeArray<JsxAttribute | JsxSpreadAttribute>): Symbol[] {
|
||||
const seenNames = createUnderscoreEscapedMap<boolean>();
|
||||
const membersDeclaredBySpreadAssignment = createMap<true>();
|
||||
const membersDeclaredBySpreadAssignment = new Set<string>();
|
||||
for (const attr of attributes) {
|
||||
// If this is the current item we are editing right now, do not filter it out
|
||||
if (isCurrentlyEditingNode(attr)) {
|
||||
|
|
|
@ -28,7 +28,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
function getSemanticDocumentHighlights(position: number, node: Node, program: Program, cancellationToken: CancellationToken, sourceFilesToSearch: readonly SourceFile[]): DocumentHighlights[] | undefined {
|
||||
const sourceFilesSet = arrayToSet(sourceFilesToSearch, f => f.fileName);
|
||||
const sourceFilesSet = new Set(sourceFilesToSearch.map(f => f.fileName));
|
||||
const referenceEntries = FindAllReferences.getReferenceEntriesForNode(position, node, program, sourceFilesToSearch, cancellationToken, /*options*/ undefined, sourceFilesSet);
|
||||
if (!referenceEntries) return undefined;
|
||||
const map = arrayToMultiMap(referenceEntries.map(FindAllReferences.toHighlightSpan), e => e.fileName, e => e.span);
|
||||
|
|
|
@ -118,7 +118,7 @@ namespace ts {
|
|||
export function createDocumentRegistryInternal(useCaseSensitiveFileNames?: boolean, currentDirectory = "", externalCache?: ExternalDocumentCache): DocumentRegistry {
|
||||
// Maps from compiler setting target (ES3, ES5, etc.) to all the cached documents we have
|
||||
// for those settings.
|
||||
const buckets = createMap<Map<DocumentRegistryEntry>>();
|
||||
const buckets = new Map<string, Map<Path, DocumentRegistryEntry>>();
|
||||
const getCanonicalFileName = createGetCanonicalFileName(!!useCaseSensitiveFileNames);
|
||||
|
||||
function reportStats() {
|
||||
|
@ -170,7 +170,7 @@ namespace ts {
|
|||
acquiring: boolean,
|
||||
scriptKind?: ScriptKind): SourceFile {
|
||||
|
||||
const bucket = getOrUpdate<Map<DocumentRegistryEntry>>(buckets, key, createMap);
|
||||
const bucket = getOrUpdate<Map<Path, DocumentRegistryEntry>>(buckets, key, createMap);
|
||||
let entry = bucket.get(path);
|
||||
const scriptTarget = scriptKind === ScriptKind.JSON ? ScriptTarget.JSON : compilationSettings.target || ScriptTarget.ES5;
|
||||
if (!entry && externalCache) {
|
||||
|
|
|
@ -287,7 +287,7 @@ namespace ts.FindAllReferences {
|
|||
sourceFiles: readonly SourceFile[],
|
||||
cancellationToken: CancellationToken,
|
||||
options: Options = {},
|
||||
sourceFilesSet: ReadonlyMap<true> = arrayToSet(sourceFiles, f => f.fileName),
|
||||
sourceFilesSet: ReadonlySet<string> = new Set(sourceFiles.map(f => f.fileName)),
|
||||
): readonly Entry[] | undefined {
|
||||
return flattenEntries(Core.getReferencedSymbolsForNode(position, node, program, sourceFiles, cancellationToken, options, sourceFilesSet));
|
||||
}
|
||||
|
@ -594,7 +594,7 @@ namespace ts.FindAllReferences {
|
|||
/** Encapsulates the core find-all-references algorithm. */
|
||||
export namespace Core {
|
||||
/** Core find-all-references algorithm. Handles special cases before delegating to `getReferencedSymbolsForSymbol`. */
|
||||
export function getReferencedSymbolsForNode(position: number, node: Node, program: Program, sourceFiles: readonly SourceFile[], cancellationToken: CancellationToken, options: Options = {}, sourceFilesSet: ReadonlyMap<true> = arrayToSet(sourceFiles, f => f.fileName)): readonly SymbolAndEntries[] | undefined {
|
||||
export function getReferencedSymbolsForNode(position: number, node: Node, program: Program, sourceFiles: readonly SourceFile[], cancellationToken: CancellationToken, options: Options = {}, sourceFilesSet: ReadonlySet<string> = new Set(sourceFiles.map(f => f.fileName))): readonly SymbolAndEntries[] | undefined {
|
||||
if (options.use === FindReferencesUse.References) {
|
||||
node = getAdjustedReferenceLocation(node);
|
||||
}
|
||||
|
@ -651,7 +651,7 @@ namespace ts.FindAllReferences {
|
|||
return undefined;
|
||||
}
|
||||
|
||||
function getReferencedSymbolsForModuleIfDeclaredBySourceFile(symbol: Symbol, program: Program, sourceFiles: readonly SourceFile[], cancellationToken: CancellationToken, options: Options, sourceFilesSet: ReadonlyMap<true>) {
|
||||
function getReferencedSymbolsForModuleIfDeclaredBySourceFile(symbol: Symbol, program: Program, sourceFiles: readonly SourceFile[], cancellationToken: CancellationToken, options: Options, sourceFilesSet: ReadonlySet<string>) {
|
||||
const moduleSourceFile = (symbol.flags & SymbolFlags.Module) && symbol.declarations && find(symbol.declarations, isSourceFile);
|
||||
if (!moduleSourceFile) return undefined;
|
||||
const exportEquals = symbol.exports!.get(InternalSymbolName.ExportEquals);
|
||||
|
@ -718,7 +718,7 @@ namespace ts.FindAllReferences {
|
|||
return program.getSourceFiles().indexOf(sourceFile);
|
||||
}
|
||||
|
||||
function getReferencedSymbolsForModule(program: Program, symbol: Symbol, excludeImportTypeOfExportEquals: boolean, sourceFiles: readonly SourceFile[], sourceFilesSet: ReadonlyMap<true>): SymbolAndEntries[] {
|
||||
function getReferencedSymbolsForModule(program: Program, symbol: Symbol, excludeImportTypeOfExportEquals: boolean, sourceFiles: readonly SourceFile[], sourceFilesSet: ReadonlySet<string>): SymbolAndEntries[] {
|
||||
Debug.assert(!!symbol.valueDeclaration);
|
||||
|
||||
const references = mapDefined<ModuleReference, Entry>(findModuleReferences(program, sourceFiles, symbol), reference => {
|
||||
|
@ -830,7 +830,7 @@ namespace ts.FindAllReferences {
|
|||
}
|
||||
|
||||
/** Core find-all-references algorithm for a normal symbol. */
|
||||
function getReferencedSymbolsForSymbol(originalSymbol: Symbol, node: Node | undefined, sourceFiles: readonly SourceFile[], sourceFilesSet: ReadonlyMap<true>, checker: TypeChecker, cancellationToken: CancellationToken, options: Options): SymbolAndEntries[] {
|
||||
function getReferencedSymbolsForSymbol(originalSymbol: Symbol, node: Node | undefined, sourceFiles: readonly SourceFile[], sourceFilesSet: ReadonlySet<string>, checker: TypeChecker, cancellationToken: CancellationToken, options: Options): SymbolAndEntries[] {
|
||||
const symbol = node && skipPastExportOrImportSpecifierOrUnion(originalSymbol, node, checker, /*useLocalSymbolForExportSpecifier*/ !isForRenameWithPrefixAndSuffixText(options)) || originalSymbol;
|
||||
|
||||
// Compute the meaning from the location and the symbol it references
|
||||
|
@ -972,7 +972,7 @@ namespace ts.FindAllReferences {
|
|||
|
||||
constructor(
|
||||
readonly sourceFiles: readonly SourceFile[],
|
||||
readonly sourceFilesSet: ReadonlyMap<true>,
|
||||
readonly sourceFilesSet: ReadonlySet<string>,
|
||||
readonly specialSearchKind: SpecialSearchKind,
|
||||
readonly checker: TypeChecker,
|
||||
readonly cancellationToken: CancellationToken,
|
||||
|
@ -1031,15 +1031,15 @@ namespace ts.FindAllReferences {
|
|||
}
|
||||
|
||||
// Source file ID → symbol ID → Whether the symbol has been searched for in the source file.
|
||||
private readonly sourceFileToSeenSymbols: Map<true>[] = [];
|
||||
private readonly sourceFileToSeenSymbols: Set<number>[] = [];
|
||||
/** Returns `true` the first time we search for a symbol in a file and `false` afterwards. */
|
||||
markSearchedSymbols(sourceFile: SourceFile, symbols: readonly Symbol[]): boolean {
|
||||
const sourceId = getNodeId(sourceFile);
|
||||
const seenSymbols = this.sourceFileToSeenSymbols[sourceId] || (this.sourceFileToSeenSymbols[sourceId] = createMap<true>());
|
||||
const seenSymbols = this.sourceFileToSeenSymbols[sourceId] || (this.sourceFileToSeenSymbols[sourceId] = new Set<number>());
|
||||
|
||||
let anyNewSymbols = false;
|
||||
for (const sym of symbols) {
|
||||
anyNewSymbols = addToSeen(seenSymbols, getSymbolId(sym)) || anyNewSymbols;
|
||||
anyNewSymbols = tryAddToSet(seenSymbols, getSymbolId(sym)) || anyNewSymbols;
|
||||
}
|
||||
return anyNewSymbols;
|
||||
}
|
||||
|
@ -1093,7 +1093,7 @@ namespace ts.FindAllReferences {
|
|||
isDefaultExport: boolean,
|
||||
cb: (ref: Identifier) => void,
|
||||
): void {
|
||||
const importTracker = createImportTracker(sourceFiles, arrayToSet(sourceFiles, f => f.fileName), checker, cancellationToken);
|
||||
const importTracker = createImportTracker(sourceFiles, new Set(sourceFiles.map(f => f.fileName)), checker, cancellationToken);
|
||||
const { importSearches, indirectUsers } = importTracker(exportSymbol, { exportKind: isDefaultExport ? ExportKind.Default : ExportKind.Named, exportingModuleSymbol }, /*isForRename*/ false);
|
||||
for (const [importLocation] of importSearches) {
|
||||
cb(importLocation);
|
||||
|
@ -1754,7 +1754,7 @@ namespace ts.FindAllReferences {
|
|||
* @param parent Another class or interface Symbol
|
||||
* @param cachedResults A map of symbol id pairs (i.e. "child,parent") to booleans indicating previous results
|
||||
*/
|
||||
function explicitlyInheritsFrom(symbol: Symbol, parent: Symbol, cachedResults: Map<boolean>, checker: TypeChecker): boolean {
|
||||
function explicitlyInheritsFrom(symbol: Symbol, parent: Symbol, cachedResults: Map<string, boolean>, checker: TypeChecker): boolean {
|
||||
if (symbol === parent) {
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ namespace ts.FindAllReferences {
|
|||
export type ImportTracker = (exportSymbol: Symbol, exportInfo: ExportInfo, isForRename: boolean) => ImportsResult;
|
||||
|
||||
/** Creates the imports map and returns an ImportTracker that uses it. Call this lazily to avoid calling `getDirectImportsMap` unnecessarily. */
|
||||
export function createImportTracker(sourceFiles: readonly SourceFile[], sourceFilesSet: ReadonlyMap<true>, checker: TypeChecker, cancellationToken: CancellationToken | undefined): ImportTracker {
|
||||
export function createImportTracker(sourceFiles: readonly SourceFile[], sourceFilesSet: ReadonlySet<string>, checker: TypeChecker, cancellationToken: CancellationToken | undefined): ImportTracker {
|
||||
const allDirectImports = getDirectImportsMap(sourceFiles, checker, cancellationToken);
|
||||
return (exportSymbol, exportInfo, isForRename) => {
|
||||
const { directImports, indirectUsers } = getImportersForExport(sourceFiles, sourceFilesSet, allDirectImports, exportInfo, checker, cancellationToken);
|
||||
|
@ -39,8 +39,8 @@ namespace ts.FindAllReferences {
|
|||
/** Returns import statements that directly reference the exporting module, and a list of files that may access the module through a namespace. */
|
||||
function getImportersForExport(
|
||||
sourceFiles: readonly SourceFile[],
|
||||
sourceFilesSet: ReadonlyMap<true>,
|
||||
allDirectImports: Map<ImporterOrCallExpression[]>,
|
||||
sourceFilesSet: ReadonlySet<string>,
|
||||
allDirectImports: Map<string, ImporterOrCallExpression[]>,
|
||||
{ exportingModuleSymbol, exportKind }: ExportInfo,
|
||||
checker: TypeChecker,
|
||||
cancellationToken: CancellationToken | undefined,
|
||||
|
@ -368,7 +368,7 @@ namespace ts.FindAllReferences {
|
|||
}
|
||||
|
||||
/** Returns a map from a module symbol Id to all import statements that directly reference the module. */
|
||||
function getDirectImportsMap(sourceFiles: readonly SourceFile[], checker: TypeChecker, cancellationToken: CancellationToken | undefined): Map<ImporterOrCallExpression[]> {
|
||||
function getDirectImportsMap(sourceFiles: readonly SourceFile[], checker: TypeChecker, cancellationToken: CancellationToken | undefined): Map<string, ImporterOrCallExpression[]> {
|
||||
const map = createMap<ImporterOrCallExpression[]>();
|
||||
|
||||
for (const sourceFile of sourceFiles) {
|
||||
|
|
|
@ -33,8 +33,8 @@ namespace ts.NavigationBar {
|
|||
let parentsStack: NavigationBarNode[] = [];
|
||||
let parent: NavigationBarNode;
|
||||
|
||||
const trackedEs5ClassesStack: (Map<boolean> | undefined)[] = [];
|
||||
let trackedEs5Classes: Map<boolean> | undefined;
|
||||
const trackedEs5ClassesStack: (Map<string, boolean> | undefined)[] = [];
|
||||
let trackedEs5Classes: Map<string, boolean> | undefined;
|
||||
|
||||
// NavigationBarItem requires an array, but will not mutate it, so just give it this for performance.
|
||||
let emptyChildItemArray: NavigationBarItem[] = [];
|
||||
|
|
|
@ -115,7 +115,7 @@ namespace ts {
|
|||
};
|
||||
}
|
||||
|
||||
function getFullMatch(candidateContainers: readonly string[], candidate: string, dotSeparatedSegments: readonly Segment[], stringToWordSpans: Map<TextSpan[]>): PatternMatch | undefined {
|
||||
function getFullMatch(candidateContainers: readonly string[], candidate: string, dotSeparatedSegments: readonly Segment[], stringToWordSpans: Map<string, TextSpan[]>): PatternMatch | undefined {
|
||||
// First, check that the last part of the dot separated pattern matches the name of the
|
||||
// candidate. If not, then there's no point in proceeding and doing the more
|
||||
// expensive work.
|
||||
|
@ -141,7 +141,7 @@ namespace ts {
|
|||
return bestMatch;
|
||||
}
|
||||
|
||||
function getWordSpans(word: string, stringToWordSpans: Map<TextSpan[]>): TextSpan[] {
|
||||
function getWordSpans(word: string, stringToWordSpans: Map<string, TextSpan[]>): TextSpan[] {
|
||||
let spans = stringToWordSpans.get(word);
|
||||
if (!spans) {
|
||||
stringToWordSpans.set(word, spans = breakIntoWordSpans(word));
|
||||
|
@ -149,7 +149,7 @@ namespace ts {
|
|||
return spans;
|
||||
}
|
||||
|
||||
function matchTextChunk(candidate: string, chunk: TextChunk, stringToWordSpans: Map<TextSpan[]>): PatternMatch | undefined {
|
||||
function matchTextChunk(candidate: string, chunk: TextChunk, stringToWordSpans: Map<string, TextSpan[]>): PatternMatch | undefined {
|
||||
const index = indexOfIgnoringCase(candidate, chunk.textLowerCase);
|
||||
if (index === 0) {
|
||||
// a) Check if the word is a prefix of the candidate, in a case insensitive or
|
||||
|
@ -201,7 +201,7 @@ namespace ts {
|
|||
}
|
||||
}
|
||||
|
||||
function matchSegment(candidate: string, segment: Segment, stringToWordSpans: Map<TextSpan[]>): PatternMatch | undefined {
|
||||
function matchSegment(candidate: string, segment: Segment, stringToWordSpans: Map<string, TextSpan[]>): PatternMatch | undefined {
|
||||
// First check if the segment matches as is. This is also useful if the segment contains
|
||||
// characters we would normally strip when splitting into parts that we also may want to
|
||||
// match in the candidate. For example if the segment is "@int" and the candidate is
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
namespace ts.refactor {
|
||||
// A map with the refactor code as key, the refactor itself as value
|
||||
// e.g. nonSuggestableRefactors[refactorCode] -> the refactor you want
|
||||
const refactors: Map<Refactor> = createMap<Refactor>();
|
||||
const refactors: Map<string, Refactor> = createMap<Refactor>();
|
||||
|
||||
/** @param name An unique code associated with each refactor. Does not have to be human-readable. */
|
||||
export function registerRefactor(name: string, refactor: Refactor) {
|
||||
|
|
|
@ -43,11 +43,11 @@ namespace ts.refactor.extractSymbol {
|
|||
}
|
||||
|
||||
const functionActions: RefactorActionInfo[] = [];
|
||||
const usedFunctionNames: Map<boolean> = createMap();
|
||||
const usedFunctionNames: Map<string, boolean> = createMap();
|
||||
let innermostErrorFunctionAction: RefactorActionInfo | undefined;
|
||||
|
||||
const constantActions: RefactorActionInfo[] = [];
|
||||
const usedConstantNames: Map<boolean> = createMap();
|
||||
const usedConstantNames: Map<string, boolean> = createMap();
|
||||
let innermostErrorConstantAction: RefactorActionInfo | undefined;
|
||||
|
||||
let i = 0;
|
||||
|
@ -1321,7 +1321,7 @@ namespace ts.refactor.extractSymbol {
|
|||
}
|
||||
}
|
||||
|
||||
function transformFunctionBody(body: Node, exposedVariableDeclarations: readonly VariableDeclaration[], writes: readonly UsageEntry[] | undefined, substitutions: ReadonlyMap<Node>, hasReturn: boolean): { body: Block, returnValueProperty: string | undefined } {
|
||||
function transformFunctionBody(body: Node, exposedVariableDeclarations: readonly VariableDeclaration[], writes: readonly UsageEntry[] | undefined, substitutions: ReadonlyMap<string, Node>, hasReturn: boolean): { body: Block, returnValueProperty: string | undefined } {
|
||||
const hasWritesOrVariableDeclarations = writes !== undefined || exposedVariableDeclarations.length > 0;
|
||||
if (isBlock(body) && !hasWritesOrVariableDeclarations && substitutions.size === 0) {
|
||||
// already block, no declarations or writes to propagate back, no substitutions - can use node as is
|
||||
|
@ -1377,7 +1377,7 @@ namespace ts.refactor.extractSymbol {
|
|||
}
|
||||
}
|
||||
|
||||
function transformConstantInitializer(initializer: Expression, substitutions: ReadonlyMap<Node>): Expression {
|
||||
function transformConstantInitializer(initializer: Expression, substitutions: ReadonlyMap<string, Node>): Expression {
|
||||
return substitutions.size
|
||||
? visitor(initializer) as Expression
|
||||
: initializer;
|
||||
|
@ -1525,9 +1525,9 @@ namespace ts.refactor.extractSymbol {
|
|||
}
|
||||
|
||||
interface ScopeUsages {
|
||||
readonly usages: Map<UsageEntry>;
|
||||
readonly typeParameterUsages: Map<TypeParameter>; // Key is type ID
|
||||
readonly substitutions: Map<Node>;
|
||||
readonly usages: Map<string, UsageEntry>;
|
||||
readonly typeParameterUsages: Map<string, TypeParameter>; // Key is type ID
|
||||
readonly substitutions: Map<string, Node>;
|
||||
}
|
||||
|
||||
interface ReadsAndWrites {
|
||||
|
@ -1547,7 +1547,7 @@ namespace ts.refactor.extractSymbol {
|
|||
|
||||
const allTypeParameterUsages = createMap<TypeParameter>(); // Key is type ID
|
||||
const usagesPerScope: ScopeUsages[] = [];
|
||||
const substitutionsPerScope: Map<Node>[] = [];
|
||||
const substitutionsPerScope: Map<string, Node>[] = [];
|
||||
const functionErrorsPerScope: Diagnostic[][] = [];
|
||||
const constantErrorsPerScope: Diagnostic[][] = [];
|
||||
const visibleDeclarationsInExtractedRange: NamedDeclaration[] = [];
|
||||
|
|
|
@ -634,13 +634,13 @@ namespace ts {
|
|||
public scriptKind!: ScriptKind;
|
||||
public languageVersion!: ScriptTarget;
|
||||
public languageVariant!: LanguageVariant;
|
||||
public identifiers!: Map<string>;
|
||||
public identifiers!: Map<string, string>;
|
||||
public nameTable: UnderscoreEscapedMap<number> | undefined;
|
||||
public resolvedModules: Map<ResolvedModuleFull> | undefined;
|
||||
public resolvedTypeReferenceDirectiveNames!: Map<ResolvedTypeReferenceDirective>;
|
||||
public resolvedModules: Map<string, ResolvedModuleFull> | undefined;
|
||||
public resolvedTypeReferenceDirectiveNames!: Map<string, ResolvedTypeReferenceDirective>;
|
||||
public imports!: readonly StringLiteralLike[];
|
||||
public moduleAugmentations!: StringLiteral[];
|
||||
private namedDeclarations: Map<Declaration[]> | undefined;
|
||||
private namedDeclarations: Map<string, Declaration[]> | undefined;
|
||||
public ambientModuleNames!: string[];
|
||||
public checkJsDirective: CheckJsDirective | undefined;
|
||||
public errorExpectations: TextRange[] | undefined;
|
||||
|
@ -686,7 +686,7 @@ namespace ts {
|
|||
return fullText[lastCharPos] === "\n" && fullText[lastCharPos - 1] === "\r" ? lastCharPos - 1 : lastCharPos;
|
||||
}
|
||||
|
||||
public getNamedDeclarations(): Map<Declaration[]> {
|
||||
public getNamedDeclarations(): Map<string, Declaration[]> {
|
||||
if (!this.namedDeclarations) {
|
||||
this.namedDeclarations = this.computeNamedDeclarations();
|
||||
}
|
||||
|
@ -694,7 +694,7 @@ namespace ts {
|
|||
return this.namedDeclarations;
|
||||
}
|
||||
|
||||
private computeNamedDeclarations(): Map<Declaration[]> {
|
||||
private computeNamedDeclarations(): Map<string, Declaration[]> {
|
||||
const result = createMultiMap<Declaration>();
|
||||
|
||||
this.forEachChild(visit);
|
||||
|
@ -937,14 +937,14 @@ namespace ts {
|
|||
// at each language service public entry point, since we don't know when
|
||||
// the set of scripts handled by the host changes.
|
||||
class HostCache {
|
||||
private fileNameToEntry: Map<CachedHostFileInformation>;
|
||||
private fileNameToEntry: Map<Path, CachedHostFileInformation>;
|
||||
private _compilationSettings: CompilerOptions;
|
||||
private currentDirectory: string;
|
||||
|
||||
constructor(private host: LanguageServiceHost, getCanonicalFileName: GetCanonicalFileName) {
|
||||
// script id => script index
|
||||
this.currentDirectory = host.getCurrentDirectory();
|
||||
this.fileNameToEntry = createMap<CachedHostFileInformation>();
|
||||
this.fileNameToEntry = new Map();
|
||||
|
||||
// Initialize the list with the root file names
|
||||
const rootFileNames = host.getScriptFileNames();
|
||||
|
|
|
@ -25,11 +25,11 @@ namespace ts {
|
|||
fileNames: string[]; // The file names that belong to the same project.
|
||||
projectRootPath: string; // The path to the project root directory
|
||||
safeListPath: string; // The path used to retrieve the safe list
|
||||
packageNameToTypingLocation: Map<JsTyping.CachedTyping>; // The map of package names to their cached typing locations and installed versions
|
||||
packageNameToTypingLocation: Map<string, JsTyping.CachedTyping>; // The map of package names to their cached typing locations and installed versions
|
||||
typeAcquisition: TypeAcquisition; // Used to customize the type acquisition process
|
||||
compilerOptions: CompilerOptions; // Used as a source for typing inference
|
||||
unresolvedImports: readonly string[]; // List of unresolved module ids from imports
|
||||
typesRegistry: ReadonlyMap<MapLike<string>>; // The map of available typings in npm to maps of TS versions to their latest supported versions
|
||||
typesRegistry: ReadonlyMap<string, MapLike<string>>; // The map of available typings in npm to maps of TS versions to their latest supported versions
|
||||
}
|
||||
|
||||
export interface ScriptSnapshotShim {
|
||||
|
|
|
@ -92,7 +92,7 @@ namespace ts {
|
|||
/* @internal */ scriptSnapshot: IScriptSnapshot | undefined;
|
||||
/* @internal */ nameTable: UnderscoreEscapedMap<number> | undefined;
|
||||
|
||||
/* @internal */ getNamedDeclarations(): Map<readonly Declaration[]>;
|
||||
/* @internal */ getNamedDeclarations(): Map<string, readonly Declaration[]>;
|
||||
|
||||
getLineAndCharacterOfPosition(pos: number): LineAndCharacter;
|
||||
getLineEndOfPosition(pos: number): number;
|
||||
|
@ -196,10 +196,10 @@ namespace ts {
|
|||
export interface PackageJsonInfo {
|
||||
fileName: string;
|
||||
parseable: boolean;
|
||||
dependencies?: Map<string>;
|
||||
devDependencies?: Map<string>;
|
||||
peerDependencies?: Map<string>;
|
||||
optionalDependencies?: Map<string>;
|
||||
dependencies?: Map<string, string>;
|
||||
devDependencies?: Map<string, string>;
|
||||
peerDependencies?: Map<string, string>;
|
||||
optionalDependencies?: Map<string, string>;
|
||||
get(dependencyName: string, inGroups?: PackageJsonDependencyGroup): string | undefined;
|
||||
has(dependencyName: string, inGroups?: PackageJsonDependencyGroup): boolean;
|
||||
}
|
||||
|
@ -271,7 +271,7 @@ namespace ts {
|
|||
/* @internal */
|
||||
getGlobalTypingsCacheLocation?(): string | undefined;
|
||||
/* @internal */
|
||||
getProbableSymlinks?(files: readonly SourceFile[]): ReadonlyMap<string>;
|
||||
getProbableSymlinks?(files: readonly SourceFile[]): ReadonlyMap<string, string>;
|
||||
|
||||
/*
|
||||
* Required for full import and type reference completions.
|
||||
|
|
|
@ -2186,7 +2186,7 @@ namespace ts {
|
|||
return clone;
|
||||
}
|
||||
|
||||
export function getSynthesizedDeepCloneWithRenames<T extends Node>(node: T, includeTrivia = true, renameMap?: Map<Identifier>, checker?: TypeChecker, callback?: (originalNode: Node, clone: Node) => any): T {
|
||||
export function getSynthesizedDeepCloneWithRenames<T extends Node>(node: T, includeTrivia = true, renameMap?: Map<string, Identifier>, checker?: TypeChecker, callback?: (originalNode: Node, clone: Node) => any): T {
|
||||
let clone;
|
||||
if (renameMap && checker && isBindingElement(node) && isIdentifier(node.name) && isObjectBindingPattern(node.parent)) {
|
||||
const symbol = checker.getSymbolAtLocation(node.name);
|
||||
|
@ -2222,7 +2222,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
|
||||
function getSynthesizedDeepCloneWorker<T extends Node>(node: T, renameMap?: Map<Identifier>, checker?: TypeChecker, callback?: (originalNode: Node, clone: Node) => any): T {
|
||||
function getSynthesizedDeepCloneWorker<T extends Node>(node: T, renameMap?: Map<string, Identifier>, checker?: TypeChecker, callback?: (originalNode: Node, clone: Node) => any): T {
|
||||
const visited = (renameMap || checker || callback) ?
|
||||
visitEachChild(node, wrapper, nullTransformationContext) :
|
||||
visitEachChild(node, getSynthesizedDeepClone, nullTransformationContext);
|
||||
|
|
267
src/shims/collectionShims.ts
Normal file
267
src/shims/collectionShims.ts
Normal file
|
@ -0,0 +1,267 @@
|
|||
/* @internal */
|
||||
namespace ts {
|
||||
type GetIteratorCallback = <I extends readonly any[] | ReadonlySetShim<any> | ReadonlyMapShim<any, any> | undefined>(iterable: I) => IteratorShim<
|
||||
I extends ReadonlyMapShim<infer K, infer V> ? [K, V] :
|
||||
I extends ReadonlySetShim<infer T> ? T :
|
||||
I extends readonly (infer T)[] ? T :
|
||||
I extends undefined ? undefined :
|
||||
never>;
|
||||
|
||||
type IteratorResultShim<T> =
|
||||
| { value: T, done?: false }
|
||||
| { value: never, done: true };
|
||||
|
||||
interface IteratorShim<T> {
|
||||
next(): IteratorResultShim<T>;
|
||||
}
|
||||
|
||||
interface ReadonlyMapShim<K, V> {
|
||||
readonly size: number;
|
||||
get(key: K): V | undefined;
|
||||
has(key: K): boolean;
|
||||
keys(): IteratorShim<K>;
|
||||
values(): IteratorShim<V>;
|
||||
entries(): IteratorShim<[K, V]>;
|
||||
forEach(action: (value: V, key: K) => void): void;
|
||||
}
|
||||
|
||||
interface MapShim<K, V> extends ReadonlyMapShim<K, V> {
|
||||
set(key: K, value: V): this;
|
||||
delete(key: K): boolean;
|
||||
clear(): void;
|
||||
}
|
||||
|
||||
type MapShimConstructor = new <K, V>(iterable?: readonly (readonly [K, V])[] | ReadonlyMapShim<K, V>) => MapShim<K, V>;
|
||||
|
||||
interface ReadonlySetShim<T> {
|
||||
readonly size: number;
|
||||
has(value: T): boolean;
|
||||
keys(): IteratorShim<T>;
|
||||
values(): IteratorShim<T>;
|
||||
entries(): IteratorShim<[T, T]>;
|
||||
forEach(action: (value: T, key: T) => void): void;
|
||||
}
|
||||
|
||||
interface SetShim<T> extends ReadonlySetShim<T> {
|
||||
add(value: T): this;
|
||||
delete(value: T): boolean;
|
||||
clear(): void;
|
||||
}
|
||||
|
||||
type SetShimConstructor = new <T>(iterable?: readonly T[] | ReadonlySetShim<T>) => SetShim<T>;
|
||||
|
||||
interface MapData<K, V> {
|
||||
size: number;
|
||||
readonly head: MapEntry<K, V>;
|
||||
tail: MapEntry<K, V>;
|
||||
}
|
||||
|
||||
interface MapEntry<K, V> {
|
||||
readonly key?: K;
|
||||
value?: V;
|
||||
/**
|
||||
* Specifies the next entry in the linked list.
|
||||
*/
|
||||
next?: MapEntry<K, V>;
|
||||
/**
|
||||
* Specifies the previous entry in the linked list.
|
||||
* Must be set when the entry is part of a Map/Set.
|
||||
* When 'undefined', iterators should skip the next entry.
|
||||
* This will be set to 'undefined' when an entry is deleted.
|
||||
* See https://github.com/Microsoft/TypeScript/pull/27292 for more information.
|
||||
*/
|
||||
prev?: MapEntry<K, V>;
|
||||
}
|
||||
|
||||
interface IteratorData<K, V, U extends (K | V | [K, V])> {
|
||||
current?: MapEntry<K, V>;
|
||||
selector: (key: K, value: V) => U;
|
||||
}
|
||||
|
||||
function createMapData<K, V>(): MapData<K, V> {
|
||||
const sentinel: MapEntry<K, V> = {};
|
||||
sentinel.prev = sentinel;
|
||||
return { head: sentinel, tail: sentinel, size: 0 };
|
||||
}
|
||||
|
||||
function createMapEntry<K, V>(key: K, value: V): MapEntry<K, V> {
|
||||
return { key, value, next: undefined, prev: undefined };
|
||||
}
|
||||
|
||||
function sameValueZero(x: unknown, y: unknown) {
|
||||
// Treats -0 === 0 and NaN === NaN
|
||||
return x === y || x !== x && y !== y;
|
||||
}
|
||||
|
||||
function getPrev<K, V>(entry: MapEntry<K, V>) {
|
||||
const prev = entry.prev;
|
||||
// Entries without a 'prev' have been removed from the map.
|
||||
// An entry whose 'prev' points to itself is the head of the list and is invalid here.
|
||||
if (!prev || prev === entry) throw new Error("Illegal state");
|
||||
return prev;
|
||||
}
|
||||
|
||||
function getNext<K, V>(entry: MapEntry<K, V> | undefined) {
|
||||
while (entry) {
|
||||
// Entries without a 'prev' have been removed from the map. Their 'next'
|
||||
// pointer should point to the previous entry prior to deletion and
|
||||
// that entry should be skipped to resume iteration.
|
||||
const skipNext = !entry.prev;
|
||||
entry = entry.next;
|
||||
if (skipNext) {
|
||||
continue;
|
||||
}
|
||||
return entry;
|
||||
}
|
||||
}
|
||||
|
||||
function getEntry<K, V>(data: MapData<K, V>, key: K): MapEntry<K, V> | undefined {
|
||||
// We walk backwards from 'tail' to prioritize recently added entries.
|
||||
// We skip 'head' because it is an empty entry used to track iteration start.
|
||||
for (let entry = data.tail; entry !== data.head; entry = getPrev(entry)) {
|
||||
if (sameValueZero(entry.key, key)) {
|
||||
return entry;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function addOrUpdateEntry<K, V>(data: MapData<K, V>, key: K, value: V): MapEntry<K, V> | undefined {
|
||||
const existing = getEntry(data, key);
|
||||
if (existing) {
|
||||
existing.value = value;
|
||||
return;
|
||||
}
|
||||
|
||||
const entry = createMapEntry(key, value);
|
||||
entry.prev = data.tail;
|
||||
data.tail.next = entry;
|
||||
data.tail = entry;
|
||||
data.size++;
|
||||
return entry;
|
||||
}
|
||||
|
||||
function deleteEntry<K, V>(data: MapData<K, V>, key: K): MapEntry<K, V> | undefined {
|
||||
// We walk backwards from 'tail' to prioritize recently added entries.
|
||||
// We skip 'head' because it is an empty entry used to track iteration start.
|
||||
for (let entry = data.tail; entry !== data.head; entry = getPrev(entry)) {
|
||||
// all entries in the map should have a 'prev' pointer.
|
||||
if (entry.prev === undefined) throw new Error("Illegal state");
|
||||
if (sameValueZero(entry.key, key)) {
|
||||
if (entry.next) {
|
||||
entry.next.prev = entry.prev;
|
||||
}
|
||||
else {
|
||||
// an entry in the map without a 'next' pointer must be the 'tail'.
|
||||
if (data.tail !== entry) throw new Error("Illegal state");
|
||||
data.tail = entry.prev;
|
||||
}
|
||||
|
||||
entry.prev.next = entry.next;
|
||||
entry.next = entry.prev;
|
||||
entry.prev = undefined;
|
||||
data.size--;
|
||||
return entry;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function clearEntries<K, V>(data: MapData<K, V>) {
|
||||
let node = data.tail;
|
||||
while (node !== data.head) {
|
||||
const prev = getPrev(node);
|
||||
node.next = data.head;
|
||||
node.prev = undefined;
|
||||
node = prev;
|
||||
}
|
||||
data.head.next = undefined;
|
||||
data.tail = data.head;
|
||||
data.size = 0;
|
||||
}
|
||||
|
||||
function forEachEntry<K, V>(data: MapData<K, V>, action: (value: V, key: K) => void) {
|
||||
let entry: MapEntry<K, V> | undefined = data.head;
|
||||
while (entry) {
|
||||
entry = getNext(entry);
|
||||
if (entry) {
|
||||
action(entry.value!, entry.key!);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function forEachIteration<T>(iterator: IteratorShim<T> | undefined, action: (value: any) => void) {
|
||||
if (iterator) {
|
||||
for (let step = iterator.next(); !step.done; step = iterator.next()) {
|
||||
action(step.value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function createIteratorData<K, V, U extends (K | V | [K, V])>(data: MapData<K, V>, selector: (key: K, value: V) => U): IteratorData<K, V, U> {
|
||||
return { current: data.head, selector };
|
||||
}
|
||||
|
||||
function iteratorNext<K, V, U extends (K | V | [K, V])>(data: IteratorData<K, V, U>): IteratorResultShim<U> {
|
||||
// Navigate to the next entry.
|
||||
data.current = getNext(data.current);
|
||||
if (data.current) {
|
||||
return { value: data.selector(data.current.key!, data.current.value!), done: false };
|
||||
}
|
||||
else {
|
||||
return { value: undefined as never, done: true };
|
||||
}
|
||||
}
|
||||
|
||||
/* @internal */
|
||||
export namespace ShimCollections {
|
||||
export function createMapShim(getIterator: GetIteratorCallback): MapShimConstructor {
|
||||
class MapIterator<K, V, U extends (K | V | [K, V])> {
|
||||
private _data: IteratorData<K, V, U>;
|
||||
constructor(data: MapData<K, V>, selector: (key: K, value: V) => U) {
|
||||
this._data = createIteratorData(data, selector);
|
||||
}
|
||||
next() { return iteratorNext(this._data); }
|
||||
}
|
||||
return class Map<K, V> implements MapShim<K, V> {
|
||||
private _mapData = createMapData<K, V>();
|
||||
constructor(iterable?: readonly (readonly [K, V])[] | ReadonlyMapShim<K, V>) {
|
||||
forEachIteration(getIterator(iterable), ([key, value]) => this.set(key, value));
|
||||
}
|
||||
get size() { return this._mapData.size; }
|
||||
get(key: K): V | undefined { return getEntry(this._mapData, key)?.value; }
|
||||
set(key: K, value: V): this { return addOrUpdateEntry(this._mapData, key, value), this; }
|
||||
has(key: K): boolean { return !!getEntry(this._mapData, key); }
|
||||
delete(key: K): boolean { return !!deleteEntry(this._mapData, key); }
|
||||
clear(): void { clearEntries(this._mapData); }
|
||||
keys(): IteratorShim<K> { return new MapIterator(this._mapData, (key, _value) => key); }
|
||||
values(): IteratorShim<V> { return new MapIterator(this._mapData, (_key, value) => value); }
|
||||
entries(): IteratorShim<[K, V]> { return new MapIterator(this._mapData, (key, value) => [key, value]); }
|
||||
forEach(action: (value: V, key: K) => void): void { forEachEntry(this._mapData, action); }
|
||||
};
|
||||
}
|
||||
|
||||
export function createSetShim(getIterator: GetIteratorCallback): SetShimConstructor {
|
||||
class SetIterator<K, V, U extends (K | V | [K, V])> {
|
||||
private _data: IteratorData<K, V, U>;
|
||||
constructor(data: MapData<K, V>, selector: (key: K, value: V) => U) {
|
||||
this._data = createIteratorData(data, selector);
|
||||
}
|
||||
next() { return iteratorNext(this._data); }
|
||||
}
|
||||
return class Set<T> implements SetShim<T> {
|
||||
private _mapData = createMapData<T, T>();
|
||||
constructor(iterable?: readonly T[] | ReadonlySetShim<T>) {
|
||||
forEachIteration(getIterator(iterable), value => this.add(value));
|
||||
}
|
||||
get size() { return this._mapData.size; }
|
||||
add(value: T): this { return addOrUpdateEntry(this._mapData, value, value), this; }
|
||||
has(value: T): boolean { return !!getEntry(this._mapData, value); }
|
||||
delete(value: T): boolean { return !!deleteEntry(this._mapData, value); }
|
||||
clear(): void { clearEntries(this._mapData); }
|
||||
keys(): IteratorShim<T> { return new SetIterator(this._mapData, (key, _value) => key); }
|
||||
values(): IteratorShim<T> { return new SetIterator(this._mapData, (_key, value) => value); }
|
||||
entries(): IteratorShim<[T, T]> { return new SetIterator(this._mapData, (key, value) => [key, value]); }
|
||||
forEach(action: (value: T, key: T) => void): void { forEachEntry(this._mapData, action); }
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,220 +0,0 @@
|
|||
/* @internal */
|
||||
namespace ts {
|
||||
interface IteratorShim<T> {
|
||||
next(): { value: T, done?: false } | { value: never, done: true };
|
||||
}
|
||||
interface MapShim<T> {
|
||||
readonly size: number;
|
||||
get(key: string): T | undefined;
|
||||
set(key: string, value: T): this;
|
||||
has(key: string): boolean;
|
||||
delete(key: string): boolean;
|
||||
clear(): void;
|
||||
keys(): IteratorShim<string>;
|
||||
values(): IteratorShim<T>;
|
||||
entries(): IteratorShim<[string, T]>;
|
||||
forEach(action: (value: T, key: string) => void): void;
|
||||
}
|
||||
export function createMapShim(): new <T>() => MapShim<T> {
|
||||
/** Create a MapLike with good performance. */
|
||||
function createDictionaryObject<T>(): Record<string, T> {
|
||||
const map = Object.create(/*prototype*/ null); // eslint-disable-line no-null/no-null
|
||||
|
||||
// Using 'delete' on an object causes V8 to put the object in dictionary mode.
|
||||
// This disables creation of hidden classes, which are expensive when an object is
|
||||
// constantly changing shape.
|
||||
map.__ = undefined;
|
||||
delete map.__;
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
interface MapEntry<T> {
|
||||
readonly key?: string;
|
||||
value?: T;
|
||||
|
||||
// Linked list references for iterators.
|
||||
nextEntry?: MapEntry<T>;
|
||||
previousEntry?: MapEntry<T>;
|
||||
|
||||
/**
|
||||
* Specifies if iterators should skip the next entry.
|
||||
* This will be set when an entry is deleted.
|
||||
* See https://github.com/Microsoft/TypeScript/pull/27292 for more information.
|
||||
*/
|
||||
skipNext?: boolean;
|
||||
}
|
||||
|
||||
class MapIterator<T, U extends (string | T | [string, T])> {
|
||||
private currentEntry?: MapEntry<T>;
|
||||
private selector: (key: string, value: T) => U;
|
||||
|
||||
constructor(currentEntry: MapEntry<T>, selector: (key: string, value: T) => U) {
|
||||
this.currentEntry = currentEntry;
|
||||
this.selector = selector;
|
||||
}
|
||||
|
||||
public next(): { value: U, done?: false } | { value: never, done: true } {
|
||||
// Navigate to the next entry.
|
||||
while (this.currentEntry) {
|
||||
const skipNext = !!this.currentEntry.skipNext;
|
||||
this.currentEntry = this.currentEntry.nextEntry;
|
||||
|
||||
if (!skipNext) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.currentEntry) {
|
||||
return { value: this.selector(this.currentEntry.key!, this.currentEntry.value!), done: false };
|
||||
}
|
||||
else {
|
||||
return { value: undefined as never, done: true };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return class <T> implements MapShim<T> {
|
||||
private data = createDictionaryObject<MapEntry<T>>();
|
||||
public size = 0;
|
||||
|
||||
// Linked list references for iterators.
|
||||
// See https://github.com/Microsoft/TypeScript/pull/27292
|
||||
// for more information.
|
||||
|
||||
/**
|
||||
* The first entry in the linked list.
|
||||
* Note that this is only a stub that serves as starting point
|
||||
* for iterators and doesn't contain a key and a value.
|
||||
*/
|
||||
private readonly firstEntry: MapEntry<T>;
|
||||
private lastEntry: MapEntry<T>;
|
||||
|
||||
constructor() {
|
||||
// Create a first (stub) map entry that will not contain a key
|
||||
// and value but serves as starting point for iterators.
|
||||
this.firstEntry = {};
|
||||
// When the map is empty, the last entry is the same as the
|
||||
// first one.
|
||||
this.lastEntry = this.firstEntry;
|
||||
}
|
||||
|
||||
get(key: string): T | undefined {
|
||||
const entry = this.data[key] as MapEntry<T> | undefined;
|
||||
return entry && entry.value!;
|
||||
}
|
||||
|
||||
set(key: string, value: T): this {
|
||||
if (!this.has(key)) {
|
||||
this.size++;
|
||||
|
||||
// Create a new entry that will be appended at the
|
||||
// end of the linked list.
|
||||
const newEntry: MapEntry<T> = {
|
||||
key,
|
||||
value
|
||||
};
|
||||
this.data[key] = newEntry;
|
||||
|
||||
// Adjust the references.
|
||||
const previousLastEntry = this.lastEntry;
|
||||
previousLastEntry.nextEntry = newEntry;
|
||||
newEntry.previousEntry = previousLastEntry;
|
||||
this.lastEntry = newEntry;
|
||||
}
|
||||
else {
|
||||
this.data[key].value = value;
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
has(key: string): boolean {
|
||||
// eslint-disable-next-line no-in-operator
|
||||
return key in this.data;
|
||||
}
|
||||
|
||||
delete(key: string): boolean {
|
||||
if (this.has(key)) {
|
||||
this.size--;
|
||||
const entry = this.data[key];
|
||||
delete this.data[key];
|
||||
|
||||
// Adjust the linked list references of the neighbor entries.
|
||||
const previousEntry = entry.previousEntry!;
|
||||
previousEntry.nextEntry = entry.nextEntry;
|
||||
if (entry.nextEntry) {
|
||||
entry.nextEntry.previousEntry = previousEntry;
|
||||
}
|
||||
|
||||
// When the deleted entry was the last one, we need to
|
||||
// adjust the lastEntry reference.
|
||||
if (this.lastEntry === entry) {
|
||||
this.lastEntry = previousEntry;
|
||||
}
|
||||
|
||||
// Adjust the forward reference of the deleted entry
|
||||
// in case an iterator still references it. This allows us
|
||||
// to throw away the entry, but when an active iterator
|
||||
// (which points to the current entry) continues, it will
|
||||
// navigate to the entry that originally came before the
|
||||
// current one and skip it.
|
||||
entry.previousEntry = undefined;
|
||||
entry.nextEntry = previousEntry;
|
||||
entry.skipNext = true;
|
||||
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
clear(): void {
|
||||
this.data = createDictionaryObject<MapEntry<T>>();
|
||||
this.size = 0;
|
||||
|
||||
// Reset the linked list. Note that we must adjust the forward
|
||||
// references of the deleted entries to ensure iterators stuck
|
||||
// in the middle of the list don't continue with deleted entries,
|
||||
// but can continue with new entries added after the clear()
|
||||
// operation.
|
||||
const firstEntry = this.firstEntry;
|
||||
let currentEntry = firstEntry.nextEntry;
|
||||
while (currentEntry) {
|
||||
const nextEntry = currentEntry.nextEntry;
|
||||
currentEntry.previousEntry = undefined;
|
||||
currentEntry.nextEntry = firstEntry;
|
||||
currentEntry.skipNext = true;
|
||||
|
||||
currentEntry = nextEntry;
|
||||
}
|
||||
firstEntry.nextEntry = undefined;
|
||||
this.lastEntry = firstEntry;
|
||||
}
|
||||
|
||||
keys(): IteratorShim<string> {
|
||||
return new MapIterator(this.firstEntry, key => key);
|
||||
}
|
||||
|
||||
values(): IteratorShim<T> {
|
||||
return new MapIterator(this.firstEntry, (_key, value) => value);
|
||||
}
|
||||
|
||||
entries(): IteratorShim<[string, T]> {
|
||||
return new MapIterator(this.firstEntry, (key, value) => [key, value] as [string, T]);
|
||||
}
|
||||
|
||||
forEach(action: (value: T, key: string) => void): void {
|
||||
const iterator = this.entries();
|
||||
while (true) {
|
||||
const iterResult = iterator.next();
|
||||
if (iterResult.done) {
|
||||
break;
|
||||
}
|
||||
|
||||
const [key, value] = iterResult.value;
|
||||
action(value, key);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
|
@ -4,6 +4,6 @@
|
|||
"outFile": "../../built/local/shims.js"
|
||||
},
|
||||
"files": [
|
||||
"mapShim.ts"
|
||||
"collectionShims.ts"
|
||||
]
|
||||
}
|
||||
|
|
|
@ -301,9 +301,9 @@ namespace Harness.Parallel.Worker {
|
|||
|
||||
// The root suite for all unit tests.
|
||||
let unitTestSuite: Suite;
|
||||
let unitTestSuiteMap: ts.Map<Mocha.Suite>;
|
||||
let unitTestSuiteMap: ts.Map<string, Mocha.Suite>;
|
||||
// (Unit) Tests directly within the root suite
|
||||
let unitTestTestMap: ts.Map<Mocha.Test>;
|
||||
let unitTestTestMap: ts.Map<string, Mocha.Test>;
|
||||
|
||||
if (runUnitTests) {
|
||||
unitTestSuite = new Suite("", new Mocha.Context());
|
||||
|
|
|
@ -73,6 +73,7 @@
|
|||
"unittests/reuseProgramStructure.ts",
|
||||
"unittests/semver.ts",
|
||||
"unittests/createMapShim.ts",
|
||||
"unittests/createSetShim.ts",
|
||||
"unittests/transform.ts",
|
||||
"unittests/config/commandLineParsing.ts",
|
||||
"unittests/config/configurationExtension.ts",
|
||||
|
|
|
@ -493,7 +493,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
interface VerifyNullNonIncludedOption {
|
||||
type: () => "string" | "number" | Map<number | string>;
|
||||
type: () => "string" | "number" | Map<string, number | string>;
|
||||
nonNullValue?: string;
|
||||
}
|
||||
function verifyNullNonIncludedOption({ type, nonNullValue }: VerifyNullNonIncludedOption) {
|
||||
|
|
|
@ -1,63 +1,97 @@
|
|||
namespace ts {
|
||||
describe("unittests:: createMapShim", () => {
|
||||
|
||||
function testMapIterationAddedValues(map: Map<string>, useForEach: boolean): string {
|
||||
const stringKeys = [
|
||||
"1",
|
||||
"3",
|
||||
"2",
|
||||
"4",
|
||||
"0",
|
||||
"999",
|
||||
"A",
|
||||
"B",
|
||||
"C",
|
||||
"Z",
|
||||
"X",
|
||||
"X1",
|
||||
"X2",
|
||||
"Y"
|
||||
];
|
||||
|
||||
const mixedKeys = [
|
||||
true,
|
||||
3,
|
||||
{ toString() { return "2"; } },
|
||||
"4",
|
||||
false,
|
||||
null, // eslint-disable-line no-null/no-null
|
||||
undefined,
|
||||
"B",
|
||||
{ toString() { return "C"; } },
|
||||
"Z",
|
||||
"X",
|
||||
{ toString() { return "X1"; } },
|
||||
"X2",
|
||||
"Y"
|
||||
];
|
||||
|
||||
function testMapIterationAddedValues<K>(keys: K[], map: Map<K, string>, useForEach: boolean): string {
|
||||
let resultString = "";
|
||||
|
||||
map.set("1", "1");
|
||||
map.set("3", "3");
|
||||
map.set("2", "2");
|
||||
map.set("4", "4");
|
||||
map.set(keys[0], "1");
|
||||
map.set(keys[1], "3");
|
||||
map.set(keys[2], "2");
|
||||
map.set(keys[3], "4");
|
||||
|
||||
let addedThree = false;
|
||||
const doForEach = (value: string, key: string) => {
|
||||
const doForEach = (value: string, key: K) => {
|
||||
resultString += `${key}:${value};`;
|
||||
|
||||
// Add a new key ("0") - the map should provide this
|
||||
// one in the next iteration.
|
||||
if (key === "1") {
|
||||
map.set("1", "X1");
|
||||
map.set("0", "X0");
|
||||
map.set("4", "X4");
|
||||
if (key === keys[0]) {
|
||||
map.set(keys[0], "X1");
|
||||
map.set(keys[4], "X0");
|
||||
map.set(keys[3], "X4");
|
||||
}
|
||||
else if (key === "3") {
|
||||
else if (key === keys[1]) {
|
||||
if (!addedThree) {
|
||||
addedThree = true;
|
||||
|
||||
// Remove and re-add key "3"; the map should
|
||||
// visit it after "0".
|
||||
map.delete("3");
|
||||
map.set("3", "Y3");
|
||||
map.delete(keys[1]);
|
||||
map.set(keys[1], "Y3");
|
||||
|
||||
// Change the value of "2"; the map should provide
|
||||
// it when visiting the key.
|
||||
map.set("2", "Y2");
|
||||
map.set(keys[2], "Y2");
|
||||
}
|
||||
else {
|
||||
// Check that an entry added when we visit the
|
||||
// currently last entry will still be visited.
|
||||
map.set("999", "999");
|
||||
map.set(keys[5], "999");
|
||||
}
|
||||
}
|
||||
else if (key === "999") {
|
||||
else if (key === keys[5]) {
|
||||
// Ensure that clear() behaves correctly same as removing all keys.
|
||||
map.set("A", "A");
|
||||
map.set("B", "B");
|
||||
map.set("C", "C");
|
||||
map.set(keys[6], "A");
|
||||
map.set(keys[7], "B");
|
||||
map.set(keys[8], "C");
|
||||
}
|
||||
else if (key === "A") {
|
||||
else if (key === keys[6]) {
|
||||
map.clear();
|
||||
map.set("Z", "Z");
|
||||
map.set(keys[9], "Z");
|
||||
}
|
||||
else if (key === "Z") {
|
||||
else if (key === keys[9]) {
|
||||
// Check that the map behaves correctly when two items are
|
||||
// added and removed immediately.
|
||||
map.set("X", "X");
|
||||
map.set("X1", "X1");
|
||||
map.set("X2", "X2");
|
||||
map.delete("X1");
|
||||
map.delete("X2");
|
||||
map.set("Y", "Y");
|
||||
map.set(keys[10], "X");
|
||||
map.set(keys[11], "X1");
|
||||
map.set(keys[12], "X2");
|
||||
map.delete(keys[11]);
|
||||
map.delete(keys[12]);
|
||||
map.set(keys[13], "Y");
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -81,27 +115,212 @@ namespace ts {
|
|||
return resultString;
|
||||
}
|
||||
|
||||
it("iterates values in insertion order and handles changes", () => {
|
||||
let MapShim!: MapConstructor;
|
||||
beforeEach(() => {
|
||||
function getIterator<I extends readonly any[] | ReadonlySet<any> | ReadonlyMap<any, any> | undefined>(iterable: I): Iterator<
|
||||
I extends ReadonlyMap<infer K, infer V> ? [K, V] :
|
||||
I extends ReadonlySet<infer T> ? T :
|
||||
I extends readonly (infer T)[] ? T :
|
||||
I extends undefined ? undefined :
|
||||
never>;
|
||||
function getIterator(iterable: readonly any[] | ReadonlySet<any> | ReadonlyMap<any, any> | undefined): Iterator<any> | undefined {
|
||||
// override `ts.getIterator` with a version that allows us to iterate over a `MapShim` in an environment with a native `Map`.
|
||||
if (iterable instanceof MapShim) return iterable.entries();
|
||||
return ts.getIterator(iterable);
|
||||
}
|
||||
|
||||
MapShim = ShimCollections.createMapShim(getIterator);
|
||||
afterEach(() => {
|
||||
MapShim = undefined!;
|
||||
});
|
||||
});
|
||||
|
||||
it("iterates values in insertion order and handles changes with string keys", () => {
|
||||
const expectedResult = "1:1;3:3;2:Y2;4:X4;0:X0;3:Y3;999:999;A:A;Z:Z;X:X;Y:Y;";
|
||||
|
||||
// First, ensure the test actually has the same behavior as a native Map.
|
||||
let nativeMap = createMap<string>();
|
||||
const nativeMapForEachResult = testMapIterationAddedValues(nativeMap, /* useForEach */ true);
|
||||
const nativeMapForEachResult = testMapIterationAddedValues(stringKeys, nativeMap, /* useForEach */ true);
|
||||
assert.equal(nativeMapForEachResult, expectedResult, "nativeMap-forEach");
|
||||
|
||||
nativeMap = createMap<string>();
|
||||
const nativeMapIteratorResult = testMapIterationAddedValues(nativeMap, /* useForEach */ false);
|
||||
const nativeMapIteratorResult = testMapIterationAddedValues(stringKeys, nativeMap, /* useForEach */ false);
|
||||
assert.equal(nativeMapIteratorResult, expectedResult, "nativeMap-iterator");
|
||||
|
||||
// Then, test the map shim.
|
||||
const MapShim = createMapShim(); // tslint:disable-line variable-name
|
||||
let localShimMap = new MapShim<string>();
|
||||
const shimMapForEachResult = testMapIterationAddedValues(localShimMap, /* useForEach */ true);
|
||||
let localShimMap = new MapShim<string, string>();
|
||||
const shimMapForEachResult = testMapIterationAddedValues(stringKeys, localShimMap, /* useForEach */ true);
|
||||
assert.equal(shimMapForEachResult, expectedResult, "shimMap-forEach");
|
||||
|
||||
localShimMap = new MapShim<string>();
|
||||
const shimMapIteratorResult = testMapIterationAddedValues(localShimMap, /* useForEach */ false);
|
||||
localShimMap = new MapShim<string, string>();
|
||||
const shimMapIteratorResult = testMapIterationAddedValues(stringKeys, localShimMap, /* useForEach */ false);
|
||||
assert.equal(shimMapIteratorResult, expectedResult, "shimMap-iterator");
|
||||
});
|
||||
|
||||
it("iterates values in insertion order and handles changes with mixed-type keys", () => {
|
||||
const expectedResult = "true:1;3:3;2:Y2;4:X4;false:X0;3:Y3;null:999;undefined:A;Z:Z;X:X;Y:Y;";
|
||||
|
||||
// First, ensure the test actually has the same behavior as a native Map.
|
||||
let nativeMap = createMap<any, string>();
|
||||
const nativeMapForEachResult = testMapIterationAddedValues(mixedKeys, nativeMap, /* useForEach */ true);
|
||||
assert.equal(nativeMapForEachResult, expectedResult, "nativeMap-forEach");
|
||||
|
||||
nativeMap = createMap<any, string>();
|
||||
const nativeMapIteratorResult = testMapIterationAddedValues(mixedKeys, nativeMap, /* useForEach */ false);
|
||||
assert.equal(nativeMapIteratorResult, expectedResult, "nativeMap-iterator");
|
||||
|
||||
// Then, test the map shim.
|
||||
let localShimMap = new MapShim<any, string>();
|
||||
const shimMapForEachResult = testMapIterationAddedValues(mixedKeys, localShimMap, /* useForEach */ true);
|
||||
assert.equal(shimMapForEachResult, expectedResult, "shimMap-forEach");
|
||||
|
||||
localShimMap = new MapShim<any, string>();
|
||||
const shimMapIteratorResult = testMapIterationAddedValues(mixedKeys, localShimMap, /* useForEach */ false);
|
||||
assert.equal(shimMapIteratorResult, expectedResult, "shimMap-iterator");
|
||||
});
|
||||
|
||||
it("create from Array", () => {
|
||||
const map = new MapShim([["a", "b"]]);
|
||||
assert.equal(map.size, 1);
|
||||
assert.isTrue(map.has("a"));
|
||||
assert.equal(map.get("a"), "b");
|
||||
});
|
||||
|
||||
it("create from Map", () => {
|
||||
const map1 = new MapShim([["a", "b"]]);
|
||||
const map2 = new MapShim(map1);
|
||||
assert.equal(map1.size, 1);
|
||||
assert.equal(map2.size, 1);
|
||||
assert.isTrue(map2.has("a"));
|
||||
assert.equal(map2.get("a"), "b");
|
||||
});
|
||||
|
||||
it("set when not present", () => {
|
||||
const map = new MapShim<string, string>();
|
||||
const result = map.set("a", "b");
|
||||
assert.equal(map.size, 1);
|
||||
assert.strictEqual(result, map);
|
||||
assert.isTrue(map.has("a"));
|
||||
assert.equal(map.get("a"), "b");
|
||||
});
|
||||
|
||||
it("set when present", () => {
|
||||
const map = new MapShim<string, string>();
|
||||
map.set("a", "z");
|
||||
const result = map.set("a", "b");
|
||||
assert.equal(map.size, 1);
|
||||
assert.strictEqual(result, map);
|
||||
assert.isTrue(map.has("a"));
|
||||
assert.equal(map.get("a"), "b");
|
||||
});
|
||||
|
||||
it("has when not present", () => {
|
||||
const map = new MapShim<string, string>();
|
||||
assert.isFalse(map.has("a"));
|
||||
});
|
||||
|
||||
it("has when present", () => {
|
||||
const map = new MapShim<string, string>();
|
||||
map.set("a", "b");
|
||||
assert.isTrue(map.has("a"));
|
||||
});
|
||||
|
||||
it("get when not present", () => {
|
||||
const map = new MapShim<string, string>();
|
||||
assert.isUndefined(map.get("a"));
|
||||
});
|
||||
|
||||
it("get when present", () => {
|
||||
const map = new MapShim<string, string>();
|
||||
map.set("a", "b");
|
||||
assert.equal(map.get("a"), "b");
|
||||
});
|
||||
|
||||
it("delete when not present", () => {
|
||||
const map = new MapShim<string, string>();
|
||||
assert.isFalse(map.delete("a"));
|
||||
});
|
||||
|
||||
it("delete when present", () => {
|
||||
const map = new MapShim<string, string>();
|
||||
map.set("a", "b");
|
||||
assert.isTrue(map.delete("a"));
|
||||
});
|
||||
|
||||
it("delete twice when present", () => {
|
||||
const map = new MapShim<string, string>();
|
||||
map.set("a", "b");
|
||||
assert.isTrue(map.delete("a"));
|
||||
assert.isFalse(map.delete("a"));
|
||||
});
|
||||
|
||||
it("remove only item and iterate", () => {
|
||||
const map = new MapShim<string, string>();
|
||||
map.set("a", "b");
|
||||
map.delete("a");
|
||||
const actual = arrayFrom(map.keys());
|
||||
assert.deepEqual(actual, []);
|
||||
});
|
||||
|
||||
it("remove first item and iterate", () => {
|
||||
const map = new MapShim<string, string>();
|
||||
map.set("a", "b");
|
||||
map.set("c", "d");
|
||||
map.delete("a");
|
||||
assert.deepEqual(arrayFrom(map.keys()), ["c"]);
|
||||
assert.deepEqual(arrayFrom(map.values()), ["d"]);
|
||||
assert.deepEqual(arrayFrom(map.entries()), [["c", "d"]]);
|
||||
});
|
||||
|
||||
it("remove last item and iterate", () => {
|
||||
const map = new MapShim<string, string>();
|
||||
map.set("a", "b");
|
||||
map.set("c", "d");
|
||||
map.delete("c");
|
||||
assert.deepEqual(arrayFrom(map.keys()), ["a"]);
|
||||
assert.deepEqual(arrayFrom(map.values()), ["b"]);
|
||||
assert.deepEqual(arrayFrom(map.entries()), [["a", "b"]]);
|
||||
});
|
||||
|
||||
it("remove middle item and iterate", () => {
|
||||
const map = new MapShim<string, string>();
|
||||
map.set("a", "b");
|
||||
map.set("c", "d");
|
||||
map.set("e", "f");
|
||||
map.delete("c");
|
||||
assert.deepEqual(arrayFrom(map.keys()), ["a", "e"]);
|
||||
assert.deepEqual(arrayFrom(map.values()), ["b", "f"]);
|
||||
assert.deepEqual(arrayFrom(map.entries()), [["a", "b"], ["e", "f"]]);
|
||||
});
|
||||
|
||||
it("keys", () => {
|
||||
const map = new MapShim<string, string>();
|
||||
map.set("c", "d");
|
||||
map.set("a", "b");
|
||||
assert.deepEqual(arrayFrom(map.keys()), ["c", "a"]);
|
||||
});
|
||||
|
||||
it("values", () => {
|
||||
const map = new MapShim<string, string>();
|
||||
map.set("c", "d");
|
||||
map.set("a", "b");
|
||||
assert.deepEqual(arrayFrom(map.values()), ["d", "b"]);
|
||||
});
|
||||
|
||||
it("entries", () => {
|
||||
const map = new MapShim<string, string>();
|
||||
map.set("c", "d");
|
||||
map.set("a", "b");
|
||||
assert.deepEqual(arrayFrom(map.entries()), [["c", "d"], ["a", "b"]]);
|
||||
});
|
||||
|
||||
it("forEach", () => {
|
||||
const map = new MapShim<string, string>();
|
||||
map.set("c", "d");
|
||||
map.set("a", "b");
|
||||
const actual: [string, string][] = [];
|
||||
map.forEach((value, key) => { actual.push([key, value]); });
|
||||
assert.deepEqual(actual, [["c", "d"], ["a", "b"]]);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
|
309
src/testRunner/unittests/createSetShim.ts
Normal file
309
src/testRunner/unittests/createSetShim.ts
Normal file
|
@ -0,0 +1,309 @@
|
|||
namespace ts {
|
||||
describe("unittests:: createSetShim", () => {
|
||||
const stringKeys = [
|
||||
"1",
|
||||
"3",
|
||||
"2",
|
||||
"4",
|
||||
"0",
|
||||
"999",
|
||||
"A",
|
||||
"B",
|
||||
"C",
|
||||
"Z",
|
||||
"X",
|
||||
"X1",
|
||||
"X2",
|
||||
"Y"
|
||||
];
|
||||
|
||||
const mixedKeys = [
|
||||
true,
|
||||
3,
|
||||
{ toString() { return "2"; } },
|
||||
"4",
|
||||
false,
|
||||
null, // eslint-disable-line no-null/no-null
|
||||
undefined,
|
||||
"B",
|
||||
{ toString() { return "C"; } },
|
||||
"Z",
|
||||
"X",
|
||||
{ toString() { return "X1"; } },
|
||||
"X2",
|
||||
"Y"
|
||||
];
|
||||
|
||||
function testSetIterationAddedValues<K>(keys: K[], set: Set<K>, useForEach: boolean): string {
|
||||
let resultString = "";
|
||||
|
||||
set.add(keys[0]);
|
||||
set.add(keys[1]);
|
||||
set.add(keys[2]);
|
||||
set.add(keys[3]);
|
||||
|
||||
let addedThree = false;
|
||||
const doForEach = (key: K) => {
|
||||
resultString += `${key};`;
|
||||
|
||||
// Add a new key ("0") - the set should provide this
|
||||
// one in the next iteration.
|
||||
if (key === keys[0]) {
|
||||
set.add(keys[0]);
|
||||
set.add(keys[4]);
|
||||
set.add(keys[3]);
|
||||
}
|
||||
else if (key === keys[1]) {
|
||||
if (!addedThree) {
|
||||
addedThree = true;
|
||||
|
||||
// Remove and re-add key "3"; the set should
|
||||
// visit it after "0".
|
||||
set.delete(keys[1]);
|
||||
set.add(keys[1]);
|
||||
|
||||
// Change the value of "2"; the set should provide
|
||||
// it when visiting the key.
|
||||
set.add(keys[2]);
|
||||
}
|
||||
else {
|
||||
// Check that an entry added when we visit the
|
||||
// currently last entry will still be visited.
|
||||
set.add(keys[5]);
|
||||
}
|
||||
}
|
||||
else if (key === keys[5]) {
|
||||
// Ensure that clear() behaves correctly same as removing all keys.
|
||||
set.add(keys[6]);
|
||||
set.add(keys[7]);
|
||||
set.add(keys[8]);
|
||||
}
|
||||
else if (key === keys[6]) {
|
||||
set.clear();
|
||||
set.add(keys[9]);
|
||||
}
|
||||
else if (key === keys[9]) {
|
||||
// Check that the set behaves correctly when two items are
|
||||
// added and removed immediately.
|
||||
set.add(keys[10]);
|
||||
set.add(keys[11]);
|
||||
set.add(keys[12]);
|
||||
set.delete(keys[11]);
|
||||
set.delete(keys[12]);
|
||||
set.add(keys[13]);
|
||||
}
|
||||
};
|
||||
|
||||
if (useForEach) {
|
||||
set.forEach(doForEach);
|
||||
}
|
||||
else {
|
||||
// Use an iterator.
|
||||
const iterator = set.values();
|
||||
while (true) {
|
||||
const iterResult = iterator.next();
|
||||
if (iterResult.done) {
|
||||
break;
|
||||
}
|
||||
|
||||
doForEach(iterResult.value);
|
||||
}
|
||||
}
|
||||
|
||||
return resultString;
|
||||
}
|
||||
|
||||
let SetShim!: SetConstructor;
|
||||
beforeEach(() => {
|
||||
function getIterator<I extends readonly any[] | ReadonlySet<any> | ReadonlyMap<any, any> | undefined>(iterable: I): Iterator<
|
||||
I extends ReadonlyMap<infer K, infer V> ? [K, V] :
|
||||
I extends ReadonlySet<infer T> ? T :
|
||||
I extends readonly (infer T)[] ? T :
|
||||
I extends undefined ? undefined :
|
||||
never>;
|
||||
function getIterator(iterable: readonly any[] | ReadonlySet<any> | ReadonlyMap<any, any> | undefined): Iterator<any> | undefined {
|
||||
// override `ts.getIterator` with a version that allows us to iterate over a `SetShim` in an environment with a native `Set`.
|
||||
if (iterable instanceof SetShim) return iterable.values();
|
||||
return ts.getIterator(iterable);
|
||||
}
|
||||
|
||||
SetShim = ShimCollections.createSetShim(getIterator);
|
||||
afterEach(() => {
|
||||
SetShim = undefined!;
|
||||
});
|
||||
});
|
||||
|
||||
it("iterates values in insertion order and handles changes with string keys", () => {
|
||||
const expectedResult = "1;3;2;4;0;3;999;A;Z;X;Y;";
|
||||
|
||||
// First, ensure the test actually has the same behavior as a native Set.
|
||||
let nativeSet = new Set<string>();
|
||||
const nativeSetForEachResult = testSetIterationAddedValues(stringKeys, nativeSet, /* useForEach */ true);
|
||||
assert.equal(nativeSetForEachResult, expectedResult, "nativeSet-forEach");
|
||||
|
||||
nativeSet = new Set<string>();
|
||||
const nativeSetIteratorResult = testSetIterationAddedValues(stringKeys, nativeSet, /* useForEach */ false);
|
||||
assert.equal(nativeSetIteratorResult, expectedResult, "nativeSet-iterator");
|
||||
|
||||
// Then, test the set shim.
|
||||
let localShimSet = new SetShim<string>();
|
||||
const shimSetForEachResult = testSetIterationAddedValues(stringKeys, localShimSet, /* useForEach */ true);
|
||||
assert.equal(shimSetForEachResult, expectedResult, "shimSet-forEach");
|
||||
|
||||
localShimSet = new SetShim<string>();
|
||||
const shimSetIteratorResult = testSetIterationAddedValues(stringKeys, localShimSet, /* useForEach */ false);
|
||||
assert.equal(shimSetIteratorResult, expectedResult, "shimSet-iterator");
|
||||
});
|
||||
|
||||
it("iterates values in insertion order and handles changes with mixed-type keys", () => {
|
||||
const expectedResult = "true;3;2;4;false;3;null;undefined;Z;X;Y;";
|
||||
|
||||
// First, ensure the test actually has the same behavior as a native Set.
|
||||
let nativeSet = new Set<any>();
|
||||
const nativeSetForEachResult = testSetIterationAddedValues(mixedKeys, nativeSet, /* useForEach */ true);
|
||||
assert.equal(nativeSetForEachResult, expectedResult, "nativeSet-forEach");
|
||||
|
||||
nativeSet = new Set<any>();
|
||||
const nativeSetIteratorResult = testSetIterationAddedValues(mixedKeys, nativeSet, /* useForEach */ false);
|
||||
assert.equal(nativeSetIteratorResult, expectedResult, "nativeSet-iterator");
|
||||
|
||||
// Then, test the set shim.
|
||||
let localshimSet = new SetShim<any>();
|
||||
const shimSetForEachResult = testSetIterationAddedValues(mixedKeys, localshimSet, /* useForEach */ true);
|
||||
assert.equal(shimSetForEachResult, expectedResult, "shimSet-forEach");
|
||||
|
||||
localshimSet = new SetShim<any>();
|
||||
const shimSetIteratorResult = testSetIterationAddedValues(mixedKeys, localshimSet, /* useForEach */ false);
|
||||
assert.equal(shimSetIteratorResult, expectedResult, "shimSet-iterator");
|
||||
});
|
||||
|
||||
it("create from Array", () => {
|
||||
const set = new SetShim(["a"]);
|
||||
assert.equal(set.size, 1);
|
||||
assert.isTrue(set.has("a"));
|
||||
});
|
||||
|
||||
it("create from set", () => {
|
||||
const set1 = new SetShim(["a"]);
|
||||
const set2 = new SetShim(set1);
|
||||
assert.equal(set1.size, 1);
|
||||
assert.equal(set2.size, 1);
|
||||
assert.isTrue(set2.has("a"));
|
||||
});
|
||||
|
||||
it("add when not present", () => {
|
||||
const set = new SetShim<string>();
|
||||
const result = set.add("a");
|
||||
assert.equal(set.size, 1);
|
||||
assert.strictEqual(result, set);
|
||||
assert.isTrue(set.has("a"));
|
||||
});
|
||||
|
||||
it("add when present", () => {
|
||||
const set = new SetShim<string>();
|
||||
set.add("a");
|
||||
const result = set.add("a");
|
||||
assert.equal(set.size, 1);
|
||||
assert.strictEqual(result, set);
|
||||
assert.isTrue(set.has("a"));
|
||||
});
|
||||
|
||||
it("has when not present", () => {
|
||||
const set = new SetShim<string>();
|
||||
assert.isFalse(set.has("a"));
|
||||
});
|
||||
|
||||
it("has when present", () => {
|
||||
const set = new SetShim<string>();
|
||||
set.add("a");
|
||||
assert.isTrue(set.has("a"));
|
||||
});
|
||||
|
||||
it("delete when not present", () => {
|
||||
const set = new SetShim<string>();
|
||||
assert.isFalse(set.delete("a"));
|
||||
});
|
||||
|
||||
it("delete when present", () => {
|
||||
const set = new SetShim<string>();
|
||||
set.add("a");
|
||||
assert.isTrue(set.delete("a"));
|
||||
});
|
||||
|
||||
it("delete twice when present", () => {
|
||||
const set = new SetShim<string>();
|
||||
set.add("a");
|
||||
assert.isTrue(set.delete("a"));
|
||||
assert.isFalse(set.delete("a"));
|
||||
});
|
||||
|
||||
it("remove only item and iterate", () => {
|
||||
const set = new SetShim<string>();
|
||||
set.add("a");
|
||||
set.delete("a");
|
||||
const actual = arrayFrom(set.keys());
|
||||
assert.deepEqual(actual, []);
|
||||
});
|
||||
|
||||
it("remove first item and iterate", () => {
|
||||
const set = new SetShim<string>();
|
||||
set.add("a");
|
||||
set.add("c");
|
||||
set.delete("a");
|
||||
assert.deepEqual(arrayFrom(set.keys()), ["c"]);
|
||||
assert.deepEqual(arrayFrom(set.values()), ["c"]);
|
||||
assert.deepEqual(arrayFrom(set.entries()), [["c", "c"]]);
|
||||
});
|
||||
|
||||
it("remove last item and iterate", () => {
|
||||
const set = new SetShim<string>();
|
||||
set.add("a");
|
||||
set.add("c");
|
||||
set.delete("c");
|
||||
assert.deepEqual(arrayFrom(set.keys()), ["a"]);
|
||||
assert.deepEqual(arrayFrom(set.values()), ["a"]);
|
||||
assert.deepEqual(arrayFrom(set.entries()), [["a", "a"]]);
|
||||
});
|
||||
|
||||
it("remove middle item and iterate", () => {
|
||||
const set = new SetShim<string>();
|
||||
set.add("a");
|
||||
set.add("c");
|
||||
set.add("e");
|
||||
set.delete("c");
|
||||
assert.deepEqual(arrayFrom(set.keys()), ["a", "e"]);
|
||||
assert.deepEqual(arrayFrom(set.values()), ["a", "e"]);
|
||||
assert.deepEqual(arrayFrom(set.entries()), [["a", "a"], ["e", "e"]]);
|
||||
});
|
||||
|
||||
it("keys", () => {
|
||||
const set = new SetShim<string>();
|
||||
set.add("c");
|
||||
set.add("a");
|
||||
assert.deepEqual(arrayFrom(set.keys()), ["c", "a"]);
|
||||
});
|
||||
|
||||
it("values", () => {
|
||||
const set = new SetShim<string>();
|
||||
set.add("c");
|
||||
set.add("a");
|
||||
assert.deepEqual(arrayFrom(set.values()), ["c", "a"]);
|
||||
});
|
||||
|
||||
it("entries", () => {
|
||||
const set = new SetShim<string>();
|
||||
set.add("c");
|
||||
set.add("a");
|
||||
assert.deepEqual(arrayFrom(set.entries()), [["c", "c"], ["a", "a"]]);
|
||||
});
|
||||
|
||||
it("forEach", () => {
|
||||
const set = new SetShim<string>();
|
||||
set.add("c");
|
||||
set.add("a");
|
||||
const actual: [string, string][] = [];
|
||||
set.forEach((value, key) => { actual.push([key, value]); });
|
||||
assert.deepEqual(actual, [["c", "c"], ["a", "a"]]);
|
||||
});
|
||||
});
|
||||
}
|
|
@ -458,7 +458,7 @@ namespace ts {
|
|||
});
|
||||
|
||||
describe("unittests:: moduleResolution:: Relative imports", () => {
|
||||
function test(files: Map<string>, currentDirectory: string, rootFiles: string[], expectedFilesCount: number, relativeNamesToCheck: string[]) {
|
||||
function test(files: Map<string, string>, currentDirectory: string, rootFiles: string[], expectedFilesCount: number, relativeNamesToCheck: string[]) {
|
||||
const options: CompilerOptions = { module: ModuleKind.CommonJS };
|
||||
const host: CompilerHost = {
|
||||
getSourceFile: (fileName: string, languageVersion: ScriptTarget) => {
|
||||
|
@ -533,7 +533,7 @@ export = C;
|
|||
describe("unittests:: moduleResolution:: Files with different casing with forceConsistentCasingInFileNames", () => {
|
||||
let library: SourceFile;
|
||||
function test(
|
||||
files: Map<string>,
|
||||
files: Map<string, string>,
|
||||
options: CompilerOptions,
|
||||
currentDirectory: string,
|
||||
useCaseSensitiveFileNames: boolean,
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
namespace ts {
|
||||
function verifyMissingFilePaths(missingPaths: readonly Path[], expected: readonly string[]) {
|
||||
assert.isDefined(missingPaths);
|
||||
const map = arrayToSet(expected) as Map<boolean>;
|
||||
const map = arrayToSet(expected) as Map<string, boolean>;
|
||||
for (const missing of missingPaths) {
|
||||
const value = map.get(missing);
|
||||
assert.isTrue(value, `${missing} to be ${value === undefined ? "not present" : "present only once"}, in actual: ${missingPaths} expected: ${expected}`);
|
||||
|
|
|
@ -175,7 +175,7 @@ namespace ts {
|
|||
return true;
|
||||
}
|
||||
|
||||
function checkCache<T>(caption: string, program: Program, fileName: string, expectedContent: Map<T> | undefined, getCache: (f: SourceFile) => Map<T> | undefined, entryChecker: (expected: T, original: T) => boolean): void {
|
||||
function checkCache<T>(caption: string, program: Program, fileName: string, expectedContent: Map<string, T> | undefined, getCache: (f: SourceFile) => Map<string, T> | undefined, entryChecker: (expected: T, original: T) => boolean): void {
|
||||
const file = program.getSourceFile(fileName);
|
||||
assert.isTrue(file !== undefined, `cannot find file ${fileName}`);
|
||||
const cache = getCache(file!);
|
||||
|
@ -189,7 +189,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
/** True if the maps have the same keys and values. */
|
||||
function mapsAreEqual<T>(left: Map<T>, right: Map<T>, valuesAreEqual?: (left: T, right: T) => boolean): boolean {
|
||||
function mapsAreEqual<T>(left: Map<string, T>, right: Map<string, T>, valuesAreEqual?: (left: T, right: T) => boolean): boolean {
|
||||
if (left === right) return true;
|
||||
if (!left || !right) return false;
|
||||
const someInLeftHasNoMatch = forEachEntry(left, (leftValue, leftKey) => {
|
||||
|
@ -202,11 +202,11 @@ namespace ts {
|
|||
return !someInRightHasNoMatch;
|
||||
}
|
||||
|
||||
function checkResolvedModulesCache(program: Program, fileName: string, expectedContent: Map<ResolvedModule | undefined> | undefined): void {
|
||||
function checkResolvedModulesCache(program: Program, fileName: string, expectedContent: Map<string, ResolvedModule | undefined> | undefined): void {
|
||||
checkCache("resolved modules", program, fileName, expectedContent, f => f.resolvedModules, checkResolvedModule);
|
||||
}
|
||||
|
||||
function checkResolvedTypeDirectivesCache(program: Program, fileName: string, expectedContent: Map<ResolvedTypeReferenceDirective> | undefined): void {
|
||||
function checkResolvedTypeDirectivesCache(program: Program, fileName: string, expectedContent: Map<string, ResolvedTypeReferenceDirective> | undefined): void {
|
||||
checkCache("resolved type directives", program, fileName, expectedContent, f => f.resolvedTypeReferenceDirectiveNames, checkResolvedTypeDirective);
|
||||
}
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ namespace ts {
|
|||
|
||||
interface Test {
|
||||
source: string;
|
||||
ranges: Map<Range>;
|
||||
ranges: Map<string, Range>;
|
||||
}
|
||||
|
||||
export function extractTest(source: string): Test {
|
||||
|
|
|
@ -186,7 +186,7 @@ interface Symbol {
|
|||
}
|
||||
}
|
||||
|
||||
export function generateSourceMapBaselineFiles(sys: System & { writtenFiles: Map<any>; }) {
|
||||
export function generateSourceMapBaselineFiles(sys: System & { writtenFiles: ReadonlyCollection<string>; }) {
|
||||
const mapFileNames = mapDefinedIterator(sys.writtenFiles.keys(), f => f.endsWith(".map") ? f : undefined);
|
||||
while (true) {
|
||||
const { value: mapFile, done } = mapFileNames.next();
|
||||
|
@ -237,7 +237,7 @@ interface Symbol {
|
|||
|
||||
export function baselineBuildInfo(
|
||||
options: CompilerOptions,
|
||||
sys: System & { writtenFiles: Map<any>; },
|
||||
sys: System & { writtenFiles: ReadonlyCollection<string>; },
|
||||
originalReadCall?: System["readFile"]
|
||||
) {
|
||||
const out = outFile(options);
|
||||
|
|
|
@ -11,7 +11,7 @@ namespace ts.tscWatch {
|
|||
);
|
||||
});
|
||||
|
||||
function verifyWatchFileOnMultipleProjects(singleWatchPerFile: boolean, environmentVariables?: Map<string>) {
|
||||
function verifyWatchFileOnMultipleProjects(singleWatchPerFile: boolean, environmentVariables?: Map<string, string>) {
|
||||
it("watchFile on same file multiple times because file is part of multiple projects", () => {
|
||||
const project = `${TestFSWithWatch.tsbuildProjectsLocation}/myproject`;
|
||||
let maxPkgs = 4;
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
namespace ts {
|
||||
export type TscCompileSystem = fakes.System & {
|
||||
writtenFiles: Map<true>;
|
||||
writtenFiles: Set<string>;
|
||||
baseLine(): { file: string; text: string; };
|
||||
};
|
||||
|
||||
|
@ -44,7 +44,7 @@ namespace ts {
|
|||
return !!(program as Program | EmitAndSemanticDiagnosticsBuilderProgram).getCompilerOptions;
|
||||
}
|
||||
export function commandLineCallbacks(
|
||||
sys: System & { writtenFiles: Map<any>; },
|
||||
sys: System & { writtenFiles: ReadonlyCollection<string>; },
|
||||
originalReadCall?: System["readFile"]
|
||||
): CommandLineCallbacks {
|
||||
let programs: CommandLineProgram[] | undefined;
|
||||
|
@ -85,11 +85,11 @@ namespace ts {
|
|||
// Create system
|
||||
const sys = new fakes.System(fs, { executingFilePath: "/lib/tsc" }) as TscCompileSystem;
|
||||
fakes.patchHostForBuildInfoReadWrite(sys);
|
||||
const writtenFiles = sys.writtenFiles = createMap<true>();
|
||||
const writtenFiles = sys.writtenFiles = new Set<string>();
|
||||
const originalWriteFile = sys.writeFile;
|
||||
sys.writeFile = (fileName, content, writeByteOrderMark) => {
|
||||
assert.isFalse(writtenFiles.has(fileName));
|
||||
writtenFiles.set(fileName, true);
|
||||
writtenFiles.add(fileName);
|
||||
return originalWriteFile.call(sys, fileName, content, writeByteOrderMark);
|
||||
};
|
||||
const actualReadFileMap: MapLike<number> = {};
|
||||
|
|
|
@ -157,17 +157,17 @@ namespace ts.tscWatch {
|
|||
assert.equal(state.changedFilesSet!.size, 0, "changes");
|
||||
|
||||
assert.equal(state.fileInfos.size, 3, "FileInfo size");
|
||||
assert.deepEqual(state.fileInfos.get(libFile.path), {
|
||||
assert.deepEqual(state.fileInfos.get(libFile.path as Path), {
|
||||
version: system.createHash(libFile.content),
|
||||
signature: system.createHash(libFile.content),
|
||||
affectsGlobalScope: true,
|
||||
});
|
||||
assert.deepEqual(state.fileInfos.get(file1.path), {
|
||||
assert.deepEqual(state.fileInfos.get(file1.path as Path), {
|
||||
version: system.createHash(file1.content),
|
||||
signature: system.createHash(`${file1.content.replace("export ", "export declare ")}\n`),
|
||||
affectsGlobalScope: false,
|
||||
});
|
||||
assert.deepEqual(state.fileInfos.get(file2.path), {
|
||||
assert.deepEqual(state.fileInfos.get(file2.path as Path), {
|
||||
version: system.createHash(fileModified.content),
|
||||
signature: system.createHash("export declare const y: string;\n"),
|
||||
affectsGlobalScope: false,
|
||||
|
@ -183,9 +183,9 @@ namespace ts.tscWatch {
|
|||
assert.equal(state.exportedModulesMap!.size, 0);
|
||||
|
||||
assert.equal(state.semanticDiagnosticsPerFile!.size, 3);
|
||||
assert.deepEqual(state.semanticDiagnosticsPerFile!.get(libFile.path), emptyArray);
|
||||
assert.deepEqual(state.semanticDiagnosticsPerFile!.get(file1.path), emptyArray);
|
||||
assert.deepEqual(state.semanticDiagnosticsPerFile!.get(file2.path), [{
|
||||
assert.deepEqual(state.semanticDiagnosticsPerFile!.get(libFile.path as Path), emptyArray);
|
||||
assert.deepEqual(state.semanticDiagnosticsPerFile!.get(file1.path as Path), emptyArray);
|
||||
assert.deepEqual(state.semanticDiagnosticsPerFile!.get(file2.path as Path), [{
|
||||
file: state.program!.getSourceFileByPath(file2.path as Path)!,
|
||||
start: 13,
|
||||
length: 1,
|
||||
|
|
|
@ -16,7 +16,7 @@ namespace ts.projectSystem {
|
|||
type CalledMaps = CalledMapsWithSingleArg | CalledMapsWithFiveArgs;
|
||||
type CalledWithFiveArgs = [readonly string[], readonly string[], readonly string[], number];
|
||||
function createCallsTrackingHost(host: TestServerHost) {
|
||||
const calledMaps: Record<CalledMapsWithSingleArg, MultiMap<true>> & Record<CalledMapsWithFiveArgs, MultiMap<CalledWithFiveArgs>> = {
|
||||
const calledMaps: Record<CalledMapsWithSingleArg, MultiMap<string, true>> & Record<CalledMapsWithFiveArgs, MultiMap<string, CalledWithFiveArgs>> = {
|
||||
fileExists: setCallsTrackingWithSingleArgFn(CalledMapsWithSingleArg.fileExists),
|
||||
directoryExists: setCallsTrackingWithSingleArgFn(CalledMapsWithSingleArg.directoryExists),
|
||||
getDirectories: setCallsTrackingWithSingleArgFn(CalledMapsWithSingleArg.getDirectories),
|
||||
|
@ -65,7 +65,7 @@ namespace ts.projectSystem {
|
|||
assert.equal(calledMap.size, 0, `${callback} shouldn't be called: ${arrayFrom(calledMap.keys())}`);
|
||||
}
|
||||
|
||||
function verifyCalledOnEachEntry(callback: CalledMaps, expectedKeys: Map<number>) {
|
||||
function verifyCalledOnEachEntry(callback: CalledMaps, expectedKeys: Map<string, number>) {
|
||||
TestFSWithWatch.checkMap<true | CalledWithFiveArgs>(callback, calledMaps[callback], expectedKeys);
|
||||
}
|
||||
|
||||
|
|
|
@ -165,7 +165,7 @@ namespace ts.projectSystem {
|
|||
return JSON.stringify({ dependencies });
|
||||
}
|
||||
|
||||
export function createTypesRegistry(...list: string[]): Map<MapLike<string>> {
|
||||
export function createTypesRegistry(...list: string[]): Map<string, MapLike<string>> {
|
||||
const versionMap = {
|
||||
"latest": "1.3.0",
|
||||
"ts2.0": "1.0.0",
|
||||
|
@ -489,7 +489,7 @@ namespace ts.projectSystem {
|
|||
}
|
||||
|
||||
export function checkOpenFiles(projectService: server.ProjectService, expectedFiles: File[]) {
|
||||
checkArray("Open files", arrayFrom(projectService.openFiles.keys(), path => projectService.getScriptInfoForPath(path as Path)!.fileName), expectedFiles.map(file => file.path));
|
||||
checkArray("Open files", arrayFrom(projectService.openFiles.keys(), path => projectService.getScriptInfoForPath(path)!.fileName), expectedFiles.map(file => file.path));
|
||||
}
|
||||
|
||||
export function checkScriptInfos(projectService: server.ProjectService, expectedFiles: readonly string[], additionInfo?: string) {
|
||||
|
|
|
@ -58,7 +58,7 @@ fn2();
|
|||
arguments: { file: dependencyTs.path }
|
||||
});
|
||||
const { file, insertString } = change();
|
||||
if (session.getProjectService().openFiles.has(file.path)) {
|
||||
if (session.getProjectService().openFiles.has(file.path as Path)) {
|
||||
const toLocation = protocolToLocation(file.content);
|
||||
const location = toLocation(file.content.length);
|
||||
session.executeCommandSeq<protocol.ChangeRequest>({
|
||||
|
@ -95,7 +95,7 @@ fn2();
|
|||
assert.equal(host.writtenFiles.size, expectedFiles.length);
|
||||
for (const file of expectedFiles) {
|
||||
assert.equal(host.readFile(file.path), file.content, `Expected to write ${file.path}`);
|
||||
assert.isTrue(host.writtenFiles.has(file.path), `${file.path} is newly written`);
|
||||
assert.isTrue(host.writtenFiles.has(file.path as Path), `${file.path} is newly written`);
|
||||
}
|
||||
|
||||
// Verify EmitOutput
|
||||
|
|
|
@ -161,7 +161,7 @@ new C();`
|
|||
});
|
||||
}
|
||||
|
||||
function verifyWatchedFilesAndDirectories(host: TestServerHost, files: string[], recursiveDirectories: ReadonlyMap<number>, nonRecursiveDirectories: string[]) {
|
||||
function verifyWatchedFilesAndDirectories(host: TestServerHost, files: string[], recursiveDirectories: ReadonlyMap<string, number>, nonRecursiveDirectories: string[]) {
|
||||
checkWatchedFilesDetailed(host, files.filter(f => f !== recognizersDateTimeSrcFile.path), 1);
|
||||
checkWatchedDirectoriesDetailed(host, nonRecursiveDirectories, 1, /*recursive*/ false);
|
||||
checkWatchedDirectoriesDetailed(host, recursiveDirectories, /*recursive*/ true);
|
||||
|
|
|
@ -5,7 +5,7 @@ namespace ts.projectSystem {
|
|||
interface InstallerParams {
|
||||
globalTypingsCacheLocation?: string;
|
||||
throttleLimit?: number;
|
||||
typesRegistry?: Map<MapLike<string>>;
|
||||
typesRegistry?: Map<string, MapLike<string>>;
|
||||
}
|
||||
|
||||
class Installer extends TestTypingsInstaller {
|
||||
|
|
|
@ -233,7 +233,7 @@ namespace ts.server {
|
|||
private requestMap = createMap<QueuedOperation>(); // Maps operation ID to newest requestQueue entry with that ID
|
||||
/** We will lazily request the types registry on the first call to `isKnownTypesPackageName` and store it in `typesRegistryCache`. */
|
||||
private requestedRegistry = false;
|
||||
private typesRegistryCache: Map<MapLike<string>> | undefined;
|
||||
private typesRegistryCache: Map<string, MapLike<string>> | undefined;
|
||||
|
||||
// This number is essentially arbitrary. Processing more than one typings request
|
||||
// at a time makes sense, but having too many in the pipe results in a hang
|
||||
|
|
|
@ -46,7 +46,7 @@ namespace ts.server.typingsInstaller {
|
|||
entries: MapLike<MapLike<string>>;
|
||||
}
|
||||
|
||||
function loadTypesRegistryFile(typesRegistryFilePath: string, host: InstallTypingHost, log: Log): Map<MapLike<string>> {
|
||||
function loadTypesRegistryFile(typesRegistryFilePath: string, host: InstallTypingHost, log: Log): Map<string, MapLike<string>> {
|
||||
if (!host.fileExists(typesRegistryFilePath)) {
|
||||
if (log.isEnabled()) {
|
||||
log.writeLine(`Types registry file '${typesRegistryFilePath}' does not exist`);
|
||||
|
@ -79,7 +79,7 @@ namespace ts.server.typingsInstaller {
|
|||
export class NodeTypingsInstaller extends TypingsInstaller {
|
||||
private readonly nodeExecSync: ExecSync;
|
||||
private readonly npmPath: string;
|
||||
readonly typesRegistry: Map<MapLike<string>>;
|
||||
readonly typesRegistry: Map<string, MapLike<string>>;
|
||||
|
||||
private delayedInitializationError: InitializationFailedResponse | undefined;
|
||||
|
||||
|
|
|
@ -84,12 +84,12 @@ namespace ts.server.typingsInstaller {
|
|||
DirectoryWatcher = "DirectoryWatcher"
|
||||
}
|
||||
|
||||
type ProjectWatchers = Map<FileWatcher> & { isInvoked?: boolean; };
|
||||
type ProjectWatchers = Map<string, FileWatcher> & { isInvoked?: boolean; };
|
||||
|
||||
export abstract class TypingsInstaller {
|
||||
private readonly packageNameToTypingLocation: Map<JsTyping.CachedTyping> = createMap<JsTyping.CachedTyping>();
|
||||
private readonly missingTypingsSet: Map<true> = createMap<true>();
|
||||
private readonly knownCachesSet: Map<true> = createMap<true>();
|
||||
private readonly packageNameToTypingLocation: Map<string, JsTyping.CachedTyping> = createMap<JsTyping.CachedTyping>();
|
||||
private readonly missingTypingsSet = new Set<string>();
|
||||
private readonly knownCachesSet = new Set<string>();
|
||||
private readonly projectWatchers = createMap<ProjectWatchers>();
|
||||
private safeList: JsTyping.SafeList | undefined;
|
||||
readonly pendingRunRequests: PendingRequest[] = [];
|
||||
|
@ -99,7 +99,7 @@ namespace ts.server.typingsInstaller {
|
|||
private installRunCount = 1;
|
||||
private inFlightRequestCount = 0;
|
||||
|
||||
abstract readonly typesRegistry: Map<MapLike<string>>;
|
||||
abstract readonly typesRegistry: Map<string, MapLike<string>>;
|
||||
|
||||
constructor(
|
||||
protected readonly installTypingHost: InstallTypingHost,
|
||||
|
@ -234,7 +234,7 @@ namespace ts.server.typingsInstaller {
|
|||
}
|
||||
const typingFile = typingToFileName(cacheLocation, packageName, this.installTypingHost, this.log);
|
||||
if (!typingFile) {
|
||||
this.missingTypingsSet.set(packageName, true);
|
||||
this.missingTypingsSet.add(packageName);
|
||||
continue;
|
||||
}
|
||||
const existingTypingFile = this.packageNameToTypingLocation.get(packageName);
|
||||
|
@ -264,20 +264,20 @@ namespace ts.server.typingsInstaller {
|
|||
if (this.log.isEnabled()) {
|
||||
this.log.writeLine(`Finished processing cache location '${cacheLocation}'`);
|
||||
}
|
||||
this.knownCachesSet.set(cacheLocation, true);
|
||||
this.knownCachesSet.add(cacheLocation);
|
||||
}
|
||||
|
||||
private filterTypings(typingsToInstall: readonly string[]): readonly string[] {
|
||||
return mapDefined(typingsToInstall, typing => {
|
||||
const typingKey = mangleScopedPackageName(typing);
|
||||
if (this.missingTypingsSet.get(typingKey)) {
|
||||
if (this.missingTypingsSet.has(typingKey)) {
|
||||
if (this.log.isEnabled()) this.log.writeLine(`'${typing}':: '${typingKey}' is in missingTypingsSet - skipping...`);
|
||||
return undefined;
|
||||
}
|
||||
const validationResult = JsTyping.validatePackageName(typing);
|
||||
if (validationResult !== JsTyping.NameValidationResult.Ok) {
|
||||
// add typing name to missing set so we won't process it again
|
||||
this.missingTypingsSet.set(typingKey, true);
|
||||
this.missingTypingsSet.add(typingKey);
|
||||
if (this.log.isEnabled()) this.log.writeLine(JsTyping.renderPackageNameValidationFailure(validationResult, typing));
|
||||
return undefined;
|
||||
}
|
||||
|
@ -343,7 +343,7 @@ namespace ts.server.typingsInstaller {
|
|||
this.log.writeLine(`install request failed, marking packages as missing to prevent repeated requests: ${JSON.stringify(filteredTypings)}`);
|
||||
}
|
||||
for (const typing of filteredTypings) {
|
||||
this.missingTypingsSet.set(typing, true);
|
||||
this.missingTypingsSet.add(typing);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
@ -356,7 +356,7 @@ namespace ts.server.typingsInstaller {
|
|||
for (const packageName of filteredTypings) {
|
||||
const typingFile = typingToFileName(cachePath, packageName, this.installTypingHost, this.log);
|
||||
if (!typingFile) {
|
||||
this.missingTypingsSet.set(packageName, true);
|
||||
this.missingTypingsSet.add(packageName);
|
||||
continue;
|
||||
}
|
||||
|
||||
|
|
|
@ -31,21 +31,39 @@ declare namespace ts {
|
|||
interface SortedArray<T> extends Array<T> {
|
||||
" __sortedArrayBrand": any;
|
||||
}
|
||||
/** ES6 Map interface, only read methods included. */
|
||||
interface ReadonlyMap<T> {
|
||||
get(key: string): T | undefined;
|
||||
has(key: string): boolean;
|
||||
forEach(action: (value: T, key: string) => void): void;
|
||||
/** Common read methods for ES6 Map/Set. */
|
||||
interface ReadonlyCollection<K> {
|
||||
readonly size: number;
|
||||
keys(): Iterator<string>;
|
||||
values(): Iterator<T>;
|
||||
entries(): Iterator<[string, T]>;
|
||||
has(key: K): boolean;
|
||||
keys(): Iterator<K>;
|
||||
}
|
||||
/** Common write methods for ES6 Map/Set. */
|
||||
interface Collection<K> extends ReadonlyCollection<K> {
|
||||
delete(key: K): boolean;
|
||||
clear(): void;
|
||||
}
|
||||
/** ES6 Map interface, only read methods included. */
|
||||
interface ReadonlyMap<K, V> extends ReadonlyCollection<K> {
|
||||
get(key: K): V | undefined;
|
||||
values(): Iterator<V>;
|
||||
entries(): Iterator<[K, V]>;
|
||||
forEach(action: (value: V, key: K) => void): void;
|
||||
}
|
||||
/** ES6 Map interface. */
|
||||
interface Map<T> extends ReadonlyMap<T> {
|
||||
set(key: string, value: T): this;
|
||||
delete(key: string): boolean;
|
||||
clear(): void;
|
||||
interface Map<K, V> extends ReadonlyMap<K, V>, Collection<K> {
|
||||
set(key: K, value: V): this;
|
||||
}
|
||||
/** ES6 Set interface, only read methods included. */
|
||||
interface ReadonlySet<T> extends ReadonlyCollection<T> {
|
||||
has(value: T): boolean;
|
||||
values(): Iterator<T>;
|
||||
entries(): Iterator<[T, T]>;
|
||||
forEach(action: (value: T, key: T) => void): void;
|
||||
}
|
||||
/** ES6 Set interface. */
|
||||
interface Set<T> extends ReadonlySet<T>, Collection<T> {
|
||||
add(value: T): this;
|
||||
delete(value: T): boolean;
|
||||
}
|
||||
/** ES6 Iterator type. */
|
||||
interface Iterator<T> {
|
||||
|
@ -2387,20 +2405,10 @@ declare namespace ts {
|
|||
__escapedIdentifier: void;
|
||||
}) | InternalSymbolName;
|
||||
/** ReadonlyMap where keys are `__String`s. */
|
||||
export interface ReadonlyUnderscoreEscapedMap<T> {
|
||||
get(key: __String): T | undefined;
|
||||
has(key: __String): boolean;
|
||||
forEach(action: (value: T, key: __String) => void): void;
|
||||
readonly size: number;
|
||||
keys(): Iterator<__String>;
|
||||
values(): Iterator<T>;
|
||||
entries(): Iterator<[__String, T]>;
|
||||
export interface ReadonlyUnderscoreEscapedMap<T> extends ReadonlyMap<__String, T> {
|
||||
}
|
||||
/** Map where keys are `__String`s. */
|
||||
export interface UnderscoreEscapedMap<T> extends ReadonlyUnderscoreEscapedMap<T> {
|
||||
set(key: __String, value: T): this;
|
||||
delete(key: __String): boolean;
|
||||
clear(): void;
|
||||
export interface UnderscoreEscapedMap<T> extends Map<__String, T>, ReadonlyUnderscoreEscapedMap<T> {
|
||||
}
|
||||
/** SymbolTable based on ES6 Map interface. */
|
||||
export type SymbolTable = UnderscoreEscapedMap<Symbol>;
|
||||
|
@ -2593,7 +2601,7 @@ declare namespace ts {
|
|||
isDistributive: boolean;
|
||||
inferTypeParameters?: TypeParameter[];
|
||||
outerTypeParameters?: TypeParameter[];
|
||||
instantiations?: Map<Type>;
|
||||
instantiations?: Map<string, Type>;
|
||||
aliasSymbol?: Symbol;
|
||||
aliasTypeArguments?: Type[];
|
||||
}
|
||||
|
@ -4485,7 +4493,7 @@ declare namespace ts {
|
|||
/**
|
||||
* Reads the config file, reports errors if any and exits if the config file cannot be found
|
||||
*/
|
||||
export function getParsedCommandLineOfConfigFile(configFileName: string, optionsToExtend: CompilerOptions, host: ParseConfigFileHost, extendedConfigCache?: Map<ExtendedConfigCacheEntry>, watchOptionsToExtend?: WatchOptions, extraFileExtensions?: readonly FileExtensionInfo[]): ParsedCommandLine | undefined;
|
||||
export function getParsedCommandLineOfConfigFile(configFileName: string, optionsToExtend: CompilerOptions, host: ParseConfigFileHost, extendedConfigCache?: Map<string, ExtendedConfigCacheEntry>, watchOptionsToExtend?: WatchOptions, extraFileExtensions?: readonly FileExtensionInfo[]): ParsedCommandLine | undefined;
|
||||
/**
|
||||
* Read tsconfig.json file
|
||||
* @param fileName The path to the config file
|
||||
|
@ -4519,7 +4527,7 @@ declare namespace ts {
|
|||
* @param basePath A root directory to resolve relative path entries in the config
|
||||
* file to. e.g. outDir
|
||||
*/
|
||||
export function parseJsonConfigFileContent(json: any, host: ParseConfigHost, basePath: string, existingOptions?: CompilerOptions, configFileName?: string, resolutionStack?: Path[], extraFileExtensions?: readonly FileExtensionInfo[], extendedConfigCache?: Map<ExtendedConfigCacheEntry>, existingWatchOptions?: WatchOptions): ParsedCommandLine;
|
||||
export function parseJsonConfigFileContent(json: any, host: ParseConfigHost, basePath: string, existingOptions?: CompilerOptions, configFileName?: string, resolutionStack?: Path[], extraFileExtensions?: readonly FileExtensionInfo[], extendedConfigCache?: Map<string, ExtendedConfigCacheEntry>, existingWatchOptions?: WatchOptions): ParsedCommandLine;
|
||||
/**
|
||||
* Parse the contents of a config file (tsconfig.json).
|
||||
* @param jsonNode The contents of the config file to parse
|
||||
|
@ -4527,7 +4535,7 @@ declare namespace ts {
|
|||
* @param basePath A root directory to resolve relative path entries in the config
|
||||
* file to. e.g. outDir
|
||||
*/
|
||||
export function parseJsonSourceFileConfigFileContent(sourceFile: TsConfigSourceFile, host: ParseConfigHost, basePath: string, existingOptions?: CompilerOptions, configFileName?: string, resolutionStack?: Path[], extraFileExtensions?: readonly FileExtensionInfo[], extendedConfigCache?: Map<ExtendedConfigCacheEntry>, existingWatchOptions?: WatchOptions): ParsedCommandLine;
|
||||
export function parseJsonSourceFileConfigFileContent(sourceFile: TsConfigSourceFile, host: ParseConfigHost, basePath: string, existingOptions?: CompilerOptions, configFileName?: string, resolutionStack?: Path[], extraFileExtensions?: readonly FileExtensionInfo[], extendedConfigCache?: Map<string, ExtendedConfigCacheEntry>, existingWatchOptions?: WatchOptions): ParsedCommandLine;
|
||||
export interface ParsedTsconfig {
|
||||
raw: any;
|
||||
options?: CompilerOptions;
|
||||
|
@ -4574,7 +4582,7 @@ declare namespace ts {
|
|||
* This assumes that any module id will have the same resolution for sibling files located in the same folder.
|
||||
*/
|
||||
interface ModuleResolutionCache extends NonRelativeModuleNameResolutionCache {
|
||||
getOrCreateCacheForDirectory(directoryName: string, redirectedReference?: ResolvedProjectReference): Map<ResolvedModuleWithFailedLookupLocations>;
|
||||
getOrCreateCacheForDirectory(directoryName: string, redirectedReference?: ResolvedProjectReference): Map<string, ResolvedModuleWithFailedLookupLocations>;
|
||||
}
|
||||
/**
|
||||
* Stored map from non-relative module name to a table: directory -> result of module lookup in this directory
|
||||
|
@ -9193,8 +9201,8 @@ declare namespace ts.server {
|
|||
filesToString(writeProjectFileNames: boolean): string;
|
||||
setCompilerOptions(compilerOptions: CompilerOptions): void;
|
||||
protected removeRoot(info: ScriptInfo): void;
|
||||
protected enableGlobalPlugins(options: CompilerOptions, pluginConfigOverrides: Map<any> | undefined): void;
|
||||
protected enablePlugin(pluginConfigEntry: PluginImport, searchPaths: string[], pluginConfigOverrides: Map<any> | undefined): void;
|
||||
protected enableGlobalPlugins(options: CompilerOptions, pluginConfigOverrides: Map<string, any> | undefined): void;
|
||||
protected enablePlugin(pluginConfigEntry: PluginImport, searchPaths: string[], pluginConfigOverrides: Map<string, any> | undefined): void;
|
||||
private enableProxy;
|
||||
/** Starts a new check for diagnostics. Call this if some file has updated that would cause diagnostics to be changed. */
|
||||
refreshDiagnostics(): void;
|
||||
|
@ -9462,11 +9470,11 @@ declare namespace ts.server {
|
|||
/**
|
||||
* projects specified by a tsconfig.json file
|
||||
*/
|
||||
readonly configuredProjects: Map<ConfiguredProject>;
|
||||
readonly configuredProjects: Map<string, ConfiguredProject>;
|
||||
/**
|
||||
* Open files: with value being project root path, and key being Path of the file that is open
|
||||
*/
|
||||
readonly openFiles: Map<NormalizedPath | undefined>;
|
||||
readonly openFiles: Map<Path, NormalizedPath | undefined>;
|
||||
/**
|
||||
* Map of open files that are opened without complete path but have projectRoot as current directory
|
||||
*/
|
||||
|
|
66
tests/baselines/reference/api/typescript.d.ts
vendored
66
tests/baselines/reference/api/typescript.d.ts
vendored
|
@ -31,21 +31,39 @@ declare namespace ts {
|
|||
interface SortedArray<T> extends Array<T> {
|
||||
" __sortedArrayBrand": any;
|
||||
}
|
||||
/** ES6 Map interface, only read methods included. */
|
||||
interface ReadonlyMap<T> {
|
||||
get(key: string): T | undefined;
|
||||
has(key: string): boolean;
|
||||
forEach(action: (value: T, key: string) => void): void;
|
||||
/** Common read methods for ES6 Map/Set. */
|
||||
interface ReadonlyCollection<K> {
|
||||
readonly size: number;
|
||||
keys(): Iterator<string>;
|
||||
values(): Iterator<T>;
|
||||
entries(): Iterator<[string, T]>;
|
||||
has(key: K): boolean;
|
||||
keys(): Iterator<K>;
|
||||
}
|
||||
/** Common write methods for ES6 Map/Set. */
|
||||
interface Collection<K> extends ReadonlyCollection<K> {
|
||||
delete(key: K): boolean;
|
||||
clear(): void;
|
||||
}
|
||||
/** ES6 Map interface, only read methods included. */
|
||||
interface ReadonlyMap<K, V> extends ReadonlyCollection<K> {
|
||||
get(key: K): V | undefined;
|
||||
values(): Iterator<V>;
|
||||
entries(): Iterator<[K, V]>;
|
||||
forEach(action: (value: V, key: K) => void): void;
|
||||
}
|
||||
/** ES6 Map interface. */
|
||||
interface Map<T> extends ReadonlyMap<T> {
|
||||
set(key: string, value: T): this;
|
||||
delete(key: string): boolean;
|
||||
clear(): void;
|
||||
interface Map<K, V> extends ReadonlyMap<K, V>, Collection<K> {
|
||||
set(key: K, value: V): this;
|
||||
}
|
||||
/** ES6 Set interface, only read methods included. */
|
||||
interface ReadonlySet<T> extends ReadonlyCollection<T> {
|
||||
has(value: T): boolean;
|
||||
values(): Iterator<T>;
|
||||
entries(): Iterator<[T, T]>;
|
||||
forEach(action: (value: T, key: T) => void): void;
|
||||
}
|
||||
/** ES6 Set interface. */
|
||||
interface Set<T> extends ReadonlySet<T>, Collection<T> {
|
||||
add(value: T): this;
|
||||
delete(value: T): boolean;
|
||||
}
|
||||
/** ES6 Iterator type. */
|
||||
interface Iterator<T> {
|
||||
|
@ -2387,20 +2405,10 @@ declare namespace ts {
|
|||
__escapedIdentifier: void;
|
||||
}) | InternalSymbolName;
|
||||
/** ReadonlyMap where keys are `__String`s. */
|
||||
export interface ReadonlyUnderscoreEscapedMap<T> {
|
||||
get(key: __String): T | undefined;
|
||||
has(key: __String): boolean;
|
||||
forEach(action: (value: T, key: __String) => void): void;
|
||||
readonly size: number;
|
||||
keys(): Iterator<__String>;
|
||||
values(): Iterator<T>;
|
||||
entries(): Iterator<[__String, T]>;
|
||||
export interface ReadonlyUnderscoreEscapedMap<T> extends ReadonlyMap<__String, T> {
|
||||
}
|
||||
/** Map where keys are `__String`s. */
|
||||
export interface UnderscoreEscapedMap<T> extends ReadonlyUnderscoreEscapedMap<T> {
|
||||
set(key: __String, value: T): this;
|
||||
delete(key: __String): boolean;
|
||||
clear(): void;
|
||||
export interface UnderscoreEscapedMap<T> extends Map<__String, T>, ReadonlyUnderscoreEscapedMap<T> {
|
||||
}
|
||||
/** SymbolTable based on ES6 Map interface. */
|
||||
export type SymbolTable = UnderscoreEscapedMap<Symbol>;
|
||||
|
@ -2593,7 +2601,7 @@ declare namespace ts {
|
|||
isDistributive: boolean;
|
||||
inferTypeParameters?: TypeParameter[];
|
||||
outerTypeParameters?: TypeParameter[];
|
||||
instantiations?: Map<Type>;
|
||||
instantiations?: Map<string, Type>;
|
||||
aliasSymbol?: Symbol;
|
||||
aliasTypeArguments?: Type[];
|
||||
}
|
||||
|
@ -4485,7 +4493,7 @@ declare namespace ts {
|
|||
/**
|
||||
* Reads the config file, reports errors if any and exits if the config file cannot be found
|
||||
*/
|
||||
export function getParsedCommandLineOfConfigFile(configFileName: string, optionsToExtend: CompilerOptions, host: ParseConfigFileHost, extendedConfigCache?: Map<ExtendedConfigCacheEntry>, watchOptionsToExtend?: WatchOptions, extraFileExtensions?: readonly FileExtensionInfo[]): ParsedCommandLine | undefined;
|
||||
export function getParsedCommandLineOfConfigFile(configFileName: string, optionsToExtend: CompilerOptions, host: ParseConfigFileHost, extendedConfigCache?: Map<string, ExtendedConfigCacheEntry>, watchOptionsToExtend?: WatchOptions, extraFileExtensions?: readonly FileExtensionInfo[]): ParsedCommandLine | undefined;
|
||||
/**
|
||||
* Read tsconfig.json file
|
||||
* @param fileName The path to the config file
|
||||
|
@ -4519,7 +4527,7 @@ declare namespace ts {
|
|||
* @param basePath A root directory to resolve relative path entries in the config
|
||||
* file to. e.g. outDir
|
||||
*/
|
||||
export function parseJsonConfigFileContent(json: any, host: ParseConfigHost, basePath: string, existingOptions?: CompilerOptions, configFileName?: string, resolutionStack?: Path[], extraFileExtensions?: readonly FileExtensionInfo[], extendedConfigCache?: Map<ExtendedConfigCacheEntry>, existingWatchOptions?: WatchOptions): ParsedCommandLine;
|
||||
export function parseJsonConfigFileContent(json: any, host: ParseConfigHost, basePath: string, existingOptions?: CompilerOptions, configFileName?: string, resolutionStack?: Path[], extraFileExtensions?: readonly FileExtensionInfo[], extendedConfigCache?: Map<string, ExtendedConfigCacheEntry>, existingWatchOptions?: WatchOptions): ParsedCommandLine;
|
||||
/**
|
||||
* Parse the contents of a config file (tsconfig.json).
|
||||
* @param jsonNode The contents of the config file to parse
|
||||
|
@ -4527,7 +4535,7 @@ declare namespace ts {
|
|||
* @param basePath A root directory to resolve relative path entries in the config
|
||||
* file to. e.g. outDir
|
||||
*/
|
||||
export function parseJsonSourceFileConfigFileContent(sourceFile: TsConfigSourceFile, host: ParseConfigHost, basePath: string, existingOptions?: CompilerOptions, configFileName?: string, resolutionStack?: Path[], extraFileExtensions?: readonly FileExtensionInfo[], extendedConfigCache?: Map<ExtendedConfigCacheEntry>, existingWatchOptions?: WatchOptions): ParsedCommandLine;
|
||||
export function parseJsonSourceFileConfigFileContent(sourceFile: TsConfigSourceFile, host: ParseConfigHost, basePath: string, existingOptions?: CompilerOptions, configFileName?: string, resolutionStack?: Path[], extraFileExtensions?: readonly FileExtensionInfo[], extendedConfigCache?: Map<string, ExtendedConfigCacheEntry>, existingWatchOptions?: WatchOptions): ParsedCommandLine;
|
||||
export interface ParsedTsconfig {
|
||||
raw: any;
|
||||
options?: CompilerOptions;
|
||||
|
@ -4574,7 +4582,7 @@ declare namespace ts {
|
|||
* This assumes that any module id will have the same resolution for sibling files located in the same folder.
|
||||
*/
|
||||
interface ModuleResolutionCache extends NonRelativeModuleNameResolutionCache {
|
||||
getOrCreateCacheForDirectory(directoryName: string, redirectedReference?: ResolvedProjectReference): Map<ResolvedModuleWithFailedLookupLocations>;
|
||||
getOrCreateCacheForDirectory(directoryName: string, redirectedReference?: ResolvedProjectReference): Map<string, ResolvedModuleWithFailedLookupLocations>;
|
||||
}
|
||||
/**
|
||||
* Stored map from non-relative module name to a table: directory -> result of module lookup in this directory
|
||||
|
|
Loading…
Reference in a new issue