merge with master
This commit is contained in:
commit
572db9c403
|
@ -859,6 +859,7 @@ var tslintRuleDir = "scripts/tslint";
|
|||
var tslintRules = ([
|
||||
"nextLineRule",
|
||||
"noNullRule",
|
||||
"preferConstRule",
|
||||
"booleanTriviaRule",
|
||||
"typeOperatorSpacingRule"
|
||||
]);
|
||||
|
|
228
scripts/tslint/preferConstRule.ts
Normal file
228
scripts/tslint/preferConstRule.ts
Normal file
|
@ -0,0 +1,228 @@
|
|||
/// <reference path="../../node_modules/tslint/typings/typescriptServices.d.ts" />
|
||||
/// <reference path="../../node_modules/tslint/lib/tslint.d.ts" />
|
||||
|
||||
|
||||
export class Rule extends Lint.Rules.AbstractRule {
|
||||
public static FAILURE_STRING_FACTORY = (identifier: string) => `Identifier '${identifier}' never appears on the LHS of an assignment - use const instead of let for its declaration.`;
|
||||
|
||||
public apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] {
|
||||
return this.applyWithWalker(new PreferConstWalker(sourceFile, this.getOptions()));
|
||||
}
|
||||
}
|
||||
|
||||
function isBindingPattern(node: ts.Node): node is ts.BindingPattern {
|
||||
return !!node && (node.kind === ts.SyntaxKind.ArrayBindingPattern || node.kind === ts.SyntaxKind.ObjectBindingPattern);
|
||||
}
|
||||
|
||||
function walkUpBindingElementsAndPatterns(node: ts.Node): ts.Node {
|
||||
while (node && (node.kind === ts.SyntaxKind.BindingElement || isBindingPattern(node))) {
|
||||
node = node.parent;
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
|
||||
function getCombinedNodeFlags(node: ts.Node): ts.NodeFlags {
|
||||
node = walkUpBindingElementsAndPatterns(node);
|
||||
|
||||
let flags = node.flags;
|
||||
if (node.kind === ts.SyntaxKind.VariableDeclaration) {
|
||||
node = node.parent;
|
||||
}
|
||||
|
||||
if (node && node.kind === ts.SyntaxKind.VariableDeclarationList) {
|
||||
flags |= node.flags;
|
||||
node = node.parent;
|
||||
}
|
||||
|
||||
if (node && node.kind === ts.SyntaxKind.VariableStatement) {
|
||||
flags |= node.flags;
|
||||
}
|
||||
|
||||
return flags;
|
||||
}
|
||||
|
||||
function isLet(node: ts.Node) {
|
||||
return !!(getCombinedNodeFlags(node) & ts.NodeFlags.Let);
|
||||
}
|
||||
|
||||
function isExported(node: ts.Node) {
|
||||
return !!(getCombinedNodeFlags(node) & ts.NodeFlags.Export);
|
||||
}
|
||||
|
||||
function isAssignmentOperator(token: ts.SyntaxKind): boolean {
|
||||
return token >= ts.SyntaxKind.FirstAssignment && token <= ts.SyntaxKind.LastAssignment;
|
||||
}
|
||||
|
||||
function isBindingLiteralExpression(node: ts.Node): node is (ts.ArrayLiteralExpression | ts.ObjectLiteralExpression) {
|
||||
return (!!node) && (node.kind === ts.SyntaxKind.ObjectLiteralExpression || node.kind === ts.SyntaxKind.ArrayLiteralExpression);
|
||||
}
|
||||
|
||||
interface DeclarationUsages {
|
||||
declaration: ts.VariableDeclaration;
|
||||
usages: number;
|
||||
}
|
||||
|
||||
class PreferConstWalker extends Lint.RuleWalker {
|
||||
private inScopeLetDeclarations: ts.Map<DeclarationUsages>[] = [];
|
||||
private errors: Lint.RuleFailure[] = [];
|
||||
private markAssignment(identifier: ts.Identifier) {
|
||||
const name = identifier.text;
|
||||
for (let i = this.inScopeLetDeclarations.length - 1; i >= 0; i--) {
|
||||
const declarations = this.inScopeLetDeclarations[i];
|
||||
if (declarations[name]) {
|
||||
declarations[name].usages++;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
visitSourceFile(node: ts.SourceFile) {
|
||||
super.visitSourceFile(node);
|
||||
// Sort errors by position because tslint doesn't
|
||||
this.errors.sort((a, b) => a.getStartPosition().getPosition() - b.getStartPosition().getPosition()).forEach(e => this.addFailure(e));
|
||||
}
|
||||
|
||||
visitBinaryExpression(node: ts.BinaryExpression) {
|
||||
if (isAssignmentOperator(node.operatorToken.kind)) {
|
||||
this.visitLHSExpressions(node.left);
|
||||
}
|
||||
super.visitBinaryExpression(node);
|
||||
}
|
||||
|
||||
private visitLHSExpressions(node: ts.Expression) {
|
||||
while (node.kind === ts.SyntaxKind.ParenthesizedExpression) {
|
||||
node = (node as ts.ParenthesizedExpression).expression;
|
||||
}
|
||||
if (node.kind === ts.SyntaxKind.Identifier) {
|
||||
this.markAssignment(node as ts.Identifier);
|
||||
}
|
||||
else if (isBindingLiteralExpression(node)) {
|
||||
this.visitBindingLiteralExpression(node as (ts.ArrayLiteralExpression | ts.ObjectLiteralExpression));
|
||||
}
|
||||
}
|
||||
|
||||
private visitBindingLiteralExpression(node: ts.ArrayLiteralExpression | ts.ObjectLiteralExpression) {
|
||||
if (node.kind === ts.SyntaxKind.ObjectLiteralExpression) {
|
||||
const pattern = node as ts.ObjectLiteralExpression;
|
||||
for (const element of pattern.properties) {
|
||||
if (element.name.kind === ts.SyntaxKind.Identifier) {
|
||||
this.markAssignment(element.name as ts.Identifier)
|
||||
}
|
||||
else if (isBindingPattern(element.name)) {
|
||||
this.visitBindingPatternIdentifiers(element.name as ts.BindingPattern);
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (node.kind === ts.SyntaxKind.ArrayLiteralExpression) {
|
||||
const pattern = node as ts.ArrayLiteralExpression;
|
||||
for (const element of pattern.elements) {
|
||||
this.visitLHSExpressions(element);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private visitBindingPatternIdentifiers(pattern: ts.BindingPattern) {
|
||||
for (const element of pattern.elements) {
|
||||
if (element.name.kind === ts.SyntaxKind.Identifier) {
|
||||
this.markAssignment(element.name as ts.Identifier);
|
||||
}
|
||||
else {
|
||||
this.visitBindingPatternIdentifiers(element.name as ts.BindingPattern);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
visitPrefixUnaryExpression(node: ts.PrefixUnaryExpression) {
|
||||
this.visitAnyUnaryExpression(node);
|
||||
super.visitPrefixUnaryExpression(node);
|
||||
}
|
||||
|
||||
visitPostfixUnaryExpression(node: ts.PostfixUnaryExpression) {
|
||||
this.visitAnyUnaryExpression(node);
|
||||
super.visitPostfixUnaryExpression(node);
|
||||
}
|
||||
|
||||
private visitAnyUnaryExpression(node: ts.PrefixUnaryExpression | ts.PostfixUnaryExpression) {
|
||||
if (node.operator === ts.SyntaxKind.PlusPlusToken || node.operator === ts.SyntaxKind.MinusMinusToken) {
|
||||
this.visitLHSExpressions(node.operand);
|
||||
}
|
||||
}
|
||||
|
||||
visitModuleDeclaration(node: ts.ModuleDeclaration) {
|
||||
if (node.body.kind === ts.SyntaxKind.ModuleBlock) {
|
||||
// For some reason module blocks are left out of the visit block traversal
|
||||
this.visitBlock(node.body as ts.ModuleBlock);
|
||||
}
|
||||
super.visitModuleDeclaration(node);
|
||||
}
|
||||
|
||||
visitForOfStatement(node: ts.ForOfStatement) {
|
||||
this.visitAnyForStatement(node);
|
||||
super.visitForOfStatement(node);
|
||||
this.popDeclarations();
|
||||
}
|
||||
|
||||
visitForInStatement(node: ts.ForInStatement) {
|
||||
this.visitAnyForStatement(node);
|
||||
super.visitForInStatement(node);
|
||||
this.popDeclarations();
|
||||
}
|
||||
|
||||
private visitAnyForStatement(node: ts.ForOfStatement | ts.ForInStatement) {
|
||||
const names: ts.Map<DeclarationUsages> = {};
|
||||
if (isLet(node.initializer)) {
|
||||
if (node.initializer.kind === ts.SyntaxKind.VariableDeclarationList) {
|
||||
this.collectLetIdentifiers(node.initializer as ts.VariableDeclarationList, names);
|
||||
}
|
||||
}
|
||||
this.inScopeLetDeclarations.push(names);
|
||||
}
|
||||
|
||||
private popDeclarations() {
|
||||
const completed = this.inScopeLetDeclarations.pop();
|
||||
for (const name in completed) {
|
||||
if (Object.hasOwnProperty.call(completed, name)) {
|
||||
const element = completed[name];
|
||||
if (element.usages === 0) {
|
||||
this.errors.push(this.createFailure(element.declaration.getStart(this.getSourceFile()), element.declaration.getWidth(this.getSourceFile()), Rule.FAILURE_STRING_FACTORY(name)));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
visitBlock(node: ts.Block) {
|
||||
const names: ts.Map<DeclarationUsages> = {};
|
||||
for (const statement of node.statements) {
|
||||
if (statement.kind === ts.SyntaxKind.VariableStatement) {
|
||||
this.collectLetIdentifiers((statement as ts.VariableStatement).declarationList, names);
|
||||
}
|
||||
}
|
||||
this.inScopeLetDeclarations.push(names);
|
||||
super.visitBlock(node);
|
||||
this.popDeclarations();
|
||||
}
|
||||
|
||||
private collectLetIdentifiers(list: ts.VariableDeclarationList, ret: ts.Map<DeclarationUsages>) {
|
||||
for (const node of list.declarations) {
|
||||
if (isLet(node) && !isExported(node)) {
|
||||
this.collectNameIdentifiers(node, node.name, ret);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private collectNameIdentifiers(value: ts.VariableDeclaration, node: ts.Identifier | ts.BindingPattern, table: ts.Map<DeclarationUsages>) {
|
||||
if (node.kind === ts.SyntaxKind.Identifier) {
|
||||
table[(node as ts.Identifier).text] = {declaration: value, usages: 0};
|
||||
}
|
||||
else {
|
||||
this.collectBindingPatternIdentifiers(value, node as ts.BindingPattern, table);
|
||||
}
|
||||
}
|
||||
|
||||
private collectBindingPatternIdentifiers(value: ts.VariableDeclaration, pattern: ts.BindingPattern, table: ts.Map<DeclarationUsages>) {
|
||||
for (const element of pattern.elements) {
|
||||
this.collectNameIdentifiers(value, element.name, table);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -95,7 +95,7 @@ namespace ts {
|
|||
const binder = createBinder();
|
||||
|
||||
export function bindSourceFile(file: SourceFile, options: CompilerOptions) {
|
||||
let start = new Date().getTime();
|
||||
const start = new Date().getTime();
|
||||
binder(file, options);
|
||||
bindTime += new Date().getTime() - start;
|
||||
}
|
||||
|
@ -187,7 +187,7 @@ namespace ts {
|
|||
return `"${(<LiteralExpression>node.name).text}"`;
|
||||
}
|
||||
if (node.name.kind === SyntaxKind.ComputedPropertyName) {
|
||||
let nameExpression = (<ComputedPropertyName>node.name).expression;
|
||||
const nameExpression = (<ComputedPropertyName>node.name).expression;
|
||||
// treat computed property names where expression is string/numeric literal as just string/numeric literal
|
||||
if (isStringOrNumericLiteral(nameExpression.kind)) {
|
||||
return (<LiteralExpression>nameExpression).text;
|
||||
|
@ -234,9 +234,9 @@ namespace ts {
|
|||
function declareSymbol(symbolTable: SymbolTable, parent: Symbol, node: Declaration, includes: SymbolFlags, excludes: SymbolFlags): Symbol {
|
||||
Debug.assert(!hasDynamicName(node));
|
||||
|
||||
let isDefaultExport = node.flags & NodeFlags.Default;
|
||||
const isDefaultExport = node.flags & NodeFlags.Default;
|
||||
// The exported symbol for an export default function/class node is always named "default"
|
||||
let name = isDefaultExport && parent ? "default" : getDeclarationName(node);
|
||||
const name = isDefaultExport && parent ? "default" : getDeclarationName(node);
|
||||
|
||||
let symbol: Symbol;
|
||||
if (name !== undefined) {
|
||||
|
@ -303,7 +303,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
function declareModuleMember(node: Declaration, symbolFlags: SymbolFlags, symbolExcludes: SymbolFlags): Symbol {
|
||||
let hasExportModifier = getCombinedNodeFlags(node) & NodeFlags.Export;
|
||||
const hasExportModifier = getCombinedNodeFlags(node) & NodeFlags.Export;
|
||||
if (symbolFlags & SymbolFlags.Alias) {
|
||||
if (node.kind === SyntaxKind.ExportSpecifier || (node.kind === SyntaxKind.ImportEqualsDeclaration && hasExportModifier)) {
|
||||
return declareSymbol(container.symbol.exports, container.symbol, node, symbolFlags, symbolExcludes);
|
||||
|
@ -325,11 +325,11 @@ namespace ts {
|
|||
// but return the export symbol (by calling getExportSymbolOfValueSymbolIfExported). That way
|
||||
// when the emitter comes back to it, it knows not to qualify the name if it was found in a containing scope.
|
||||
if (hasExportModifier || container.flags & NodeFlags.ExportContext) {
|
||||
let exportKind =
|
||||
const exportKind =
|
||||
(symbolFlags & SymbolFlags.Value ? SymbolFlags.ExportValue : 0) |
|
||||
(symbolFlags & SymbolFlags.Type ? SymbolFlags.ExportType : 0) |
|
||||
(symbolFlags & SymbolFlags.Namespace ? SymbolFlags.ExportNamespace : 0);
|
||||
let local = declareSymbol(container.locals, undefined, node, exportKind, symbolExcludes);
|
||||
const local = declareSymbol(container.locals, undefined, node, exportKind, symbolExcludes);
|
||||
local.exportSymbol = declareSymbol(container.symbol.exports, container.symbol, node, symbolFlags, symbolExcludes);
|
||||
node.localSymbol = local;
|
||||
return local;
|
||||
|
@ -347,9 +347,9 @@ namespace ts {
|
|||
// Before we recurse into a node's chilren, we first save the existing parent, container
|
||||
// and block-container. Then after we pop out of processing the children, we restore
|
||||
// these saved values.
|
||||
let saveParent = parent;
|
||||
let saveContainer = container;
|
||||
let savedBlockScopeContainer = blockScopeContainer;
|
||||
const saveParent = parent;
|
||||
const saveContainer = container;
|
||||
const savedBlockScopeContainer = blockScopeContainer;
|
||||
|
||||
// This node will now be set as the parent of all of its children as we recurse into them.
|
||||
parent = node;
|
||||
|
@ -371,7 +371,7 @@ namespace ts {
|
|||
// reusing a node from a previous compilation, that node may have had 'locals' created
|
||||
// for it. We must clear this so we don't accidently move any stale data forward from
|
||||
// a previous compilation.
|
||||
let containerFlags = getContainerFlags(node);
|
||||
const containerFlags = getContainerFlags(node);
|
||||
if (containerFlags & ContainerFlags.IsContainer) {
|
||||
container = blockScopeContainer = node;
|
||||
|
||||
|
@ -403,7 +403,7 @@ namespace ts {
|
|||
seenThisKeyword = false;
|
||||
}
|
||||
|
||||
let saveState = kind === SyntaxKind.SourceFile || kind === SyntaxKind.ModuleBlock || isFunctionLikeKind(kind);
|
||||
const saveState = kind === SyntaxKind.SourceFile || kind === SyntaxKind.ModuleBlock || isFunctionLikeKind(kind);
|
||||
if (saveState) {
|
||||
savedReachabilityState = currentReachabilityState;
|
||||
savedLabelStack = labelStack;
|
||||
|
@ -638,7 +638,7 @@ namespace ts {
|
|||
function bindCaseBlock(n: CaseBlock): void {
|
||||
const startState = currentReachabilityState;
|
||||
|
||||
for (let clause of n.clauses) {
|
||||
for (const clause of n.clauses) {
|
||||
currentReachabilityState = startState;
|
||||
bind(clause);
|
||||
if (clause.statements.length && currentReachabilityState === Reachability.Reachable && options.noFallthroughCasesInSwitch) {
|
||||
|
@ -795,9 +795,9 @@ namespace ts {
|
|||
}
|
||||
|
||||
function hasExportDeclarations(node: ModuleDeclaration | SourceFile): boolean {
|
||||
let body = node.kind === SyntaxKind.SourceFile ? node : (<ModuleDeclaration>node).body;
|
||||
const body = node.kind === SyntaxKind.SourceFile ? node : (<ModuleDeclaration>node).body;
|
||||
if (body.kind === SyntaxKind.SourceFile || body.kind === SyntaxKind.ModuleBlock) {
|
||||
for (let stat of (<Block>body).statements) {
|
||||
for (const stat of (<Block>body).statements) {
|
||||
if (stat.kind === SyntaxKind.ExportDeclaration || stat.kind === SyntaxKind.ExportAssignment) {
|
||||
return true;
|
||||
}
|
||||
|
@ -823,7 +823,7 @@ namespace ts {
|
|||
declareSymbolAndAddToSymbolTable(node, SymbolFlags.ValueModule, SymbolFlags.ValueModuleExcludes);
|
||||
}
|
||||
else {
|
||||
let state = getModuleInstanceState(node);
|
||||
const state = getModuleInstanceState(node);
|
||||
if (state === ModuleInstanceState.NonInstantiated) {
|
||||
declareSymbolAndAddToSymbolTable(node, SymbolFlags.NamespaceModule, SymbolFlags.NamespaceModuleExcludes);
|
||||
}
|
||||
|
@ -835,7 +835,7 @@ namespace ts {
|
|||
node.symbol.constEnumOnlyModule = false;
|
||||
}
|
||||
else {
|
||||
let currentModuleIsConstEnumOnly = state === ModuleInstanceState.ConstEnumOnly;
|
||||
const currentModuleIsConstEnumOnly = state === ModuleInstanceState.ConstEnumOnly;
|
||||
if (node.symbol.constEnumOnlyModule === undefined) {
|
||||
// non-merged case - use the current state
|
||||
node.symbol.constEnumOnlyModule = currentModuleIsConstEnumOnly;
|
||||
|
@ -856,10 +856,10 @@ namespace ts {
|
|||
// We do that by making an anonymous type literal symbol, and then setting the function
|
||||
// symbol as its sole member. To the rest of the system, this symbol will be indistinguishable
|
||||
// from an actual type literal symbol you would have gotten had you used the long form.
|
||||
let symbol = createSymbol(SymbolFlags.Signature, getDeclarationName(node));
|
||||
const symbol = createSymbol(SymbolFlags.Signature, getDeclarationName(node));
|
||||
addDeclarationToSymbol(symbol, node, SymbolFlags.Signature);
|
||||
|
||||
let typeLiteralSymbol = createSymbol(SymbolFlags.TypeLiteral, "__type");
|
||||
const typeLiteralSymbol = createSymbol(SymbolFlags.TypeLiteral, "__type");
|
||||
addDeclarationToSymbol(typeLiteralSymbol, node, SymbolFlags.TypeLiteral);
|
||||
typeLiteralSymbol.members = { [symbol.name]: symbol };
|
||||
}
|
||||
|
@ -871,14 +871,14 @@ namespace ts {
|
|||
}
|
||||
|
||||
if (inStrictMode) {
|
||||
let seen: Map<ElementKind> = {};
|
||||
const seen: Map<ElementKind> = {};
|
||||
|
||||
for (let prop of node.properties) {
|
||||
for (const prop of node.properties) {
|
||||
if (prop.name.kind !== SyntaxKind.Identifier) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let identifier = <Identifier>prop.name;
|
||||
const identifier = <Identifier>prop.name;
|
||||
|
||||
// ECMA-262 11.1.5 Object Initialiser
|
||||
// If previous is not undefined then throw a SyntaxError exception if any of the following conditions are true
|
||||
|
@ -888,18 +888,18 @@ namespace ts {
|
|||
// c.IsAccessorDescriptor(previous) is true and IsDataDescriptor(propId.descriptor) is true.
|
||||
// d.IsAccessorDescriptor(previous) is true and IsAccessorDescriptor(propId.descriptor) is true
|
||||
// and either both previous and propId.descriptor have[[Get]] fields or both previous and propId.descriptor have[[Set]] fields
|
||||
let currentKind = prop.kind === SyntaxKind.PropertyAssignment || prop.kind === SyntaxKind.ShorthandPropertyAssignment || prop.kind === SyntaxKind.MethodDeclaration
|
||||
const currentKind = prop.kind === SyntaxKind.PropertyAssignment || prop.kind === SyntaxKind.ShorthandPropertyAssignment || prop.kind === SyntaxKind.MethodDeclaration
|
||||
? ElementKind.Property
|
||||
: ElementKind.Accessor;
|
||||
|
||||
let existingKind = seen[identifier.text];
|
||||
const existingKind = seen[identifier.text];
|
||||
if (!existingKind) {
|
||||
seen[identifier.text] = currentKind;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (currentKind === ElementKind.Property && existingKind === ElementKind.Property) {
|
||||
let span = getErrorSpanForNode(file, identifier);
|
||||
const span = getErrorSpanForNode(file, identifier);
|
||||
file.bindDiagnostics.push(createFileDiagnostic(file, span.start, span.length,
|
||||
Diagnostics.An_object_literal_cannot_have_multiple_properties_with_the_same_name_in_strict_mode));
|
||||
}
|
||||
|
@ -910,7 +910,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
function bindAnonymousDeclaration(node: Declaration, symbolFlags: SymbolFlags, name: string) {
|
||||
let symbol = createSymbol(symbolFlags, name);
|
||||
const symbol = createSymbol(symbolFlags, name);
|
||||
addDeclarationToSymbol(symbol, node, symbolFlags);
|
||||
}
|
||||
|
||||
|
@ -989,7 +989,7 @@ namespace ts {
|
|||
if (inStrictMode && node.expression.kind === SyntaxKind.Identifier) {
|
||||
// When a delete operator occurs within strict mode code, a SyntaxError is thrown if its
|
||||
// UnaryExpression is a direct reference to a variable, function argument, or function name
|
||||
let span = getErrorSpanForNode(file, node.expression);
|
||||
const span = getErrorSpanForNode(file, node.expression);
|
||||
file.bindDiagnostics.push(createFileDiagnostic(file, span.start, span.length, Diagnostics.delete_cannot_be_called_on_an_identifier_in_strict_mode));
|
||||
}
|
||||
}
|
||||
|
@ -1001,11 +1001,11 @@ namespace ts {
|
|||
|
||||
function checkStrictModeEvalOrArguments(contextNode: Node, name: Node) {
|
||||
if (name && name.kind === SyntaxKind.Identifier) {
|
||||
let identifier = <Identifier>name;
|
||||
const identifier = <Identifier>name;
|
||||
if (isEvalOrArgumentsIdentifier(identifier)) {
|
||||
// We check first if the name is inside class declaration or class expression; if so give explicit message
|
||||
// otherwise report generic error message.
|
||||
let span = getErrorSpanForNode(file, name);
|
||||
const span = getErrorSpanForNode(file, name);
|
||||
file.bindDiagnostics.push(createFileDiagnostic(file, span.start, span.length,
|
||||
getStrictModeEvalOrArgumentsMessage(contextNode), identifier.text));
|
||||
}
|
||||
|
@ -1066,7 +1066,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
function errorOnFirstToken(node: Node, message: DiagnosticMessage, arg0?: any, arg1?: any, arg2?: any) {
|
||||
let span = getSpanOfTokenAtPosition(file, node.pos);
|
||||
const span = getSpanOfTokenAtPosition(file, node.pos);
|
||||
file.bindDiagnostics.push(createFileDiagnostic(file, span.start, span.length, message, arg0, arg1, arg2));
|
||||
}
|
||||
|
||||
|
@ -1081,7 +1081,7 @@ namespace ts {
|
|||
|
||||
node.parent = parent;
|
||||
|
||||
let savedInStrictMode = inStrictMode;
|
||||
const savedInStrictMode = inStrictMode;
|
||||
if (!savedInStrictMode) {
|
||||
updateStrictMode(node);
|
||||
}
|
||||
|
@ -1127,7 +1127,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
function updateStrictModeStatementList(statements: NodeArray<Statement>) {
|
||||
for (let statement of statements) {
|
||||
for (const statement of statements) {
|
||||
if (!isPrologueDirective(statement)) {
|
||||
return;
|
||||
}
|
||||
|
@ -1141,7 +1141,7 @@ namespace ts {
|
|||
|
||||
/// Should be called only on prologue directives (isPrologueDirective(node) should be true)
|
||||
function isUseStrictPrologueDirective(node: ExpressionStatement): boolean {
|
||||
let nodeText = getTextOfNodeFromSourceText(file.text, node.expression);
|
||||
const nodeText = getTextOfNodeFromSourceText(file.text, node.expression);
|
||||
|
||||
// Note: the node text must be exactly "use strict" or 'use strict'. It is not ok for the
|
||||
// string to contain unicode escapes (as per ES5).
|
||||
|
@ -1216,7 +1216,7 @@ namespace ts {
|
|||
case SyntaxKind.FunctionExpression:
|
||||
case SyntaxKind.ArrowFunction:
|
||||
checkStrictModeFunctionName(<FunctionExpression>node);
|
||||
let bindingName = (<FunctionExpression>node).name ? (<FunctionExpression>node).name.text : "__function";
|
||||
const bindingName = (<FunctionExpression>node).name ? (<FunctionExpression>node).name.text : "__function";
|
||||
return bindAnonymousDeclaration(<FunctionExpression>node, SymbolFlags.Function, bindingName);
|
||||
case SyntaxKind.ClassExpression:
|
||||
case SyntaxKind.ClassDeclaration:
|
||||
|
@ -1289,7 +1289,7 @@ namespace ts {
|
|||
bindBlockScopedDeclaration(node, SymbolFlags.Class, SymbolFlags.ClassExcludes);
|
||||
}
|
||||
else {
|
||||
let bindingName = node.name ? node.name.text : "__class";
|
||||
const bindingName = node.name ? node.name.text : "__class";
|
||||
bindAnonymousDeclaration(node, SymbolFlags.Class, bindingName);
|
||||
// Add name of class expression into the map for semantic classifier
|
||||
if (node.name) {
|
||||
|
@ -1297,7 +1297,7 @@ namespace ts {
|
|||
}
|
||||
}
|
||||
|
||||
let symbol = node.symbol;
|
||||
const symbol = node.symbol;
|
||||
|
||||
// TypeScript 1.0 spec (April 2014): 8.4
|
||||
// Every class automatically contains a static property member named 'prototype', the
|
||||
|
@ -1308,7 +1308,7 @@ namespace ts {
|
|||
// Note: we check for this here because this class may be merging into a module. The
|
||||
// module might have an exported variable called 'prototype'. We can't allow that as
|
||||
// that would clash with the built-in 'prototype' for the class.
|
||||
let prototypeSymbol = createSymbol(SymbolFlags.Property | SymbolFlags.Prototype, "prototype");
|
||||
const prototypeSymbol = createSymbol(SymbolFlags.Property | SymbolFlags.Prototype, "prototype");
|
||||
if (hasProperty(symbol.exports, prototypeSymbol.name)) {
|
||||
if (node.name) {
|
||||
node.name.parent = node;
|
||||
|
@ -1373,7 +1373,7 @@ namespace ts {
|
|||
node.parent.kind === SyntaxKind.Constructor &&
|
||||
isClassLike(node.parent.parent)) {
|
||||
|
||||
let classDeclaration = <ClassLikeDeclaration>node.parent.parent;
|
||||
const classDeclaration = <ClassLikeDeclaration>node.parent.parent;
|
||||
declareSymbol(classDeclaration.symbol.members, classDeclaration.symbol, node, SymbolFlags.Property, SymbolFlags.PropertyExcludes);
|
||||
}
|
||||
}
|
||||
|
@ -1399,13 +1399,13 @@ namespace ts {
|
|||
function pushImplicitLabel(): number {
|
||||
initializeReachabilityStateIfNecessary();
|
||||
|
||||
let index = labelStack.push(Reachability.Unintialized) - 1;
|
||||
const index = labelStack.push(Reachability.Unintialized) - 1;
|
||||
implicitLabels.push(index);
|
||||
return index;
|
||||
}
|
||||
|
||||
function popNamedLabel(label: Identifier, outerState: Reachability): void {
|
||||
let index = labelIndexMap[label.text];
|
||||
const index = labelIndexMap[label.text];
|
||||
Debug.assert(index !== undefined);
|
||||
Debug.assert(labelStack.length == index + 1);
|
||||
|
||||
|
@ -1419,7 +1419,7 @@ namespace ts {
|
|||
Debug.assert(false, `Label stack: ${labelStack.length}, index:${implicitLabelIndex}`);
|
||||
}
|
||||
|
||||
let i = implicitLabels.pop();
|
||||
const i = implicitLabels.pop();
|
||||
|
||||
if (implicitLabelIndex !== i) {
|
||||
Debug.assert(false, `i: ${i}, index: ${implicitLabelIndex}`);
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -295,8 +295,8 @@ namespace ts {
|
|||
return optionNameMapCache;
|
||||
}
|
||||
|
||||
let optionNameMap: Map<CommandLineOption> = {};
|
||||
let shortOptionNames: Map<string> = {};
|
||||
const optionNameMap: Map<CommandLineOption> = {};
|
||||
const shortOptionNames: Map<string> = {};
|
||||
forEach(optionDeclarations, option => {
|
||||
optionNameMap[option.name.toLowerCase()] = option;
|
||||
if (option.shortName) {
|
||||
|
@ -309,10 +309,10 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function parseCommandLine(commandLine: string[], readFile?: (path: string) => string): ParsedCommandLine {
|
||||
let options: CompilerOptions = {};
|
||||
let fileNames: string[] = [];
|
||||
let errors: Diagnostic[] = [];
|
||||
let { optionNameMap, shortOptionNames } = getOptionNameMap();
|
||||
const options: CompilerOptions = {};
|
||||
const fileNames: string[] = [];
|
||||
const errors: Diagnostic[] = [];
|
||||
const { optionNameMap, shortOptionNames } = getOptionNameMap();
|
||||
|
||||
parseStrings(commandLine);
|
||||
return {
|
||||
|
@ -337,7 +337,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
if (hasProperty(optionNameMap, s)) {
|
||||
let opt = optionNameMap[s];
|
||||
const opt = optionNameMap[s];
|
||||
|
||||
// Check to see if no argument was provided (e.g. "--locale" is the last command-line argument).
|
||||
if (!args[i] && opt.type !== "boolean") {
|
||||
|
@ -377,19 +377,19 @@ namespace ts {
|
|||
}
|
||||
|
||||
function parseResponseFile(fileName: string) {
|
||||
let text = readFile ? readFile(fileName) : sys.readFile(fileName);
|
||||
const text = readFile ? readFile(fileName) : sys.readFile(fileName);
|
||||
|
||||
if (!text) {
|
||||
errors.push(createCompilerDiagnostic(Diagnostics.File_0_not_found, fileName));
|
||||
return;
|
||||
}
|
||||
|
||||
let args: string[] = [];
|
||||
const args: string[] = [];
|
||||
let pos = 0;
|
||||
while (true) {
|
||||
while (pos < text.length && text.charCodeAt(pos) <= CharacterCodes.space) pos++;
|
||||
if (pos >= text.length) break;
|
||||
let start = pos;
|
||||
const start = pos;
|
||||
if (text.charCodeAt(start) === CharacterCodes.doubleQuote) {
|
||||
pos++;
|
||||
while (pos < text.length && text.charCodeAt(pos) !== CharacterCodes.doubleQuote) pos++;
|
||||
|
@ -432,7 +432,7 @@ namespace ts {
|
|||
*/
|
||||
export function parseConfigFileTextToJson(fileName: string, jsonText: string): { config?: any; error?: Diagnostic } {
|
||||
try {
|
||||
let jsonTextWithoutComments = removeComments(jsonText);
|
||||
const jsonTextWithoutComments = removeComments(jsonText);
|
||||
return { config: /\S/.test(jsonTextWithoutComments) ? JSON.parse(jsonTextWithoutComments) : {} };
|
||||
}
|
||||
catch (e) {
|
||||
|
@ -449,7 +449,7 @@ namespace ts {
|
|||
*/
|
||||
function removeComments(jsonText: string): string {
|
||||
let output = "";
|
||||
let scanner = createScanner(ScriptTarget.ES5, /* skipTrivia */ false, LanguageVariant.Standard, jsonText);
|
||||
const scanner = createScanner(ScriptTarget.ES5, /* skipTrivia */ false, LanguageVariant.Standard, jsonText);
|
||||
let token: SyntaxKind;
|
||||
while ((token = scanner.scan()) !== SyntaxKind.EndOfFileToken) {
|
||||
switch (token) {
|
||||
|
@ -475,7 +475,7 @@ namespace ts {
|
|||
* file to. e.g. outDir
|
||||
*/
|
||||
export function parseJsonConfigFileContent(json: any, host: ParseConfigHost, basePath: string): ParsedCommandLine {
|
||||
let { options, errors } = convertCompilerOptionsFromJson(json["compilerOptions"], basePath);
|
||||
const { options, errors } = convertCompilerOptionsFromJson(json["compilerOptions"], basePath);
|
||||
|
||||
return {
|
||||
options,
|
||||
|
@ -494,12 +494,12 @@ namespace ts {
|
|||
}
|
||||
}
|
||||
else {
|
||||
let exclude = json["exclude"] instanceof Array ? map(<string[]>json["exclude"], normalizeSlashes) : undefined;
|
||||
let sysFiles = host.readDirectory(basePath, ".ts", exclude).concat(host.readDirectory(basePath, ".tsx", exclude));
|
||||
const exclude = json["exclude"] instanceof Array ? map(<string[]>json["exclude"], normalizeSlashes) : undefined;
|
||||
const sysFiles = host.readDirectory(basePath, ".ts", exclude).concat(host.readDirectory(basePath, ".tsx", exclude));
|
||||
for (let i = 0; i < sysFiles.length; i++) {
|
||||
let name = sysFiles[i];
|
||||
const name = sysFiles[i];
|
||||
if (fileExtensionIs(name, ".d.ts")) {
|
||||
let baseName = name.substr(0, name.length - ".d.ts".length);
|
||||
const baseName = name.substr(0, name.length - ".d.ts".length);
|
||||
if (!contains(sysFiles, baseName + ".tsx") && !contains(sysFiles, baseName + ".ts")) {
|
||||
fileNames.push(name);
|
||||
}
|
||||
|
@ -519,24 +519,24 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function convertCompilerOptionsFromJson(jsonOptions: any, basePath: string): { options: CompilerOptions, errors: Diagnostic[] } {
|
||||
let options: CompilerOptions = {};
|
||||
let errors: Diagnostic[] = [];
|
||||
const options: CompilerOptions = {};
|
||||
const errors: Diagnostic[] = [];
|
||||
|
||||
if (!jsonOptions) {
|
||||
return { options, errors };
|
||||
}
|
||||
|
||||
let optionNameMap = arrayToMap(optionDeclarations, opt => opt.name);
|
||||
const optionNameMap = arrayToMap(optionDeclarations, opt => opt.name);
|
||||
|
||||
for (let id in jsonOptions) {
|
||||
for (const id in jsonOptions) {
|
||||
if (hasProperty(optionNameMap, id)) {
|
||||
let opt = optionNameMap[id];
|
||||
let optType = opt.type;
|
||||
const opt = optionNameMap[id];
|
||||
const optType = opt.type;
|
||||
let value = jsonOptions[id];
|
||||
let expectedType = typeof optType === "string" ? optType : "string";
|
||||
const expectedType = typeof optType === "string" ? optType : "string";
|
||||
if (typeof value === expectedType) {
|
||||
if (typeof optType !== "string") {
|
||||
let key = value.toLowerCase();
|
||||
const key = value.toLowerCase();
|
||||
if (hasProperty(optType, key)) {
|
||||
value = optType[key];
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ namespace ts {
|
|||
};
|
||||
|
||||
function forEachValueInMap(f: (key: Path, value: T) => void) {
|
||||
for (let key in files) {
|
||||
for (const key in files) {
|
||||
f(<Path>key, files[key]);
|
||||
}
|
||||
}
|
||||
|
@ -84,7 +84,7 @@ namespace ts {
|
|||
export function forEach<T, U>(array: T[], callback: (element: T, index: number) => U): U {
|
||||
if (array) {
|
||||
for (let i = 0, len = array.length; i < len; i++) {
|
||||
let result = callback(array[i], i);
|
||||
const result = callback(array[i], i);
|
||||
if (result) {
|
||||
return result;
|
||||
}
|
||||
|
@ -95,7 +95,7 @@ namespace ts {
|
|||
|
||||
export function contains<T>(array: T[], value: T): boolean {
|
||||
if (array) {
|
||||
for (let v of array) {
|
||||
for (const v of array) {
|
||||
if (v === value) {
|
||||
return true;
|
||||
}
|
||||
|
@ -118,7 +118,7 @@ namespace ts {
|
|||
export function countWhere<T>(array: T[], predicate: (x: T) => boolean): number {
|
||||
let count = 0;
|
||||
if (array) {
|
||||
for (let v of array) {
|
||||
for (const v of array) {
|
||||
if (predicate(v)) {
|
||||
count++;
|
||||
}
|
||||
|
@ -131,7 +131,7 @@ namespace ts {
|
|||
let result: T[];
|
||||
if (array) {
|
||||
result = [];
|
||||
for (let item of array) {
|
||||
for (const item of array) {
|
||||
if (f(item)) {
|
||||
result.push(item);
|
||||
}
|
||||
|
@ -144,7 +144,7 @@ namespace ts {
|
|||
let result: U[];
|
||||
if (array) {
|
||||
result = [];
|
||||
for (let v of array) {
|
||||
for (const v of array) {
|
||||
result.push(f(v));
|
||||
}
|
||||
}
|
||||
|
@ -162,7 +162,7 @@ namespace ts {
|
|||
let result: T[];
|
||||
if (array) {
|
||||
result = [];
|
||||
for (let item of array) {
|
||||
for (const item of array) {
|
||||
if (!contains(result, item)) {
|
||||
result.push(item);
|
||||
}
|
||||
|
@ -173,7 +173,7 @@ namespace ts {
|
|||
|
||||
export function sum(array: any[], prop: string): number {
|
||||
let result = 0;
|
||||
for (let v of array) {
|
||||
for (const v of array) {
|
||||
result += v[prop];
|
||||
}
|
||||
return result;
|
||||
|
@ -181,7 +181,7 @@ namespace ts {
|
|||
|
||||
export function addRange<T>(to: T[], from: T[]): void {
|
||||
if (to && from) {
|
||||
for (let v of from) {
|
||||
for (const v of from) {
|
||||
to.push(v);
|
||||
}
|
||||
}
|
||||
|
@ -220,8 +220,8 @@ namespace ts {
|
|||
let high = array.length - 1;
|
||||
|
||||
while (low <= high) {
|
||||
let middle = low + ((high - low) >> 1);
|
||||
let midValue = array[middle];
|
||||
const middle = low + ((high - low) >> 1);
|
||||
const midValue = array[middle];
|
||||
|
||||
if (midValue === value) {
|
||||
return middle;
|
||||
|
@ -270,7 +270,7 @@ namespace ts {
|
|||
return initial;
|
||||
}
|
||||
|
||||
let hasOwnProperty = Object.prototype.hasOwnProperty;
|
||||
const hasOwnProperty = Object.prototype.hasOwnProperty;
|
||||
|
||||
export function hasProperty<T>(map: Map<T>, key: string): boolean {
|
||||
return hasOwnProperty.call(map, key);
|
||||
|
@ -281,7 +281,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function isEmpty<T>(map: Map<T>) {
|
||||
for (let id in map) {
|
||||
for (const id in map) {
|
||||
if (hasProperty(map, id)) {
|
||||
return false;
|
||||
}
|
||||
|
@ -290,19 +290,19 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function clone<T>(object: T): T {
|
||||
let result: any = {};
|
||||
for (let id in object) {
|
||||
const result: any = {};
|
||||
for (const id in object) {
|
||||
result[id] = (<any>object)[id];
|
||||
}
|
||||
return <T>result;
|
||||
}
|
||||
|
||||
export function extend<T1, T2>(first: Map<T1>, second: Map<T2>): Map<T1 & T2> {
|
||||
let result: Map<T1 & T2> = {};
|
||||
for (let id in first) {
|
||||
const result: Map<T1 & T2> = {};
|
||||
for (const id in first) {
|
||||
(result as any)[id] = first[id];
|
||||
}
|
||||
for (let id in second) {
|
||||
for (const id in second) {
|
||||
if (!hasProperty(result, id)) {
|
||||
(result as any)[id] = second[id];
|
||||
}
|
||||
|
@ -312,7 +312,7 @@ namespace ts {
|
|||
|
||||
export function forEachValue<T, U>(map: Map<T>, callback: (value: T) => U): U {
|
||||
let result: U;
|
||||
for (let id in map) {
|
||||
for (const id in map) {
|
||||
if (result = callback(map[id])) break;
|
||||
}
|
||||
return result;
|
||||
|
@ -320,7 +320,7 @@ namespace ts {
|
|||
|
||||
export function forEachKey<T, U>(map: Map<T>, callback: (key: string) => U): U {
|
||||
let result: U;
|
||||
for (let id in map) {
|
||||
for (const id in map) {
|
||||
if (result = callback(id)) break;
|
||||
}
|
||||
return result;
|
||||
|
@ -331,7 +331,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function copyMap<T>(source: Map<T>, target: Map<T>): void {
|
||||
for (let p in source) {
|
||||
for (const p in source) {
|
||||
target[p] = source[p];
|
||||
}
|
||||
}
|
||||
|
@ -347,7 +347,7 @@ namespace ts {
|
|||
* index in the array will be the one associated with the produced key.
|
||||
*/
|
||||
export function arrayToMap<T>(array: T[], makeKey: (value: T) => string): Map<T> {
|
||||
let result: Map<T> = {};
|
||||
const result: Map<T> = {};
|
||||
|
||||
forEach(array, value => {
|
||||
result[makeKey(value)] = value;
|
||||
|
@ -383,7 +383,7 @@ namespace ts {
|
|||
|
||||
export function createFileDiagnostic(file: SourceFile, start: number, length: number, message: DiagnosticMessage, ...args: any[]): Diagnostic;
|
||||
export function createFileDiagnostic(file: SourceFile, start: number, length: number, message: DiagnosticMessage): Diagnostic {
|
||||
let end = start + length;
|
||||
const end = start + length;
|
||||
|
||||
Debug.assert(start >= 0, "start must be non-negative, is " + start);
|
||||
Debug.assert(length >= 0, "length must be non-negative, is " + length);
|
||||
|
@ -479,10 +479,10 @@ namespace ts {
|
|||
function compareMessageText(text1: string | DiagnosticMessageChain, text2: string | DiagnosticMessageChain): Comparison {
|
||||
while (text1 && text2) {
|
||||
// We still have both chains.
|
||||
let string1 = typeof text1 === "string" ? text1 : text1.messageText;
|
||||
let string2 = typeof text2 === "string" ? text2 : text2.messageText;
|
||||
const string1 = typeof text1 === "string" ? text1 : text1.messageText;
|
||||
const string2 = typeof text2 === "string" ? text2 : text2.messageText;
|
||||
|
||||
let res = compareValues(string1, string2);
|
||||
const res = compareValues(string1, string2);
|
||||
if (res) {
|
||||
return res;
|
||||
}
|
||||
|
@ -509,11 +509,11 @@ namespace ts {
|
|||
return diagnostics;
|
||||
}
|
||||
|
||||
let newDiagnostics = [diagnostics[0]];
|
||||
const newDiagnostics = [diagnostics[0]];
|
||||
let previousDiagnostic = diagnostics[0];
|
||||
for (let i = 1; i < diagnostics.length; i++) {
|
||||
let currentDiagnostic = diagnostics[i];
|
||||
let isDupe = compareDiagnostics(currentDiagnostic, previousDiagnostic) === Comparison.EqualTo;
|
||||
const currentDiagnostic = diagnostics[i];
|
||||
const isDupe = compareDiagnostics(currentDiagnostic, previousDiagnostic) === Comparison.EqualTo;
|
||||
if (!isDupe) {
|
||||
newDiagnostics.push(currentDiagnostic);
|
||||
previousDiagnostic = currentDiagnostic;
|
||||
|
@ -531,9 +531,9 @@ namespace ts {
|
|||
export function getRootLength(path: string): number {
|
||||
if (path.charCodeAt(0) === CharacterCodes.slash) {
|
||||
if (path.charCodeAt(1) !== CharacterCodes.slash) return 1;
|
||||
let p1 = path.indexOf("/", 2);
|
||||
const p1 = path.indexOf("/", 2);
|
||||
if (p1 < 0) return 2;
|
||||
let p2 = path.indexOf("/", p1 + 1);
|
||||
const p2 = path.indexOf("/", p1 + 1);
|
||||
if (p2 < 0) return p1 + 1;
|
||||
return p2 + 1;
|
||||
}
|
||||
|
@ -549,7 +549,7 @@ namespace ts {
|
|||
if (path.lastIndexOf("file:///", 0) === 0) {
|
||||
return "file:///".length;
|
||||
}
|
||||
let idx = path.indexOf("://");
|
||||
const idx = path.indexOf("://");
|
||||
if (idx !== -1) {
|
||||
return idx + "://".length;
|
||||
}
|
||||
|
@ -558,9 +558,9 @@ namespace ts {
|
|||
|
||||
export let directorySeparator = "/";
|
||||
function getNormalizedParts(normalizedSlashedPath: string, rootLength: number) {
|
||||
let parts = normalizedSlashedPath.substr(rootLength).split(directorySeparator);
|
||||
let normalized: string[] = [];
|
||||
for (let part of parts) {
|
||||
const parts = normalizedSlashedPath.substr(rootLength).split(directorySeparator);
|
||||
const normalized: string[] = [];
|
||||
for (const part of parts) {
|
||||
if (part !== ".") {
|
||||
if (part === ".." && normalized.length > 0 && lastOrUndefined(normalized) !== "..") {
|
||||
normalized.pop();
|
||||
|
@ -580,8 +580,8 @@ namespace ts {
|
|||
|
||||
export function normalizePath(path: string): string {
|
||||
path = normalizeSlashes(path);
|
||||
let rootLength = getRootLength(path);
|
||||
let normalized = getNormalizedParts(path, rootLength);
|
||||
const rootLength = getRootLength(path);
|
||||
const normalized = getNormalizedParts(path, rootLength);
|
||||
return path.substr(0, rootLength) + normalized.join(directorySeparator);
|
||||
}
|
||||
|
||||
|
@ -598,7 +598,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
function normalizedPathComponents(path: string, rootLength: number) {
|
||||
let normalizedParts = getNormalizedParts(path, rootLength);
|
||||
const normalizedParts = getNormalizedParts(path, rootLength);
|
||||
return [path.substr(0, rootLength)].concat(normalizedParts);
|
||||
}
|
||||
|
||||
|
@ -629,7 +629,7 @@ namespace ts {
|
|||
// In this example the root is: http://www.website.com/
|
||||
// normalized path components should be ["http://www.website.com/", "folder1", "folder2"]
|
||||
|
||||
let urlLength = url.length;
|
||||
const urlLength = url.length;
|
||||
// Initial root length is http:// part
|
||||
let rootLength = url.indexOf("://") + "://".length;
|
||||
while (rootLength < urlLength) {
|
||||
|
@ -650,7 +650,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
// Find the index of "/" after website.com so the root can be http://www.website.com/ (from existing http://)
|
||||
let indexOfNextSlash = url.indexOf(directorySeparator, rootLength);
|
||||
const indexOfNextSlash = url.indexOf(directorySeparator, rootLength);
|
||||
if (indexOfNextSlash !== -1) {
|
||||
// Found the "/" after the website.com so the root is length of http://www.website.com/
|
||||
// and get components afetr the root normally like any other folder components
|
||||
|
@ -676,8 +676,8 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function getRelativePathToDirectoryOrUrl(directoryPathOrUrl: string, relativeOrAbsolutePath: string, currentDirectory: string, getCanonicalFileName: (fileName: string) => string, isAbsolutePathAnUrl: boolean) {
|
||||
let pathComponents = getNormalizedPathOrUrlComponents(relativeOrAbsolutePath, currentDirectory);
|
||||
let directoryComponents = getNormalizedPathOrUrlComponents(directoryPathOrUrl, currentDirectory);
|
||||
const pathComponents = getNormalizedPathOrUrlComponents(relativeOrAbsolutePath, currentDirectory);
|
||||
const directoryComponents = getNormalizedPathOrUrlComponents(directoryPathOrUrl, currentDirectory);
|
||||
if (directoryComponents.length > 1 && lastOrUndefined(directoryComponents) === "") {
|
||||
// If the directory path given was of type test/cases/ then we really need components of directory to be only till its name
|
||||
// that is ["test", "cases", ""] needs to be actually ["test", "cases"]
|
||||
|
@ -694,7 +694,7 @@ namespace ts {
|
|||
// Get the relative path
|
||||
if (joinStartIndex) {
|
||||
let relativePath = "";
|
||||
let relativePathComponents = pathComponents.slice(joinStartIndex, pathComponents.length);
|
||||
const relativePathComponents = pathComponents.slice(joinStartIndex, pathComponents.length);
|
||||
for (; joinStartIndex < directoryComponents.length; joinStartIndex++) {
|
||||
if (directoryComponents[joinStartIndex] !== "") {
|
||||
relativePath = relativePath + ".." + directorySeparator;
|
||||
|
@ -717,7 +717,7 @@ namespace ts {
|
|||
if (!path) {
|
||||
return undefined;
|
||||
}
|
||||
let i = path.lastIndexOf(directorySeparator);
|
||||
const i = path.lastIndexOf(directorySeparator);
|
||||
return i < 0 ? path : path.substring(i + 1);
|
||||
}
|
||||
|
||||
|
@ -730,8 +730,8 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function fileExtensionIs(path: string, extension: string): boolean {
|
||||
let pathLen = path.length;
|
||||
let extLen = extension.length;
|
||||
const pathLen = path.length;
|
||||
const extLen = extension.length;
|
||||
return pathLen > extLen && path.substr(pathLen - extLen, extLen) === extension;
|
||||
}
|
||||
|
||||
|
@ -749,7 +749,7 @@ namespace ts {
|
|||
export function isSupportedSourceFileName(fileName: string) {
|
||||
if (!fileName) { return false; }
|
||||
|
||||
for (let extension of supportedExtensions) {
|
||||
for (const extension of supportedExtensions) {
|
||||
if (fileExtensionIs(fileName, extension)) {
|
||||
return true;
|
||||
}
|
||||
|
@ -759,7 +759,7 @@ namespace ts {
|
|||
|
||||
const extensionsToRemove = [".d.ts", ".ts", ".js", ".tsx", ".jsx"];
|
||||
export function removeFileExtension(path: string): string {
|
||||
for (let ext of extensionsToRemove) {
|
||||
for (const ext of extensionsToRemove) {
|
||||
if (fileExtensionIs(path, ext)) {
|
||||
return path.substr(0, path.length - ext.length);
|
||||
}
|
||||
|
@ -767,9 +767,9 @@ namespace ts {
|
|||
return path;
|
||||
}
|
||||
|
||||
let backslashOrDoubleQuote = /[\"\\]/g;
|
||||
let escapedCharsRegExp = /[\u0000-\u001f\t\v\f\b\r\n\u2028\u2029\u0085]/g;
|
||||
let escapedCharsMap: Map<string> = {
|
||||
const backslashOrDoubleQuote = /[\"\\]/g;
|
||||
const escapedCharsRegExp = /[\u0000-\u001f\t\v\f\b\r\n\u2028\u2029\u0085]/g;
|
||||
const escapedCharsMap: Map<string> = {
|
||||
"\0": "\\0",
|
||||
"\t": "\\t",
|
||||
"\v": "\\v",
|
||||
|
@ -828,7 +828,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
export namespace Debug {
|
||||
let currentAssertionLevel = AssertionLevel.None;
|
||||
const currentAssertionLevel = AssertionLevel.None;
|
||||
|
||||
export function shouldAssert(level: AssertionLevel): boolean {
|
||||
return currentAssertionLevel >= level;
|
||||
|
@ -851,8 +851,8 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function copyListRemovingItem<T>(item: T, list: T[]) {
|
||||
let copiedList: T[] = [];
|
||||
for (let e of list) {
|
||||
const copiedList: T[] = [];
|
||||
for (const e of list) {
|
||||
if (e !== item) {
|
||||
copiedList.push(e);
|
||||
}
|
||||
|
|
|
@ -31,15 +31,15 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function getDeclarationDiagnostics(host: EmitHost, resolver: EmitResolver, targetSourceFile: SourceFile): Diagnostic[] {
|
||||
let diagnostics: Diagnostic[] = [];
|
||||
let jsFilePath = getOwnEmitOutputFilePath(targetSourceFile, host, ".js");
|
||||
const diagnostics: Diagnostic[] = [];
|
||||
const jsFilePath = getOwnEmitOutputFilePath(targetSourceFile, host, ".js");
|
||||
emitDeclarations(host, resolver, diagnostics, jsFilePath, targetSourceFile);
|
||||
return diagnostics;
|
||||
}
|
||||
|
||||
function emitDeclarations(host: EmitHost, resolver: EmitResolver, diagnostics: Diagnostic[], jsFilePath: string, root?: SourceFile): DeclarationEmit {
|
||||
let newLine = host.getNewLine();
|
||||
let compilerOptions = host.getCompilerOptions();
|
||||
const newLine = host.getNewLine();
|
||||
const compilerOptions = host.getCompilerOptions();
|
||||
|
||||
let write: (s: string) => void;
|
||||
let writeLine: () => void;
|
||||
|
@ -53,10 +53,10 @@ namespace ts {
|
|||
let currentSourceFile: SourceFile;
|
||||
let reportedDeclarationError = false;
|
||||
let errorNameNode: DeclarationName;
|
||||
let emitJsDocComments = compilerOptions.removeComments ? function (declaration: Node) { } : writeJsDocComments;
|
||||
let emit = compilerOptions.stripInternal ? stripInternal : emitNode;
|
||||
const emitJsDocComments = compilerOptions.removeComments ? function (declaration: Node) { } : writeJsDocComments;
|
||||
const emit = compilerOptions.stripInternal ? stripInternal : emitNode;
|
||||
|
||||
let moduleElementDeclarationEmitInfo: ModuleElementDeclarationEmitInfo[] = [];
|
||||
const moduleElementDeclarationEmitInfo: ModuleElementDeclarationEmitInfo[] = [];
|
||||
let asynchronousSubModuleDeclarationEmitInfo: ModuleElementDeclarationEmitInfo[];
|
||||
|
||||
// Contains the reference paths that needs to go in the declaration file.
|
||||
|
@ -69,7 +69,7 @@ namespace ts {
|
|||
if (!compilerOptions.noResolve) {
|
||||
let addedGlobalFileReference = false;
|
||||
forEach(root.referencedFiles, fileReference => {
|
||||
let referencedFile = tryResolveScriptReference(host, root, fileReference);
|
||||
const referencedFile = tryResolveScriptReference(host, root, fileReference);
|
||||
|
||||
// All the references that are not going to be part of same file
|
||||
if (referencedFile && ((referencedFile.flags & NodeFlags.DeclarationFile) || // This is a declare file reference
|
||||
|
@ -88,7 +88,7 @@ namespace ts {
|
|||
|
||||
// create asynchronous output for the importDeclarations
|
||||
if (moduleElementDeclarationEmitInfo.length) {
|
||||
let oldWriter = writer;
|
||||
const oldWriter = writer;
|
||||
forEach(moduleElementDeclarationEmitInfo, aliasEmitInfo => {
|
||||
if (aliasEmitInfo.isVisible) {
|
||||
Debug.assert(aliasEmitInfo.node.kind === SyntaxKind.ImportDeclaration);
|
||||
|
@ -103,13 +103,13 @@ namespace ts {
|
|||
}
|
||||
else {
|
||||
// Emit references corresponding to this file
|
||||
let emittedReferencedFiles: SourceFile[] = [];
|
||||
const emittedReferencedFiles: SourceFile[] = [];
|
||||
forEach(host.getSourceFiles(), sourceFile => {
|
||||
if (!isExternalModuleOrDeclarationFile(sourceFile)) {
|
||||
// Check what references need to be added
|
||||
if (!compilerOptions.noResolve) {
|
||||
forEach(sourceFile.referencedFiles, fileReference => {
|
||||
let referencedFile = tryResolveScriptReference(host, sourceFile, fileReference);
|
||||
const referencedFile = tryResolveScriptReference(host, sourceFile, fileReference);
|
||||
|
||||
// If the reference file is a declaration file or an external module, emit that reference
|
||||
if (referencedFile && (isExternalModuleOrDeclarationFile(referencedFile) &&
|
||||
|
@ -134,14 +134,14 @@ namespace ts {
|
|||
};
|
||||
|
||||
function hasInternalAnnotation(range: CommentRange) {
|
||||
let text = currentSourceFile.text;
|
||||
let comment = text.substring(range.pos, range.end);
|
||||
const text = currentSourceFile.text;
|
||||
const comment = text.substring(range.pos, range.end);
|
||||
return comment.indexOf("@internal") >= 0;
|
||||
}
|
||||
|
||||
function stripInternal(node: Node) {
|
||||
if (node) {
|
||||
let leadingCommentRanges = getLeadingCommentRanges(currentSourceFile.text, node.pos);
|
||||
const leadingCommentRanges = getLeadingCommentRanges(currentSourceFile.text, node.pos);
|
||||
if (forEach(leadingCommentRanges, hasInternalAnnotation)) {
|
||||
return;
|
||||
}
|
||||
|
@ -151,7 +151,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
function createAndSetNewTextWriterWithSymbolWriter(): EmitTextWriterWithSymbolWriter {
|
||||
let writer = <EmitTextWriterWithSymbolWriter>createTextWriter(newLine);
|
||||
const writer = <EmitTextWriterWithSymbolWriter>createTextWriter(newLine);
|
||||
writer.trackSymbol = trackSymbol;
|
||||
writer.reportInaccessibleThisError = reportInaccessibleThisError;
|
||||
writer.writeKeyword = writer.write;
|
||||
|
@ -175,7 +175,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
function writeAsynchronousModuleElements(nodes: Node[]) {
|
||||
let oldWriter = writer;
|
||||
const oldWriter = writer;
|
||||
forEach(nodes, declaration => {
|
||||
let nodeToCheck: Node;
|
||||
if (declaration.kind === SyntaxKind.VariableDeclaration) {
|
||||
|
@ -238,7 +238,7 @@ namespace ts {
|
|||
else {
|
||||
// Report error
|
||||
reportedDeclarationError = true;
|
||||
let errorInfo = writer.getSymbolAccessibilityDiagnostic(symbolAccesibilityResult);
|
||||
const errorInfo = writer.getSymbolAccessibilityDiagnostic(symbolAccesibilityResult);
|
||||
if (errorInfo) {
|
||||
if (errorInfo.typeName) {
|
||||
diagnostics.push(createDiagnosticForNode(symbolAccesibilityResult.errorNode || errorInfo.errorNode,
|
||||
|
@ -297,14 +297,14 @@ namespace ts {
|
|||
}
|
||||
|
||||
function emitLines(nodes: Node[]) {
|
||||
for (let node of nodes) {
|
||||
for (const node of nodes) {
|
||||
emit(node);
|
||||
}
|
||||
}
|
||||
|
||||
function emitSeparatedList(nodes: Node[], separator: string, eachNodeEmitFn: (node: Node) => void, canEmitFn?: (node: Node) => boolean) {
|
||||
let currentWriterPos = writer.getTextPos();
|
||||
for (let node of nodes) {
|
||||
for (const node of nodes) {
|
||||
if (!canEmitFn || canEmitFn(node)) {
|
||||
if (currentWriterPos !== writer.getTextPos()) {
|
||||
write(separator);
|
||||
|
@ -321,7 +321,7 @@ namespace ts {
|
|||
|
||||
function writeJsDocComments(declaration: Node) {
|
||||
if (declaration) {
|
||||
let jsDocComments = getJsDocComments(declaration, currentSourceFile);
|
||||
const jsDocComments = getJsDocComments(declaration, currentSourceFile);
|
||||
emitNewLineBeforeLeadingComments(currentSourceFile, writer, declaration, jsDocComments);
|
||||
// jsDoc comments are emitted at /*leading comment1 */space/*leading comment*/space
|
||||
emitComments(currentSourceFile, writer, jsDocComments, /*trailingSeparator*/ true, newLine, writeCommentRange);
|
||||
|
@ -378,8 +378,8 @@ namespace ts {
|
|||
writeTextOfNode(currentSourceFile, entityName);
|
||||
}
|
||||
else {
|
||||
let left = entityName.kind === SyntaxKind.QualifiedName ? (<QualifiedName>entityName).left : (<PropertyAccessExpression>entityName).expression;
|
||||
let right = entityName.kind === SyntaxKind.QualifiedName ? (<QualifiedName>entityName).right : (<PropertyAccessExpression>entityName).name;
|
||||
const left = entityName.kind === SyntaxKind.QualifiedName ? (<QualifiedName>entityName).left : (<PropertyAccessExpression>entityName).expression;
|
||||
const right = entityName.kind === SyntaxKind.QualifiedName ? (<QualifiedName>entityName).right : (<PropertyAccessExpression>entityName).name;
|
||||
writeEntityName(left);
|
||||
write(".");
|
||||
writeTextOfNode(currentSourceFile, right);
|
||||
|
@ -387,7 +387,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
function emitEntityName(entityName: EntityName | PropertyAccessExpression) {
|
||||
let visibilityResult = resolver.isEntityNameVisible(entityName,
|
||||
const visibilityResult = resolver.isEntityNameVisible(entityName,
|
||||
// Aliases can be written asynchronously so use correct enclosing declaration
|
||||
entityName.parent.kind === SyntaxKind.ImportEqualsDeclaration ? entityName.parent : enclosingDeclaration);
|
||||
|
||||
|
@ -477,13 +477,13 @@ namespace ts {
|
|||
// Note that export default is only allowed at most once in a module, so we
|
||||
// do not need to keep track of created temp names.
|
||||
function getExportDefaultTempVariableName(): string {
|
||||
let baseName = "_default";
|
||||
const baseName = "_default";
|
||||
if (!hasProperty(currentSourceFile.identifiers, baseName)) {
|
||||
return baseName;
|
||||
}
|
||||
let count = 0;
|
||||
while (true) {
|
||||
let name = baseName + "_" + (++count);
|
||||
const name = baseName + "_" + (++count);
|
||||
if (!hasProperty(currentSourceFile.identifiers, name)) {
|
||||
return name;
|
||||
}
|
||||
|
@ -497,7 +497,7 @@ namespace ts {
|
|||
}
|
||||
else {
|
||||
// Expression
|
||||
let tempVarName = getExportDefaultTempVariableName();
|
||||
const tempVarName = getExportDefaultTempVariableName();
|
||||
write("declare var ");
|
||||
write(tempVarName);
|
||||
write(": ");
|
||||
|
@ -513,7 +513,7 @@ namespace ts {
|
|||
|
||||
// Make all the declarations visible for the export name
|
||||
if (node.expression.kind === SyntaxKind.Identifier) {
|
||||
let nodes = resolver.collectLinkedAliases(<Identifier>node.expression);
|
||||
const nodes = resolver.collectLinkedAliases(<Identifier>node.expression);
|
||||
|
||||
// write each of these declarations asynchronously
|
||||
writeAsynchronousModuleElements(nodes);
|
||||
|
@ -550,7 +550,7 @@ namespace ts {
|
|||
}
|
||||
else {
|
||||
if (node.kind === SyntaxKind.ImportDeclaration) {
|
||||
let importDeclaration = <ImportDeclaration>node;
|
||||
const importDeclaration = <ImportDeclaration>node;
|
||||
if (importDeclaration.importClause) {
|
||||
isVisible = (importDeclaration.importClause.name && resolver.isDeclarationVisible(importDeclaration.importClause)) ||
|
||||
isVisibleNamedBinding(importDeclaration.importClause.namedBindings);
|
||||
|
@ -676,7 +676,7 @@ namespace ts {
|
|||
}
|
||||
write("import ");
|
||||
if (node.importClause) {
|
||||
let currentWriterPos = writer.getTextPos();
|
||||
const currentWriterPos = writer.getTextPos();
|
||||
if (node.importClause.name && resolver.isDeclarationVisible(node.importClause)) {
|
||||
writeTextOfNode(currentSourceFile, node.importClause.name);
|
||||
}
|
||||
|
@ -714,7 +714,7 @@ namespace ts {
|
|||
emitImportOrExportSpecifier(node);
|
||||
|
||||
// Make all the declarations visible for the export name
|
||||
let nodes = resolver.collectLinkedAliases(node.propertyName || node.name);
|
||||
const nodes = resolver.collectLinkedAliases(node.propertyName || node.name);
|
||||
|
||||
// write each of these declarations asynchronously
|
||||
writeAsynchronousModuleElements(nodes);
|
||||
|
@ -754,7 +754,7 @@ namespace ts {
|
|||
write(".");
|
||||
writeTextOfNode(currentSourceFile, node.name);
|
||||
}
|
||||
let prevEnclosingDeclaration = enclosingDeclaration;
|
||||
const prevEnclosingDeclaration = enclosingDeclaration;
|
||||
enclosingDeclaration = node;
|
||||
write(" {");
|
||||
writeLine();
|
||||
|
@ -767,7 +767,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
function writeTypeAliasDeclaration(node: TypeAliasDeclaration) {
|
||||
let prevEnclosingDeclaration = enclosingDeclaration;
|
||||
const prevEnclosingDeclaration = enclosingDeclaration;
|
||||
enclosingDeclaration = node;
|
||||
emitJsDocComments(node);
|
||||
emitModuleElementDeclarationFlags(node);
|
||||
|
@ -809,7 +809,7 @@ namespace ts {
|
|||
function emitEnumMemberDeclaration(node: EnumMember) {
|
||||
emitJsDocComments(node);
|
||||
writeTextOfNode(currentSourceFile, node.name);
|
||||
let enumMemberValue = resolver.getConstantValue(node);
|
||||
const enumMemberValue = resolver.getConstantValue(node);
|
||||
if (enumMemberValue !== undefined) {
|
||||
write(" = ");
|
||||
write(enumMemberValue.toString());
|
||||
|
@ -959,10 +959,10 @@ namespace ts {
|
|||
|
||||
write("class ");
|
||||
writeTextOfNode(currentSourceFile, node.name);
|
||||
let prevEnclosingDeclaration = enclosingDeclaration;
|
||||
const prevEnclosingDeclaration = enclosingDeclaration;
|
||||
enclosingDeclaration = node;
|
||||
emitTypeParameters(node.typeParameters);
|
||||
let baseTypeNode = getClassExtendsHeritageClauseElement(node);
|
||||
const baseTypeNode = getClassExtendsHeritageClauseElement(node);
|
||||
if (baseTypeNode) {
|
||||
emitHeritageClause([baseTypeNode], /*isImplementsList*/ false);
|
||||
}
|
||||
|
@ -983,7 +983,7 @@ namespace ts {
|
|||
emitModuleElementDeclarationFlags(node);
|
||||
write("interface ");
|
||||
writeTextOfNode(currentSourceFile, node.name);
|
||||
let prevEnclosingDeclaration = enclosingDeclaration;
|
||||
const prevEnclosingDeclaration = enclosingDeclaration;
|
||||
enclosingDeclaration = node;
|
||||
emitTypeParameters(node.typeParameters);
|
||||
emitHeritageClause(getInterfaceBaseTypeNodes(node), /*isImplementsList*/ false);
|
||||
|
@ -1069,7 +1069,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
function getVariableDeclarationTypeVisibilityError(symbolAccesibilityResult: SymbolAccessiblityResult): SymbolAccessibilityDiagnostic {
|
||||
let diagnosticMessage = getVariableDeclarationTypeVisibilityDiagnosticMessage(symbolAccesibilityResult);
|
||||
const diagnosticMessage = getVariableDeclarationTypeVisibilityDiagnosticMessage(symbolAccesibilityResult);
|
||||
return diagnosticMessage !== undefined ? {
|
||||
diagnosticMessage,
|
||||
errorNode: node,
|
||||
|
@ -1083,8 +1083,8 @@ namespace ts {
|
|||
// For example:
|
||||
// original: var [, c,,] = [ 2,3,4]
|
||||
// emitted: declare var c: number; // instead of declare var c:number, ;
|
||||
let elements: Node[] = [];
|
||||
for (let element of bindingPattern.elements) {
|
||||
const elements: Node[] = [];
|
||||
for (const element of bindingPattern.elements) {
|
||||
if (element.kind !== SyntaxKind.OmittedExpression) {
|
||||
elements.push(element);
|
||||
}
|
||||
|
@ -1094,7 +1094,7 @@ namespace ts {
|
|||
|
||||
function emitBindingElement(bindingElement: BindingElement) {
|
||||
function getBindingElementTypeVisibilityError(symbolAccesibilityResult: SymbolAccessiblityResult): SymbolAccessibilityDiagnostic {
|
||||
let diagnosticMessage = getVariableDeclarationTypeVisibilityDiagnosticMessage(symbolAccesibilityResult);
|
||||
const diagnosticMessage = getVariableDeclarationTypeVisibilityDiagnosticMessage(symbolAccesibilityResult);
|
||||
return diagnosticMessage !== undefined ? {
|
||||
diagnosticMessage,
|
||||
errorNode: bindingElement,
|
||||
|
@ -1150,7 +1150,7 @@ namespace ts {
|
|||
return;
|
||||
}
|
||||
|
||||
let accessors = getAllAccessorDeclarations((<ClassDeclaration>node.parent).members, node);
|
||||
const accessors = getAllAccessorDeclarations((<ClassDeclaration>node.parent).members, node);
|
||||
let accessorWithTypeAnnotation: AccessorDeclaration;
|
||||
|
||||
if (node === accessors.firstAccessor) {
|
||||
|
@ -1163,7 +1163,7 @@ namespace ts {
|
|||
let type = getTypeAnnotationFromAccessor(node);
|
||||
if (!type) {
|
||||
// couldn't get type for the first accessor, try the another one
|
||||
let anotherAccessor = node.kind === SyntaxKind.GetAccessor ? accessors.setAccessor : accessors.getAccessor;
|
||||
const anotherAccessor = node.kind === SyntaxKind.GetAccessor ? accessors.setAccessor : accessors.getAccessor;
|
||||
type = getTypeAnnotationFromAccessor(anotherAccessor);
|
||||
if (type) {
|
||||
accessorWithTypeAnnotation = anotherAccessor;
|
||||
|
@ -1280,7 +1280,7 @@ namespace ts {
|
|||
write("(");
|
||||
}
|
||||
|
||||
let prevEnclosingDeclaration = enclosingDeclaration;
|
||||
const prevEnclosingDeclaration = enclosingDeclaration;
|
||||
enclosingDeclaration = node;
|
||||
|
||||
// Parameters
|
||||
|
@ -1294,7 +1294,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
// If this is not a constructor and is not private, emit the return type
|
||||
let isFunctionTypeOrConstructorType = node.kind === SyntaxKind.FunctionType || node.kind === SyntaxKind.ConstructorType;
|
||||
const isFunctionTypeOrConstructorType = node.kind === SyntaxKind.FunctionType || node.kind === SyntaxKind.ConstructorType;
|
||||
if (isFunctionTypeOrConstructorType || node.parent.kind === SyntaxKind.TypeLiteral) {
|
||||
// Emit type literal signature return type only if specified
|
||||
if (node.type) {
|
||||
|
@ -1410,7 +1410,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
function getParameterDeclarationTypeVisibilityError(symbolAccesibilityResult: SymbolAccessiblityResult): SymbolAccessibilityDiagnostic {
|
||||
let diagnosticMessage: DiagnosticMessage = getParameterDeclarationTypeVisibilityDiagnosticMessage(symbolAccesibilityResult);
|
||||
const diagnosticMessage: DiagnosticMessage = getParameterDeclarationTypeVisibilityDiagnosticMessage(symbolAccesibilityResult);
|
||||
return diagnosticMessage !== undefined ? {
|
||||
diagnosticMessage,
|
||||
errorNode: node,
|
||||
|
@ -1483,7 +1483,7 @@ namespace ts {
|
|||
}
|
||||
else if (bindingPattern.kind === SyntaxKind.ArrayBindingPattern) {
|
||||
write("[");
|
||||
let elements = bindingPattern.elements;
|
||||
const elements = bindingPattern.elements;
|
||||
emitCommaList(elements, emitBindingElement);
|
||||
if (elements && elements.hasTrailingComma) {
|
||||
write(", ");
|
||||
|
@ -1494,7 +1494,7 @@ namespace ts {
|
|||
|
||||
function emitBindingElement(bindingElement: BindingElement) {
|
||||
function getBindingElementTypeVisibilityError(symbolAccesibilityResult: SymbolAccessiblityResult): SymbolAccessibilityDiagnostic {
|
||||
let diagnosticMessage = getParameterDeclarationTypeVisibilityDiagnosticMessage(symbolAccesibilityResult);
|
||||
const diagnosticMessage = getParameterDeclarationTypeVisibilityDiagnosticMessage(symbolAccesibilityResult);
|
||||
return diagnosticMessage !== undefined ? {
|
||||
diagnosticMessage,
|
||||
errorNode: bindingElement,
|
||||
|
@ -1609,11 +1609,11 @@ namespace ts {
|
|||
|
||||
/* @internal */
|
||||
export function writeDeclarationFile(jsFilePath: string, sourceFile: SourceFile, host: EmitHost, resolver: EmitResolver, diagnostics: Diagnostic[]) {
|
||||
let emitDeclarationResult = emitDeclarations(host, resolver, diagnostics, jsFilePath, sourceFile);
|
||||
const emitDeclarationResult = emitDeclarations(host, resolver, diagnostics, jsFilePath, sourceFile);
|
||||
// TODO(shkamat): Should we not write any declaration file if any of them can produce error,
|
||||
// or should we just not write this file like we are doing now
|
||||
if (!emitDeclarationResult.reportedDeclarationError) {
|
||||
let declarationOutput = emitDeclarationResult.referencePathsOutput
|
||||
const declarationOutput = emitDeclarationResult.referencePathsOutput
|
||||
+ getDeclarationOutput(emitDeclarationResult.synchronousDeclarationOutput, emitDeclarationResult.moduleElementDeclarationEmitInfo);
|
||||
writeFile(host, diagnostics, removeFileExtension(jsFilePath) + ".d.ts", declarationOutput, host.getCompilerOptions().emitBOM);
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -10,7 +10,7 @@ namespace ts {
|
|||
|
||||
/** The version of the TypeScript compiler release */
|
||||
|
||||
let emptyArray: any[] = [];
|
||||
const emptyArray: any[] = [];
|
||||
|
||||
export const version = "1.8.0";
|
||||
|
||||
|
@ -20,7 +20,7 @@ namespace ts {
|
|||
if (sys.fileExists(fileName)) {
|
||||
return fileName;
|
||||
}
|
||||
let parentPath = getDirectoryPath(searchPath);
|
||||
const parentPath = getDirectoryPath(searchPath);
|
||||
if (parentPath === searchPath) {
|
||||
break;
|
||||
}
|
||||
|
@ -31,13 +31,13 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function resolveTripleslashReference(moduleName: string, containingFile: string): string {
|
||||
let basePath = getDirectoryPath(containingFile);
|
||||
let referencedFileName = isRootedDiskPath(moduleName) ? moduleName : combinePaths(basePath, moduleName);
|
||||
const basePath = getDirectoryPath(containingFile);
|
||||
const referencedFileName = isRootedDiskPath(moduleName) ? moduleName : combinePaths(basePath, moduleName);
|
||||
return normalizePath(referencedFileName);
|
||||
}
|
||||
|
||||
export function resolveModuleName(moduleName: string, containingFile: string, compilerOptions: CompilerOptions, host: ModuleResolutionHost): ResolvedModuleWithFailedLookupLocations {
|
||||
let moduleResolution = compilerOptions.moduleResolution !== undefined
|
||||
const moduleResolution = compilerOptions.moduleResolution !== undefined
|
||||
? compilerOptions.moduleResolution
|
||||
: compilerOptions.module === ModuleKind.CommonJS ? ModuleResolutionKind.NodeJs : ModuleResolutionKind.Classic;
|
||||
|
||||
|
@ -48,11 +48,11 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function nodeModuleNameResolver(moduleName: string, containingFile: string, host: ModuleResolutionHost): ResolvedModuleWithFailedLookupLocations {
|
||||
let containingDirectory = getDirectoryPath(containingFile);
|
||||
const containingDirectory = getDirectoryPath(containingFile);
|
||||
|
||||
if (getRootLength(moduleName) !== 0 || nameStartsWithDotSlashOrDotDotSlash(moduleName)) {
|
||||
let failedLookupLocations: string[] = [];
|
||||
let candidate = normalizePath(combinePaths(containingDirectory, moduleName));
|
||||
const failedLookupLocations: string[] = [];
|
||||
const candidate = normalizePath(combinePaths(containingDirectory, moduleName));
|
||||
let resolvedFileName = loadNodeModuleFromFile(candidate, failedLookupLocations, host);
|
||||
|
||||
if (resolvedFileName) {
|
||||
|
@ -73,7 +73,7 @@ namespace ts {
|
|||
return forEach(moduleFileExtensions, tryLoad);
|
||||
|
||||
function tryLoad(ext: string): string {
|
||||
let fileName = fileExtensionIs(candidate, ext) ? candidate : candidate + ext;
|
||||
const fileName = fileExtensionIs(candidate, ext) ? candidate : candidate + ext;
|
||||
if (host.fileExists(fileName)) {
|
||||
return fileName;
|
||||
}
|
||||
|
@ -85,13 +85,13 @@ namespace ts {
|
|||
}
|
||||
|
||||
function loadNodeModuleFromDirectory(candidate: string, failedLookupLocation: string[], host: ModuleResolutionHost): string {
|
||||
let packageJsonPath = combinePaths(candidate, "package.json");
|
||||
const packageJsonPath = combinePaths(candidate, "package.json");
|
||||
if (host.fileExists(packageJsonPath)) {
|
||||
|
||||
let jsonContent: { typings?: string };
|
||||
|
||||
try {
|
||||
let jsonText = host.readFile(packageJsonPath);
|
||||
const jsonText = host.readFile(packageJsonPath);
|
||||
jsonContent = jsonText ? <{ typings?: string }>JSON.parse(jsonText) : { typings: undefined };
|
||||
}
|
||||
catch (e) {
|
||||
|
@ -100,7 +100,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
if (jsonContent.typings) {
|
||||
let result = loadNodeModuleFromFile(normalizePath(combinePaths(candidate, jsonContent.typings)), failedLookupLocation, host);
|
||||
const result = loadNodeModuleFromFile(normalizePath(combinePaths(candidate, jsonContent.typings)), failedLookupLocation, host);
|
||||
if (result) {
|
||||
return result;
|
||||
}
|
||||
|
@ -115,13 +115,13 @@ namespace ts {
|
|||
}
|
||||
|
||||
function loadModuleFromNodeModules(moduleName: string, directory: string, host: ModuleResolutionHost): ResolvedModuleWithFailedLookupLocations {
|
||||
let failedLookupLocations: string[] = [];
|
||||
const failedLookupLocations: string[] = [];
|
||||
directory = normalizeSlashes(directory);
|
||||
while (true) {
|
||||
let baseName = getBaseFileName(directory);
|
||||
const baseName = getBaseFileName(directory);
|
||||
if (baseName !== "node_modules") {
|
||||
let nodeModulesFolder = combinePaths(directory, "node_modules");
|
||||
let candidate = normalizePath(combinePaths(nodeModulesFolder, moduleName));
|
||||
const nodeModulesFolder = combinePaths(directory, "node_modules");
|
||||
const candidate = normalizePath(combinePaths(nodeModulesFolder, moduleName));
|
||||
let result = loadNodeModuleFromFile(candidate, failedLookupLocations, host);
|
||||
if (result) {
|
||||
return { resolvedModule: { resolvedFileName: result, isExternalLibraryImport: true }, failedLookupLocations };
|
||||
|
@ -133,7 +133,7 @@ namespace ts {
|
|||
}
|
||||
}
|
||||
|
||||
let parentPath = getDirectoryPath(directory);
|
||||
const parentPath = getDirectoryPath(directory);
|
||||
if (parentPath === directory) {
|
||||
break;
|
||||
}
|
||||
|
@ -145,7 +145,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
function nameStartsWithDotSlashOrDotDotSlash(name: string) {
|
||||
let i = name.lastIndexOf("./", 1);
|
||||
const i = name.lastIndexOf("./", 1);
|
||||
return i === 0 || (i === 1 && name.charCodeAt(0) === CharacterCodes.dot);
|
||||
}
|
||||
|
||||
|
@ -159,7 +159,7 @@ namespace ts {
|
|||
let searchPath = getDirectoryPath(containingFile);
|
||||
let searchName: string;
|
||||
|
||||
let failedLookupLocations: string[] = [];
|
||||
const failedLookupLocations: string[] = [];
|
||||
|
||||
let referencedSourceFile: string;
|
||||
while (true) {
|
||||
|
@ -171,7 +171,7 @@ namespace ts {
|
|||
return undefined;
|
||||
}
|
||||
|
||||
let candidate = searchName + extension;
|
||||
const candidate = searchName + extension;
|
||||
if (host.fileExists(candidate)) {
|
||||
return candidate;
|
||||
}
|
||||
|
@ -184,7 +184,7 @@ namespace ts {
|
|||
break;
|
||||
}
|
||||
|
||||
let parentPath = getDirectoryPath(searchPath);
|
||||
const parentPath = getDirectoryPath(searchPath);
|
||||
if (parentPath === searchPath) {
|
||||
break;
|
||||
}
|
||||
|
@ -205,7 +205,7 @@ namespace ts {
|
|||
};
|
||||
|
||||
export function createCompilerHost(options: CompilerOptions, setParentNodes?: boolean): CompilerHost {
|
||||
let existingDirectories: Map<boolean> = {};
|
||||
const existingDirectories: Map<boolean> = {};
|
||||
|
||||
function getCanonicalFileName(fileName: string): string {
|
||||
// if underlying system can distinguish between two files whose names differs only in cases then file name already in canonical form.
|
||||
|
@ -214,12 +214,12 @@ namespace ts {
|
|||
}
|
||||
|
||||
// returned by CScript sys environment
|
||||
let unsupportedFileEncodingErrorCode = -2147024809;
|
||||
const unsupportedFileEncodingErrorCode = -2147024809;
|
||||
|
||||
function getSourceFile(fileName: string, languageVersion: ScriptTarget, onError?: (message: string) => void): SourceFile {
|
||||
let text: string;
|
||||
try {
|
||||
let start = new Date().getTime();
|
||||
const start = new Date().getTime();
|
||||
text = sys.readFile(fileName, options.charset);
|
||||
ioReadTime += new Date().getTime() - start;
|
||||
}
|
||||
|
@ -248,7 +248,7 @@ namespace ts {
|
|||
|
||||
function ensureDirectoriesExist(directoryPath: string) {
|
||||
if (directoryPath.length > getRootLength(directoryPath) && !directoryExists(directoryPath)) {
|
||||
let parentDirectory = getDirectoryPath(directoryPath);
|
||||
const parentDirectory = getDirectoryPath(directoryPath);
|
||||
ensureDirectoriesExist(parentDirectory);
|
||||
sys.createDirectory(directoryPath);
|
||||
}
|
||||
|
@ -256,7 +256,7 @@ namespace ts {
|
|||
|
||||
function writeFile(fileName: string, data: string, writeByteOrderMark: boolean, onError?: (message: string) => void) {
|
||||
try {
|
||||
let start = new Date().getTime();
|
||||
const start = new Date().getTime();
|
||||
ensureDirectoriesExist(getDirectoryPath(normalizePath(fileName)));
|
||||
sys.writeFile(fileName, data, writeByteOrderMark);
|
||||
ioWriteTime += new Date().getTime() - start;
|
||||
|
@ -284,7 +284,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function getPreEmitDiagnostics(program: Program, sourceFile?: SourceFile, cancellationToken?: CancellationToken): Diagnostic[] {
|
||||
let diagnostics = program.getOptionsDiagnostics(cancellationToken).concat(
|
||||
const diagnostics = program.getOptionsDiagnostics(cancellationToken).concat(
|
||||
program.getSyntacticDiagnostics(sourceFile, cancellationToken),
|
||||
program.getGlobalDiagnostics(cancellationToken),
|
||||
program.getSemanticDiagnostics(sourceFile, cancellationToken));
|
||||
|
@ -326,7 +326,7 @@ namespace ts {
|
|||
let program: Program;
|
||||
let files: SourceFile[] = [];
|
||||
let fileProcessingDiagnostics = createDiagnosticCollection();
|
||||
let programDiagnostics = createDiagnosticCollection();
|
||||
const programDiagnostics = createDiagnosticCollection();
|
||||
|
||||
let commonSourceDirectory: string;
|
||||
let diagnosticsProducingTypeChecker: TypeChecker;
|
||||
|
@ -335,7 +335,7 @@ namespace ts {
|
|||
|
||||
let skipDefaultLib = options.noLib;
|
||||
|
||||
let start = new Date().getTime();
|
||||
const start = new Date().getTime();
|
||||
|
||||
host = host || createCompilerHost(options);
|
||||
|
||||
|
@ -344,15 +344,15 @@ namespace ts {
|
|||
? ((moduleNames: string[], containingFile: string) => host.resolveModuleNames(moduleNames, containingFile))
|
||||
: ((moduleNames: string[], containingFile: string) => map(moduleNames, moduleName => resolveModuleName(moduleName, containingFile, options, host).resolvedModule));
|
||||
|
||||
let filesByName = createFileMap<SourceFile>();
|
||||
const filesByName = createFileMap<SourceFile>();
|
||||
// stores 'filename -> file association' ignoring case
|
||||
// used to track cases when two file names differ only in casing
|
||||
let filesByNameIgnoreCase = host.useCaseSensitiveFileNames() ? createFileMap<SourceFile>(fileName => fileName.toLowerCase()) : undefined;
|
||||
const filesByNameIgnoreCase = host.useCaseSensitiveFileNames() ? createFileMap<SourceFile>(fileName => fileName.toLowerCase()) : undefined;
|
||||
|
||||
if (oldProgram) {
|
||||
// check properties that can affect structure of the program or module resolution strategy
|
||||
// if any of these properties has changed - structure cannot be reused
|
||||
let oldOptions = oldProgram.getCompilerOptions();
|
||||
const oldOptions = oldProgram.getCompilerOptions();
|
||||
if ((oldOptions.module !== options.module) ||
|
||||
(oldOptions.noResolve !== options.noResolve) ||
|
||||
(oldOptions.target !== options.target) ||
|
||||
|
@ -410,7 +410,7 @@ namespace ts {
|
|||
getTypeChecker();
|
||||
classifiableNames = {};
|
||||
|
||||
for (let sourceFile of files) {
|
||||
for (const sourceFile of files) {
|
||||
copyMap(sourceFile.classifiableNames, classifiableNames);
|
||||
}
|
||||
}
|
||||
|
@ -426,17 +426,17 @@ namespace ts {
|
|||
Debug.assert(!oldProgram.structureIsReused);
|
||||
|
||||
// there is an old program, check if we can reuse its structure
|
||||
let oldRootNames = oldProgram.getRootFileNames();
|
||||
const oldRootNames = oldProgram.getRootFileNames();
|
||||
if (!arrayIsEqualTo(oldRootNames, rootNames)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// check if program source files has changed in the way that can affect structure of the program
|
||||
let newSourceFiles: SourceFile[] = [];
|
||||
let filePaths: Path[] = [];
|
||||
let modifiedSourceFiles: SourceFile[] = [];
|
||||
const newSourceFiles: SourceFile[] = [];
|
||||
const filePaths: Path[] = [];
|
||||
const modifiedSourceFiles: SourceFile[] = [];
|
||||
|
||||
for (let oldSourceFile of oldProgram.getSourceFiles()) {
|
||||
for (const oldSourceFile of oldProgram.getSourceFiles()) {
|
||||
let newSourceFile = host.getSourceFile(oldSourceFile.fileName, options.target);
|
||||
if (!newSourceFile) {
|
||||
return false;
|
||||
|
@ -466,13 +466,13 @@ namespace ts {
|
|||
}
|
||||
|
||||
if (resolveModuleNamesWorker) {
|
||||
let moduleNames = map(newSourceFile.imports, name => name.text);
|
||||
let resolutions = resolveModuleNamesWorker(moduleNames, getNormalizedAbsolutePath(newSourceFile.fileName, currentDirectory));
|
||||
const moduleNames = map(newSourceFile.imports, name => name.text);
|
||||
const resolutions = resolveModuleNamesWorker(moduleNames, getNormalizedAbsolutePath(newSourceFile.fileName, currentDirectory));
|
||||
// ensure that module resolution results are still correct
|
||||
for (let i = 0; i < moduleNames.length; ++i) {
|
||||
let newResolution = resolutions[i];
|
||||
let oldResolution = getResolvedModule(oldSourceFile, moduleNames[i]);
|
||||
let resolutionChanged = oldResolution
|
||||
const newResolution = resolutions[i];
|
||||
const oldResolution = getResolvedModule(oldSourceFile, moduleNames[i]);
|
||||
const resolutionChanged = oldResolution
|
||||
? !newResolution ||
|
||||
oldResolution.resolvedFileName !== newResolution.resolvedFileName ||
|
||||
!!oldResolution.isExternalLibraryImport !== !!newResolution.isExternalLibraryImport
|
||||
|
@ -504,7 +504,7 @@ namespace ts {
|
|||
files = newSourceFiles;
|
||||
fileProcessingDiagnostics = oldProgram.getFileProcessingDiagnostics();
|
||||
|
||||
for (let modifiedFile of modifiedSourceFiles) {
|
||||
for (const modifiedFile of modifiedSourceFiles) {
|
||||
fileProcessingDiagnostics.reattachFileDiagnostics(modifiedFile);
|
||||
}
|
||||
oldProgram.structureIsReused = true;
|
||||
|
@ -554,11 +554,11 @@ namespace ts {
|
|||
// This is because in the -out scenario all files need to be emitted, and therefore all
|
||||
// files need to be type checked. And the way to specify that all files need to be type
|
||||
// checked is to not pass the file to getEmitResolver.
|
||||
let emitResolver = getDiagnosticsProducingTypeChecker().getEmitResolver((options.outFile || options.out) ? undefined : sourceFile);
|
||||
const emitResolver = getDiagnosticsProducingTypeChecker().getEmitResolver((options.outFile || options.out) ? undefined : sourceFile);
|
||||
|
||||
let start = new Date().getTime();
|
||||
const start = new Date().getTime();
|
||||
|
||||
let emitResult = emitFiles(
|
||||
const emitResult = emitFiles(
|
||||
emitResolver,
|
||||
getEmitHost(writeFileCallback),
|
||||
sourceFile);
|
||||
|
@ -579,7 +579,7 @@ namespace ts {
|
|||
return getDiagnostics(sourceFile, cancellationToken);
|
||||
}
|
||||
|
||||
let allDiagnostics: Diagnostic[] = [];
|
||||
const allDiagnostics: Diagnostic[] = [];
|
||||
forEach(program.getSourceFiles(), sourceFile => {
|
||||
if (cancellationToken) {
|
||||
cancellationToken.throwIfCancellationRequested();
|
||||
|
@ -631,13 +631,13 @@ namespace ts {
|
|||
|
||||
function getSemanticDiagnosticsForFile(sourceFile: SourceFile, cancellationToken: CancellationToken): Diagnostic[] {
|
||||
return runWithCancellationToken(() => {
|
||||
let typeChecker = getDiagnosticsProducingTypeChecker();
|
||||
const typeChecker = getDiagnosticsProducingTypeChecker();
|
||||
|
||||
Debug.assert(!!sourceFile.bindDiagnostics);
|
||||
let bindDiagnostics = sourceFile.bindDiagnostics;
|
||||
let checkDiagnostics = typeChecker.getDiagnostics(sourceFile, cancellationToken);
|
||||
let fileProcessingDiagnosticsInFile = fileProcessingDiagnostics.getDiagnostics(sourceFile.fileName);
|
||||
let programDiagnosticsInFile = programDiagnostics.getDiagnostics(sourceFile.fileName);
|
||||
const bindDiagnostics = sourceFile.bindDiagnostics;
|
||||
const checkDiagnostics = typeChecker.getDiagnostics(sourceFile, cancellationToken);
|
||||
const fileProcessingDiagnosticsInFile = fileProcessingDiagnostics.getDiagnostics(sourceFile.fileName);
|
||||
const programDiagnosticsInFile = programDiagnostics.getDiagnostics(sourceFile.fileName);
|
||||
|
||||
return bindDiagnostics.concat(checkDiagnostics).concat(fileProcessingDiagnosticsInFile).concat(programDiagnosticsInFile);
|
||||
});
|
||||
|
@ -646,23 +646,23 @@ namespace ts {
|
|||
function getDeclarationDiagnosticsForFile(sourceFile: SourceFile, cancellationToken: CancellationToken): Diagnostic[] {
|
||||
return runWithCancellationToken(() => {
|
||||
if (!isDeclarationFile(sourceFile)) {
|
||||
let resolver = getDiagnosticsProducingTypeChecker().getEmitResolver(sourceFile, cancellationToken);
|
||||
const resolver = getDiagnosticsProducingTypeChecker().getEmitResolver(sourceFile, cancellationToken);
|
||||
// Don't actually write any files since we're just getting diagnostics.
|
||||
let writeFile: WriteFileCallback = () => { };
|
||||
const writeFile: WriteFileCallback = () => { };
|
||||
return ts.getDeclarationDiagnostics(getEmitHost(writeFile), resolver, sourceFile);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function getOptionsDiagnostics(): Diagnostic[] {
|
||||
let allDiagnostics: Diagnostic[] = [];
|
||||
const allDiagnostics: Diagnostic[] = [];
|
||||
addRange(allDiagnostics, fileProcessingDiagnostics.getGlobalDiagnostics());
|
||||
addRange(allDiagnostics, programDiagnostics.getGlobalDiagnostics());
|
||||
return sortAndDeduplicateDiagnostics(allDiagnostics);
|
||||
}
|
||||
|
||||
function getGlobalDiagnostics(): Diagnostic[] {
|
||||
let allDiagnostics: Diagnostic[] = [];
|
||||
const allDiagnostics: Diagnostic[] = [];
|
||||
addRange(allDiagnostics, getDiagnosticsProducingTypeChecker().getGlobalDiagnostics());
|
||||
return sortAndDeduplicateDiagnostics(allDiagnostics);
|
||||
}
|
||||
|
@ -689,7 +689,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
let imports: LiteralExpression[];
|
||||
for (let node of file.statements) {
|
||||
for (const node of file.statements) {
|
||||
collect(node, /* allowRelativeModuleNames */ true);
|
||||
}
|
||||
|
||||
|
@ -749,7 +749,7 @@ namespace ts {
|
|||
}
|
||||
}
|
||||
else {
|
||||
let nonTsFile: SourceFile = options.allowNonTsExtensions && findSourceFile(fileName, toPath(fileName, currentDirectory, getCanonicalFileName), isDefaultLib, refFile, refPos, refEnd);
|
||||
const nonTsFile: SourceFile = options.allowNonTsExtensions && findSourceFile(fileName, toPath(fileName, currentDirectory, getCanonicalFileName), isDefaultLib, refFile, refPos, refEnd);
|
||||
if (!nonTsFile) {
|
||||
if (options.allowNonTsExtensions) {
|
||||
diagnostic = Diagnostics.File_0_not_found;
|
||||
|
@ -797,7 +797,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
// We haven't looked for this file, do so now and cache result
|
||||
let file = host.getSourceFile(fileName, options.target, hostErrorMessage => {
|
||||
const file = host.getSourceFile(fileName, options.target, hostErrorMessage => {
|
||||
if (refFile !== undefined && refPos !== undefined && refEnd !== undefined) {
|
||||
fileProcessingDiagnostics.add(createFileDiagnostic(refFile, refPos, refEnd - refPos,
|
||||
Diagnostics.Cannot_read_file_0_Colon_1, fileName, hostErrorMessage));
|
||||
|
@ -824,7 +824,7 @@ namespace ts {
|
|||
|
||||
skipDefaultLib = skipDefaultLib || file.hasNoDefaultLib;
|
||||
|
||||
let basePath = getDirectoryPath(fileName);
|
||||
const basePath = getDirectoryPath(fileName);
|
||||
if (!options.noResolve) {
|
||||
processReferencedFiles(file, basePath);
|
||||
}
|
||||
|
@ -846,7 +846,7 @@ namespace ts {
|
|||
|
||||
function processReferencedFiles(file: SourceFile, basePath: string) {
|
||||
forEach(file.referencedFiles, ref => {
|
||||
let referencedFileName = resolveTripleslashReference(ref.fileName, file.fileName);
|
||||
const referencedFileName = resolveTripleslashReference(ref.fileName, file.fileName);
|
||||
processSourceFile(referencedFileName, /* isDefaultLib */ false, file, ref.pos, ref.end);
|
||||
});
|
||||
}
|
||||
|
@ -859,21 +859,21 @@ namespace ts {
|
|||
collectExternalModuleReferences(file);
|
||||
if (file.imports.length) {
|
||||
file.resolvedModules = {};
|
||||
let moduleNames = map(file.imports, name => name.text);
|
||||
let resolutions = resolveModuleNamesWorker(moduleNames, getNormalizedAbsolutePath(file.fileName, currentDirectory));
|
||||
const moduleNames = map(file.imports, name => name.text);
|
||||
const resolutions = resolveModuleNamesWorker(moduleNames, getNormalizedAbsolutePath(file.fileName, currentDirectory));
|
||||
for (let i = 0; i < file.imports.length; ++i) {
|
||||
let resolution = resolutions[i];
|
||||
const resolution = resolutions[i];
|
||||
setResolvedModule(file, moduleNames[i], resolution);
|
||||
if (resolution && !options.noResolve) {
|
||||
const importedFile = findSourceFile(resolution.resolvedFileName, toPath(resolution.resolvedFileName, currentDirectory, getCanonicalFileName), /* isDefaultLib */ false, file, skipTrivia(file.text, file.imports[i].pos), file.imports[i].end);
|
||||
|
||||
if (importedFile && resolution.isExternalLibraryImport) {
|
||||
if (!isExternalModule(importedFile)) {
|
||||
let start = getTokenPosOfNode(file.imports[i], file);
|
||||
const start = getTokenPosOfNode(file.imports[i], file);
|
||||
fileProcessingDiagnostics.add(createFileDiagnostic(file, start, file.imports[i].end - start, Diagnostics.Exported_external_package_typings_file_0_is_not_a_module_Please_contact_the_package_author_to_update_the_package_definition, importedFile.fileName));
|
||||
}
|
||||
else if (importedFile.referencedFiles.length) {
|
||||
let firstRef = importedFile.referencedFiles[0];
|
||||
const firstRef = importedFile.referencedFiles[0];
|
||||
fileProcessingDiagnostics.add(createFileDiagnostic(importedFile, firstRef.pos, firstRef.end - firstRef.pos, Diagnostics.Exported_external_package_typings_file_cannot_contain_tripleslash_references_Please_contact_the_package_author_to_update_the_package_definition));
|
||||
}
|
||||
}
|
||||
|
@ -895,7 +895,7 @@ namespace ts {
|
|||
return;
|
||||
}
|
||||
|
||||
let sourcePathComponents = getNormalizedPathComponents(sourceFile.fileName, currentDirectory);
|
||||
const sourcePathComponents = getNormalizedPathComponents(sourceFile.fileName, currentDirectory);
|
||||
sourcePathComponents.pop(); // The base file name is not part of the common directory path
|
||||
|
||||
if (!commonPathComponents) {
|
||||
|
@ -929,11 +929,11 @@ namespace ts {
|
|||
function checkSourceFilesBelongToPath(sourceFiles: SourceFile[], rootDirectory: string): boolean {
|
||||
let allFilesBelongToPath = true;
|
||||
if (sourceFiles) {
|
||||
let absoluteRootDirectoryPath = host.getCanonicalFileName(getNormalizedAbsolutePath(rootDirectory, currentDirectory));
|
||||
const absoluteRootDirectoryPath = host.getCanonicalFileName(getNormalizedAbsolutePath(rootDirectory, currentDirectory));
|
||||
|
||||
for (var sourceFile of sourceFiles) {
|
||||
if (!isDeclarationFile(sourceFile)) {
|
||||
let absoluteSourceFilePath = host.getCanonicalFileName(getNormalizedAbsolutePath(sourceFile.fileName, currentDirectory));
|
||||
const absoluteSourceFilePath = host.getCanonicalFileName(getNormalizedAbsolutePath(sourceFile.fileName, currentDirectory));
|
||||
if (absoluteSourceFilePath.indexOf(absoluteRootDirectoryPath) !== 0) {
|
||||
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.File_0_is_not_under_rootDir_1_rootDir_is_expected_to_contain_all_source_files, sourceFile.fileName, options.rootDir));
|
||||
allFilesBelongToPath = false;
|
||||
|
@ -998,24 +998,24 @@ namespace ts {
|
|||
return;
|
||||
}
|
||||
|
||||
let languageVersion = options.target || ScriptTarget.ES3;
|
||||
let outFile = options.outFile || options.out;
|
||||
const languageVersion = options.target || ScriptTarget.ES3;
|
||||
const outFile = options.outFile || options.out;
|
||||
|
||||
let firstExternalModuleSourceFile = forEach(files, f => isExternalModule(f) ? f : undefined);
|
||||
const firstExternalModuleSourceFile = forEach(files, f => isExternalModule(f) ? f : undefined);
|
||||
if (options.isolatedModules) {
|
||||
if (!options.module && languageVersion < ScriptTarget.ES6) {
|
||||
programDiagnostics.add(createCompilerDiagnostic(Diagnostics.Option_isolatedModules_can_only_be_used_when_either_option_module_is_provided_or_option_target_is_ES2015_or_higher));
|
||||
}
|
||||
|
||||
let firstNonExternalModuleSourceFile = forEach(files, f => !isExternalModule(f) && !isDeclarationFile(f) ? f : undefined);
|
||||
const firstNonExternalModuleSourceFile = forEach(files, f => !isExternalModule(f) && !isDeclarationFile(f) ? f : undefined);
|
||||
if (firstNonExternalModuleSourceFile) {
|
||||
let span = getErrorSpanForNode(firstNonExternalModuleSourceFile, firstNonExternalModuleSourceFile);
|
||||
const span = getErrorSpanForNode(firstNonExternalModuleSourceFile, firstNonExternalModuleSourceFile);
|
||||
programDiagnostics.add(createFileDiagnostic(firstNonExternalModuleSourceFile, span.start, span.length, Diagnostics.Cannot_compile_namespaces_when_the_isolatedModules_flag_is_provided));
|
||||
}
|
||||
}
|
||||
else if (firstExternalModuleSourceFile && languageVersion < ScriptTarget.ES6 && !options.module) {
|
||||
// We cannot use createDiagnosticFromNode because nodes do not have parents yet
|
||||
let span = getErrorSpanForNode(firstExternalModuleSourceFile, firstExternalModuleSourceFile.externalModuleIndicator);
|
||||
const span = getErrorSpanForNode(firstExternalModuleSourceFile, firstExternalModuleSourceFile.externalModuleIndicator);
|
||||
programDiagnostics.add(createFileDiagnostic(firstExternalModuleSourceFile, span.start, span.length, Diagnostics.Cannot_compile_modules_unless_the_module_flag_is_provided));
|
||||
}
|
||||
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -51,15 +51,15 @@ namespace ts {
|
|||
|
||||
function getWScriptSystem(): System {
|
||||
|
||||
let fso = new ActiveXObject("Scripting.FileSystemObject");
|
||||
const fso = new ActiveXObject("Scripting.FileSystemObject");
|
||||
|
||||
let fileStream = new ActiveXObject("ADODB.Stream");
|
||||
const fileStream = new ActiveXObject("ADODB.Stream");
|
||||
fileStream.Type = 2 /*text*/;
|
||||
|
||||
let binaryStream = new ActiveXObject("ADODB.Stream");
|
||||
const binaryStream = new ActiveXObject("ADODB.Stream");
|
||||
binaryStream.Type = 1 /*binary*/;
|
||||
|
||||
let args: string[] = [];
|
||||
const args: string[] = [];
|
||||
for (let i = 0; i < WScript.Arguments.length; i++) {
|
||||
args[i] = WScript.Arguments.Item(i);
|
||||
}
|
||||
|
@ -78,7 +78,7 @@ namespace ts {
|
|||
// Load file and read the first two bytes into a string with no interpretation
|
||||
fileStream.Charset = "x-ansi";
|
||||
fileStream.LoadFromFile(fileName);
|
||||
let bom = fileStream.ReadText(2) || "";
|
||||
const bom = fileStream.ReadText(2) || "";
|
||||
// Position must be at 0 before encoding can be changed
|
||||
fileStream.Position = 0;
|
||||
// [0xFF,0xFE] and [0xFE,0xFF] mean utf-16 (little or big endian), otherwise default to utf-8
|
||||
|
@ -124,7 +124,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
function getNames(collection: any): string[] {
|
||||
let result: string[] = [];
|
||||
const result: string[] = [];
|
||||
for (let e = new Enumerator(collection); !e.atEnd(); e.moveNext()) {
|
||||
result.push(e.item().Name);
|
||||
}
|
||||
|
@ -132,22 +132,22 @@ namespace ts {
|
|||
}
|
||||
|
||||
function readDirectory(path: string, extension?: string, exclude?: string[]): string[] {
|
||||
let result: string[] = [];
|
||||
const result: string[] = [];
|
||||
exclude = map(exclude, s => getCanonicalPath(combinePaths(path, s)));
|
||||
visitDirectory(path);
|
||||
return result;
|
||||
function visitDirectory(path: string) {
|
||||
let folder = fso.GetFolder(path || ".");
|
||||
let files = getNames(folder.files);
|
||||
for (let current of files) {
|
||||
let name = combinePaths(path, current);
|
||||
const folder = fso.GetFolder(path || ".");
|
||||
const files = getNames(folder.files);
|
||||
for (const current of files) {
|
||||
const name = combinePaths(path, current);
|
||||
if ((!extension || fileExtensionIs(name, extension)) && !contains(exclude, getCanonicalPath(name))) {
|
||||
result.push(name);
|
||||
}
|
||||
}
|
||||
let subfolders = getNames(folder.subfolders);
|
||||
for (let current of subfolders) {
|
||||
let name = combinePaths(path, current);
|
||||
const subfolders = getNames(folder.subfolders);
|
||||
for (const current of subfolders) {
|
||||
const name = combinePaths(path, current);
|
||||
if (!contains(exclude, getCanonicalPath(name))) {
|
||||
visitDirectory(name);
|
||||
}
|
||||
|
@ -212,7 +212,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
function poll(checkedIndex: number) {
|
||||
let watchedFile = watchedFiles[checkedIndex];
|
||||
const watchedFile = watchedFiles[checkedIndex];
|
||||
if (!watchedFile) {
|
||||
return;
|
||||
}
|
||||
|
@ -252,7 +252,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
function addFile(fileName: string, callback: (fileName: string, removed?: boolean) => void): WatchedFile {
|
||||
let file: WatchedFile = {
|
||||
const file: WatchedFile = {
|
||||
fileName,
|
||||
callback,
|
||||
mtime: getModifiedTime(fileName)
|
||||
|
@ -291,7 +291,7 @@ namespace ts {
|
|||
// changes for large reference sets? If so, do we want
|
||||
// to increase the chunk size or decrease the interval
|
||||
// time dynamically to match the large reference set?
|
||||
let watchedFileSet = createWatchedFileSet();
|
||||
const watchedFileSet = createWatchedFileSet();
|
||||
|
||||
function isNode4OrLater(): Boolean {
|
||||
return parseInt(process.version.charAt(1)) >= 4;
|
||||
|
@ -305,14 +305,14 @@ namespace ts {
|
|||
if (!_fs.existsSync(fileName)) {
|
||||
return undefined;
|
||||
}
|
||||
let buffer = _fs.readFileSync(fileName);
|
||||
const buffer = _fs.readFileSync(fileName);
|
||||
let len = buffer.length;
|
||||
if (len >= 2 && buffer[0] === 0xFE && buffer[1] === 0xFF) {
|
||||
// Big endian UTF-16 byte order mark detected. Since big endian is not supported by node.js,
|
||||
// flip all byte pairs and treat as little endian.
|
||||
len &= ~1;
|
||||
for (let i = 0; i < len; i += 2) {
|
||||
let temp = buffer[i];
|
||||
const temp = buffer[i];
|
||||
buffer[i] = buffer[i + 1];
|
||||
buffer[i + 1] = temp;
|
||||
}
|
||||
|
@ -354,17 +354,17 @@ namespace ts {
|
|||
}
|
||||
|
||||
function readDirectory(path: string, extension?: string, exclude?: string[]): string[] {
|
||||
let result: string[] = [];
|
||||
const result: string[] = [];
|
||||
exclude = map(exclude, s => getCanonicalPath(combinePaths(path, s)));
|
||||
visitDirectory(path);
|
||||
return result;
|
||||
function visitDirectory(path: string) {
|
||||
let files = _fs.readdirSync(path || ".").sort();
|
||||
let directories: string[] = [];
|
||||
for (let current of files) {
|
||||
let name = combinePaths(path, current);
|
||||
const files = _fs.readdirSync(path || ".").sort();
|
||||
const directories: string[] = [];
|
||||
for (const current of files) {
|
||||
const name = combinePaths(path, current);
|
||||
if (!contains(exclude, getCanonicalPath(name))) {
|
||||
let stat = _fs.statSync(name);
|
||||
const stat = _fs.statSync(name);
|
||||
if (stat.isFile()) {
|
||||
if (!extension || fileExtensionIs(name, extension)) {
|
||||
result.push(name);
|
||||
|
@ -375,7 +375,7 @@ namespace ts {
|
|||
}
|
||||
}
|
||||
}
|
||||
for (let current of directories) {
|
||||
for (const current of directories) {
|
||||
visitDirectory(current);
|
||||
}
|
||||
}
|
||||
|
@ -400,7 +400,7 @@ namespace ts {
|
|||
return _fs.watch(fileName, (eventName: string, relativeFileName: string) => callback(fileName));
|
||||
}
|
||||
|
||||
let watchedFile = watchedFileSet.addFile(fileName, callback);
|
||||
const watchedFile = watchedFileSet.addFile(fileName, callback);
|
||||
return {
|
||||
close: () => watchedFileSet.removeFile(watchedFile)
|
||||
};
|
||||
|
|
|
@ -9,7 +9,7 @@ namespace ts {
|
|||
let reportDiagnostic = reportDiagnosticSimply;
|
||||
|
||||
function reportDiagnostics(diagnostics: Diagnostic[], host: CompilerHost): void {
|
||||
for (let diagnostic of diagnostics) {
|
||||
for (const diagnostic of diagnostics) {
|
||||
reportDiagnostic(diagnostic, host);
|
||||
}
|
||||
}
|
||||
|
@ -19,15 +19,15 @@ namespace ts {
|
|||
* and if it is, attempts to set the appropriate language.
|
||||
*/
|
||||
function validateLocaleAndSetLanguage(locale: string, errors: Diagnostic[]): boolean {
|
||||
let matchResult = /^([a-z]+)([_\-]([a-z]+))?$/.exec(locale.toLowerCase());
|
||||
const matchResult = /^([a-z]+)([_\-]([a-z]+))?$/.exec(locale.toLowerCase());
|
||||
|
||||
if (!matchResult) {
|
||||
errors.push(createCompilerDiagnostic(Diagnostics.Locale_must_be_of_the_form_language_or_language_territory_For_example_0_or_1, "en", "ja-jp"));
|
||||
return false;
|
||||
}
|
||||
|
||||
let language = matchResult[1];
|
||||
let territory = matchResult[3];
|
||||
const language = matchResult[1];
|
||||
const territory = matchResult[3];
|
||||
|
||||
// First try the entire locale, then fall back to just language if that's all we have.
|
||||
if (!trySetLanguageAndTerritory(language, territory, errors) &&
|
||||
|
@ -41,8 +41,8 @@ namespace ts {
|
|||
}
|
||||
|
||||
function trySetLanguageAndTerritory(language: string, territory: string, errors: Diagnostic[]): boolean {
|
||||
let compilerFilePath = normalizePath(sys.getExecutingFilePath());
|
||||
let containingDirectoryPath = getDirectoryPath(compilerFilePath);
|
||||
const compilerFilePath = normalizePath(sys.getExecutingFilePath());
|
||||
const containingDirectoryPath = getDirectoryPath(compilerFilePath);
|
||||
|
||||
let filePath = combinePaths(containingDirectoryPath, language);
|
||||
|
||||
|
@ -85,7 +85,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
function getDiagnosticText(message: DiagnosticMessage, ...args: any[]): string {
|
||||
let diagnostic = createCompilerDiagnostic.apply(undefined, arguments);
|
||||
const diagnostic = createCompilerDiagnostic.apply(undefined, arguments);
|
||||
return <string>diagnostic.messageText;
|
||||
}
|
||||
|
||||
|
@ -102,7 +102,7 @@ namespace ts {
|
|||
output += `${ relativeFileName }(${ line + 1 },${ character + 1 }): `;
|
||||
}
|
||||
|
||||
let category = DiagnosticCategory[diagnostic.category].toLowerCase();
|
||||
const category = DiagnosticCategory[diagnostic.category].toLowerCase();
|
||||
output += `${ category } TS${ diagnostic.code }: ${ flattenDiagnosticMessageText(diagnostic.messageText, sys.newLine) }${ sys.newLine }`;
|
||||
|
||||
sys.write(output);
|
||||
|
@ -130,13 +130,13 @@ namespace ts {
|
|||
let output = "";
|
||||
|
||||
if (diagnostic.file) {
|
||||
let { start, length, file } = diagnostic;
|
||||
let { line: firstLine, character: firstLineChar } = getLineAndCharacterOfPosition(file, start);
|
||||
let { line: lastLine, character: lastLineChar } = getLineAndCharacterOfPosition(file, start + length);
|
||||
const { start, length, file } = diagnostic;
|
||||
const { line: firstLine, character: firstLineChar } = getLineAndCharacterOfPosition(file, start);
|
||||
const { line: lastLine, character: lastLineChar } = getLineAndCharacterOfPosition(file, start + length);
|
||||
const lastLineInFile = getLineAndCharacterOfPosition(file, file.text.length).line;
|
||||
const relativeFileName = getRelativeFileName(file.fileName, host);
|
||||
|
||||
let hasMoreThanFiveLines = (lastLine - firstLine) >= 4;
|
||||
const hasMoreThanFiveLines = (lastLine - firstLine) >= 4;
|
||||
let gutterWidth = (lastLine + 1 + "").length;
|
||||
if (hasMoreThanFiveLines) {
|
||||
gutterWidth = Math.max(elipsis.length, gutterWidth);
|
||||
|
@ -151,8 +151,8 @@ namespace ts {
|
|||
i = lastLine - 1;
|
||||
}
|
||||
|
||||
let lineStart = getPositionOfLineAndCharacter(file, i, 0);
|
||||
let lineEnd = i < lastLineInFile ? getPositionOfLineAndCharacter(file, i + 1, 0) : file.text.length;
|
||||
const lineStart = getPositionOfLineAndCharacter(file, i, 0);
|
||||
const lineEnd = i < lastLineInFile ? getPositionOfLineAndCharacter(file, i + 1, 0) : file.text.length;
|
||||
let lineContent = file.text.slice(lineStart, lineEnd);
|
||||
lineContent = lineContent.replace(/\s+$/g, ""); // trim from end
|
||||
lineContent = lineContent.replace("\t", " "); // convert tabs to single spaces
|
||||
|
@ -200,7 +200,7 @@ namespace ts {
|
|||
let output = new Date().toLocaleTimeString() + " - ";
|
||||
|
||||
if (diagnostic.file) {
|
||||
let loc = getLineAndCharacterOfPosition(diagnostic.file, diagnostic.start);
|
||||
const loc = getLineAndCharacterOfPosition(diagnostic.file, diagnostic.start);
|
||||
output += `${ diagnostic.file.fileName }(${ loc.line + 1 },${ loc.character + 1 }): `;
|
||||
}
|
||||
|
||||
|
@ -241,7 +241,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function executeCommandLine(args: string[]): void {
|
||||
let commandLine = parseCommandLine(args);
|
||||
const commandLine = parseCommandLine(args);
|
||||
let configFileName: string; // Configuration file name (if any)
|
||||
let cachedConfigFileText: string; // Cached configuration file text, used for reparsing (if any)
|
||||
let configFileWatcher: FileWatcher; // Configuration file watcher
|
||||
|
@ -302,7 +302,7 @@ namespace ts {
|
|||
}
|
||||
}
|
||||
else if (commandLine.fileNames.length === 0 && isJSONSupported()) {
|
||||
let searchPath = normalizePath(sys.getCurrentDirectory());
|
||||
const searchPath = normalizePath(sys.getCurrentDirectory());
|
||||
configFileName = findConfigFile(searchPath);
|
||||
}
|
||||
|
||||
|
@ -322,7 +322,7 @@ namespace ts {
|
|||
configFileWatcher = sys.watchFile(configFileName, configFileChanged);
|
||||
}
|
||||
if (sys.watchDirectory && configFileName) {
|
||||
let directory = ts.getDirectoryPath(configFileName);
|
||||
const directory = ts.getDirectoryPath(configFileName);
|
||||
directoryWatcher = sys.watchDirectory(
|
||||
// When the configFileName is just "tsconfig.json", the watched directory should be
|
||||
// the current direcotry; if there is a given "project" parameter, then the configFileName
|
||||
|
@ -340,16 +340,16 @@ namespace ts {
|
|||
cachedConfigFileText = sys.readFile(configFileName);
|
||||
}
|
||||
catch (e) {
|
||||
let error = createCompilerDiagnostic(Diagnostics.Cannot_read_file_0_Colon_1, configFileName, e.message);
|
||||
const error = createCompilerDiagnostic(Diagnostics.Cannot_read_file_0_Colon_1, configFileName, e.message);
|
||||
reportWatchDiagnostic(error);
|
||||
sys.exit(ExitStatus.DiagnosticsPresent_OutputsSkipped);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
let result = parseConfigFileTextToJson(configFileName, cachedConfigFileText);
|
||||
let configObject = result.config;
|
||||
let configParseResult = parseJsonConfigFileContent(configObject, sys, getDirectoryPath(configFileName));
|
||||
const result = parseConfigFileTextToJson(configFileName, cachedConfigFileText);
|
||||
const configObject = result.config;
|
||||
const configParseResult = parseJsonConfigFileContent(configObject, sys, getDirectoryPath(configFileName));
|
||||
if (configParseResult.errors.length > 0) {
|
||||
reportDiagnostics(configParseResult.errors, /* compilerHost */ undefined);
|
||||
sys.exit(ExitStatus.DiagnosticsPresent_OutputsSkipped);
|
||||
|
@ -363,7 +363,7 @@ namespace ts {
|
|||
|
||||
if (!cachedProgram) {
|
||||
if (configFileName) {
|
||||
let configParseResult = parseConfigFile();
|
||||
const configParseResult = parseConfigFile();
|
||||
rootFileNames = configParseResult.fileNames;
|
||||
compilerOptions = extend(commandLine.options, configParseResult.options);
|
||||
}
|
||||
|
@ -386,7 +386,7 @@ namespace ts {
|
|||
// reset the cache of existing files
|
||||
cachedExistingFiles = {};
|
||||
|
||||
let compileResult = compile(rootFileNames, compilerOptions, compilerHost);
|
||||
const compileResult = compile(rootFileNames, compilerOptions, compilerHost);
|
||||
|
||||
if (!compilerOptions.watch) {
|
||||
return sys.exit(compileResult.exitStatus);
|
||||
|
@ -406,14 +406,14 @@ namespace ts {
|
|||
function getSourceFile(fileName: string, languageVersion: ScriptTarget, onError?: (message: string) => void) {
|
||||
// Return existing SourceFile object if one is available
|
||||
if (cachedProgram) {
|
||||
let sourceFile = cachedProgram.getSourceFile(fileName);
|
||||
const sourceFile = cachedProgram.getSourceFile(fileName);
|
||||
// A modified source file has no watcher and should not be reused
|
||||
if (sourceFile && sourceFile.fileWatcher) {
|
||||
return sourceFile;
|
||||
}
|
||||
}
|
||||
// Use default host function
|
||||
let sourceFile = hostGetSourceFile(fileName, languageVersion, onError);
|
||||
const sourceFile = hostGetSourceFile(fileName, languageVersion, onError);
|
||||
if (sourceFile && compilerOptions.watch) {
|
||||
// Attach a file watcher
|
||||
sourceFile.fileWatcher = sys.watchFile(sourceFile.fileName, (fileName: string, removed?: boolean) => sourceFileChanged(sourceFile, removed));
|
||||
|
@ -424,7 +424,7 @@ namespace ts {
|
|||
// Change cached program to the given program
|
||||
function setCachedProgram(program: Program) {
|
||||
if (cachedProgram) {
|
||||
let newSourceFiles = program ? program.getSourceFiles() : undefined;
|
||||
const newSourceFiles = program ? program.getSourceFiles() : undefined;
|
||||
forEach(cachedProgram.getSourceFiles(), sourceFile => {
|
||||
if (!(newSourceFiles && contains(newSourceFiles, sourceFile))) {
|
||||
if (sourceFile.fileWatcher) {
|
||||
|
@ -442,7 +442,7 @@ namespace ts {
|
|||
sourceFile.fileWatcher.close();
|
||||
sourceFile.fileWatcher = undefined;
|
||||
if (removed) {
|
||||
let index = rootFileNames.indexOf(sourceFile.fileName);
|
||||
const index = rootFileNames.indexOf(sourceFile.fileName);
|
||||
if (index >= 0) {
|
||||
rootFileNames.splice(index, 1);
|
||||
}
|
||||
|
@ -473,9 +473,9 @@ namespace ts {
|
|||
}
|
||||
|
||||
function directoryChangeHandler() {
|
||||
let parsedCommandLine = parseConfigFile();
|
||||
let newFileNames = ts.map(parsedCommandLine.fileNames, compilerHost.getCanonicalFileName);
|
||||
let canonicalRootFileNames = ts.map(rootFileNames, compilerHost.getCanonicalFileName);
|
||||
const parsedCommandLine = parseConfigFile();
|
||||
const newFileNames = ts.map(parsedCommandLine.fileNames, compilerHost.getCanonicalFileName);
|
||||
const canonicalRootFileNames = ts.map(rootFileNames, compilerHost.getCanonicalFileName);
|
||||
|
||||
// We check if the project file list has changed. If so, we just throw away the old program and start fresh.
|
||||
if (!arrayIsEqualTo(newFileNames && newFileNames.sort(), canonicalRootFileNames && canonicalRootFileNames.sort())) {
|
||||
|
@ -509,8 +509,8 @@ namespace ts {
|
|||
checkTime = 0;
|
||||
emitTime = 0;
|
||||
|
||||
let program = createProgram(fileNames, compilerOptions, compilerHost);
|
||||
let exitStatus = compileProgram();
|
||||
const program = createProgram(fileNames, compilerOptions, compilerHost);
|
||||
const exitStatus = compileProgram();
|
||||
|
||||
if (compilerOptions.listFiles) {
|
||||
forEach(program.getSourceFiles(), file => {
|
||||
|
@ -519,7 +519,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
if (compilerOptions.diagnostics) {
|
||||
let memoryUsed = sys.getMemoryUsage ? sys.getMemoryUsage() : -1;
|
||||
const memoryUsed = sys.getMemoryUsage ? sys.getMemoryUsage() : -1;
|
||||
reportCountStatistic("Files", program.getSourceFiles().length);
|
||||
reportCountStatistic("Lines", countLines(program));
|
||||
reportCountStatistic("Nodes", program.getNodeCount());
|
||||
|
@ -572,7 +572,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
// Otherwise, emit and report any errors we ran into.
|
||||
let emitOutput = program.emit();
|
||||
const emitOutput = program.emit();
|
||||
reportDiagnostics(emitOutput.diagnostics, compilerHost);
|
||||
|
||||
// If the emitter didn't emit anything, then pass that value along.
|
||||
|
@ -598,8 +598,8 @@ namespace ts {
|
|||
let output = "";
|
||||
|
||||
// We want to align our "syntax" and "examples" commands to a certain margin.
|
||||
let syntaxLength = getDiagnosticText(Diagnostics.Syntax_Colon_0, "").length;
|
||||
let examplesLength = getDiagnosticText(Diagnostics.Examples_Colon_0, "").length;
|
||||
const syntaxLength = getDiagnosticText(Diagnostics.Syntax_Colon_0, "").length;
|
||||
const examplesLength = getDiagnosticText(Diagnostics.Examples_Colon_0, "").length;
|
||||
let marginLength = Math.max(syntaxLength, examplesLength);
|
||||
|
||||
// Build up the syntactic skeleton.
|
||||
|
@ -610,7 +610,7 @@ namespace ts {
|
|||
output += sys.newLine + sys.newLine;
|
||||
|
||||
// Build up the list of examples.
|
||||
let padding = makePadding(marginLength);
|
||||
const padding = makePadding(marginLength);
|
||||
output += getDiagnosticText(Diagnostics.Examples_Colon_0, makePadding(marginLength - examplesLength) + "tsc hello.ts") + sys.newLine;
|
||||
output += padding + "tsc --out file.js file.ts" + sys.newLine;
|
||||
output += padding + "tsc @args.txt" + sys.newLine;
|
||||
|
@ -619,17 +619,17 @@ namespace ts {
|
|||
output += getDiagnosticText(Diagnostics.Options_Colon) + sys.newLine;
|
||||
|
||||
// Sort our options by their names, (e.g. "--noImplicitAny" comes before "--watch")
|
||||
let optsList = filter(optionDeclarations.slice(), v => !v.experimental);
|
||||
const optsList = filter(optionDeclarations.slice(), v => !v.experimental);
|
||||
optsList.sort((a, b) => compareValues<string>(a.name.toLowerCase(), b.name.toLowerCase()));
|
||||
|
||||
// We want our descriptions to align at the same column in our output,
|
||||
// so we keep track of the longest option usage string.
|
||||
marginLength = 0;
|
||||
let usageColumn: string[] = []; // Things like "-d, --declaration" go in here.
|
||||
let descriptionColumn: string[] = [];
|
||||
const usageColumn: string[] = []; // Things like "-d, --declaration" go in here.
|
||||
const descriptionColumn: string[] = [];
|
||||
|
||||
for (let i = 0; i < optsList.length; i++) {
|
||||
let option = optsList[i];
|
||||
const option = optsList[i];
|
||||
|
||||
// If an option lacks a description,
|
||||
// it is not officially supported.
|
||||
|
@ -655,15 +655,15 @@ namespace ts {
|
|||
}
|
||||
|
||||
// Special case that can't fit in the loop.
|
||||
let usageText = " @<" + getDiagnosticText(Diagnostics.file) + ">";
|
||||
const usageText = " @<" + getDiagnosticText(Diagnostics.file) + ">";
|
||||
usageColumn.push(usageText);
|
||||
descriptionColumn.push(getDiagnosticText(Diagnostics.Insert_command_line_options_and_files_from_a_file));
|
||||
marginLength = Math.max(usageText.length, marginLength);
|
||||
|
||||
// Print out each row, aligning all the descriptions on the same column.
|
||||
for (let i = 0; i < usageColumn.length; i++) {
|
||||
let usage = usageColumn[i];
|
||||
let description = descriptionColumn[i];
|
||||
const usage = usageColumn[i];
|
||||
const description = descriptionColumn[i];
|
||||
output += usage + makePadding(marginLength - usage.length + 2) + description + sys.newLine;
|
||||
}
|
||||
|
||||
|
@ -683,14 +683,14 @@ namespace ts {
|
|||
}
|
||||
|
||||
function writeConfigFile(options: CompilerOptions, fileNames: string[]) {
|
||||
let currentDirectory = sys.getCurrentDirectory();
|
||||
let file = normalizePath(combinePaths(currentDirectory, "tsconfig.json"));
|
||||
const currentDirectory = sys.getCurrentDirectory();
|
||||
const file = normalizePath(combinePaths(currentDirectory, "tsconfig.json"));
|
||||
if (sys.fileExists(file)) {
|
||||
reportDiagnostic(createCompilerDiagnostic(Diagnostics.A_tsconfig_json_file_is_already_defined_at_Colon_0, file), /* compilerHost */ undefined);
|
||||
}
|
||||
else {
|
||||
let compilerOptions = extend(options, defaultInitCompilerOptions);
|
||||
let configurations: any = {
|
||||
const compilerOptions = extend(options, defaultInitCompilerOptions);
|
||||
const configurations: any = {
|
||||
compilerOptions: serializeCompilerOptions(compilerOptions),
|
||||
exclude: ["node_modules"]
|
||||
};
|
||||
|
@ -707,12 +707,12 @@ namespace ts {
|
|||
return;
|
||||
|
||||
function serializeCompilerOptions(options: CompilerOptions): Map<string | number | boolean> {
|
||||
let result: Map<string | number | boolean> = {};
|
||||
let optionsNameMap = getOptionNameMap().optionNameMap;
|
||||
const result: Map<string | number | boolean> = {};
|
||||
const optionsNameMap = getOptionNameMap().optionNameMap;
|
||||
|
||||
for (let name in options) {
|
||||
for (const name in options) {
|
||||
if (hasProperty(options, name)) {
|
||||
let value = options[name];
|
||||
const value = options[name];
|
||||
switch (name) {
|
||||
case "init":
|
||||
case "watch":
|
||||
|
@ -729,8 +729,8 @@ namespace ts {
|
|||
}
|
||||
else {
|
||||
// Enum
|
||||
let typeMap = <Map<number>>optionDefinition.type;
|
||||
for (let key in typeMap) {
|
||||
const typeMap = <Map<number>>optionDefinition.type;
|
||||
for (const key in typeMap) {
|
||||
if (hasProperty(typeMap, key)) {
|
||||
if (typeMap[key] === value)
|
||||
result[name] = key;
|
||||
|
|
|
@ -16,9 +16,9 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function getDeclarationOfKind(symbol: Symbol, kind: SyntaxKind): Declaration {
|
||||
let declarations = symbol.declarations;
|
||||
const declarations = symbol.declarations;
|
||||
if (declarations) {
|
||||
for (let declaration of declarations) {
|
||||
for (const declaration of declarations) {
|
||||
if (declaration.kind === kind) {
|
||||
return declaration;
|
||||
}
|
||||
|
@ -43,12 +43,12 @@ namespace ts {
|
|||
}
|
||||
|
||||
// Pool writers to avoid needing to allocate them for every symbol we write.
|
||||
let stringWriters: StringSymbolWriter[] = [];
|
||||
const stringWriters: StringSymbolWriter[] = [];
|
||||
export function getSingleLineStringWriter(): StringSymbolWriter {
|
||||
if (stringWriters.length === 0) {
|
||||
let str = "";
|
||||
|
||||
let writeText: (text: string) => void = text => str += text;
|
||||
const writeText: (text: string) => void = text => str += text;
|
||||
return {
|
||||
string: () => str,
|
||||
writeKeyword: writeText,
|
||||
|
@ -92,7 +92,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
for (let i = 0; i < array1.length; ++i) {
|
||||
let equals = equaler ? equaler(array1[i], array2[i]) : array1[i] === array2[i];
|
||||
const equals = equaler ? equaler(array1[i], array2[i]) : array1[i] === array2[i];
|
||||
if (!equals) {
|
||||
return false;
|
||||
}
|
||||
|
@ -128,7 +128,7 @@ namespace ts {
|
|||
// A node is considered to contain a parse error if:
|
||||
// a) the parser explicitly marked that it had an error
|
||||
// b) any of it's children reported that it had an error.
|
||||
let thisNodeOrAnySubNodesHasError = ((node.parserContextFlags & ParserContextFlags.ThisNodeHasError) !== 0) ||
|
||||
const thisNodeOrAnySubNodesHasError = ((node.parserContextFlags & ParserContextFlags.ThisNodeHasError) !== 0) ||
|
||||
forEachChild(node, containsParseError);
|
||||
|
||||
// If so, mark ourselves accordingly.
|
||||
|
@ -157,8 +157,8 @@ namespace ts {
|
|||
|
||||
// This is a useful function for debugging purposes.
|
||||
export function nodePosToString(node: Node): string {
|
||||
let file = getSourceFileOfNode(node);
|
||||
let loc = getLineAndCharacterOfPosition(file, node.pos);
|
||||
const file = getSourceFileOfNode(node);
|
||||
const loc = getLineAndCharacterOfPosition(file, node.pos);
|
||||
return `${ file.fileName }(${ loc.line + 1 },${ loc.character + 1 })`;
|
||||
}
|
||||
|
||||
|
@ -213,7 +213,7 @@ namespace ts {
|
|||
return "";
|
||||
}
|
||||
|
||||
let text = sourceFile.text;
|
||||
const text = sourceFile.text;
|
||||
return text.substring(includeTrivia ? node.pos : skipTrivia(text, node.pos), node.end);
|
||||
}
|
||||
|
||||
|
@ -294,14 +294,14 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function createDiagnosticForNode(node: Node, message: DiagnosticMessage, arg0?: any, arg1?: any, arg2?: any): Diagnostic {
|
||||
let sourceFile = getSourceFileOfNode(node);
|
||||
let span = getErrorSpanForNode(sourceFile, node);
|
||||
const sourceFile = getSourceFileOfNode(node);
|
||||
const span = getErrorSpanForNode(sourceFile, node);
|
||||
return createFileDiagnostic(sourceFile, span.start, span.length, message, arg0, arg1, arg2);
|
||||
}
|
||||
|
||||
export function createDiagnosticForNodeFromMessageChain(node: Node, messageChain: DiagnosticMessageChain): Diagnostic {
|
||||
let sourceFile = getSourceFileOfNode(node);
|
||||
let span = getErrorSpanForNode(sourceFile, node);
|
||||
const sourceFile = getSourceFileOfNode(node);
|
||||
const span = getErrorSpanForNode(sourceFile, node);
|
||||
return {
|
||||
file: sourceFile,
|
||||
start: span.start,
|
||||
|
@ -313,9 +313,9 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function getSpanOfTokenAtPosition(sourceFile: SourceFile, pos: number): TextSpan {
|
||||
let scanner = createScanner(sourceFile.languageVersion, /*skipTrivia*/ true, sourceFile.languageVariant, sourceFile.text, /*onError:*/ undefined, pos);
|
||||
const scanner = createScanner(sourceFile.languageVersion, /*skipTrivia*/ true, sourceFile.languageVariant, sourceFile.text, /*onError:*/ undefined, pos);
|
||||
scanner.scan();
|
||||
let start = scanner.getTokenPos();
|
||||
const start = scanner.getTokenPos();
|
||||
return createTextSpanFromBounds(start, scanner.getTextPos());
|
||||
}
|
||||
|
||||
|
@ -351,7 +351,7 @@ namespace ts {
|
|||
return getSpanOfTokenAtPosition(sourceFile, node.pos);
|
||||
}
|
||||
|
||||
let pos = nodeIsMissing(errorNode)
|
||||
const pos = nodeIsMissing(errorNode)
|
||||
? errorNode.pos
|
||||
: skipTrivia(sourceFile.text, errorNode.pos);
|
||||
|
||||
|
@ -422,7 +422,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function getJsDocComments(node: Node, sourceFileOfNode: SourceFile) {
|
||||
let commentRanges = (node.kind === SyntaxKind.Parameter || node.kind === SyntaxKind.TypeParameter) ?
|
||||
const commentRanges = (node.kind === SyntaxKind.Parameter || node.kind === SyntaxKind.TypeParameter) ?
|
||||
concatenate(getTrailingCommentRanges(sourceFileOfNode.text, node.pos),
|
||||
getLeadingCommentRanges(sourceFileOfNode.text, node.pos)) :
|
||||
getLeadingCommentRangesOfNode(node, sourceFileOfNode);
|
||||
|
@ -579,7 +579,7 @@ namespace ts {
|
|||
return;
|
||||
default:
|
||||
if (isFunctionLike(node)) {
|
||||
let name = (<FunctionLikeDeclaration>node).name;
|
||||
const name = (<FunctionLikeDeclaration>node).name;
|
||||
if (name && name.kind === SyntaxKind.ComputedPropertyName) {
|
||||
// Note that we will not include methods/accessors of a class because they would require
|
||||
// first descending into the class. This is by design.
|
||||
|
@ -1030,7 +1030,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function isInstantiatedModule(node: ModuleDeclaration, preserveConstEnums: boolean) {
|
||||
let moduleState = getModuleInstanceState(node);
|
||||
const moduleState = getModuleInstanceState(node);
|
||||
return moduleState === ModuleInstanceState.Instantiated ||
|
||||
(preserveConstEnums && moduleState === ModuleInstanceState.ConstEnumOnly);
|
||||
}
|
||||
|
@ -1053,7 +1053,7 @@ namespace ts {
|
|||
return (<ImportDeclaration>node).moduleSpecifier;
|
||||
}
|
||||
if (node.kind === SyntaxKind.ImportEqualsDeclaration) {
|
||||
let reference = (<ImportEqualsDeclaration>node).moduleReference;
|
||||
const reference = (<ImportEqualsDeclaration>node).moduleReference;
|
||||
if (reference.kind === SyntaxKind.ExternalModuleReference) {
|
||||
return (<ExternalModuleReference>reference).expression;
|
||||
}
|
||||
|
@ -1088,7 +1088,7 @@ namespace ts {
|
|||
|
||||
function getJSDocTag(node: Node, kind: SyntaxKind): JSDocTag {
|
||||
if (node && node.jsDocComment) {
|
||||
for (let tag of node.jsDocComment.tags) {
|
||||
for (const tag of node.jsDocComment.tags) {
|
||||
if (tag.kind === kind) {
|
||||
return tag;
|
||||
}
|
||||
|
@ -1112,14 +1112,14 @@ namespace ts {
|
|||
if (parameter.name && parameter.name.kind === SyntaxKind.Identifier) {
|
||||
// If it's a parameter, see if the parent has a jsdoc comment with an @param
|
||||
// annotation.
|
||||
let parameterName = (<Identifier>parameter.name).text;
|
||||
const parameterName = (<Identifier>parameter.name).text;
|
||||
|
||||
let docComment = parameter.parent.jsDocComment;
|
||||
const docComment = parameter.parent.jsDocComment;
|
||||
if (docComment) {
|
||||
return <JSDocParameterTag>forEach(docComment.tags, t => {
|
||||
if (t.kind === SyntaxKind.JSDocParameterTag) {
|
||||
let parameterTag = <JSDocParameterTag>t;
|
||||
let name = parameterTag.preParameterName || parameterTag.postParameterName;
|
||||
const parameterTag = <JSDocParameterTag>t;
|
||||
const name = parameterTag.preParameterName || parameterTag.postParameterName;
|
||||
if (name.text === parameterName) {
|
||||
return t;
|
||||
}
|
||||
|
@ -1140,7 +1140,7 @@ namespace ts {
|
|||
return true;
|
||||
}
|
||||
|
||||
let paramTag = getCorrespondingJSDocParameterTag(node);
|
||||
const paramTag = getCorrespondingJSDocParameterTag(node);
|
||||
if (paramTag && paramTag.typeExpression) {
|
||||
return paramTag.typeExpression.type.kind === SyntaxKind.JSDocVariadicType;
|
||||
}
|
||||
|
@ -1270,7 +1270,7 @@ namespace ts {
|
|||
return false;
|
||||
}
|
||||
|
||||
let parent = name.parent;
|
||||
const parent = name.parent;
|
||||
if (parent.kind === SyntaxKind.ImportSpecifier || parent.kind === SyntaxKind.ExportSpecifier) {
|
||||
if ((<ImportOrExportSpecifier>parent).propertyName) {
|
||||
return true;
|
||||
|
@ -1337,23 +1337,23 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function getClassExtendsHeritageClauseElement(node: ClassLikeDeclaration) {
|
||||
let heritageClause = getHeritageClause(node.heritageClauses, SyntaxKind.ExtendsKeyword);
|
||||
const heritageClause = getHeritageClause(node.heritageClauses, SyntaxKind.ExtendsKeyword);
|
||||
return heritageClause && heritageClause.types.length > 0 ? heritageClause.types[0] : undefined;
|
||||
}
|
||||
|
||||
export function getClassImplementsHeritageClauseElements(node: ClassLikeDeclaration) {
|
||||
let heritageClause = getHeritageClause(node.heritageClauses, SyntaxKind.ImplementsKeyword);
|
||||
const heritageClause = getHeritageClause(node.heritageClauses, SyntaxKind.ImplementsKeyword);
|
||||
return heritageClause ? heritageClause.types : undefined;
|
||||
}
|
||||
|
||||
export function getInterfaceBaseTypeNodes(node: InterfaceDeclaration) {
|
||||
let heritageClause = getHeritageClause(node.heritageClauses, SyntaxKind.ExtendsKeyword);
|
||||
const heritageClause = getHeritageClause(node.heritageClauses, SyntaxKind.ExtendsKeyword);
|
||||
return heritageClause ? heritageClause.types : undefined;
|
||||
}
|
||||
|
||||
export function getHeritageClause(clauses: NodeArray<HeritageClause>, kind: SyntaxKind) {
|
||||
if (clauses) {
|
||||
for (let clause of clauses) {
|
||||
for (const clause of clauses) {
|
||||
if (clause.token === kind) {
|
||||
return clause;
|
||||
}
|
||||
|
@ -1365,7 +1365,7 @@ namespace ts {
|
|||
|
||||
export function tryResolveScriptReference(host: ScriptReferenceHost, sourceFile: SourceFile, reference: FileReference) {
|
||||
if (!host.getCompilerOptions().noResolve) {
|
||||
let referenceFileName = isRootedDiskPath(reference.fileName) ? reference.fileName : combinePaths(getDirectoryPath(sourceFile.fileName), reference.fileName);
|
||||
const referenceFileName = isRootedDiskPath(reference.fileName) ? reference.fileName : combinePaths(getDirectoryPath(sourceFile.fileName), reference.fileName);
|
||||
return host.getSourceFile(referenceFileName);
|
||||
}
|
||||
}
|
||||
|
@ -1381,8 +1381,8 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function getFileReferenceFromReferencePath(comment: string, commentRange: CommentRange): ReferencePathMatchResult {
|
||||
let simpleReferenceRegEx = /^\/\/\/\s*<reference\s+/gim;
|
||||
let isNoDefaultLibRegEx = /^(\/\/\/\s*<reference\s+no-default-lib\s*=\s*)('|")(.+?)\2\s*\/>/gim;
|
||||
const simpleReferenceRegEx = /^\/\/\/\s*<reference\s+/gim;
|
||||
const isNoDefaultLibRegEx = /^(\/\/\/\s*<reference\s+no-default-lib\s*=\s*)('|")(.+?)\2\s*\/>/gim;
|
||||
if (simpleReferenceRegEx.exec(comment)) {
|
||||
if (isNoDefaultLibRegEx.exec(comment)) {
|
||||
return {
|
||||
|
@ -1390,10 +1390,10 @@ namespace ts {
|
|||
};
|
||||
}
|
||||
else {
|
||||
let matchResult = fullTripleSlashReferencePathRegEx.exec(comment);
|
||||
const matchResult = fullTripleSlashReferencePathRegEx.exec(comment);
|
||||
if (matchResult) {
|
||||
let start = commentRange.pos;
|
||||
let end = commentRange.end;
|
||||
const start = commentRange.pos;
|
||||
const end = commentRange.end;
|
||||
return {
|
||||
fileReference: {
|
||||
pos: start,
|
||||
|
@ -1462,9 +1462,9 @@ namespace ts {
|
|||
return (<Identifier | LiteralExpression>name).text;
|
||||
}
|
||||
if (name.kind === SyntaxKind.ComputedPropertyName) {
|
||||
let nameExpression = (<ComputedPropertyName>name).expression;
|
||||
const nameExpression = (<ComputedPropertyName>name).expression;
|
||||
if (isWellKnownSymbolSyntactically(nameExpression)) {
|
||||
let rightHandSideName = (<PropertyAccessExpression>nameExpression).name.text;
|
||||
const rightHandSideName = (<PropertyAccessExpression>nameExpression).name.text;
|
||||
return getPropertyNameForKnownSymbolName(rightHandSideName);
|
||||
}
|
||||
}
|
||||
|
@ -1501,7 +1501,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function isParameterDeclaration(node: VariableLikeDeclaration) {
|
||||
let root = getRootDeclaration(node);
|
||||
const root = getRootDeclaration(node);
|
||||
return root.kind === SyntaxKind.Parameter;
|
||||
}
|
||||
|
||||
|
@ -1518,12 +1518,12 @@ namespace ts {
|
|||
|
||||
export function cloneEntityName(node: EntityName): EntityName {
|
||||
if (node.kind === SyntaxKind.Identifier) {
|
||||
let clone = <Identifier>createSynthesizedNode(SyntaxKind.Identifier);
|
||||
const clone = <Identifier>createSynthesizedNode(SyntaxKind.Identifier);
|
||||
clone.text = (<Identifier>node).text;
|
||||
return clone;
|
||||
}
|
||||
else {
|
||||
let clone = <QualifiedName>createSynthesizedNode(SyntaxKind.QualifiedName);
|
||||
const clone = <QualifiedName>createSynthesizedNode(SyntaxKind.QualifiedName);
|
||||
clone.left = cloneEntityName((<QualifiedName>node).left);
|
||||
clone.left.parent = clone;
|
||||
clone.right = <Identifier>cloneEntityName((<QualifiedName>node).right);
|
||||
|
@ -1537,13 +1537,13 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function createSynthesizedNode(kind: SyntaxKind, startsOnNewLine?: boolean): Node {
|
||||
let node = <SynthesizedNode>createNode(kind, /* pos */ -1, /* end */ -1);
|
||||
const node = <SynthesizedNode>createNode(kind, /* pos */ -1, /* end */ -1);
|
||||
node.startsOnNewLine = startsOnNewLine;
|
||||
return node;
|
||||
}
|
||||
|
||||
export function createSynthesizedNodeArray(): NodeArray<any> {
|
||||
let array = <NodeArray<any>>[];
|
||||
const array = <NodeArray<any>>[];
|
||||
array.pos = -1;
|
||||
array.end = -1;
|
||||
return array;
|
||||
|
@ -1551,7 +1551,7 @@ namespace ts {
|
|||
|
||||
export function createDiagnosticCollection(): DiagnosticCollection {
|
||||
let nonFileDiagnostics: Diagnostic[] = [];
|
||||
let fileDiagnostics: Map<Diagnostic[]> = {};
|
||||
const fileDiagnostics: Map<Diagnostic[]> = {};
|
||||
|
||||
let diagnosticsModified = false;
|
||||
let modificationCount = 0;
|
||||
|
@ -1573,7 +1573,7 @@ namespace ts {
|
|||
return;
|
||||
}
|
||||
|
||||
for (let diagnostic of fileDiagnostics[newFile.fileName]) {
|
||||
for (const diagnostic of fileDiagnostics[newFile.fileName]) {
|
||||
diagnostic.file = newFile;
|
||||
}
|
||||
}
|
||||
|
@ -1607,14 +1607,14 @@ namespace ts {
|
|||
return fileDiagnostics[fileName] || [];
|
||||
}
|
||||
|
||||
let allDiagnostics: Diagnostic[] = [];
|
||||
const allDiagnostics: Diagnostic[] = [];
|
||||
function pushDiagnostic(d: Diagnostic) {
|
||||
allDiagnostics.push(d);
|
||||
}
|
||||
|
||||
forEach(nonFileDiagnostics, pushDiagnostic);
|
||||
|
||||
for (let key in fileDiagnostics) {
|
||||
for (const key in fileDiagnostics) {
|
||||
if (hasProperty(fileDiagnostics, key)) {
|
||||
forEach(fileDiagnostics[key], pushDiagnostic);
|
||||
}
|
||||
|
@ -1631,7 +1631,7 @@ namespace ts {
|
|||
diagnosticsModified = false;
|
||||
nonFileDiagnostics = sortAndDeduplicateDiagnostics(nonFileDiagnostics);
|
||||
|
||||
for (let key in fileDiagnostics) {
|
||||
for (const key in fileDiagnostics) {
|
||||
if (hasProperty(fileDiagnostics, key)) {
|
||||
fileDiagnostics[key] = sortAndDeduplicateDiagnostics(fileDiagnostics[key]);
|
||||
}
|
||||
|
@ -1644,8 +1644,8 @@ namespace ts {
|
|||
// the language service. These characters should be escaped when printing, and if any characters are added,
|
||||
// the map below must be updated. Note that this regexp *does not* include the 'delete' character.
|
||||
// There is no reason for this other than that JSON.stringify does not handle it either.
|
||||
let escapedCharsRegExp = /[\\\"\u0000-\u001f\t\v\f\b\r\n\u2028\u2029\u0085]/g;
|
||||
let escapedCharsMap: Map<string> = {
|
||||
const escapedCharsRegExp = /[\\\"\u0000-\u001f\t\v\f\b\r\n\u2028\u2029\u0085]/g;
|
||||
const escapedCharsMap: Map<string> = {
|
||||
"\0": "\\0",
|
||||
"\t": "\\t",
|
||||
"\v": "\\v",
|
||||
|
@ -1676,17 +1676,17 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function isIntrinsicJsxName(name: string) {
|
||||
let ch = name.substr(0, 1);
|
||||
const ch = name.substr(0, 1);
|
||||
return ch.toLowerCase() === ch;
|
||||
}
|
||||
|
||||
function get16BitUnicodeEscapeSequence(charCode: number): string {
|
||||
let hexCharCode = charCode.toString(16).toUpperCase();
|
||||
let paddedHexCode = ("0000" + hexCharCode).slice(-4);
|
||||
const hexCharCode = charCode.toString(16).toUpperCase();
|
||||
const paddedHexCode = ("0000" + hexCharCode).slice(-4);
|
||||
return "\\u" + paddedHexCode;
|
||||
}
|
||||
|
||||
let nonAsciiCharacters = /[^\u0000-\u007F]/g;
|
||||
const nonAsciiCharacters = /[^\u0000-\u007F]/g;
|
||||
export function escapeNonAsciiCharacters(s: string): string {
|
||||
// Replace non-ASCII characters with '\uNNNN' escapes if any exist.
|
||||
// Otherwise just return the original string.
|
||||
|
@ -1710,7 +1710,7 @@ namespace ts {
|
|||
getIndent(): number;
|
||||
}
|
||||
|
||||
let indentStrings: string[] = ["", " "];
|
||||
const indentStrings: string[] = ["", " "];
|
||||
export function getIndentString(level: number) {
|
||||
if (indentStrings[level] === undefined) {
|
||||
indentStrings[level] = getIndentString(level - 1) + indentStrings[1];
|
||||
|
@ -1751,7 +1751,7 @@ namespace ts {
|
|||
function writeLiteral(s: string) {
|
||||
if (s && s.length) {
|
||||
write(s);
|
||||
let lineStartsOfS = computeLineStarts(s);
|
||||
const lineStartsOfS = computeLineStarts(s);
|
||||
if (lineStartsOfS.length > 1) {
|
||||
lineCount = lineCount + lineStartsOfS.length - 1;
|
||||
linePos = output.length - s.length + lastOrUndefined(lineStartsOfS);
|
||||
|
@ -1789,7 +1789,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function getOwnEmitOutputFilePath(sourceFile: SourceFile, host: EmitHost, extension: string) {
|
||||
let compilerOptions = host.getCompilerOptions();
|
||||
const compilerOptions = host.getCompilerOptions();
|
||||
let emitOutputFilePathWithoutExtension: string;
|
||||
if (compilerOptions.outDir) {
|
||||
emitOutputFilePathWithoutExtension = removeFileExtension(getSourceFilePathInNewDir(sourceFile, host, compilerOptions.outDir));
|
||||
|
@ -1862,8 +1862,8 @@ namespace ts {
|
|||
forEach(declarations, (member: Declaration) => {
|
||||
if ((member.kind === SyntaxKind.GetAccessor || member.kind === SyntaxKind.SetAccessor)
|
||||
&& (member.flags & NodeFlags.Static) === (accessor.flags & NodeFlags.Static)) {
|
||||
let memberName = getPropertyNameForPropertyNameNode(member.name);
|
||||
let accessorName = getPropertyNameForPropertyNameNode(accessor.name);
|
||||
const memberName = getPropertyNameForPropertyNameNode(member.name);
|
||||
const accessorName = getPropertyNameForPropertyNameNode(accessor.name);
|
||||
if (memberName === accessorName) {
|
||||
if (!firstAccessor) {
|
||||
firstAccessor = <AccessorDeclaration>member;
|
||||
|
@ -1946,13 +1946,13 @@ namespace ts {
|
|||
}
|
||||
|
||||
if (leadingComments) {
|
||||
let detachedComments: CommentRange[] = [];
|
||||
const detachedComments: CommentRange[] = [];
|
||||
let lastComment: CommentRange;
|
||||
|
||||
for (let comment of leadingComments) {
|
||||
for (const comment of leadingComments) {
|
||||
if (lastComment) {
|
||||
let lastCommentLine = getLineOfLocalPosition(currentSourceFile, lastComment.end);
|
||||
let commentLine = getLineOfLocalPosition(currentSourceFile, comment.pos);
|
||||
const lastCommentLine = getLineOfLocalPosition(currentSourceFile, lastComment.end);
|
||||
const commentLine = getLineOfLocalPosition(currentSourceFile, comment.pos);
|
||||
|
||||
if (commentLine >= lastCommentLine + 2) {
|
||||
// There was a blank line between the last comment and this comment. This
|
||||
|
@ -1970,8 +1970,8 @@ namespace ts {
|
|||
// All comments look like they could have been part of the copyright header. Make
|
||||
// sure there is at least one blank line between it and the node. If not, it's not
|
||||
// a copyright header.
|
||||
let lastCommentLine = getLineOfLocalPosition(currentSourceFile, lastOrUndefined(detachedComments).end);
|
||||
let nodeLine = getLineOfLocalPosition(currentSourceFile, skipTrivia(currentSourceFile.text, node.pos));
|
||||
const lastCommentLine = getLineOfLocalPosition(currentSourceFile, lastOrUndefined(detachedComments).end);
|
||||
const nodeLine = getLineOfLocalPosition(currentSourceFile, skipTrivia(currentSourceFile.text, node.pos));
|
||||
if (nodeLine >= lastCommentLine + 2) {
|
||||
// Valid detachedComments
|
||||
emitNewLineBeforeLeadingComments(currentSourceFile, writer, node, leadingComments);
|
||||
|
@ -1991,11 +1991,11 @@ namespace ts {
|
|||
|
||||
export function writeCommentRange(currentSourceFile: SourceFile, writer: EmitTextWriter, comment: CommentRange, newLine: string) {
|
||||
if (currentSourceFile.text.charCodeAt(comment.pos + 1) === CharacterCodes.asterisk) {
|
||||
let firstCommentLineAndCharacter = getLineAndCharacterOfPosition(currentSourceFile, comment.pos);
|
||||
let lineCount = getLineStarts(currentSourceFile).length;
|
||||
const firstCommentLineAndCharacter = getLineAndCharacterOfPosition(currentSourceFile, comment.pos);
|
||||
const lineCount = getLineStarts(currentSourceFile).length;
|
||||
let firstCommentLineIndent: number;
|
||||
for (let pos = comment.pos, currentLine = firstCommentLineAndCharacter.line; pos < comment.end; currentLine++) {
|
||||
let nextLineStart = (currentLine + 1) === lineCount
|
||||
const nextLineStart = (currentLine + 1) === lineCount
|
||||
? currentSourceFile.text.length + 1
|
||||
: getStartPositionOfLine(currentLine + 1, currentSourceFile);
|
||||
|
||||
|
@ -2006,7 +2006,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
// These are number of spaces writer is going to write at current indent
|
||||
let currentWriterIndentSpacing = writer.getIndent() * getIndentSize();
|
||||
const currentWriterIndentSpacing = writer.getIndent() * getIndentSize();
|
||||
|
||||
// Number of spaces we want to be writing
|
||||
// eg: Assume writer indent
|
||||
|
@ -2022,10 +2022,10 @@ namespace ts {
|
|||
// More right indented comment */ --4 = 8 - 4 + 11
|
||||
// class c { }
|
||||
// }
|
||||
let spacesToEmit = currentWriterIndentSpacing - firstCommentLineIndent + calculateIndent(pos, nextLineStart);
|
||||
const spacesToEmit = currentWriterIndentSpacing - firstCommentLineIndent + calculateIndent(pos, nextLineStart);
|
||||
if (spacesToEmit > 0) {
|
||||
let numberOfSingleSpacesToEmit = spacesToEmit % getIndentSize();
|
||||
let indentSizeSpaceString = getIndentString((spacesToEmit - numberOfSingleSpacesToEmit) / getIndentSize());
|
||||
const indentSizeSpaceString = getIndentString((spacesToEmit - numberOfSingleSpacesToEmit) / getIndentSize());
|
||||
|
||||
// Write indent size string ( in eg 1: = "", 2: "" , 3: string with 8 spaces 4: string with 12 spaces
|
||||
writer.rawWrite(indentSizeSpaceString);
|
||||
|
@ -2054,8 +2054,8 @@ namespace ts {
|
|||
}
|
||||
|
||||
function writeTrimmedCurrentLine(pos: number, nextLineStart: number) {
|
||||
let end = Math.min(comment.end, nextLineStart - 1);
|
||||
let currentLineText = currentSourceFile.text.substring(pos, end).replace(/^\s+|\s+$/g, "");
|
||||
const end = Math.min(comment.end, nextLineStart - 1);
|
||||
const currentLineText = currentSourceFile.text.substring(pos, end).replace(/^\s+|\s+$/g, "");
|
||||
if (currentLineText) {
|
||||
// trimmed forward and ending spaces text
|
||||
writer.write(currentLineText);
|
||||
|
@ -2169,7 +2169,7 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function isEmptyObjectLiteralOrArrayLiteral(expression: Node): boolean {
|
||||
let kind = expression.kind;
|
||||
const kind = expression.kind;
|
||||
if (kind === SyntaxKind.ObjectLiteralExpression) {
|
||||
return (<ObjectLiteralExpression>expression).properties.length === 0;
|
||||
}
|
||||
|
@ -2196,11 +2196,11 @@ namespace ts {
|
|||
* representing the UTF-8 encoding of the character, and return the expanded char code list.
|
||||
*/
|
||||
function getExpandedCharCodes(input: string): number[] {
|
||||
let output: number[] = [];
|
||||
let length = input.length;
|
||||
const output: number[] = [];
|
||||
const length = input.length;
|
||||
|
||||
for (let i = 0; i < length; i++) {
|
||||
let charCode = input.charCodeAt(i);
|
||||
const charCode = input.charCodeAt(i);
|
||||
|
||||
// handel utf8
|
||||
if (charCode < 0x80) {
|
||||
|
@ -2236,9 +2236,9 @@ namespace ts {
|
|||
*/
|
||||
export function convertToBase64(input: string): string {
|
||||
let result = "";
|
||||
let charCodes = getExpandedCharCodes(input);
|
||||
const charCodes = getExpandedCharCodes(input);
|
||||
let i = 0;
|
||||
let length = charCodes.length;
|
||||
const length = charCodes.length;
|
||||
let byte1: number, byte2: number, byte3: number, byte4: number;
|
||||
|
||||
while (i < length) {
|
||||
|
@ -2312,14 +2312,14 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function textSpanOverlapsWith(span: TextSpan, other: TextSpan) {
|
||||
let overlapStart = Math.max(span.start, other.start);
|
||||
let overlapEnd = Math.min(textSpanEnd(span), textSpanEnd(other));
|
||||
const overlapStart = Math.max(span.start, other.start);
|
||||
const overlapEnd = Math.min(textSpanEnd(span), textSpanEnd(other));
|
||||
return overlapStart < overlapEnd;
|
||||
}
|
||||
|
||||
export function textSpanOverlap(span1: TextSpan, span2: TextSpan) {
|
||||
let overlapStart = Math.max(span1.start, span2.start);
|
||||
let overlapEnd = Math.min(textSpanEnd(span1), textSpanEnd(span2));
|
||||
const overlapStart = Math.max(span1.start, span2.start);
|
||||
const overlapEnd = Math.min(textSpanEnd(span1), textSpanEnd(span2));
|
||||
if (overlapStart < overlapEnd) {
|
||||
return createTextSpanFromBounds(overlapStart, overlapEnd);
|
||||
}
|
||||
|
@ -2331,13 +2331,13 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function textSpanIntersectsWith(span: TextSpan, start: number, length: number) {
|
||||
let end = start + length;
|
||||
const end = start + length;
|
||||
return start <= textSpanEnd(span) && end >= span.start;
|
||||
}
|
||||
|
||||
export function decodedTextSpanIntersectsWith(start1: number, length1: number, start2: number, length2: number) {
|
||||
let end1 = start1 + length1;
|
||||
let end2 = start2 + length2;
|
||||
const end1 = start1 + length1;
|
||||
const end2 = start2 + length2;
|
||||
return start2 <= end1 && end2 >= start1;
|
||||
}
|
||||
|
||||
|
@ -2346,8 +2346,8 @@ namespace ts {
|
|||
}
|
||||
|
||||
export function textSpanIntersection(span1: TextSpan, span2: TextSpan) {
|
||||
let intersectStart = Math.max(span1.start, span2.start);
|
||||
let intersectEnd = Math.min(textSpanEnd(span1), textSpanEnd(span2));
|
||||
const intersectStart = Math.max(span1.start, span2.start);
|
||||
const intersectEnd = Math.min(textSpanEnd(span1), textSpanEnd(span2));
|
||||
if (intersectStart <= intersectEnd) {
|
||||
return createTextSpanFromBounds(intersectStart, intersectEnd);
|
||||
}
|
||||
|
@ -2406,14 +2406,14 @@ namespace ts {
|
|||
|
||||
// We change from talking about { { oldStart, oldLength }, newLength } to { oldStart, oldEnd, newEnd }
|
||||
// as it makes things much easier to reason about.
|
||||
let change0 = changes[0];
|
||||
const change0 = changes[0];
|
||||
|
||||
let oldStartN = change0.span.start;
|
||||
let oldEndN = textSpanEnd(change0.span);
|
||||
let newEndN = oldStartN + change0.newLength;
|
||||
|
||||
for (let i = 1; i < changes.length; i++) {
|
||||
let nextChange = changes[i];
|
||||
const nextChange = changes[i];
|
||||
|
||||
// Consider the following case:
|
||||
// i.e. two edits. The first represents the text change range { { 10, 50 }, 30 }. i.e. The span starting
|
||||
|
@ -2495,13 +2495,13 @@ namespace ts {
|
|||
// newEnd3 : Max(newEnd2, newEnd2 + (newEnd1 - oldEnd2))
|
||||
// }
|
||||
|
||||
let oldStart1 = oldStartN;
|
||||
let oldEnd1 = oldEndN;
|
||||
let newEnd1 = newEndN;
|
||||
const oldStart1 = oldStartN;
|
||||
const oldEnd1 = oldEndN;
|
||||
const newEnd1 = newEndN;
|
||||
|
||||
let oldStart2 = nextChange.span.start;
|
||||
let oldEnd2 = textSpanEnd(nextChange.span);
|
||||
let newEnd2 = oldStart2 + nextChange.newLength;
|
||||
const oldStart2 = nextChange.span.start;
|
||||
const oldEnd2 = textSpanEnd(nextChange.span);
|
||||
const newEnd2 = oldStart2 + nextChange.newLength;
|
||||
|
||||
oldStartN = Math.min(oldStart1, oldStart2);
|
||||
oldEndN = Math.max(oldEnd1, oldEnd1 + (oldEnd2 - newEnd1));
|
||||
|
|
|
@ -140,7 +140,7 @@ class CompilerBaselineRunner extends RunnerBase {
|
|||
it("Correct sourcemap content for " + fileName, () => {
|
||||
if (options.sourceMap || options.inlineSourceMap) {
|
||||
Harness.Baseline.runBaseline("Correct sourcemap content for " + fileName, justName.replace(/\.tsx?$/, ".sourcemap.txt"), () => {
|
||||
let record = result.getSourceMapRecord();
|
||||
const record = result.getSourceMapRecord();
|
||||
if (options.noEmitOnError && result.errors.length !== 0 && record === undefined) {
|
||||
// Because of the noEmitOnError option no files are created. We need to return null because baselining isn"t required.
|
||||
return null;
|
||||
|
@ -159,7 +159,7 @@ class CompilerBaselineRunner extends RunnerBase {
|
|||
// check js output
|
||||
Harness.Baseline.runBaseline("Correct JS output for " + fileName, justName.replace(/\.tsx?/, ".js"), () => {
|
||||
let tsCode = "";
|
||||
let tsSources = otherFiles.concat(toBeCompiled);
|
||||
const tsSources = otherFiles.concat(toBeCompiled);
|
||||
if (tsSources.length > 1) {
|
||||
tsCode += "//// [" + fileName + "] ////\r\n\r\n";
|
||||
}
|
||||
|
@ -184,7 +184,7 @@ class CompilerBaselineRunner extends RunnerBase {
|
|||
}
|
||||
}
|
||||
|
||||
let declFileCompilationResult = harnessCompiler.compileDeclarationFiles(toBeCompiled, otherFiles, result, function (settings) {
|
||||
const declFileCompilationResult = harnessCompiler.compileDeclarationFiles(toBeCompiled, otherFiles, result, function (settings) {
|
||||
harnessCompiler.setCompilerSettings(tcSettings);
|
||||
}, options);
|
||||
|
||||
|
@ -257,15 +257,15 @@ class CompilerBaselineRunner extends RunnerBase {
|
|||
// These types are equivalent, but depend on what order the compiler observed
|
||||
// certain parts of the program.
|
||||
|
||||
let allFiles = toBeCompiled.concat(otherFiles).filter(file => !!program.getSourceFile(file.unitName));
|
||||
const allFiles = toBeCompiled.concat(otherFiles).filter(file => !!program.getSourceFile(file.unitName));
|
||||
|
||||
let fullWalker = new TypeWriterWalker(program, /*fullTypeCheck:*/ true);
|
||||
let pullWalker = new TypeWriterWalker(program, /*fullTypeCheck:*/ false);
|
||||
const fullWalker = new TypeWriterWalker(program, /*fullTypeCheck:*/ true);
|
||||
const pullWalker = new TypeWriterWalker(program, /*fullTypeCheck:*/ false);
|
||||
|
||||
let fullResults: ts.Map<TypeWriterResult[]> = {};
|
||||
let pullResults: ts.Map<TypeWriterResult[]> = {};
|
||||
const fullResults: ts.Map<TypeWriterResult[]> = {};
|
||||
const pullResults: ts.Map<TypeWriterResult[]> = {};
|
||||
|
||||
for (let sourceFile of allFiles) {
|
||||
for (const sourceFile of allFiles) {
|
||||
fullResults[sourceFile.unitName] = fullWalker.getTypeAndSymbols(sourceFile.unitName);
|
||||
pullResults[sourceFile.unitName] = fullWalker.getTypeAndSymbols(sourceFile.unitName);
|
||||
}
|
||||
|
@ -294,11 +294,11 @@ class CompilerBaselineRunner extends RunnerBase {
|
|||
return;
|
||||
|
||||
function checkBaseLines(isSymbolBaseLine: boolean) {
|
||||
let fullBaseLine = generateBaseLine(fullResults, isSymbolBaseLine);
|
||||
let pullBaseLine = generateBaseLine(pullResults, isSymbolBaseLine);
|
||||
const fullBaseLine = generateBaseLine(fullResults, isSymbolBaseLine);
|
||||
const pullBaseLine = generateBaseLine(pullResults, isSymbolBaseLine);
|
||||
|
||||
let fullExtension = isSymbolBaseLine ? ".symbols" : ".types";
|
||||
let pullExtension = isSymbolBaseLine ? ".symbols.pull" : ".types.pull";
|
||||
const fullExtension = isSymbolBaseLine ? ".symbols" : ".types";
|
||||
const pullExtension = isSymbolBaseLine ? ".symbols.pull" : ".types.pull";
|
||||
|
||||
if (fullBaseLine !== pullBaseLine) {
|
||||
Harness.Baseline.runBaseline("Correct full information for " + fileName, justName.replace(/\.tsx?/, fullExtension), () => fullBaseLine);
|
||||
|
@ -310,24 +310,24 @@ class CompilerBaselineRunner extends RunnerBase {
|
|||
}
|
||||
|
||||
function generateBaseLine(typeWriterResults: ts.Map<TypeWriterResult[]>, isSymbolBaseline: boolean): string {
|
||||
let typeLines: string[] = [];
|
||||
let typeMap: { [fileName: string]: { [lineNum: number]: string[]; } } = {};
|
||||
const typeLines: string[] = [];
|
||||
const typeMap: { [fileName: string]: { [lineNum: number]: string[]; } } = {};
|
||||
|
||||
allFiles.forEach(file => {
|
||||
let codeLines = file.content.split("\n");
|
||||
const codeLines = file.content.split("\n");
|
||||
typeWriterResults[file.unitName].forEach(result => {
|
||||
if (isSymbolBaseline && !result.symbol) {
|
||||
return;
|
||||
}
|
||||
|
||||
let typeOrSymbolString = isSymbolBaseline ? result.symbol : result.type;
|
||||
let formattedLine = result.sourceText.replace(/\r?\n/g, "") + " : " + typeOrSymbolString;
|
||||
const typeOrSymbolString = isSymbolBaseline ? result.symbol : result.type;
|
||||
const formattedLine = result.sourceText.replace(/\r?\n/g, "") + " : " + typeOrSymbolString;
|
||||
if (!typeMap[file.unitName]) {
|
||||
typeMap[file.unitName] = {};
|
||||
}
|
||||
|
||||
let typeInfo = [formattedLine];
|
||||
let existingTypeInfo = typeMap[file.unitName][result.line];
|
||||
const existingTypeInfo = typeMap[file.unitName][result.line];
|
||||
if (existingTypeInfo) {
|
||||
typeInfo = existingTypeInfo.concat(typeInfo);
|
||||
}
|
||||
|
@ -336,10 +336,10 @@ class CompilerBaselineRunner extends RunnerBase {
|
|||
|
||||
typeLines.push("=== " + file.unitName + " ===\r\n");
|
||||
for (let i = 0; i < codeLines.length; i++) {
|
||||
let currentCodeLine = codeLines[i];
|
||||
const currentCodeLine = codeLines[i];
|
||||
typeLines.push(currentCodeLine + "\r\n");
|
||||
if (typeMap[file.unitName]) {
|
||||
let typeInfo = typeMap[file.unitName][i];
|
||||
const typeInfo = typeMap[file.unitName][i];
|
||||
if (typeInfo) {
|
||||
typeInfo.forEach(ty => {
|
||||
typeLines.push(">" + ty + "\r\n");
|
||||
|
@ -367,13 +367,13 @@ class CompilerBaselineRunner extends RunnerBase {
|
|||
public initializeTests() {
|
||||
describe(this.testSuiteName + " tests", () => {
|
||||
describe("Setup compiler for compiler baselines", () => {
|
||||
let harnessCompiler = Harness.Compiler.getCompiler();
|
||||
const harnessCompiler = Harness.Compiler.getCompiler();
|
||||
this.parseOptions();
|
||||
});
|
||||
|
||||
// this will set up a series of describe/it blocks to run between the setup and cleanup phases
|
||||
if (this.tests.length === 0) {
|
||||
let testFiles = this.enumerateFiles(this.basePath, /\.tsx?$/, { recursive: true });
|
||||
const testFiles = this.enumerateFiles(this.basePath, /\.tsx?$/, { recursive: true });
|
||||
testFiles.forEach(fn => {
|
||||
fn = fn.replace(/\\/g, "/");
|
||||
this.checkTestCodeOutput(fn);
|
||||
|
@ -392,7 +392,7 @@ class CompilerBaselineRunner extends RunnerBase {
|
|||
this.decl = false;
|
||||
this.output = false;
|
||||
|
||||
let opts = this.options.split(",");
|
||||
const opts = this.options.split(",");
|
||||
for (let i = 0; i < opts.length; i++) {
|
||||
switch (opts[i]) {
|
||||
case "error":
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -45,10 +45,10 @@ class FourSlashRunner extends RunnerBase {
|
|||
this.tests.forEach((fn: string) => {
|
||||
describe(fn, () => {
|
||||
fn = ts.normalizeSlashes(fn);
|
||||
let justName = fn.replace(/^.*[\\\/]/, "");
|
||||
const justName = fn.replace(/^.*[\\\/]/, "");
|
||||
|
||||
// Convert to relative path
|
||||
let testIndex = fn.indexOf("tests/");
|
||||
const testIndex = fn.indexOf("tests/");
|
||||
if (testIndex >= 0) fn = fn.substr(testIndex);
|
||||
|
||||
if (justName && !justName.match(/fourslash\.ts$/i) && !justName.match(/\.d\.ts$/i)) {
|
||||
|
@ -60,21 +60,21 @@ class FourSlashRunner extends RunnerBase {
|
|||
});
|
||||
|
||||
describe("Generate Tao XML", () => {
|
||||
let invalidReasons: any = {};
|
||||
const invalidReasons: any = {};
|
||||
FourSlash.xmlData.forEach(xml => {
|
||||
if (xml.invalidReason !== null) {
|
||||
invalidReasons[xml.invalidReason] = (invalidReasons[xml.invalidReason] || 0) + 1;
|
||||
}
|
||||
});
|
||||
let invalidReport: { reason: string; count: number }[] = [];
|
||||
for (let reason in invalidReasons) {
|
||||
const invalidReport: { reason: string; count: number }[] = [];
|
||||
for (const reason in invalidReasons) {
|
||||
if (invalidReasons.hasOwnProperty(reason)) {
|
||||
invalidReport.push({ reason: reason, count: invalidReasons[reason] });
|
||||
}
|
||||
}
|
||||
invalidReport.sort((lhs, rhs) => lhs.count > rhs.count ? -1 : lhs.count === rhs.count ? 0 : 1);
|
||||
|
||||
let lines: string[] = [];
|
||||
const lines: string[] = [];
|
||||
lines.push("<!-- Blocked Test Report");
|
||||
invalidReport.forEach((reasonAndCount) => {
|
||||
lines.push(reasonAndCount.count + " tests blocked by " + reasonAndCount.reason);
|
||||
|
|
|
@ -67,7 +67,7 @@ namespace Utils {
|
|||
}
|
||||
|
||||
export function evalFile(fileContents: string, fileName: string, nodeContext?: any) {
|
||||
let environment = getExecutionEnvironment();
|
||||
const environment = getExecutionEnvironment();
|
||||
switch (environment) {
|
||||
case ExecutionEnvironment.CScript:
|
||||
case ExecutionEnvironment.Browser:
|
||||
|
@ -121,11 +121,11 @@ namespace Utils {
|
|||
}
|
||||
|
||||
export function memoize<T extends Function>(f: T): T {
|
||||
let cache: { [idx: string]: any } = {};
|
||||
const cache: { [idx: string]: any } = {};
|
||||
|
||||
return <any>(function () {
|
||||
let key = Array.prototype.join.call(arguments);
|
||||
let cachedResult = cache[key];
|
||||
const key = Array.prototype.join.call(arguments);
|
||||
const cachedResult = cache[key];
|
||||
if (cachedResult) {
|
||||
return cachedResult;
|
||||
}
|
||||
|
@ -172,14 +172,14 @@ namespace Utils {
|
|||
currentPos = array.end;
|
||||
});
|
||||
|
||||
let childNodesAndArrays: any[] = [];
|
||||
const childNodesAndArrays: any[] = [];
|
||||
ts.forEachChild(node, child => { childNodesAndArrays.push(child); }, array => { childNodesAndArrays.push(array); });
|
||||
|
||||
for (let childName in node) {
|
||||
for (const childName in node) {
|
||||
if (childName === "parent" || childName === "nextContainer" || childName === "modifiers" || childName === "externalModuleIndicator") {
|
||||
continue;
|
||||
}
|
||||
let child = (<any>node)[childName];
|
||||
const child = (<any>node)[childName];
|
||||
if (isNodeOrArray(child)) {
|
||||
assert.isFalse(childNodesAndArrays.indexOf(child) < 0,
|
||||
"Missing child when forEach'ing over node: " + (<any>ts).SyntaxKind[node.kind] + "-" + childName);
|
||||
|
@ -247,7 +247,7 @@ namespace Utils {
|
|||
function getParserContextFlagName(f: number) { return getFlagName((<any>ts).ParserContextFlags, f); }
|
||||
|
||||
function serializeNode(n: ts.Node): any {
|
||||
let o: any = { kind: getKindName(n.kind) };
|
||||
const o: any = { kind: getKindName(n.kind) };
|
||||
if (ts.containsParseError(n)) {
|
||||
o.containsParseError = true;
|
||||
}
|
||||
|
@ -329,8 +329,8 @@ namespace Utils {
|
|||
assert.equal(array1.length, array2.length, "array1.length !== array2.length");
|
||||
|
||||
for (let i = 0, n = array1.length; i < n; i++) {
|
||||
let d1 = array1[i];
|
||||
let d2 = array2[i];
|
||||
const d1 = array1[i];
|
||||
const d2 = array2[i];
|
||||
|
||||
assert.equal(d1.start, d2.start, "d1.start !== d2.start");
|
||||
assert.equal(d1.length, d2.length, "d1.length !== d2.length");
|
||||
|
@ -361,14 +361,14 @@ namespace Utils {
|
|||
|
||||
ts.forEachChild(node1,
|
||||
child1 => {
|
||||
let childName = findChildName(node1, child1);
|
||||
let child2: ts.Node = (<any>node2)[childName];
|
||||
const childName = findChildName(node1, child1);
|
||||
const child2: ts.Node = (<any>node2)[childName];
|
||||
|
||||
assertStructuralEquals(child1, child2);
|
||||
},
|
||||
(array1: ts.NodeArray<ts.Node>) => {
|
||||
let childName = findChildName(node1, array1);
|
||||
let array2: ts.NodeArray<ts.Node> = (<any>node2)[childName];
|
||||
const childName = findChildName(node1, array1);
|
||||
const array2: ts.NodeArray<ts.Node> = (<any>node2)[childName];
|
||||
|
||||
assertArrayStructuralEquals(array1, array2);
|
||||
});
|
||||
|
@ -391,7 +391,7 @@ namespace Utils {
|
|||
}
|
||||
|
||||
function findChildName(parent: any, child: any) {
|
||||
for (let name in parent) {
|
||||
for (const name in parent) {
|
||||
if (parent.hasOwnProperty(name) && parent[name] === child) {
|
||||
return name;
|
||||
}
|
||||
|
@ -408,8 +408,8 @@ namespace Harness.Path {
|
|||
|
||||
export function filePath(fullPath: string) {
|
||||
fullPath = ts.normalizeSlashes(fullPath);
|
||||
let components = fullPath.split("/");
|
||||
let path: string[] = components.slice(0, components.length - 1);
|
||||
const components = fullPath.split("/");
|
||||
const path: string[] = components.slice(0, components.length - 1);
|
||||
return path.join("/") + "/";
|
||||
}
|
||||
}
|
||||
|
@ -510,15 +510,15 @@ namespace Harness {
|
|||
return paths;
|
||||
}
|
||||
|
||||
let folder: any = fso.GetFolder(path);
|
||||
let paths: string[] = [];
|
||||
const folder: any = fso.GetFolder(path);
|
||||
const paths: string[] = [];
|
||||
|
||||
return filesInFolder(folder, path);
|
||||
};
|
||||
}
|
||||
|
||||
export namespace Node {
|
||||
declare let require: any;
|
||||
declare const require: any;
|
||||
let fs: any, pathModule: any;
|
||||
if (require) {
|
||||
fs = require("fs");
|
||||
|
@ -579,10 +579,10 @@ namespace Harness {
|
|||
function filesInFolder(folder: string): string[] {
|
||||
let paths: string[] = [];
|
||||
|
||||
let files = fs.readdirSync(folder);
|
||||
const files = fs.readdirSync(folder);
|
||||
for (let i = 0; i < files.length; i++) {
|
||||
let pathToFile = pathModule.join(folder, files[i]);
|
||||
let stat = fs.statSync(pathToFile);
|
||||
const pathToFile = pathModule.join(folder, files[i]);
|
||||
const stat = fs.statSync(pathToFile);
|
||||
if (options.recursive && stat.isDirectory()) {
|
||||
paths = paths.concat(filesInFolder(pathToFile));
|
||||
}
|
||||
|
@ -606,7 +606,7 @@ namespace Harness {
|
|||
}
|
||||
|
||||
export namespace Network {
|
||||
let serverRoot = "http://localhost:8888/";
|
||||
const serverRoot = "http://localhost:8888/";
|
||||
|
||||
export const newLine = () => harnessNewLine;
|
||||
export const useCaseSensitiveFileNames = () => false;
|
||||
|
@ -615,7 +615,7 @@ namespace Harness {
|
|||
export const getExecutingFilePath = () => "";
|
||||
export const exit = (exitCode: number) => {};
|
||||
|
||||
let supportsCodePage = () => false;
|
||||
const supportsCodePage = () => false;
|
||||
export let log = (s: string) => console.log(s);
|
||||
|
||||
namespace Http {
|
||||
|
@ -626,7 +626,7 @@ namespace Harness {
|
|||
|
||||
/// Ask the server to use node's path.resolve to resolve the given path
|
||||
function getResolvedPathFromServer(path: string) {
|
||||
let xhr = new XMLHttpRequest();
|
||||
const xhr = new XMLHttpRequest();
|
||||
try {
|
||||
xhr.open("GET", path + "?resolve", false);
|
||||
xhr.send();
|
||||
|
@ -645,7 +645,7 @@ namespace Harness {
|
|||
|
||||
/// Ask the server for the contents of the file at the given URL via a simple GET request
|
||||
export function getFileFromServerSync(url: string): XHRResponse {
|
||||
let xhr = new XMLHttpRequest();
|
||||
const xhr = new XMLHttpRequest();
|
||||
try {
|
||||
xhr.open("GET", url, false);
|
||||
xhr.send();
|
||||
|
@ -659,9 +659,9 @@ namespace Harness {
|
|||
|
||||
/// Submit a POST request to the server to do the given action (ex WRITE, DELETE) on the provided URL
|
||||
export function writeToServerSync(url: string, action: string, contents?: string): XHRResponse {
|
||||
let xhr = new XMLHttpRequest();
|
||||
const xhr = new XMLHttpRequest();
|
||||
try {
|
||||
let actionMsg = "?action=" + action;
|
||||
const actionMsg = "?action=" + action;
|
||||
xhr.open("POST", url + actionMsg, false);
|
||||
xhr.setRequestHeader("Access-Control-Allow-Origin", "*");
|
||||
xhr.send(contents);
|
||||
|
@ -712,14 +712,14 @@ namespace Harness {
|
|||
export const resolvePath = (path: string) => directoryName(path);
|
||||
|
||||
export function fileExists(path: string): boolean {
|
||||
let response = Http.getFileFromServerSync(serverRoot + path);
|
||||
const response = Http.getFileFromServerSync(serverRoot + path);
|
||||
return response.status === 200;
|
||||
}
|
||||
|
||||
export function _listFilesImpl(path: string, spec?: RegExp, options?: any) {
|
||||
let response = Http.getFileFromServerSync(serverRoot + path);
|
||||
const response = Http.getFileFromServerSync(serverRoot + path);
|
||||
if (response.status === 200) {
|
||||
let results = response.responseText.split(",");
|
||||
const results = response.responseText.split(",");
|
||||
if (spec) {
|
||||
return results.filter(file => spec.test(file));
|
||||
}
|
||||
|
@ -734,7 +734,7 @@ namespace Harness {
|
|||
export let listFiles = Utils.memoize(_listFilesImpl);
|
||||
|
||||
export function readFile(file: string) {
|
||||
let response = Http.getFileFromServerSync(serverRoot + file);
|
||||
const response = Http.getFileFromServerSync(serverRoot + file);
|
||||
if (response.status === 200) {
|
||||
return response.responseText;
|
||||
}
|
||||
|
@ -856,10 +856,10 @@ namespace Harness {
|
|||
public reset() { this.fileCollection = {}; }
|
||||
|
||||
public toArray(): { fileName: string; file: WriterAggregator; }[] {
|
||||
let result: { fileName: string; file: WriterAggregator; }[] = [];
|
||||
for (let p in this.fileCollection) {
|
||||
const result: { fileName: string; file: WriterAggregator; }[] = [];
|
||||
for (const p in this.fileCollection) {
|
||||
if (this.fileCollection.hasOwnProperty(p)) {
|
||||
let current = <Harness.Compiler.WriterAggregator>this.fileCollection[p];
|
||||
const current = <Harness.Compiler.WriterAggregator>this.fileCollection[p];
|
||||
if (current.lines.length > 0) {
|
||||
if (p.indexOf(".d.ts") !== -1) { current.lines.unshift(["////[", Path.getFileName(p), "]"].join("")); }
|
||||
result.push({ fileName: p, file: this.fileCollection[p] });
|
||||
|
@ -878,7 +878,7 @@ namespace Harness {
|
|||
const shouldAssertInvariants = !Harness.lightMode;
|
||||
|
||||
// Only set the parent nodes if we're asserting inletiants. We don't need them otherwise.
|
||||
let result = ts.createSourceFile(fileName, sourceText, languageVersion, /*setParentNodes:*/ shouldAssertInvariants);
|
||||
const result = ts.createSourceFile(fileName, sourceText, languageVersion, /*setParentNodes:*/ shouldAssertInvariants);
|
||||
|
||||
if (shouldAssertInvariants) {
|
||||
Utils.assertInvariants(result, /*parent:*/ undefined);
|
||||
|
@ -916,13 +916,13 @@ namespace Harness {
|
|||
return useCaseSensitiveFileNames ? fileName : fileName.toLowerCase();
|
||||
}
|
||||
|
||||
let filemap: { [fileName: string]: ts.SourceFile; } = {};
|
||||
let getCurrentDirectory = currentDirectory === undefined ? Harness.IO.getCurrentDirectory : () => currentDirectory;
|
||||
const filemap: { [fileName: string]: ts.SourceFile; } = {};
|
||||
const getCurrentDirectory = currentDirectory === undefined ? Harness.IO.getCurrentDirectory : () => currentDirectory;
|
||||
|
||||
// Register input files
|
||||
function register(file: { unitName: string; content: string; }) {
|
||||
if (file.content !== undefined) {
|
||||
let fileName = ts.normalizePath(file.unitName);
|
||||
const fileName = ts.normalizePath(file.unitName);
|
||||
const sourceFile = createSourceFileAndAssertInvariants(fileName, file.content, scriptTarget);
|
||||
filemap[getCanonicalFileName(fileName)] = sourceFile;
|
||||
filemap[getCanonicalFileName(ts.getNormalizedAbsolutePath(fileName, getCurrentDirectory()))] = sourceFile;
|
||||
|
@ -936,11 +936,11 @@ namespace Harness {
|
|||
return filemap[getCanonicalFileName(fn)];
|
||||
}
|
||||
else if (currentDirectory) {
|
||||
let canonicalAbsolutePath = getCanonicalFileName(ts.getNormalizedAbsolutePath(fn, currentDirectory));
|
||||
const canonicalAbsolutePath = getCanonicalFileName(ts.getNormalizedAbsolutePath(fn, currentDirectory));
|
||||
return Object.prototype.hasOwnProperty.call(filemap, getCanonicalFileName(canonicalAbsolutePath)) ? filemap[canonicalAbsolutePath] : undefined;
|
||||
}
|
||||
else if (fn === fourslashFileName) {
|
||||
let tsFn = "tests/cases/fourslash/" + fourslashFileName;
|
||||
const tsFn = "tests/cases/fourslash/" + fourslashFileName;
|
||||
fourslashSourceFile = fourslashSourceFile || createSourceFileAndAssertInvariants(tsFn, Harness.IO.readFile(tsFn), scriptTarget);
|
||||
return fourslashSourceFile;
|
||||
}
|
||||
|
@ -953,7 +953,7 @@ namespace Harness {
|
|||
}
|
||||
}
|
||||
|
||||
let newLine =
|
||||
const newLine =
|
||||
newLineKind === ts.NewLineKind.CarriageReturnLineFeed ? carriageReturnLineFeed :
|
||||
newLineKind === ts.NewLineKind.LineFeed ? lineFeed :
|
||||
Harness.IO.newLine();
|
||||
|
@ -991,8 +991,8 @@ namespace Harness {
|
|||
function getCommandLineOption(name: string): ts.CommandLineOption {
|
||||
if (!optionsIndex) {
|
||||
optionsIndex = {};
|
||||
let optionDeclarations = harnessOptionDeclarations.concat(ts.optionDeclarations);
|
||||
for (let option of optionDeclarations) {
|
||||
const optionDeclarations = harnessOptionDeclarations.concat(ts.optionDeclarations);
|
||||
for (const option of optionDeclarations) {
|
||||
optionsIndex[option.name.toLowerCase()] = option;
|
||||
}
|
||||
}
|
||||
|
@ -1000,13 +1000,13 @@ namespace Harness {
|
|||
}
|
||||
|
||||
export function setCompilerOptionsFromHarnessSetting(settings: Harness.TestCaseParser.CompilerSettings, options: ts.CompilerOptions & HarnessOptions): void {
|
||||
for (let name in settings) {
|
||||
for (const name in settings) {
|
||||
if (settings.hasOwnProperty(name)) {
|
||||
let value = settings[name];
|
||||
const value = settings[name];
|
||||
if (value === undefined) {
|
||||
throw new Error(`Cannot have undefined value for compiler option '${name}'.`);
|
||||
}
|
||||
let option = getCommandLineOption(name);
|
||||
const option = getCommandLineOption(name);
|
||||
if (option) {
|
||||
switch (option.type) {
|
||||
case "boolean":
|
||||
|
@ -1102,37 +1102,37 @@ namespace Harness {
|
|||
settingsCallback(null);
|
||||
}
|
||||
|
||||
let newLine = "\r\n";
|
||||
const newLine = "\r\n";
|
||||
|
||||
// Parse settings
|
||||
setCompilerOptionsFromHarnessSetting(this.settings, options);
|
||||
|
||||
// Files from built\local that are requested by test "@includeBuiltFiles" to be in the context.
|
||||
// Treat them as library files, so include them in build, but not in baselines.
|
||||
let includeBuiltFiles: { unitName: string; content: string }[] = [];
|
||||
const includeBuiltFiles: { unitName: string; content: string }[] = [];
|
||||
if (options.includeBuiltFile) {
|
||||
let builtFileName = libFolder + options.includeBuiltFile;
|
||||
const builtFileName = libFolder + options.includeBuiltFile;
|
||||
includeBuiltFiles.push({ unitName: builtFileName, content: normalizeLineEndings(IO.readFile(builtFileName), newLine) });
|
||||
}
|
||||
|
||||
let useCaseSensitiveFileNames = options.useCaseSensitiveFileNames !== undefined ? options.useCaseSensitiveFileNames : Harness.IO.useCaseSensitiveFileNames();
|
||||
const useCaseSensitiveFileNames = options.useCaseSensitiveFileNames !== undefined ? options.useCaseSensitiveFileNames : Harness.IO.useCaseSensitiveFileNames();
|
||||
|
||||
let fileOutputs: GeneratedFile[] = [];
|
||||
const fileOutputs: GeneratedFile[] = [];
|
||||
|
||||
let programFiles = inputFiles.concat(includeBuiltFiles).map(file => file.unitName);
|
||||
const programFiles = inputFiles.concat(includeBuiltFiles).map(file => file.unitName);
|
||||
|
||||
let compilerHost = createCompilerHost(
|
||||
const compilerHost = createCompilerHost(
|
||||
inputFiles.concat(includeBuiltFiles).concat(otherFiles),
|
||||
(fn, contents, writeByteOrderMark) => fileOutputs.push({ fileName: fn, code: contents, writeByteOrderMark: writeByteOrderMark }),
|
||||
options.target, useCaseSensitiveFileNames, currentDirectory, options.newLine);
|
||||
let program = ts.createProgram(programFiles, options, compilerHost);
|
||||
const program = ts.createProgram(programFiles, options, compilerHost);
|
||||
|
||||
let emitResult = program.emit();
|
||||
const emitResult = program.emit();
|
||||
|
||||
let errors = ts.getPreEmitDiagnostics(program).concat(emitResult.diagnostics);
|
||||
const errors = ts.getPreEmitDiagnostics(program).concat(emitResult.diagnostics);
|
||||
this.lastErrors = errors;
|
||||
|
||||
let result = new CompilerResult(fileOutputs, errors, program, Harness.IO.getCurrentDirectory(), emitResult.sourceMaps);
|
||||
const result = new CompilerResult(fileOutputs, errors, program, Harness.IO.getCurrentDirectory(), emitResult.sourceMaps);
|
||||
onComplete(result, program);
|
||||
|
||||
return options;
|
||||
|
@ -1149,8 +1149,8 @@ namespace Harness {
|
|||
throw new Error("There were no errors and declFiles generated did not match number of js files generated");
|
||||
}
|
||||
|
||||
let declInputFiles: { unitName: string; content: string }[] = [];
|
||||
let declOtherFiles: { unitName: string; content: string }[] = [];
|
||||
const declInputFiles: { unitName: string; content: string }[] = [];
|
||||
const declOtherFiles: { unitName: string; content: string }[] = [];
|
||||
let declResult: Harness.Compiler.CompilerResult;
|
||||
|
||||
// if the .d.ts is non-empty, confirm it compiles correctly as well
|
||||
|
@ -1168,18 +1168,18 @@ namespace Harness {
|
|||
dtsFiles.push(file);
|
||||
}
|
||||
else if (isTS(file.unitName)) {
|
||||
let declFile = findResultCodeFile(file.unitName);
|
||||
const declFile = findResultCodeFile(file.unitName);
|
||||
if (!findUnit(declFile.fileName, declInputFiles) && !findUnit(declFile.fileName, declOtherFiles)) {
|
||||
dtsFiles.push({ unitName: declFile.fileName, content: declFile.code });
|
||||
}
|
||||
}
|
||||
|
||||
function findResultCodeFile(fileName: string) {
|
||||
let sourceFile = result.program.getSourceFile(fileName);
|
||||
const sourceFile = result.program.getSourceFile(fileName);
|
||||
assert(sourceFile, "Program has no source file with name '" + fileName + "'");
|
||||
// Is this file going to be emitted separately
|
||||
let sourceFileName: string;
|
||||
let outFile = options.outFile || options.out;
|
||||
const outFile = options.outFile || options.out;
|
||||
if (ts.isExternalModule(sourceFile) || !outFile) {
|
||||
if (options.outDir) {
|
||||
let sourceFilePath = ts.getNormalizedAbsolutePath(sourceFile.fileName, result.currentDirectoryForProgram);
|
||||
|
@ -1195,7 +1195,7 @@ namespace Harness {
|
|||
sourceFileName = outFile;
|
||||
}
|
||||
|
||||
let dTsFileName = ts.removeFileExtension(sourceFileName) + ".d.ts";
|
||||
const dTsFileName = ts.removeFileExtension(sourceFileName) + ".d.ts";
|
||||
|
||||
return ts.forEach(result.declFilesCode, declFile => declFile.fileName === dTsFileName ? declFile : undefined);
|
||||
}
|
||||
|
@ -1220,7 +1220,7 @@ namespace Harness {
|
|||
let errorOutput = "";
|
||||
ts.forEach(diagnostics, diagnostic => {
|
||||
if (diagnostic.file) {
|
||||
let lineAndCharacter = diagnostic.file.getLineAndCharacterOfPosition(diagnostic.start);
|
||||
const lineAndCharacter = diagnostic.file.getLineAndCharacterOfPosition(diagnostic.start);
|
||||
errorOutput += diagnostic.file.fileName + "(" + (lineAndCharacter.line + 1) + "," + (lineAndCharacter.character + 1) + "): ";
|
||||
}
|
||||
|
||||
|
@ -1232,14 +1232,14 @@ namespace Harness {
|
|||
|
||||
export function getErrorBaseline(inputFiles: { unitName: string; content: string }[], diagnostics: ts.Diagnostic[]) {
|
||||
diagnostics.sort(ts.compareDiagnostics);
|
||||
let outputLines: string[] = [];
|
||||
const outputLines: string[] = [];
|
||||
// Count up all errors that were found in files other than lib.d.ts so we don't miss any
|
||||
let totalErrorsReportedInNonLibraryFiles = 0;
|
||||
|
||||
function outputErrorText(error: ts.Diagnostic) {
|
||||
let message = ts.flattenDiagnosticMessageText(error.messageText, Harness.IO.newLine());
|
||||
const message = ts.flattenDiagnosticMessageText(error.messageText, Harness.IO.newLine());
|
||||
|
||||
let errLines = RunnerBase.removeFullPaths(message)
|
||||
const errLines = RunnerBase.removeFullPaths(message)
|
||||
.split("\n")
|
||||
.map(s => s.length > 0 && s.charAt(s.length - 1) === "\r" ? s.substr(0, s.length - 1) : s)
|
||||
.filter(s => s.length > 0)
|
||||
|
@ -1257,14 +1257,14 @@ namespace Harness {
|
|||
}
|
||||
|
||||
// Report global errors
|
||||
let globalErrors = diagnostics.filter(err => !err.file);
|
||||
const globalErrors = diagnostics.filter(err => !err.file);
|
||||
globalErrors.forEach(outputErrorText);
|
||||
|
||||
// 'merge' the lines of each input file with any errors associated with it
|
||||
inputFiles.filter(f => f.content !== undefined).forEach(inputFile => {
|
||||
// Filter down to the errors in the file
|
||||
let fileErrors = diagnostics.filter(e => {
|
||||
let errFn = e.file;
|
||||
const fileErrors = diagnostics.filter(e => {
|
||||
const errFn = e.file;
|
||||
return errFn && errFn.fileName === inputFile.unitName;
|
||||
});
|
||||
|
||||
|
@ -1278,7 +1278,7 @@ namespace Harness {
|
|||
// Note: IE JS engine incorrectly handles consecutive delimiters here when using RegExp split, so
|
||||
// we have to string-based splitting instead and try to figure out the delimiting chars
|
||||
|
||||
let lineStarts = ts.computeLineStarts(inputFile.content);
|
||||
const lineStarts = ts.computeLineStarts(inputFile.content);
|
||||
let lines = inputFile.content.split("\n");
|
||||
if (lines.length === 1) {
|
||||
lines = lines[0].split("\r");
|
||||
|
@ -1289,7 +1289,7 @@ namespace Harness {
|
|||
line = line.substr(0, line.length - 1);
|
||||
}
|
||||
|
||||
let thisLineStart = lineStarts[lineIndex];
|
||||
const thisLineStart = lineStarts[lineIndex];
|
||||
let nextLineStart: number;
|
||||
// On the last line of the file, fake the next line start number so that we handle errors on the last character of the file correctly
|
||||
if (lineIndex === lines.length - 1) {
|
||||
|
@ -1302,14 +1302,14 @@ namespace Harness {
|
|||
outputLines.push(" " + line);
|
||||
fileErrors.forEach(err => {
|
||||
// Does any error start or continue on to this line? Emit squiggles
|
||||
let end = ts.textSpanEnd(err);
|
||||
const end = ts.textSpanEnd(err);
|
||||
if ((end >= thisLineStart) && ((err.start < nextLineStart) || (lineIndex === lines.length - 1))) {
|
||||
// How many characters from the start of this line the error starts at (could be positive or negative)
|
||||
let relativeOffset = err.start - thisLineStart;
|
||||
const relativeOffset = err.start - thisLineStart;
|
||||
// How many characters of the error are on this line (might be longer than this line in reality)
|
||||
let length = (end - err.start) - Math.max(0, thisLineStart - err.start);
|
||||
const length = (end - err.start) - Math.max(0, thisLineStart - err.start);
|
||||
// Calculate the start of the squiggle
|
||||
let squiggleStart = Math.max(0, relativeOffset);
|
||||
const squiggleStart = Math.max(0, relativeOffset);
|
||||
// TODO/REVIEW: this doesn't work quite right in the browser if a multi file test has files whose names are just the right length relative to one another
|
||||
outputLines.push(" " + line.substr(0, squiggleStart).replace(/[^\s]/g, " ") + new Array(Math.min(length, line.length - squiggleStart) + 1).join("~"));
|
||||
|
||||
|
@ -1329,11 +1329,11 @@ namespace Harness {
|
|||
assert.equal(markedErrorCount, fileErrors.length, "count of errors in " + inputFile.unitName);
|
||||
});
|
||||
|
||||
let numLibraryDiagnostics = ts.countWhere(diagnostics, diagnostic => {
|
||||
const numLibraryDiagnostics = ts.countWhere(diagnostics, diagnostic => {
|
||||
return diagnostic.file && (isLibraryFile(diagnostic.file.fileName) || isBuiltFile(diagnostic.file.fileName));
|
||||
});
|
||||
|
||||
let numTest262HarnessDiagnostics = ts.countWhere(diagnostics, diagnostic => {
|
||||
const numTest262HarnessDiagnostics = ts.countWhere(diagnostics, diagnostic => {
|
||||
// Count an error generated from tests262-harness folder.This should only apply for test262
|
||||
return diagnostic.file && diagnostic.file.fileName.indexOf("test262-harness") >= 0;
|
||||
});
|
||||
|
@ -1351,7 +1351,7 @@ namespace Harness {
|
|||
|
||||
// Emit them
|
||||
let result = "";
|
||||
for (let outputFile of outputFiles) {
|
||||
for (const outputFile of outputFiles) {
|
||||
// Some extra spacing if this isn't the first file
|
||||
if (result.length) {
|
||||
result += "\r\n\r\n";
|
||||
|
@ -1366,7 +1366,7 @@ namespace Harness {
|
|||
return result;
|
||||
|
||||
function cleanName(fn: string) {
|
||||
let lastSlash = ts.normalizeSlashes(fn).lastIndexOf("/");
|
||||
const lastSlash = ts.normalizeSlashes(fn).lastIndexOf("/");
|
||||
return fn.substr(lastSlash + 1).toLowerCase();
|
||||
}
|
||||
}
|
||||
|
@ -1475,10 +1475,10 @@ namespace Harness {
|
|||
}
|
||||
|
||||
// Regex for parsing options in the format "@Alpha: Value of any sort"
|
||||
let optionRegex = /^[\/]{2}\s*@(\w+)\s*:\s*(\S*)/gm; // multiple matches on multiple lines
|
||||
const optionRegex = /^[\/]{2}\s*@(\w+)\s*:\s*(\S*)/gm; // multiple matches on multiple lines
|
||||
|
||||
function extractCompilerSettings(content: string): CompilerSettings {
|
||||
let opts: CompilerSettings = {};
|
||||
const opts: CompilerSettings = {};
|
||||
|
||||
let match: RegExpExecArray;
|
||||
while ((match = optionRegex.exec(content)) != null) {
|
||||
|
@ -1490,12 +1490,12 @@ namespace Harness {
|
|||
|
||||
/** Given a test file containing // @FileName directives, return an array of named units of code to be added to an existing compiler instance */
|
||||
export function makeUnitsFromTest(code: string, fileName: string): { settings: CompilerSettings; testUnitData: TestUnitData[]; } {
|
||||
let settings = extractCompilerSettings(code);
|
||||
const settings = extractCompilerSettings(code);
|
||||
|
||||
// List of all the subfiles we've parsed out
|
||||
let testUnitData: TestUnitData[] = [];
|
||||
const testUnitData: TestUnitData[] = [];
|
||||
|
||||
let lines = Utils.splitContentByNewlines(code);
|
||||
const lines = Utils.splitContentByNewlines(code);
|
||||
|
||||
// Stuff related to the subfile we're parsing
|
||||
let currentFileContent: string = null;
|
||||
|
@ -1504,12 +1504,12 @@ namespace Harness {
|
|||
let refs: string[] = [];
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
let line = lines[i];
|
||||
let testMetaData = optionRegex.exec(line);
|
||||
const line = lines[i];
|
||||
const testMetaData = optionRegex.exec(line);
|
||||
if (testMetaData) {
|
||||
// Comment line, check for global/file @options and record them
|
||||
optionRegex.lastIndex = 0;
|
||||
let metaDataName = testMetaData[1].toLowerCase();
|
||||
const metaDataName = testMetaData[1].toLowerCase();
|
||||
if (metaDataName === "filename") {
|
||||
currentFileOptions[testMetaData[1]] = testMetaData[2];
|
||||
}
|
||||
|
@ -1520,7 +1520,7 @@ namespace Harness {
|
|||
// New metadata statement after having collected some code to go with the previous metadata
|
||||
if (currentFileName) {
|
||||
// Store result file
|
||||
let newTestFile = {
|
||||
const newTestFile = {
|
||||
content: currentFileContent,
|
||||
name: currentFileName,
|
||||
fileOptions: currentFileOptions,
|
||||
|
@ -1558,7 +1558,7 @@ namespace Harness {
|
|||
currentFileName = testUnitData.length > 0 ? currentFileName : Path.getFileName(fileName);
|
||||
|
||||
// EOF, push whatever remains
|
||||
let newTestFile2 = {
|
||||
const newTestFile2 = {
|
||||
content: currentFileContent || "",
|
||||
name: currentFileName,
|
||||
fileOptions: currentFileOptions,
|
||||
|
@ -1606,7 +1606,7 @@ namespace Harness {
|
|||
}
|
||||
}
|
||||
|
||||
let fileCache: { [idx: string]: boolean } = {};
|
||||
const fileCache: { [idx: string]: boolean } = {};
|
||||
function generateActual(actualFileName: string, generateContent: () => string): string {
|
||||
// For now this is written using TypeScript, because sys is not available when running old test cases.
|
||||
// But we need to move to sys once we have
|
||||
|
@ -1617,7 +1617,7 @@ namespace Harness {
|
|||
return;
|
||||
}
|
||||
|
||||
let parentDirectory = IO.directoryName(dirName);
|
||||
const parentDirectory = IO.directoryName(dirName);
|
||||
if (parentDirectory != "") {
|
||||
createDirectoryStructure(parentDirectory);
|
||||
}
|
||||
|
@ -1633,7 +1633,7 @@ namespace Harness {
|
|||
IO.deleteFile(actualFileName);
|
||||
}
|
||||
|
||||
let actual = generateContent();
|
||||
const actual = generateContent();
|
||||
|
||||
if (actual === undefined) {
|
||||
throw new Error("The generated content was \"undefined\". Return \"null\" if no baselining is required.\"");
|
||||
|
@ -1656,7 +1656,7 @@ namespace Harness {
|
|||
return;
|
||||
}
|
||||
|
||||
let refFileName = referencePath(relativeFileName, opts && opts.Baselinefolder, opts && opts.Subfolder);
|
||||
const refFileName = referencePath(relativeFileName, opts && opts.Baselinefolder, opts && opts.Subfolder);
|
||||
|
||||
if (actual === null) {
|
||||
actual = "<no content>";
|
||||
|
@ -1671,10 +1671,10 @@ namespace Harness {
|
|||
}
|
||||
|
||||
function writeComparison(expected: string, actual: string, relativeFileName: string, actualFileName: string, descriptionForDescribe: string) {
|
||||
let encoded_actual = Utils.encodeString(actual);
|
||||
const encoded_actual = Utils.encodeString(actual);
|
||||
if (expected != encoded_actual) {
|
||||
// Overwrite & issue error
|
||||
let errMsg = "The baseline file " + relativeFileName + " has changed";
|
||||
const errMsg = "The baseline file " + relativeFileName + " has changed";
|
||||
throw new Error(errMsg);
|
||||
}
|
||||
}
|
||||
|
@ -1687,17 +1687,17 @@ namespace Harness {
|
|||
opts?: BaselineOptions): void {
|
||||
|
||||
let actual = <string>undefined;
|
||||
let actualFileName = localPath(relativeFileName, opts && opts.Baselinefolder, opts && opts.Subfolder);
|
||||
const actualFileName = localPath(relativeFileName, opts && opts.Baselinefolder, opts && opts.Subfolder);
|
||||
|
||||
if (runImmediately) {
|
||||
actual = generateActual(actualFileName, generateContent);
|
||||
let comparison = compareToBaseline(actual, relativeFileName, opts);
|
||||
const comparison = compareToBaseline(actual, relativeFileName, opts);
|
||||
writeComparison(comparison.expected, comparison.actual, relativeFileName, actualFileName, descriptionForDescribe);
|
||||
}
|
||||
else {
|
||||
actual = generateActual(actualFileName, generateContent);
|
||||
|
||||
let comparison = compareToBaseline(actual, relativeFileName, opts);
|
||||
const comparison = compareToBaseline(actual, relativeFileName, opts);
|
||||
writeComparison(comparison.expected, comparison.actual, relativeFileName, actualFileName, descriptionForDescribe);
|
||||
}
|
||||
}
|
||||
|
@ -1712,7 +1712,7 @@ namespace Harness {
|
|||
}
|
||||
|
||||
export function getDefaultLibraryFile(io: Harness.IO): { unitName: string, content: string } {
|
||||
let libFile = Harness.userSpecifiedRoot + Harness.libFolder + "lib.d.ts";
|
||||
const libFile = Harness.userSpecifiedRoot + Harness.libFolder + "lib.d.ts";
|
||||
return {
|
||||
unitName: libFile,
|
||||
content: io.readFile(libFile)
|
||||
|
|
|
@ -26,9 +26,9 @@ namespace Harness.LanguageService {
|
|||
|
||||
public editContent(start: number, end: number, newText: string): void {
|
||||
// Apply edits
|
||||
let prefix = this.content.substring(0, start);
|
||||
let middle = newText;
|
||||
let suffix = this.content.substring(end);
|
||||
const prefix = this.content.substring(0, start);
|
||||
const middle = newText;
|
||||
const suffix = this.content.substring(end);
|
||||
this.setContent(prefix + middle + suffix);
|
||||
|
||||
// Store edit range + new length of script
|
||||
|
@ -48,10 +48,10 @@ namespace Harness.LanguageService {
|
|||
return ts.unchangedTextChangeRange;
|
||||
}
|
||||
|
||||
let initialEditRangeIndex = this.editRanges.length - (this.version - startVersion);
|
||||
let lastEditRangeIndex = this.editRanges.length - (this.version - endVersion);
|
||||
const initialEditRangeIndex = this.editRanges.length - (this.version - startVersion);
|
||||
const lastEditRangeIndex = this.editRanges.length - (this.version - endVersion);
|
||||
|
||||
let entries = this.editRanges.slice(initialEditRangeIndex, lastEditRangeIndex);
|
||||
const entries = this.editRanges.slice(initialEditRangeIndex, lastEditRangeIndex);
|
||||
return ts.collapseTextChangeRangesAcrossMultipleVersions(entries.map(e => e.textChangeRange));
|
||||
}
|
||||
}
|
||||
|
@ -74,7 +74,7 @@ namespace Harness.LanguageService {
|
|||
}
|
||||
|
||||
public getChangeRange(oldScript: ts.IScriptSnapshot): ts.TextChangeRange {
|
||||
let oldShim = <ScriptSnapshot>oldScript;
|
||||
const oldShim = <ScriptSnapshot>oldScript;
|
||||
return this.scriptInfo.getTextChangeRangeBetweenVersions(oldShim.version, this.version);
|
||||
}
|
||||
}
|
||||
|
@ -92,9 +92,9 @@ namespace Harness.LanguageService {
|
|||
}
|
||||
|
||||
public getChangeRange(oldScript: ts.ScriptSnapshotShim): string {
|
||||
let oldShim = <ScriptSnapshotProxy>oldScript;
|
||||
const oldShim = <ScriptSnapshotProxy>oldScript;
|
||||
|
||||
let range = this.scriptSnapshot.getChangeRange(oldShim.scriptSnapshot);
|
||||
const range = this.scriptSnapshot.getChangeRange(oldShim.scriptSnapshot);
|
||||
if (range === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
|
@ -130,7 +130,7 @@ namespace Harness.LanguageService {
|
|||
}
|
||||
|
||||
public getFilenames(): string[] {
|
||||
let fileNames: string[] = [];
|
||||
const fileNames: string[] = [];
|
||||
ts.forEachKey(this.fileNameToScript, (fileName) => { fileNames.push(fileName); });
|
||||
return fileNames;
|
||||
}
|
||||
|
@ -144,7 +144,7 @@ namespace Harness.LanguageService {
|
|||
}
|
||||
|
||||
public editScript(fileName: string, start: number, end: number, newText: string) {
|
||||
let script = this.getScriptInfo(fileName);
|
||||
const script = this.getScriptInfo(fileName);
|
||||
if (script !== undefined) {
|
||||
script.editContent(start, end, newText);
|
||||
return;
|
||||
|
@ -161,7 +161,7 @@ namespace Harness.LanguageService {
|
|||
* @param col 0 based index
|
||||
*/
|
||||
public positionToLineAndCharacter(fileName: string, position: number): ts.LineAndCharacter {
|
||||
let script: ScriptInfo = this.fileNameToScript[fileName];
|
||||
const script: ScriptInfo = this.fileNameToScript[fileName];
|
||||
assert.isNotNull(script);
|
||||
|
||||
return ts.computeLineAndCharacterOfPosition(script.lineMap, position);
|
||||
|
@ -176,11 +176,11 @@ namespace Harness.LanguageService {
|
|||
getDefaultLibFileName(): string { return ""; }
|
||||
getScriptFileNames(): string[] { return this.getFilenames(); }
|
||||
getScriptSnapshot(fileName: string): ts.IScriptSnapshot {
|
||||
let script = this.getScriptInfo(fileName);
|
||||
const script = this.getScriptInfo(fileName);
|
||||
return script ? new ScriptSnapshot(script) : undefined;
|
||||
}
|
||||
getScriptVersion(fileName: string): string {
|
||||
let script = this.getScriptInfo(fileName);
|
||||
const script = this.getScriptInfo(fileName);
|
||||
return script ? script.version.toString() : undefined;
|
||||
}
|
||||
|
||||
|
@ -211,20 +211,20 @@ namespace Harness.LanguageService {
|
|||
this.nativeHost = new NativeLanguageServiceHost(cancellationToken, options);
|
||||
|
||||
if (preprocessToResolve) {
|
||||
let compilerOptions = this.nativeHost.getCompilationSettings();
|
||||
let moduleResolutionHost: ts.ModuleResolutionHost = {
|
||||
const compilerOptions = this.nativeHost.getCompilationSettings();
|
||||
const moduleResolutionHost: ts.ModuleResolutionHost = {
|
||||
fileExists: fileName => this.getScriptInfo(fileName) !== undefined,
|
||||
readFile: fileName => {
|
||||
let scriptInfo = this.getScriptInfo(fileName);
|
||||
const scriptInfo = this.getScriptInfo(fileName);
|
||||
return scriptInfo && scriptInfo.content;
|
||||
}
|
||||
};
|
||||
this.getModuleResolutionsForFile = (fileName) => {
|
||||
let scriptInfo = this.getScriptInfo(fileName);
|
||||
let preprocessInfo = ts.preProcessFile(scriptInfo.content, /*readImportFiles*/ true);
|
||||
let imports: ts.Map<string> = {};
|
||||
for (let module of preprocessInfo.importedFiles) {
|
||||
let resolutionInfo = ts.resolveModuleName(module.fileName, fileName, compilerOptions, moduleResolutionHost);
|
||||
const scriptInfo = this.getScriptInfo(fileName);
|
||||
const preprocessInfo = ts.preProcessFile(scriptInfo.content, /*readImportFiles*/ true);
|
||||
const imports: ts.Map<string> = {};
|
||||
for (const module of preprocessInfo.importedFiles) {
|
||||
const resolutionInfo = ts.resolveModuleName(module.fileName, fileName, compilerOptions, moduleResolutionHost);
|
||||
if (resolutionInfo.resolvedModule) {
|
||||
imports[module.fileName] = resolutionInfo.resolvedModule.resolvedFileName;
|
||||
}
|
||||
|
@ -246,7 +246,7 @@ namespace Harness.LanguageService {
|
|||
getDefaultLibFileName(): string { return this.nativeHost.getDefaultLibFileName(); }
|
||||
getScriptFileNames(): string { return JSON.stringify(this.nativeHost.getScriptFileNames()); }
|
||||
getScriptSnapshot(fileName: string): ts.ScriptSnapshotShim {
|
||||
let nativeScriptSnapshot = this.nativeHost.getScriptSnapshot(fileName);
|
||||
const nativeScriptSnapshot = this.nativeHost.getScriptSnapshot(fileName);
|
||||
return nativeScriptSnapshot && new ScriptSnapshotProxy(nativeScriptSnapshot);
|
||||
}
|
||||
getScriptVersion(fileName: string): string { return this.nativeHost.getScriptVersion(fileName); }
|
||||
|
@ -257,7 +257,7 @@ namespace Harness.LanguageService {
|
|||
}
|
||||
fileExists(fileName: string) { return this.getScriptInfo(fileName) !== undefined; }
|
||||
readFile(fileName: string) {
|
||||
let snapshot = this.nativeHost.getScriptSnapshot(fileName);
|
||||
const snapshot = this.nativeHost.getScriptSnapshot(fileName);
|
||||
return snapshot && snapshot.getText(0, snapshot.getLength());
|
||||
}
|
||||
log(s: string): void { this.nativeHost.log(s); }
|
||||
|
@ -272,13 +272,13 @@ namespace Harness.LanguageService {
|
|||
throw new Error("NYI");
|
||||
}
|
||||
getClassificationsForLine(text: string, lexState: ts.EndOfLineState, classifyKeywordsInGenerics?: boolean): ts.ClassificationResult {
|
||||
let result = this.shim.getClassificationsForLine(text, lexState, classifyKeywordsInGenerics).split("\n");
|
||||
let entries: ts.ClassificationInfo[] = [];
|
||||
const result = this.shim.getClassificationsForLine(text, lexState, classifyKeywordsInGenerics).split("\n");
|
||||
const entries: ts.ClassificationInfo[] = [];
|
||||
let i = 0;
|
||||
let position = 0;
|
||||
|
||||
for (; i < result.length - 1; i += 2) {
|
||||
let t = entries[i / 2] = {
|
||||
const t = entries[i / 2] = {
|
||||
length: parseInt(result[i]),
|
||||
classification: parseInt(result[i + 1])
|
||||
};
|
||||
|
@ -286,7 +286,7 @@ namespace Harness.LanguageService {
|
|||
assert.isTrue(t.length > 0, "Result length should be greater than 0, got :" + t.length);
|
||||
position += t.length;
|
||||
}
|
||||
let finalLexState = parseInt(result[result.length - 1]);
|
||||
const finalLexState = parseInt(result[result.length - 1]);
|
||||
|
||||
assert.equal(position, text.length, "Expected cumulative length of all entries to match the length of the source. expected: " + text.length + ", but got: " + position);
|
||||
|
||||
|
@ -298,7 +298,7 @@ namespace Harness.LanguageService {
|
|||
}
|
||||
|
||||
function unwrapJSONCallResult(result: string): any {
|
||||
let parsedResult = JSON.parse(result);
|
||||
const parsedResult = JSON.parse(result);
|
||||
if (parsedResult.error) {
|
||||
throw new Error("Language Service Shim Error: " + JSON.stringify(parsedResult.error));
|
||||
}
|
||||
|
@ -312,7 +312,7 @@ namespace Harness.LanguageService {
|
|||
constructor(private shim: ts.LanguageServiceShim) {
|
||||
}
|
||||
private unwrappJSONCallResult(result: string): any {
|
||||
let parsedResult = JSON.parse(result);
|
||||
const parsedResult = JSON.parse(result);
|
||||
if (parsedResult.error) {
|
||||
throw new Error("Language Service Shim Error: " + JSON.stringify(parsedResult.error));
|
||||
}
|
||||
|
@ -443,10 +443,10 @@ namespace Harness.LanguageService {
|
|||
isLibFile: boolean;
|
||||
};
|
||||
|
||||
let coreServicesShim = this.factory.createCoreServicesShim(this.host);
|
||||
const coreServicesShim = this.factory.createCoreServicesShim(this.host);
|
||||
shimResult = unwrapJSONCallResult(coreServicesShim.getPreProcessedFileInfo(fileName, ts.ScriptSnapshot.fromString(fileContents)));
|
||||
|
||||
let convertResult: ts.PreProcessedFileInfo = {
|
||||
const convertResult: ts.PreProcessedFileInfo = {
|
||||
referencedFiles: [],
|
||||
importedFiles: [],
|
||||
ambientExternalModules: [],
|
||||
|
@ -530,7 +530,7 @@ namespace Harness.LanguageService {
|
|||
fileName = Harness.Compiler.defaultLibFileName;
|
||||
}
|
||||
|
||||
let snapshot = this.host.getScriptSnapshot(fileName);
|
||||
const snapshot = this.host.getScriptSnapshot(fileName);
|
||||
return snapshot && snapshot.getText(0, snapshot.getLength());
|
||||
}
|
||||
|
||||
|
@ -612,13 +612,13 @@ namespace Harness.LanguageService {
|
|||
private client: ts.server.SessionClient;
|
||||
constructor(cancellationToken?: ts.HostCancellationToken, options?: ts.CompilerOptions) {
|
||||
// This is the main host that tests use to direct tests
|
||||
let clientHost = new SessionClientHost(cancellationToken, options);
|
||||
let client = new ts.server.SessionClient(clientHost);
|
||||
const clientHost = new SessionClientHost(cancellationToken, options);
|
||||
const client = new ts.server.SessionClient(clientHost);
|
||||
|
||||
// This host is just a proxy for the clientHost, it uses the client
|
||||
// host to answer server queries about files on disk
|
||||
let serverHost = new SessionServerHost(clientHost);
|
||||
let server = new ts.server.Session(serverHost,
|
||||
const serverHost = new SessionServerHost(clientHost);
|
||||
const server = new ts.server.Session(serverHost,
|
||||
Buffer ? Buffer.byteLength : (string: string, encoding?: string) => string.length,
|
||||
process.hrtime, serverHost);
|
||||
|
||||
|
|
|
@ -89,7 +89,7 @@ namespace Playback {
|
|||
|
||||
function memoize<T>(func: (s: string) => T): Memoized<T> {
|
||||
let lookup: { [s: string]: T } = {};
|
||||
let run: Memoized<T> = <Memoized<T>>((s: string) => {
|
||||
const run: Memoized<T> = <Memoized<T>>((s: string) => {
|
||||
if (lookup.hasOwnProperty(s)) return lookup[s];
|
||||
return lookup[s] = func(s);
|
||||
});
|
||||
|
@ -159,7 +159,7 @@ namespace Playback {
|
|||
wrapper.endRecord = () => {
|
||||
if (recordLog !== undefined) {
|
||||
let i = 0;
|
||||
let fn = () => recordLogFileNameBase + i + ".json";
|
||||
const fn = () => recordLogFileNameBase + i + ".json";
|
||||
while (underlying.fileExists(fn())) i++;
|
||||
underlying.writeFile(fn(), JSON.stringify(recordLog));
|
||||
recordLog = undefined;
|
||||
|
@ -209,8 +209,8 @@ namespace Playback {
|
|||
|
||||
wrapper.readFile = recordReplay(wrapper.readFile, underlying)(
|
||||
path => {
|
||||
let result = underlying.readFile(path);
|
||||
let logEntry = { path, codepage: 0, result: { contents: result, codepage: 0 } };
|
||||
const result = underlying.readFile(path);
|
||||
const logEntry = { path, codepage: 0, result: { contents: result, codepage: 0 } };
|
||||
recordLog.filesRead.push(logEntry);
|
||||
return result;
|
||||
},
|
||||
|
@ -218,8 +218,8 @@ namespace Playback {
|
|||
|
||||
wrapper.readDirectory = recordReplay(wrapper.readDirectory, underlying)(
|
||||
(path, extension, exclude) => {
|
||||
let result = (<ts.System>underlying).readDirectory(path, extension, exclude);
|
||||
let logEntry = { path, extension, exclude, result };
|
||||
const result = (<ts.System>underlying).readDirectory(path, extension, exclude);
|
||||
const logEntry = { path, extension, exclude, result };
|
||||
recordLog.directoriesRead.push(logEntry);
|
||||
return result;
|
||||
},
|
||||
|
@ -263,10 +263,10 @@ namespace Playback {
|
|||
}
|
||||
|
||||
function findResultByFields<T>(logArray: { result?: T }[], expectedFields: {}, defaultValue?: T): T {
|
||||
let predicate = (entry: { result?: T }) => {
|
||||
const predicate = (entry: { result?: T }) => {
|
||||
return Object.getOwnPropertyNames(expectedFields).every((name) => (<any>entry)[name] === (<any>expectedFields)[name]);
|
||||
};
|
||||
let results = logArray.filter(entry => predicate(entry));
|
||||
const results = logArray.filter(entry => predicate(entry));
|
||||
if (results.length === 0) {
|
||||
if (defaultValue !== undefined) {
|
||||
return defaultValue;
|
||||
|
@ -279,7 +279,7 @@ namespace Playback {
|
|||
}
|
||||
|
||||
function findResultByPath<T>(wrapper: { resolvePath(s: string): string }, logArray: { path: string; result?: T }[], expectedPath: string, defaultValue?: T): T {
|
||||
let normalizedName = ts.normalizePath(expectedPath).toLowerCase();
|
||||
const normalizedName = ts.normalizePath(expectedPath).toLowerCase();
|
||||
// Try to find the result through normal fileName
|
||||
for (let i = 0; i < logArray.length; i++) {
|
||||
if (ts.normalizeSlashes(logArray[i].path).toLowerCase() === normalizedName) {
|
||||
|
@ -288,7 +288,7 @@ namespace Playback {
|
|||
}
|
||||
// Fallback, try to resolve the target paths as well
|
||||
if (replayLog.pathsResolved.length > 0) {
|
||||
let normalizedResolvedName = wrapper.resolvePath(expectedPath).toLowerCase();
|
||||
const normalizedResolvedName = wrapper.resolvePath(expectedPath).toLowerCase();
|
||||
for (let i = 0; i < logArray.length; i++) {
|
||||
if (wrapper.resolvePath(logArray[i].path).toLowerCase() === normalizedResolvedName) {
|
||||
return logArray[i].result;
|
||||
|
@ -305,9 +305,9 @@ namespace Playback {
|
|||
}
|
||||
}
|
||||
|
||||
let pathEquivCache: any = {};
|
||||
const pathEquivCache: any = {};
|
||||
function pathsAreEquivalent(left: string, right: string, wrapper: { resolvePath(s: string): string }) {
|
||||
let key = left + "-~~-" + right;
|
||||
const key = left + "-~~-" + right;
|
||||
function areSame(a: string, b: string) {
|
||||
return ts.normalizeSlashes(a).toLowerCase() === ts.normalizeSlashes(b).toLowerCase();
|
||||
}
|
||||
|
@ -329,7 +329,7 @@ namespace Playback {
|
|||
}
|
||||
|
||||
export function wrapIO(underlying: Harness.IO): PlaybackIO {
|
||||
let wrapper: PlaybackIO = <any>{};
|
||||
const wrapper: PlaybackIO = <any>{};
|
||||
initWrapper(wrapper, underlying);
|
||||
|
||||
wrapper.directoryName = (path): string => { throw new Error("NotSupported"); };
|
||||
|
@ -342,7 +342,7 @@ namespace Playback {
|
|||
}
|
||||
|
||||
export function wrapSystem(underlying: ts.System): PlaybackSystem {
|
||||
let wrapper: PlaybackSystem = <any>{};
|
||||
const wrapper: PlaybackSystem = <any>{};
|
||||
initWrapper(wrapper, underlying);
|
||||
return wrapper;
|
||||
}
|
||||
|
|
|
@ -47,7 +47,7 @@ interface BatchCompileProjectTestCaseResult extends CompileProjectFilesResult {
|
|||
class ProjectRunner extends RunnerBase {
|
||||
public initializeTests() {
|
||||
if (this.tests.length === 0) {
|
||||
let testFiles = this.enumerateFiles("tests/cases/project", /\.json$/, { recursive: true });
|
||||
const testFiles = this.enumerateFiles("tests/cases/project", /\.json$/, { recursive: true });
|
||||
testFiles.forEach(fn => {
|
||||
fn = fn.replace(/\\/g, "/");
|
||||
this.runProjectTestCase(fn);
|
||||
|
@ -101,7 +101,7 @@ class ProjectRunner extends RunnerBase {
|
|||
function cleanProjectUrl(url: string) {
|
||||
let diskProjectPath = ts.normalizeSlashes(Harness.IO.resolvePath(testCase.projectRoot));
|
||||
let projectRootUrl = "file:///" + diskProjectPath;
|
||||
let normalizedProjectRoot = ts.normalizeSlashes(testCase.projectRoot);
|
||||
const normalizedProjectRoot = ts.normalizeSlashes(testCase.projectRoot);
|
||||
diskProjectPath = diskProjectPath.substr(0, diskProjectPath.lastIndexOf(normalizedProjectRoot));
|
||||
projectRootUrl = projectRootUrl.substr(0, projectRootUrl.lastIndexOf(normalizedProjectRoot));
|
||||
if (url && url.length) {
|
||||
|
@ -130,12 +130,12 @@ class ProjectRunner extends RunnerBase {
|
|||
getSourceFileTextImpl: (fileName: string) => string,
|
||||
writeFile: (fileName: string, data: string, writeByteOrderMark: boolean) => void): CompileProjectFilesResult {
|
||||
|
||||
let program = ts.createProgram(getInputFiles(), createCompilerOptions(), createCompilerHost());
|
||||
const program = ts.createProgram(getInputFiles(), createCompilerOptions(), createCompilerHost());
|
||||
let errors = ts.getPreEmitDiagnostics(program);
|
||||
|
||||
let emitResult = program.emit();
|
||||
const emitResult = program.emit();
|
||||
errors = ts.concatenate(errors, emitResult.diagnostics);
|
||||
let sourceMapData = emitResult.sourceMaps;
|
||||
const sourceMapData = emitResult.sourceMaps;
|
||||
|
||||
// Clean up source map data that will be used in baselining
|
||||
if (sourceMapData) {
|
||||
|
@ -181,7 +181,7 @@ class ProjectRunner extends RunnerBase {
|
|||
sourceFile = languageVersion === ts.ScriptTarget.ES6 ? Harness.Compiler.defaultES6LibSourceFile : Harness.Compiler.defaultLibSourceFile;
|
||||
}
|
||||
else {
|
||||
let text = getSourceFileText(fileName);
|
||||
const text = getSourceFileText(fileName);
|
||||
if (text !== undefined) {
|
||||
sourceFile = Harness.Compiler.createSourceFileAndAssertInvariants(fileName, text, languageVersion);
|
||||
}
|
||||
|
@ -208,9 +208,9 @@ class ProjectRunner extends RunnerBase {
|
|||
function batchCompilerProjectTestCase(moduleKind: ts.ModuleKind): BatchCompileProjectTestCaseResult {
|
||||
let nonSubfolderDiskFiles = 0;
|
||||
|
||||
let outputFiles: BatchCompileProjectTestCaseEmittedFile[] = [];
|
||||
const outputFiles: BatchCompileProjectTestCaseEmittedFile[] = [];
|
||||
|
||||
let projectCompilerResult = compileProjectFiles(moduleKind, () => testCase.inputFiles, getSourceFileText, writeFile);
|
||||
const projectCompilerResult = compileProjectFiles(moduleKind, () => testCase.inputFiles, getSourceFileText, writeFile);
|
||||
return {
|
||||
moduleKind,
|
||||
program: projectCompilerResult.program,
|
||||
|
@ -236,11 +236,11 @@ class ProjectRunner extends RunnerBase {
|
|||
function writeFile(fileName: string, data: string, writeByteOrderMark: boolean) {
|
||||
// convert file name to rooted name
|
||||
// if filename is not rooted - concat it with project root and then expand project root relative to current directory
|
||||
let diskFileName = ts.isRootedDiskPath(fileName)
|
||||
const diskFileName = ts.isRootedDiskPath(fileName)
|
||||
? fileName
|
||||
: Harness.IO.resolvePath(ts.normalizeSlashes(testCase.projectRoot) + "/" + ts.normalizeSlashes(fileName));
|
||||
|
||||
let currentDirectory = getCurrentDirectory();
|
||||
const currentDirectory = getCurrentDirectory();
|
||||
// compute file name relative to current directory (expanded project root)
|
||||
let diskRelativeName = ts.getRelativePathToDirectoryOrUrl(currentDirectory, diskFileName, currentDirectory, Harness.Compiler.getCanonicalFileName, /*isAbsolutePathAnUrl*/ false);
|
||||
if (ts.isRootedDiskPath(diskRelativeName) || diskRelativeName.substr(0, 3) === "../") {
|
||||
|
@ -254,14 +254,14 @@ class ProjectRunner extends RunnerBase {
|
|||
|
||||
if (Harness.Compiler.isJS(fileName)) {
|
||||
// Make sure if there is URl we have it cleaned up
|
||||
let indexOfSourceMapUrl = data.lastIndexOf("//# sourceMappingURL=");
|
||||
const indexOfSourceMapUrl = data.lastIndexOf("//# sourceMappingURL=");
|
||||
if (indexOfSourceMapUrl !== -1) {
|
||||
data = data.substring(0, indexOfSourceMapUrl + 21) + cleanProjectUrl(data.substring(indexOfSourceMapUrl + 21));
|
||||
}
|
||||
}
|
||||
else if (Harness.Compiler.isJSMap(fileName)) {
|
||||
// Make sure sources list is cleaned
|
||||
let sourceMapData = JSON.parse(data);
|
||||
const sourceMapData = JSON.parse(data);
|
||||
for (let i = 0; i < sourceMapData.sources.length; i++) {
|
||||
sourceMapData.sources[i] = cleanProjectUrl(sourceMapData.sources[i]);
|
||||
}
|
||||
|
@ -269,7 +269,7 @@ class ProjectRunner extends RunnerBase {
|
|||
data = JSON.stringify(sourceMapData);
|
||||
}
|
||||
|
||||
let outputFilePath = getProjectOutputFolder(diskRelativeName, moduleKind);
|
||||
const outputFilePath = getProjectOutputFolder(diskRelativeName, moduleKind);
|
||||
// Actual writing of file as in tc.ts
|
||||
function ensureDirectoryStructure(directoryname: string) {
|
||||
if (directoryname) {
|
||||
|
@ -287,8 +287,8 @@ class ProjectRunner extends RunnerBase {
|
|||
}
|
||||
|
||||
function compileCompileDTsFiles(compilerResult: BatchCompileProjectTestCaseResult) {
|
||||
let allInputFiles: { emittedFileName: string; code: string; }[] = [];
|
||||
let compilerOptions = compilerResult.program.getCompilerOptions();
|
||||
const allInputFiles: { emittedFileName: string; code: string; }[] = [];
|
||||
const compilerOptions = compilerResult.program.getCompilerOptions();
|
||||
|
||||
ts.forEach(compilerResult.program.getSourceFiles(), sourceFile => {
|
||||
if (Harness.Compiler.isDTS(sourceFile.fileName)) {
|
||||
|
@ -305,12 +305,12 @@ class ProjectRunner extends RunnerBase {
|
|||
emitOutputFilePathWithoutExtension = ts.removeFileExtension(sourceFile.fileName);
|
||||
}
|
||||
|
||||
let outputDtsFileName = emitOutputFilePathWithoutExtension + ".d.ts";
|
||||
const outputDtsFileName = emitOutputFilePathWithoutExtension + ".d.ts";
|
||||
allInputFiles.unshift(findOutpuDtsFile(outputDtsFileName));
|
||||
}
|
||||
else {
|
||||
let outputDtsFileName = ts.removeFileExtension(compilerOptions.outFile || compilerOptions.out) + ".d.ts";
|
||||
let outputDtsFile = findOutpuDtsFile(outputDtsFileName);
|
||||
const outputDtsFileName = ts.removeFileExtension(compilerOptions.outFile || compilerOptions.out) + ".d.ts";
|
||||
const outputDtsFile = findOutpuDtsFile(outputDtsFileName);
|
||||
if (!ts.contains(allInputFiles, outputDtsFile)) {
|
||||
allInputFiles.unshift(outputDtsFile);
|
||||
}
|
||||
|
@ -343,7 +343,7 @@ class ProjectRunner extends RunnerBase {
|
|||
}
|
||||
|
||||
function getErrorsBaseline(compilerResult: CompileProjectFilesResult) {
|
||||
let inputFiles = ts.map(ts.filter(compilerResult.program.getSourceFiles(),
|
||||
const inputFiles = ts.map(ts.filter(compilerResult.program.getSourceFiles(),
|
||||
sourceFile => sourceFile.fileName !== "lib.d.ts"),
|
||||
sourceFile => {
|
||||
return { unitName: sourceFile.fileName, content: sourceFile.text };
|
||||
|
@ -352,7 +352,7 @@ class ProjectRunner extends RunnerBase {
|
|||
return Harness.Compiler.getErrorBaseline(inputFiles, compilerResult.errors);
|
||||
}
|
||||
|
||||
let name = "Compiling project for " + testCase.scenario + ": testcase " + testCaseFileName;
|
||||
const name = "Compiling project for " + testCase.scenario + ": testcase " + testCaseFileName;
|
||||
|
||||
describe("Projects tests", () => {
|
||||
describe(name, () => {
|
||||
|
@ -360,7 +360,7 @@ class ProjectRunner extends RunnerBase {
|
|||
let compilerResult: BatchCompileProjectTestCaseResult;
|
||||
|
||||
function getCompilerResolutionInfo() {
|
||||
let resolutionInfo: ProjectRunnerTestCaseResolutionInfo = {
|
||||
const resolutionInfo: ProjectRunnerTestCaseResolutionInfo = {
|
||||
scenario: testCase.scenario,
|
||||
projectRoot: testCase.projectRoot,
|
||||
inputFiles: testCase.inputFiles,
|
||||
|
@ -438,7 +438,7 @@ class ProjectRunner extends RunnerBase {
|
|||
|
||||
it("Errors in generated Dts files for (" + moduleNameToString(moduleKind) + "): " + testCaseFileName, () => {
|
||||
if (!compilerResult.errors.length && testCase.declaration) {
|
||||
let dTsCompileResult = compileCompileDTsFiles(compilerResult);
|
||||
const dTsCompileResult = compileCompileDTsFiles(compilerResult);
|
||||
if (dTsCompileResult.errors.length) {
|
||||
Harness.Baseline.runBaseline("Errors in generated Dts files for (" + moduleNameToString(compilerResult.moduleKind) + "): " + testCaseFileName, getBaselineFolder(compilerResult.moduleKind) + testCaseJustName + ".dts.errors.txt", () => {
|
||||
return getErrorsBaseline(dTsCompileResult);
|
||||
|
|
|
@ -41,13 +41,13 @@ let testConfigFile =
|
|||
(Harness.IO.fileExists(testconfig) ? Harness.IO.readFile(testconfig) : "");
|
||||
|
||||
if (testConfigFile !== "") {
|
||||
let testConfig = JSON.parse(testConfigFile);
|
||||
const testConfig = JSON.parse(testConfigFile);
|
||||
if (testConfig.light) {
|
||||
Harness.lightMode = true;
|
||||
}
|
||||
|
||||
if (testConfig.test && testConfig.test.length > 0) {
|
||||
for (let option of testConfig.test) {
|
||||
for (const option of testConfig.test) {
|
||||
if (!option) {
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -25,14 +25,14 @@ abstract class RunnerBase {
|
|||
let fixedPath = path;
|
||||
|
||||
// full paths either start with a drive letter or / for *nix, shouldn't have \ in the path at this point
|
||||
let fullPath = /(\w+:|\/)?([\w+\-\.]|\/)*\.tsx?/g;
|
||||
let fullPathList = fixedPath.match(fullPath);
|
||||
const fullPath = /(\w+:|\/)?([\w+\-\.]|\/)*\.tsx?/g;
|
||||
const fullPathList = fixedPath.match(fullPath);
|
||||
if (fullPathList) {
|
||||
fullPathList.forEach((match: string) => fixedPath = fixedPath.replace(match, Harness.Path.getFileName(match)));
|
||||
}
|
||||
|
||||
// when running in the browser the 'full path' is the host name, shows up in error baselines
|
||||
let localHost = /http:\/localhost:\d+/g;
|
||||
const localHost = /http:\/localhost:\d+/g;
|
||||
fixedPath = fixedPath.replace(localHost, "");
|
||||
return fixedPath;
|
||||
}
|
||||
|
|
|
@ -6,9 +6,9 @@
|
|||
|
||||
namespace RWC {
|
||||
function runWithIOLog(ioLog: IOLog, fn: (oldIO: Harness.IO) => void) {
|
||||
let oldIO = Harness.IO;
|
||||
const oldIO = Harness.IO;
|
||||
|
||||
let wrappedIO = Playback.wrapIO(oldIO);
|
||||
const wrappedIO = Playback.wrapIO(oldIO);
|
||||
wrappedIO.startReplayFromData(ioLog);
|
||||
Harness.IO = wrappedIO;
|
||||
|
||||
|
@ -55,10 +55,10 @@ namespace RWC {
|
|||
});
|
||||
|
||||
it("can compile", () => {
|
||||
let harnessCompiler = Harness.Compiler.getCompiler();
|
||||
const harnessCompiler = Harness.Compiler.getCompiler();
|
||||
let opts: ts.ParsedCommandLine;
|
||||
|
||||
let ioLog: IOLog = JSON.parse(Harness.IO.readFile(jsonPath));
|
||||
const ioLog: IOLog = JSON.parse(Harness.IO.readFile(jsonPath));
|
||||
currentDirectory = ioLog.currentDirectory;
|
||||
useCustomLibraryFile = ioLog.useCustomLibraryFile;
|
||||
runWithIOLog(ioLog, () => {
|
||||
|
@ -75,26 +75,26 @@ namespace RWC {
|
|||
|
||||
let fileNames = opts.fileNames;
|
||||
|
||||
let tsconfigFile = ts.forEach(ioLog.filesRead, f => isTsConfigFile(f) ? f : undefined);
|
||||
const tsconfigFile = ts.forEach(ioLog.filesRead, f => isTsConfigFile(f) ? f : undefined);
|
||||
if (tsconfigFile) {
|
||||
let tsconfigFileContents = getHarnessCompilerInputUnit(tsconfigFile.path);
|
||||
let parsedTsconfigFileContents = ts.parseConfigFileTextToJson(tsconfigFile.path, tsconfigFileContents.content);
|
||||
let configParseResult = ts.parseJsonConfigFileContent(parsedTsconfigFileContents.config, Harness.IO, ts.getDirectoryPath(tsconfigFile.path));
|
||||
const tsconfigFileContents = getHarnessCompilerInputUnit(tsconfigFile.path);
|
||||
const parsedTsconfigFileContents = ts.parseConfigFileTextToJson(tsconfigFile.path, tsconfigFileContents.content);
|
||||
const configParseResult = ts.parseJsonConfigFileContent(parsedTsconfigFileContents.config, Harness.IO, ts.getDirectoryPath(tsconfigFile.path));
|
||||
fileNames = configParseResult.fileNames;
|
||||
opts.options = ts.extend(opts.options, configParseResult.options);
|
||||
}
|
||||
|
||||
// Load the files
|
||||
for (let fileName of fileNames) {
|
||||
for (const fileName of fileNames) {
|
||||
inputFiles.push(getHarnessCompilerInputUnit(fileName));
|
||||
}
|
||||
|
||||
// Add files to compilation
|
||||
let isInInputList = (resolvedPath: string) => (inputFile: { unitName: string; content: string; }) => inputFile.unitName === resolvedPath;
|
||||
for (let fileRead of ioLog.filesRead) {
|
||||
const isInInputList = (resolvedPath: string) => (inputFile: { unitName: string; content: string; }) => inputFile.unitName === resolvedPath;
|
||||
for (const fileRead of ioLog.filesRead) {
|
||||
// Check if the file is already added into the set of input files.
|
||||
const resolvedPath = ts.normalizeSlashes(Harness.IO.resolvePath(fileRead.path));
|
||||
let inInputList = ts.forEach(inputFiles, isInInputList(resolvedPath));
|
||||
const inInputList = ts.forEach(inputFiles, isInInputList(resolvedPath));
|
||||
|
||||
if (isTsConfigFile(fileRead)) {
|
||||
continue;
|
||||
|
@ -139,7 +139,7 @@ namespace RWC {
|
|||
});
|
||||
|
||||
function getHarnessCompilerInputUnit(fileName: string) {
|
||||
let unitName = ts.normalizeSlashes(Harness.IO.resolvePath(fileName));
|
||||
const unitName = ts.normalizeSlashes(Harness.IO.resolvePath(fileName));
|
||||
let content: string = null;
|
||||
try {
|
||||
content = Harness.IO.readFile(unitName);
|
||||
|
@ -201,7 +201,7 @@ namespace RWC {
|
|||
it("has the expected errors in generated declaration files", () => {
|
||||
if (compilerOptions.declaration && !compilerResult.errors.length) {
|
||||
Harness.Baseline.runBaseline("has the expected errors in generated declaration files", baseName + ".dts.errors.txt", () => {
|
||||
let declFileCompilationResult = Harness.Compiler.getCompiler().compileDeclarationFiles(inputFiles, otherFiles, compilerResult,
|
||||
const declFileCompilationResult = Harness.Compiler.getCompiler().compileDeclarationFiles(inputFiles, otherFiles, compilerResult,
|
||||
/*settingscallback*/ undefined, compilerOptions, currentDirectory);
|
||||
if (declFileCompilationResult.declResult.errors.length === 0) {
|
||||
return null;
|
||||
|
@ -227,7 +227,7 @@ class RWCRunner extends RunnerBase {
|
|||
*/
|
||||
public initializeTests(): void {
|
||||
// Read in and evaluate the test list
|
||||
let testList = Harness.IO.listFiles(RWCRunner.sourcePath, /.+\.json$/);
|
||||
const testList = Harness.IO.listFiles(RWCRunner.sourcePath, /.+\.json$/);
|
||||
for (let i = 0; i < testList.length; i++) {
|
||||
this.runTest(testList[i]);
|
||||
}
|
||||
|
|
|
@ -92,7 +92,7 @@ namespace Harness.SourceMapRecoder {
|
|||
}
|
||||
|
||||
// 6 digit number
|
||||
let currentByte = base64FormatDecode();
|
||||
const currentByte = base64FormatDecode();
|
||||
|
||||
// If msb is set, we still have more bits to continue
|
||||
moreDigits = (currentByte & 32) !== 0;
|
||||
|
@ -259,7 +259,7 @@ namespace Harness.SourceMapRecoder {
|
|||
|
||||
export function recordSourceMapSpan(sourceMapSpan: ts.SourceMapSpan) {
|
||||
// verify the decoded span is same as the new span
|
||||
let decodeResult = SourceMapDecoder.decodeNextEncodedSourceMapSpan();
|
||||
const decodeResult = SourceMapDecoder.decodeNextEncodedSourceMapSpan();
|
||||
let decodedErrors: string[];
|
||||
if (decodeResult.error
|
||||
|| decodeResult.sourceMapSpan.emittedLine !== sourceMapSpan.emittedLine
|
||||
|
@ -317,8 +317,8 @@ namespace Harness.SourceMapRecoder {
|
|||
}
|
||||
|
||||
function getTextOfLine(line: number, lineMap: number[], code: string) {
|
||||
let startPos = lineMap[line];
|
||||
let endPos = lineMap[line + 1];
|
||||
const startPos = lineMap[line];
|
||||
const endPos = lineMap[line + 1];
|
||||
return code.substring(startPos, endPos);
|
||||
}
|
||||
|
||||
|
@ -329,7 +329,7 @@ namespace Harness.SourceMapRecoder {
|
|||
}
|
||||
|
||||
function writeRecordedSpans() {
|
||||
let markerIds: string[] = [];
|
||||
const markerIds: string[] = [];
|
||||
|
||||
function getMarkerId(markerIndex: number) {
|
||||
let markerId = "";
|
||||
|
@ -364,7 +364,7 @@ namespace Harness.SourceMapRecoder {
|
|||
}
|
||||
|
||||
function writeSourceMapMarker(currentSpan: SourceMapSpanWithDecodeErrors, index: number, endColumn = currentSpan.sourceMapSpan.emittedColumn, endContinues?: boolean) {
|
||||
let markerId = getMarkerId(index);
|
||||
const markerId = getMarkerId(index);
|
||||
markerIds.push(markerId);
|
||||
|
||||
writeSourceMapIndent(prevEmittedCol, markerId);
|
||||
|
@ -380,7 +380,7 @@ namespace Harness.SourceMapRecoder {
|
|||
}
|
||||
|
||||
function writeSourceMapSourceText(currentSpan: SourceMapSpanWithDecodeErrors, index: number) {
|
||||
let sourcePos = tsLineMap[currentSpan.sourceMapSpan.sourceLine - 1] + (currentSpan.sourceMapSpan.sourceColumn - 1);
|
||||
const sourcePos = tsLineMap[currentSpan.sourceMapSpan.sourceLine - 1] + (currentSpan.sourceMapSpan.sourceColumn - 1);
|
||||
let sourceText = "";
|
||||
if (prevWrittenSourcePos < sourcePos) {
|
||||
// Position that goes forward, get text
|
||||
|
@ -395,7 +395,7 @@ namespace Harness.SourceMapRecoder {
|
|||
}
|
||||
}
|
||||
|
||||
let tsCodeLineMap = ts.computeLineStarts(sourceText);
|
||||
const tsCodeLineMap = ts.computeLineStarts(sourceText);
|
||||
for (let i = 0; i < tsCodeLineMap.length; i++) {
|
||||
writeSourceMapIndent(prevEmittedCol, i === 0 ? markerIds[index] : " >");
|
||||
sourceMapRecoder.Write(getTextOfLine(i, tsCodeLineMap, sourceText));
|
||||
|
@ -412,7 +412,7 @@ namespace Harness.SourceMapRecoder {
|
|||
}
|
||||
|
||||
if (spansOnSingleLine.length) {
|
||||
let currentJsLine = spansOnSingleLine[0].sourceMapSpan.emittedLine;
|
||||
const currentJsLine = spansOnSingleLine[0].sourceMapSpan.emittedLine;
|
||||
|
||||
// Write js line
|
||||
writeJsFileLines(currentJsLine);
|
||||
|
@ -420,7 +420,7 @@ namespace Harness.SourceMapRecoder {
|
|||
// Emit markers
|
||||
iterateSpans(writeSourceMapMarker);
|
||||
|
||||
let jsFileText = getTextOfLine(currentJsLine, jsLineMap, jsFile.code);
|
||||
const jsFileText = getTextOfLine(currentJsLine, jsLineMap, jsFile.code);
|
||||
if (prevEmittedCol < jsFileText.length) {
|
||||
// There is remaining text on this line that will be part of next source span so write marker that continues
|
||||
writeSourceMapMarker(undefined, spansOnSingleLine.length, /*endColumn*/ jsFileText.length, /*endContinues*/ true);
|
||||
|
@ -438,16 +438,16 @@ namespace Harness.SourceMapRecoder {
|
|||
}
|
||||
|
||||
export function getSourceMapRecord(sourceMapDataList: ts.SourceMapData[], program: ts.Program, jsFiles: Compiler.GeneratedFile[]) {
|
||||
let sourceMapRecoder = new Compiler.WriterAggregator();
|
||||
const sourceMapRecoder = new Compiler.WriterAggregator();
|
||||
|
||||
for (let i = 0; i < sourceMapDataList.length; i++) {
|
||||
let sourceMapData = sourceMapDataList[i];
|
||||
const sourceMapData = sourceMapDataList[i];
|
||||
let prevSourceFile: ts.SourceFile;
|
||||
|
||||
SourceMapSpanWriter.intializeSourceMapSpanWriter(sourceMapRecoder, sourceMapData, jsFiles[i]);
|
||||
for (let j = 0; j < sourceMapData.sourceMapDecodedMappings.length; j++) {
|
||||
let decodedSourceMapping = sourceMapData.sourceMapDecodedMappings[j];
|
||||
let currentSourceFile = program.getSourceFile(sourceMapData.inputSourceFileNames[decodedSourceMapping.sourceIndex]);
|
||||
const decodedSourceMapping = sourceMapData.sourceMapDecodedMappings[j];
|
||||
const currentSourceFile = program.getSourceFile(sourceMapData.inputSourceFileNames[decodedSourceMapping.sourceIndex]);
|
||||
if (currentSourceFile !== prevSourceFile) {
|
||||
SourceMapSpanWriter.recordNewSourceFileSpan(decodedSourceMapping, currentSourceFile.text);
|
||||
prevSourceFile = currentSourceFile;
|
||||
|
|
|
@ -36,11 +36,11 @@ class Test262BaselineRunner extends RunnerBase {
|
|||
};
|
||||
|
||||
before(() => {
|
||||
let content = Harness.IO.readFile(filePath);
|
||||
let testFilename = ts.removeFileExtension(filePath).replace(/\//g, "_") + ".test";
|
||||
let testCaseContent = Harness.TestCaseParser.makeUnitsFromTest(content, testFilename);
|
||||
const content = Harness.IO.readFile(filePath);
|
||||
const testFilename = ts.removeFileExtension(filePath).replace(/\//g, "_") + ".test";
|
||||
const testCaseContent = Harness.TestCaseParser.makeUnitsFromTest(content, testFilename);
|
||||
|
||||
let inputFiles = testCaseContent.testUnitData.map(unit => {
|
||||
const inputFiles = testCaseContent.testUnitData.map(unit => {
|
||||
return { unitName: Test262BaselineRunner.getTestFilePath(unit.name), content: unit.content };
|
||||
});
|
||||
|
||||
|
@ -64,14 +64,14 @@ class Test262BaselineRunner extends RunnerBase {
|
|||
|
||||
it("has the expected emitted code", () => {
|
||||
Harness.Baseline.runBaseline("has the expected emitted code", testState.filename + ".output.js", () => {
|
||||
let files = testState.compilerResult.files.filter(f => f.fileName !== Test262BaselineRunner.helpersFilePath);
|
||||
const files = testState.compilerResult.files.filter(f => f.fileName !== Test262BaselineRunner.helpersFilePath);
|
||||
return Harness.Compiler.collateOutputs(files);
|
||||
}, false, Test262BaselineRunner.baselineOptions);
|
||||
});
|
||||
|
||||
it("has the expected errors", () => {
|
||||
Harness.Baseline.runBaseline("has the expected errors", testState.filename + ".errors.txt", () => {
|
||||
let errors = testState.compilerResult.errors;
|
||||
const errors = testState.compilerResult.errors;
|
||||
if (errors.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
@ -81,13 +81,13 @@ class Test262BaselineRunner extends RunnerBase {
|
|||
});
|
||||
|
||||
it("satisfies inletiants", () => {
|
||||
let sourceFile = testState.program.getSourceFile(Test262BaselineRunner.getTestFilePath(testState.filename));
|
||||
const sourceFile = testState.program.getSourceFile(Test262BaselineRunner.getTestFilePath(testState.filename));
|
||||
Utils.assertInvariants(sourceFile, /*parent:*/ undefined);
|
||||
});
|
||||
|
||||
it("has the expected AST", () => {
|
||||
Harness.Baseline.runBaseline("has the expected AST", testState.filename + ".AST.txt", () => {
|
||||
let sourceFile = testState.program.getSourceFile(Test262BaselineRunner.getTestFilePath(testState.filename));
|
||||
const sourceFile = testState.program.getSourceFile(Test262BaselineRunner.getTestFilePath(testState.filename));
|
||||
return Utils.sourceFileToJSON(sourceFile);
|
||||
}, false, Test262BaselineRunner.baselineOptions);
|
||||
});
|
||||
|
@ -97,7 +97,7 @@ class Test262BaselineRunner extends RunnerBase {
|
|||
public initializeTests() {
|
||||
// this will set up a series of describe/it blocks to run between the setup and cleanup phases
|
||||
if (this.tests.length === 0) {
|
||||
let testFiles = this.enumerateFiles(Test262BaselineRunner.basePath, Test262BaselineRunner.testFileExtensionRegex, { recursive: true });
|
||||
const testFiles = this.enumerateFiles(Test262BaselineRunner.basePath, Test262BaselineRunner.testFileExtensionRegex, { recursive: true });
|
||||
testFiles.forEach(fn => {
|
||||
this.runTest(ts.normalizePath(fn));
|
||||
});
|
||||
|
|
|
@ -21,7 +21,7 @@ class TypeWriterWalker {
|
|||
}
|
||||
|
||||
public getTypeAndSymbols(fileName: string): TypeWriterResult[] {
|
||||
let sourceFile = this.program.getSourceFile(fileName);
|
||||
const sourceFile = this.program.getSourceFile(fileName);
|
||||
this.currentSourceFile = sourceFile;
|
||||
this.results = [];
|
||||
this.visitNode(sourceFile);
|
||||
|
@ -37,28 +37,28 @@ class TypeWriterWalker {
|
|||
}
|
||||
|
||||
private logTypeAndSymbol(node: ts.Node): void {
|
||||
let actualPos = ts.skipTrivia(this.currentSourceFile.text, node.pos);
|
||||
let lineAndCharacter = this.currentSourceFile.getLineAndCharacterOfPosition(actualPos);
|
||||
let sourceText = ts.getTextOfNodeFromSourceText(this.currentSourceFile.text, node);
|
||||
const actualPos = ts.skipTrivia(this.currentSourceFile.text, node.pos);
|
||||
const lineAndCharacter = this.currentSourceFile.getLineAndCharacterOfPosition(actualPos);
|
||||
const sourceText = ts.getTextOfNodeFromSourceText(this.currentSourceFile.text, node);
|
||||
|
||||
// Workaround to ensure we output 'C' instead of 'typeof C' for base class expressions
|
||||
// let type = this.checker.getTypeAtLocation(node);
|
||||
let type = node.parent && ts.isExpressionWithTypeArgumentsInClassExtendsClause(node.parent) && this.checker.getTypeAtLocation(node.parent) || this.checker.getTypeAtLocation(node);
|
||||
const type = node.parent && ts.isExpressionWithTypeArgumentsInClassExtendsClause(node.parent) && this.checker.getTypeAtLocation(node.parent) || this.checker.getTypeAtLocation(node);
|
||||
|
||||
ts.Debug.assert(type !== undefined, "type doesn't exist");
|
||||
let symbol = this.checker.getSymbolAtLocation(node);
|
||||
const symbol = this.checker.getSymbolAtLocation(node);
|
||||
|
||||
let typeString = this.checker.typeToString(type, node.parent, ts.TypeFormatFlags.NoTruncation);
|
||||
const typeString = this.checker.typeToString(type, node.parent, ts.TypeFormatFlags.NoTruncation);
|
||||
let symbolString: string;
|
||||
if (symbol) {
|
||||
symbolString = "Symbol(" + this.checker.symbolToString(symbol, node.parent);
|
||||
if (symbol.declarations) {
|
||||
for (let declaration of symbol.declarations) {
|
||||
for (const declaration of symbol.declarations) {
|
||||
symbolString += ", ";
|
||||
let declSourceFile = declaration.getSourceFile();
|
||||
let declLineAndCharacter = declSourceFile.getLineAndCharacterOfPosition(declaration.pos);
|
||||
let fileName = ts.getBaseFileName(declSourceFile.fileName);
|
||||
let isLibFile = /lib(.*)\.d\.ts/i.test(fileName);
|
||||
const declSourceFile = declaration.getSourceFile();
|
||||
const declLineAndCharacter = declSourceFile.getLineAndCharacterOfPosition(declaration.pos);
|
||||
const fileName = ts.getBaseFileName(declSourceFile.fileName);
|
||||
const isLibFile = /lib(.*)\.d\.ts/i.test(fileName);
|
||||
symbolString += `Decl(${ fileName }, ${ isLibFile ? "--" : declLineAndCharacter.line }, ${ isLibFile ? "--" : declLineAndCharacter.character })`;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -105,19 +105,19 @@ namespace ts.server {
|
|||
}
|
||||
|
||||
resolveModuleNames(moduleNames: string[], containingFile: string): ResolvedModule[] {
|
||||
let path = toPath(containingFile, this.host.getCurrentDirectory(), this.getCanonicalFileName);
|
||||
let currentResolutionsInFile = this.resolvedModuleNames.get(path);
|
||||
const path = toPath(containingFile, this.host.getCurrentDirectory(), this.getCanonicalFileName);
|
||||
const currentResolutionsInFile = this.resolvedModuleNames.get(path);
|
||||
|
||||
let newResolutions: Map<TimestampedResolvedModule> = {};
|
||||
let resolvedModules: ResolvedModule[] = [];
|
||||
const newResolutions: Map<TimestampedResolvedModule> = {};
|
||||
const resolvedModules: ResolvedModule[] = [];
|
||||
|
||||
let compilerOptions = this.getCompilationSettings();
|
||||
const compilerOptions = this.getCompilationSettings();
|
||||
|
||||
for (let moduleName of moduleNames) {
|
||||
for (const moduleName of moduleNames) {
|
||||
// check if this is a duplicate entry in the list
|
||||
let resolution = lookUp(newResolutions, moduleName);
|
||||
if (!resolution) {
|
||||
let existingResolution = currentResolutionsInFile && ts.lookUp(currentResolutionsInFile, moduleName);
|
||||
const existingResolution = currentResolutionsInFile && ts.lookUp(currentResolutionsInFile, moduleName);
|
||||
if (moduleResolutionIsValid(existingResolution)) {
|
||||
// ok, it is safe to use existing module resolution results
|
||||
resolution = existingResolution;
|
||||
|
@ -211,7 +211,7 @@ namespace ts.server {
|
|||
}
|
||||
|
||||
getScriptInfo(filename: string): ScriptInfo {
|
||||
let path = toPath(filename, this.host.getCurrentDirectory(), this.getCanonicalFileName);
|
||||
const path = toPath(filename, this.host.getCurrentDirectory(), this.getCanonicalFileName);
|
||||
let scriptInfo = this.filenameToScript.get(path);
|
||||
if (!scriptInfo) {
|
||||
scriptInfo = this.project.openReferencedFile(filename);
|
||||
|
@ -282,7 +282,7 @@ namespace ts.server {
|
|||
* @param line 1 based index
|
||||
*/
|
||||
lineToTextSpan(filename: string, line: number): ts.TextSpan {
|
||||
let path = toPath(filename, this.host.getCurrentDirectory(), this.getCanonicalFileName);
|
||||
const path = toPath(filename, this.host.getCurrentDirectory(), this.getCanonicalFileName);
|
||||
const script: ScriptInfo = this.filenameToScript.get(path);
|
||||
const index = script.snap().index;
|
||||
|
||||
|
@ -303,7 +303,7 @@ namespace ts.server {
|
|||
* @param offset 1 based index
|
||||
*/
|
||||
lineOffsetToPosition(filename: string, line: number, offset: number): number {
|
||||
let path = toPath(filename, this.host.getCurrentDirectory(), this.getCanonicalFileName);
|
||||
const path = toPath(filename, this.host.getCurrentDirectory(), this.getCanonicalFileName);
|
||||
const script: ScriptInfo = this.filenameToScript.get(path);
|
||||
const index = script.snap().index;
|
||||
|
||||
|
@ -317,7 +317,7 @@ namespace ts.server {
|
|||
* @param offset 1-based index
|
||||
*/
|
||||
positionToLineOffset(filename: string, position: number): ILineInfo {
|
||||
let path = toPath(filename, this.host.getCurrentDirectory(), this.getCanonicalFileName);
|
||||
const path = toPath(filename, this.host.getCurrentDirectory(), this.getCanonicalFileName);
|
||||
const script: ScriptInfo = this.filenameToScript.get(path);
|
||||
const index = script.snap().index;
|
||||
const lineOffset = index.charOffsetToLineNumberAndPos(position);
|
||||
|
@ -578,9 +578,9 @@ namespace ts.server {
|
|||
}
|
||||
|
||||
handleProjectFilelistChanges(project: Project) {
|
||||
let { succeeded, projectOptions, error } = this.configFileToProjectOptions(project.projectFilename);
|
||||
let newRootFiles = projectOptions.files.map((f => this.getCanonicalFileName(f)));
|
||||
let currentRootFiles = project.getRootFiles().map((f => this.getCanonicalFileName(f)));
|
||||
const { succeeded, projectOptions, error } = this.configFileToProjectOptions(project.projectFilename);
|
||||
const newRootFiles = projectOptions.files.map((f => this.getCanonicalFileName(f)));
|
||||
const currentRootFiles = project.getRootFiles().map((f => this.getCanonicalFileName(f)));
|
||||
|
||||
// We check if the project file list has changed. If so, we update the project.
|
||||
if (!arrayIsEqualTo(currentRootFiles && currentRootFiles.sort(), newRootFiles && newRootFiles.sort())) {
|
||||
|
@ -606,13 +606,13 @@ namespace ts.server {
|
|||
|
||||
this.log("Detected newly added tsconfig file: " + fileName);
|
||||
|
||||
let { succeeded, projectOptions, error } = this.configFileToProjectOptions(fileName);
|
||||
let rootFilesInTsconfig = projectOptions.files.map(f => this.getCanonicalFileName(f));
|
||||
let openFileRoots = this.openFileRoots.map(s => this.getCanonicalFileName(s.fileName));
|
||||
const { succeeded, projectOptions, error } = this.configFileToProjectOptions(fileName);
|
||||
const rootFilesInTsconfig = projectOptions.files.map(f => this.getCanonicalFileName(f));
|
||||
const openFileRoots = this.openFileRoots.map(s => this.getCanonicalFileName(s.fileName));
|
||||
|
||||
// We should only care about the new tsconfig file if it contains any
|
||||
// opened root files of existing inferred projects
|
||||
for (let openFileRoot of openFileRoots) {
|
||||
for (const openFileRoot of openFileRoots) {
|
||||
if (rootFilesInTsconfig.indexOf(openFileRoot) >= 0) {
|
||||
this.reloadProjects();
|
||||
return;
|
||||
|
@ -621,7 +621,7 @@ namespace ts.server {
|
|||
}
|
||||
|
||||
getCanonicalFileName(fileName: string) {
|
||||
let name = this.host.useCaseSensitiveFileNames ? fileName : fileName.toLowerCase();
|
||||
const name = this.host.useCaseSensitiveFileNames ? fileName : fileName.toLowerCase();
|
||||
return ts.normalizePath(name);
|
||||
}
|
||||
|
||||
|
@ -737,7 +737,7 @@ namespace ts.server {
|
|||
this.configuredProjects = copyListRemovingItem(project, this.configuredProjects);
|
||||
}
|
||||
else {
|
||||
for (let directory of project.directoriesWatchedForTsconfig) {
|
||||
for (const directory of project.directoriesWatchedForTsconfig) {
|
||||
// if the ref count for this directory watcher drops to 0, it's time to close it
|
||||
if (!(--project.projectService.directoryWatchersRefCount[directory])) {
|
||||
this.log("Close directory watcher for: " + directory);
|
||||
|
@ -748,9 +748,9 @@ namespace ts.server {
|
|||
this.inferredProjects = copyListRemovingItem(project, this.inferredProjects);
|
||||
}
|
||||
|
||||
let fileNames = project.getFileNames();
|
||||
for (let fileName of fileNames) {
|
||||
let info = this.getScriptInfo(fileName);
|
||||
const fileNames = project.getFileNames();
|
||||
for (const fileName of fileNames) {
|
||||
const info = this.getScriptInfo(fileName);
|
||||
if (info.defaultProject == project) {
|
||||
info.defaultProject = undefined;
|
||||
}
|
||||
|
@ -759,7 +759,7 @@ namespace ts.server {
|
|||
|
||||
setConfiguredProjectRoot(info: ScriptInfo) {
|
||||
for (let i = 0, len = this.configuredProjects.length; i < len; i++) {
|
||||
let configuredProject = this.configuredProjects[i];
|
||||
const configuredProject = this.configuredProjects[i];
|
||||
if (configuredProject.isRoot(info)) {
|
||||
info.defaultProject = configuredProject;
|
||||
configuredProject.addOpenRef();
|
||||
|
@ -903,7 +903,7 @@ namespace ts.server {
|
|||
reloadProjects() {
|
||||
this.log("reload projects.");
|
||||
// First check if there is new tsconfig file added for inferred project roots
|
||||
for (let info of this.openFileRoots) {
|
||||
for (const info of this.openFileRoots) {
|
||||
this.openOrUpdateConfiguredProjectForFile(info.fileName);
|
||||
}
|
||||
this.updateProjectStructure();
|
||||
|
@ -918,10 +918,10 @@ namespace ts.server {
|
|||
this.log("updating project structure from ...", "Info");
|
||||
this.printProjects();
|
||||
|
||||
let unattachedOpenFiles: ScriptInfo[] = [];
|
||||
let openFileRootsConfigured: ScriptInfo[] = [];
|
||||
for (let info of this.openFileRootsConfigured) {
|
||||
let project = info.defaultProject;
|
||||
const unattachedOpenFiles: ScriptInfo[] = [];
|
||||
const openFileRootsConfigured: ScriptInfo[] = [];
|
||||
for (const info of this.openFileRootsConfigured) {
|
||||
const project = info.defaultProject;
|
||||
if (!project || !(project.getSourceFile(info))) {
|
||||
info.defaultProject = undefined;
|
||||
unattachedOpenFiles.push(info);
|
||||
|
@ -1016,7 +1016,6 @@ namespace ts.server {
|
|||
}
|
||||
}
|
||||
if (content !== undefined) {
|
||||
let indentSize: number;
|
||||
info = new ScriptInfo(this.host, fileName, content, openedByClient);
|
||||
info.setFormatOptions(this.getFormatCodeOptions());
|
||||
this.filenameToScriptInfo[fileName] = info;
|
||||
|
@ -1071,12 +1070,12 @@ namespace ts.server {
|
|||
* the tsconfig file content and update the project; otherwise we create a new one.
|
||||
*/
|
||||
openOrUpdateConfiguredProjectForFile(fileName: string) {
|
||||
let searchPath = ts.normalizePath(getDirectoryPath(fileName));
|
||||
const searchPath = ts.normalizePath(getDirectoryPath(fileName));
|
||||
this.log("Search path: " + searchPath, "Info");
|
||||
let configFileName = this.findConfigFile(searchPath);
|
||||
const configFileName = this.findConfigFile(searchPath);
|
||||
if (configFileName) {
|
||||
this.log("Config file name: " + configFileName, "Info");
|
||||
let project = this.findConfiguredProjectByConfigFile(configFileName);
|
||||
const project = this.findConfiguredProjectByConfigFile(configFileName);
|
||||
if (!project) {
|
||||
const configResult = this.openConfigFile(configFileName, fileName);
|
||||
if (!configResult.success) {
|
||||
|
@ -1215,15 +1214,15 @@ namespace ts.server {
|
|||
}
|
||||
|
||||
openConfigFile(configFilename: string, clientFileName?: string): ProjectOpenResult {
|
||||
let { succeeded, projectOptions, error } = this.configFileToProjectOptions(configFilename);
|
||||
const { succeeded, projectOptions, error } = this.configFileToProjectOptions(configFilename);
|
||||
if (!succeeded) {
|
||||
return error;
|
||||
}
|
||||
else {
|
||||
let project = this.createProject(configFilename, projectOptions);
|
||||
for (let rootFilename of projectOptions.files) {
|
||||
const project = this.createProject(configFilename, projectOptions);
|
||||
for (const rootFilename of projectOptions.files) {
|
||||
if (this.host.fileExists(rootFilename)) {
|
||||
let info = this.openFile(rootFilename, /*openedByClient*/ clientFileName == rootFilename);
|
||||
const info = this.openFile(rootFilename, /*openedByClient*/ clientFileName == rootFilename);
|
||||
project.addRoot(info);
|
||||
}
|
||||
else {
|
||||
|
@ -1248,24 +1247,24 @@ namespace ts.server {
|
|||
this.removeProject(project);
|
||||
}
|
||||
else {
|
||||
let { succeeded, projectOptions, error } = this.configFileToProjectOptions(project.projectFilename);
|
||||
const { succeeded, projectOptions, error } = this.configFileToProjectOptions(project.projectFilename);
|
||||
if (!succeeded) {
|
||||
return error;
|
||||
}
|
||||
else {
|
||||
let oldFileNames = project.compilerService.host.roots.map(info => info.fileName);
|
||||
let newFileNames = projectOptions.files;
|
||||
let fileNamesToRemove = oldFileNames.filter(f => newFileNames.indexOf(f) < 0);
|
||||
let fileNamesToAdd = newFileNames.filter(f => oldFileNames.indexOf(f) < 0);
|
||||
const oldFileNames = project.compilerService.host.roots.map(info => info.fileName);
|
||||
const newFileNames = projectOptions.files;
|
||||
const fileNamesToRemove = oldFileNames.filter(f => newFileNames.indexOf(f) < 0);
|
||||
const fileNamesToAdd = newFileNames.filter(f => oldFileNames.indexOf(f) < 0);
|
||||
|
||||
for (let fileName of fileNamesToRemove) {
|
||||
let info = this.getScriptInfo(fileName);
|
||||
for (const fileName of fileNamesToRemove) {
|
||||
const info = this.getScriptInfo(fileName);
|
||||
if (info) {
|
||||
project.removeRoot(info);
|
||||
}
|
||||
}
|
||||
|
||||
for (let fileName of fileNamesToAdd) {
|
||||
for (const fileName of fileNamesToAdd) {
|
||||
let info = this.getScriptInfo(fileName);
|
||||
if (!info) {
|
||||
info = this.openFile(fileName, false);
|
||||
|
|
|
@ -157,7 +157,7 @@ namespace ts.server {
|
|||
|
||||
const logger = createLoggerFromEnv();
|
||||
|
||||
let pending: string[] = [];
|
||||
const pending: string[] = [];
|
||||
let canWrite = true;
|
||||
function writeMessage(s: string) {
|
||||
if (!canWrite) {
|
||||
|
|
|
@ -338,25 +338,25 @@ namespace ts.server {
|
|||
|
||||
private getOccurrences(line: number, offset: number, fileName: string): protocol.OccurrencesResponseItem[] {
|
||||
fileName = ts.normalizePath(fileName);
|
||||
let project = this.projectService.getProjectForFile(fileName);
|
||||
const project = this.projectService.getProjectForFile(fileName);
|
||||
|
||||
if (!project) {
|
||||
throw Errors.NoProject;
|
||||
}
|
||||
|
||||
let { compilerService } = project;
|
||||
let position = compilerService.host.lineOffsetToPosition(fileName, line, offset);
|
||||
const { compilerService } = project;
|
||||
const position = compilerService.host.lineOffsetToPosition(fileName, line, offset);
|
||||
|
||||
let occurrences = compilerService.languageService.getOccurrencesAtPosition(fileName, position);
|
||||
const occurrences = compilerService.languageService.getOccurrencesAtPosition(fileName, position);
|
||||
|
||||
if (!occurrences) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return occurrences.map(occurrence => {
|
||||
let { fileName, isWriteAccess, textSpan } = occurrence;
|
||||
let start = compilerService.host.positionToLineOffset(fileName, textSpan.start);
|
||||
let end = compilerService.host.positionToLineOffset(fileName, ts.textSpanEnd(textSpan));
|
||||
const { fileName, isWriteAccess, textSpan } = occurrence;
|
||||
const start = compilerService.host.positionToLineOffset(fileName, textSpan.start);
|
||||
const end = compilerService.host.positionToLineOffset(fileName, ts.textSpanEnd(textSpan));
|
||||
return {
|
||||
start,
|
||||
end,
|
||||
|
@ -368,16 +368,16 @@ namespace ts.server {
|
|||
|
||||
private getDocumentHighlights(line: number, offset: number, fileName: string, filesToSearch: string[]): protocol.DocumentHighlightsItem[] {
|
||||
fileName = ts.normalizePath(fileName);
|
||||
let project = this.projectService.getProjectForFile(fileName);
|
||||
const project = this.projectService.getProjectForFile(fileName);
|
||||
|
||||
if (!project) {
|
||||
throw Errors.NoProject;
|
||||
}
|
||||
|
||||
let { compilerService } = project;
|
||||
let position = compilerService.host.lineOffsetToPosition(fileName, line, offset);
|
||||
const { compilerService } = project;
|
||||
const position = compilerService.host.lineOffsetToPosition(fileName, line, offset);
|
||||
|
||||
let documentHighlights = compilerService.languageService.getDocumentHighlights(fileName, position, filesToSearch);
|
||||
const documentHighlights = compilerService.languageService.getDocumentHighlights(fileName, position, filesToSearch);
|
||||
|
||||
if (!documentHighlights) {
|
||||
return undefined;
|
||||
|
@ -386,7 +386,7 @@ namespace ts.server {
|
|||
return documentHighlights.map(convertToDocumentHighlightsItem);
|
||||
|
||||
function convertToDocumentHighlightsItem(documentHighlights: ts.DocumentHighlights): ts.server.protocol.DocumentHighlightsItem {
|
||||
let { fileName, highlightSpans } = documentHighlights;
|
||||
const { fileName, highlightSpans } = documentHighlights;
|
||||
|
||||
return {
|
||||
file: fileName,
|
||||
|
@ -394,9 +394,9 @@ namespace ts.server {
|
|||
};
|
||||
|
||||
function convertHighlightSpan(highlightSpan: ts.HighlightSpan): ts.server.protocol.HighlightSpan {
|
||||
let { textSpan, kind } = highlightSpan;
|
||||
let start = compilerService.host.positionToLineOffset(fileName, textSpan.start);
|
||||
let end = compilerService.host.positionToLineOffset(fileName, ts.textSpanEnd(textSpan));
|
||||
const { textSpan, kind } = highlightSpan;
|
||||
const start = compilerService.host.positionToLineOffset(fileName, textSpan.start);
|
||||
const end = compilerService.host.positionToLineOffset(fileName, ts.textSpanEnd(textSpan));
|
||||
return { start, end, kind };
|
||||
}
|
||||
}
|
||||
|
@ -404,9 +404,9 @@ namespace ts.server {
|
|||
|
||||
private getProjectInfo(fileName: string, needFileNameList: boolean): protocol.ProjectInfo {
|
||||
fileName = ts.normalizePath(fileName);
|
||||
let project = this.projectService.getProjectForFile(fileName);
|
||||
const project = this.projectService.getProjectForFile(fileName);
|
||||
|
||||
let projectInfo: protocol.ProjectInfo = {
|
||||
const projectInfo: protocol.ProjectInfo = {
|
||||
configFileName: project.projectFilename
|
||||
};
|
||||
|
||||
|
@ -640,7 +640,7 @@ namespace ts.server {
|
|||
}
|
||||
// i points to the first non whitespace character
|
||||
if (preferredIndent !== hasIndent) {
|
||||
let firstNoWhiteSpacePosition = lineInfo.offset + i;
|
||||
const firstNoWhiteSpacePosition = lineInfo.offset + i;
|
||||
edits.push({
|
||||
span: ts.createTextSpanFromBounds(lineInfo.offset, firstNoWhiteSpacePosition),
|
||||
newText: generateIndentString(preferredIndent, editorOptions)
|
||||
|
@ -897,22 +897,22 @@ namespace ts.server {
|
|||
}
|
||||
|
||||
getDiagnosticsForProject(delay: number, fileName: string) {
|
||||
let { configFileName, fileNames: fileNamesInProject } = this.getProjectInfo(fileName, true);
|
||||
const { configFileName, fileNames } = this.getProjectInfo(fileName, true);
|
||||
// No need to analyze lib.d.ts
|
||||
fileNamesInProject = fileNamesInProject.filter((value, index, array) => value.indexOf("lib.d.ts") < 0);
|
||||
let fileNamesInProject = fileNames.filter((value, index, array) => value.indexOf("lib.d.ts") < 0);
|
||||
|
||||
// Sort the file name list to make the recently touched files come first
|
||||
let highPriorityFiles: string[] = [];
|
||||
let mediumPriorityFiles: string[] = [];
|
||||
let lowPriorityFiles: string[] = [];
|
||||
let veryLowPriorityFiles: string[] = [];
|
||||
let normalizedFileName = ts.normalizePath(fileName);
|
||||
let project = this.projectService.getProjectForFile(normalizedFileName);
|
||||
for (let fileNameInProject of fileNamesInProject) {
|
||||
const highPriorityFiles: string[] = [];
|
||||
const mediumPriorityFiles: string[] = [];
|
||||
const lowPriorityFiles: string[] = [];
|
||||
const veryLowPriorityFiles: string[] = [];
|
||||
const normalizedFileName = ts.normalizePath(fileName);
|
||||
const project = this.projectService.getProjectForFile(normalizedFileName);
|
||||
for (const fileNameInProject of fileNamesInProject) {
|
||||
if (this.getCanonicalFileName(fileNameInProject) == this.getCanonicalFileName(fileName))
|
||||
highPriorityFiles.push(fileNameInProject);
|
||||
else {
|
||||
let info = this.projectService.getScriptInfo(fileNameInProject);
|
||||
const info = this.projectService.getScriptInfo(fileNameInProject);
|
||||
if (!info.isOpen) {
|
||||
if (fileNameInProject.indexOf(".d.ts") > 0)
|
||||
veryLowPriorityFiles.push(fileNameInProject);
|
||||
|
@ -927,8 +927,8 @@ namespace ts.server {
|
|||
fileNamesInProject = highPriorityFiles.concat(mediumPriorityFiles).concat(lowPriorityFiles).concat(veryLowPriorityFiles);
|
||||
|
||||
if (fileNamesInProject.length > 0) {
|
||||
let checkList = fileNamesInProject.map<PendingErrorCheck>((fileName: string) => {
|
||||
let normalizedFileName = ts.normalizePath(fileName);
|
||||
const checkList = fileNamesInProject.map<PendingErrorCheck>((fileName: string) => {
|
||||
const normalizedFileName = ts.normalizePath(fileName);
|
||||
return { fileName: normalizedFileName, project };
|
||||
});
|
||||
// Project level error analysis runs on background files too, therefore
|
||||
|
@ -938,7 +938,7 @@ namespace ts.server {
|
|||
}
|
||||
|
||||
getCanonicalFileName(fileName: string) {
|
||||
let name = this.host.useCaseSensitiveFileNames ? fileName : fileName.toLowerCase();
|
||||
const name = this.host.useCaseSensitiveFileNames ? fileName : fileName.toLowerCase();
|
||||
return ts.normalizePath(name);
|
||||
}
|
||||
|
||||
|
@ -1001,7 +1001,7 @@ namespace ts.server {
|
|||
return {response: this.getDiagnostics(geterrArgs.delay, geterrArgs.files), responseRequired: false};
|
||||
},
|
||||
[CommandNames.GeterrForProject]: (request: protocol.Request) => {
|
||||
let { file, delay } = <protocol.GeterrForProjectRequestArgs>request.arguments;
|
||||
const { file, delay } = <protocol.GeterrForProjectRequestArgs>request.arguments;
|
||||
return {response: this.getDiagnosticsForProject(delay, file), responseRequired: false};
|
||||
},
|
||||
[CommandNames.Change]: (request: protocol.Request) => {
|
||||
|
|
|
@ -39,6 +39,7 @@
|
|||
"no-inferrable-types": true,
|
||||
"no-null": true,
|
||||
"boolean-trivia": true,
|
||||
"type-operator-spacing": true
|
||||
"type-operator-spacing": true,
|
||||
"prefer-const": true
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue