Merge pull request #1467 from Microsoft/postEditInvariants

Resurrect the post edit invariants checking for fourslash.
This commit is contained in:
CyrusNajmabadi 2014-12-12 13:52:19 -08:00
commit dacbd982cf
3 changed files with 151 additions and 149 deletions

View file

@ -1415,44 +1415,50 @@ module FourSlash {
}
private checkPostEditInvariants() {
return;
if (this.editValidation === IncrementalEditValidation.None) {
return;
}
/// TODO: reimplement this section
//if (this.editValidation === IncrementalEditValidation.None) {
// return;
//}
var incrementalSourceFile = this.languageService.getSourceFile(this.activeFile.fileName);
var incrementalSyntaxDiagnostics = JSON.stringify(Utils.convertDiagnostics(incrementalSourceFile.getSyntacticDiagnostics()));
//// Get syntactic errors (to force a refresh)
//var incrSyntaxErrs = JSON.stringify(this.languageService.getSyntacticDiagnostics(this.activeFile.fileName));
// Check syntactic structure
var snapshot = this.languageServiceShimHost.getScriptSnapshot(this.activeFile.fileName);
var content = snapshot.getText(0, snapshot.getLength());
//// Check syntactic structure
//var snapshot = this.languageServiceShimHost.getScriptSnapshot(this.activeFile.fileName);
//var content = snapshot.getText(0, snapshot.getLength());
//var refSyntaxTree = TypeScript.Parser.parse(this.activeFile.fileName, TypeScript.SimpleText.fromString(content), ts.ScriptTarget.ES5, TypeScript.isDTSFile(this.activeFile.fileName));
//var fullSyntaxErrs = JSON.stringify(refSyntaxTree.diagnostics());
var referenceSourceFile = ts.createLanguageServiceSourceFile(
this.activeFile.fileName, createScriptSnapShot(content), ts.ScriptTarget.Latest, /*version:*/ "0", /*isOpen:*/ false, /*setNodeParents:*/ false);
var referenceSyntaxDiagnostics = JSON.stringify(Utils.convertDiagnostics(referenceSourceFile.getSyntacticDiagnostics()));
//if (incrSyntaxErrs !== fullSyntaxErrs) {
// this.raiseError('Mismatched incremental/full syntactic errors for file ' + this.activeFile.fileName + '.\n=== Incremental errors ===\n' + incrSyntaxErrs + '\n=== Full Errors ===\n' + fullSyntaxErrs);
//}
if (incrementalSyntaxDiagnostics !== referenceSyntaxDiagnostics) {
this.raiseError('Mismatched incremental/reference syntactic diagnostics for file ' + this.activeFile.fileName + '.\n=== Incremental diagnostics ===\n' + incrementalSyntaxDiagnostics + '\n=== Reference Diagnostics ===\n' + referenceSyntaxDiagnostics);
}
// if (this.editValidation !== IncrementalEditValidation.SyntacticOnly) {
// var compiler = new TypeScript.TypeScriptCompiler();
// for (var i = 0; i < this.testData.files.length; i++) {
// snapshot = this.languageServiceShimHost.getScriptSnapshot(this.testData.files[i].fileName);
// compiler.addFile(this.testData.files[i].fileName, TypeScript.ScriptSnapshot.fromString(snapshot.getText(0, snapshot.getLength())), ts.ByteOrderMark.None, 0, true);
// }
var incrementalSourceFileJSON = Utils.sourceFileToJSON(incrementalSourceFile);
var referenceSourceFileJSON = Utils.sourceFileToJSON(referenceSourceFile);
// compiler.addFile('lib.d.ts', TypeScript.ScriptSnapshot.fromString(Harness.Compiler.libTextMinimal), ts.ByteOrderMark.None, 0, true);
if (incrementalSyntaxDiagnostics !== referenceSyntaxDiagnostics) {
this.raiseError('Mismatched incremental/reference ast for file ' + this.activeFile.fileName + '.\n=== Incremental AST ===\n' + incrementalSourceFileJSON + '\n=== Reference AST ===\n' + referenceSourceFileJSON);
}
// for (var i = 0; i < this.testData.files.length; i++) {
// var refSemanticErrs = JSON.stringify(compiler.getSemanticDiagnostics(this.testData.files[i].fileName));
// var incrSemanticErrs = JSON.stringify(this.languageService.getSemanticDiagnostics(this.testData.files[i].fileName));
//if (this.editValidation !== IncrementalEditValidation.SyntacticOnly) {
// var compiler = new TypeScript.TypeScriptCompiler();
// for (var i = 0; i < this.testData.files.length; i++) {
// snapshot = this.languageServiceShimHost.getScriptSnapshot(this.testData.files[i].fileName);
// compiler.addFile(this.testData.files[i].fileName, TypeScript.ScriptSnapshot.fromString(snapshot.getText(0, snapshot.getLength())), ts.ByteOrderMark.None, 0, true);
// }
// if (incrSemanticErrs !== refSemanticErrs) {
// this.raiseError('Mismatched incremental/full semantic errors for file ' + this.testData.files[i].fileName + '\n=== Incremental errors ===\n' + incrSemanticErrs + '\n=== Full Errors ===\n' + refSemanticErrs);
// }
// }
// }
// compiler.addFile('lib.d.ts', TypeScript.ScriptSnapshot.fromString(Harness.Compiler.libTextMinimal), ts.ByteOrderMark.None, 0, true);
// for (var i = 0; i < this.testData.files.length; i++) {
// var refSemanticErrs = JSON.stringify(compiler.getSemanticDiagnostics(this.testData.files[i].fileName));
// var incrSemanticErrs = JSON.stringify(this.languageService.getSemanticDiagnostics(this.testData.files[i].fileName));
// if (incrSemanticErrs !== refSemanticErrs) {
// this.raiseError('Mismatched incremental/full semantic errors for file ' + this.testData.files[i].fileName + '\n=== Incremental errors ===\n' + incrSemanticErrs + '\n=== Full Errors ===\n' + refSemanticErrs);
// }
// }
//}
}
private fixCaretPosition() {

View file

@ -175,6 +175,120 @@ module Utils {
function isNodeOrArray(a: any): boolean {
return a !== undefined && typeof a.pos === "number";
}
export function convertDiagnostics(diagnostics: ts.Diagnostic[]) {
return diagnostics.map(convertDiagnostic);
}
function convertDiagnostic(diagnostic: ts.Diagnostic) {
return {
start: diagnostic.start,
length: diagnostic.length,
messageText: diagnostic.messageText,
category: (<any>ts).DiagnosticCategory[diagnostic.category],
code: diagnostic.code
};
}
export function sourceFileToJSON(file: ts.SourceFile): string {
return JSON.stringify(file,(k, v) => {
return isNodeOrArray(v) ? serializeNode(v) : v;
}, " ");
function getKindName(k: number): string {
return (<any>ts).SyntaxKind[k]
}
function getFlagName(flags: any, f: number): any {
if (f === 0) {
return 0;
}
var result = "";
ts.forEach(Object.getOwnPropertyNames(flags),(v: any) => {
if (isFinite(v)) {
v = +v;
if (f === +v) {
result = flags[v];
return true;
}
else if ((f & v) > 0) {
if (result.length)
result += " | ";
result += flags[v];
return false;
}
}
});
return result;
}
function getNodeFlagName(f: number) { return getFlagName((<any>ts).NodeFlags, f); }
function getParserContextFlagName(f: number) {
// Clear the flag that are produced by aggregating child values.. That is ephemeral
// data we don't care about in the dump. We only care what the parser set directly
// on the ast.
return getFlagName((<any>ts).ParserContextFlags, f & ts.ParserContextFlags.ParserGeneratedFlags);
}
function serializeNode(n: ts.Node): any {
var o: any = { kind: getKindName(n.kind) };
o.containsParseError = ts.containsParseError(n);
ts.forEach(Object.getOwnPropertyNames(n), propertyName => {
switch (propertyName) {
case "parent":
case "symbol":
case "locals":
case "localSymbol":
case "kind":
case "semanticDiagnostics":
case "id":
case "nodeCount":
case "symbolCount":
case "identifierCount":
case "scriptSnapshot":
// Blacklist of items we never put in the baseline file.
break;
case "flags":
// Print out flags with their enum names.
o[propertyName] = getNodeFlagName(n.flags);
break;
case "parserContextFlags":
o[propertyName] = getParserContextFlagName(n.parserContextFlags);
break;
case "referenceDiagnostics":
case "parseDiagnostics":
case "grammarDiagnostics":
o[propertyName] = Utils.convertDiagnostics((<any>n)[propertyName]);
break;
case "nextContainer":
if (n.nextContainer) {
o[propertyName] = { kind: n.nextContainer.kind, pos: n.nextContainer.pos, end: n.nextContainer.end };
}
break;
case "text":
// Include 'text' field for identifiers/literals, but not for source files.
if (n.kind !== ts.SyntaxKind.SourceFile) {
o[propertyName] = (<any>n)[propertyName];
}
break;
default:
o[propertyName] = (<any>n)[propertyName];
}
return undefined;
});
return o;
}
}
}
module Harness.Path {

View file

@ -21,124 +21,6 @@ class Test262BaselineRunner extends RunnerBase {
return Test262BaselineRunner.basePath + "/" + filename;
}
private static serializeSourceFile(file: ts.SourceFile): string {
function getKindName(k: number): string {
return (<any>ts).SyntaxKind[k]
}
function getFlagName(flags: any, f: number): any {
if (f === 0) {
return 0;
}
var result = "";
ts.forEach(Object.getOwnPropertyNames(flags), (v: any) => {
if (isFinite(v)) {
v = +v;
if (f === +v) {
result = flags[v];
return true;
}
else if ((f & v) > 0) {
if (result.length)
result += " | ";
result += flags[v];
return false;
}
}
});
return result;
}
function getNodeFlagName(f: number) { return getFlagName((<any>ts).NodeFlags, f); }
function getParserContextFlagName(f: number) {
// Clear the flag that are produced by aggregating child values.. That is ephemeral
// data we don't care about in the dump. We only care what the parser set directly
// on the ast.
return getFlagName((<any>ts).ParserContextFlags, f & ts.ParserContextFlags.ParserGeneratedFlags);
}
function convertDiagnostics(diagnostics: ts.Diagnostic[]) {
return diagnostics.map(convertDiagnostic);
}
function convertDiagnostic(diagnostic: ts.Diagnostic): any {
return {
start: diagnostic.start,
length: diagnostic.length,
messageText: diagnostic.messageText,
category: (<any>ts).DiagnosticCategory[diagnostic.category],
code: diagnostic.code
};
}
function serializeNode(n: ts.Node): any {
var o: any = { kind: getKindName(n.kind) };
if (ts.containsParseError(n)) {
o.containsParseError = true;
}
ts.forEach(Object.getOwnPropertyNames(n), propertyName => {
switch (propertyName) {
case "parent":
case "symbol":
case "locals":
case "localSymbol":
case "kind":
case "semanticDiagnostics":
case "id":
case "nodeCount":
case "symbolCount":
case "identifierCount":
// Blacklist of items we never put in the baseline file.
break;
case "flags":
// Print out flags with their enum names.
o[propertyName] = getNodeFlagName(n.flags);
break;
case "parserContextFlags":
o[propertyName] = getParserContextFlagName(n.parserContextFlags);
break;
case "referenceDiagnostics":
case "parseDiagnostics":
case "grammarDiagnostics":
o[propertyName] = convertDiagnostics((<any>n)[propertyName]);
break;
case "nextContainer":
if (n.nextContainer) {
o[propertyName] = { kind: n.nextContainer.kind, pos: n.nextContainer.pos, end: n.nextContainer.end };
}
break;
case "text":
// Include 'text' field for identifiers/literals, but not for source files.
if (n.kind !== ts.SyntaxKind.SourceFile) {
o[propertyName] = (<any>n)[propertyName];
}
break;
default:
o[propertyName] = (<any>n)[propertyName];
}
return undefined;
});
return o;
}
return JSON.stringify(file, (k, v) => {
return Test262BaselineRunner.isNodeOrArray(v) ? serializeNode(v) : v;
}, " ");
}
private static isNodeOrArray(a: any): boolean {
return a !== undefined && typeof a.pos === "number";
}
private runTest(filePath: string) {
describe('test262 test for ' + filePath, () => {
// Mocha holds onto the closure environment of the describe callback even after the test is done.
@ -203,7 +85,7 @@ class Test262BaselineRunner extends RunnerBase {
it('has the expected AST',() => {
Harness.Baseline.runBaseline('has the expected AST', testState.filename + '.AST.txt',() => {
var sourceFile = testState.checker.getProgram().getSourceFile(Test262BaselineRunner.getTestFilePath(testState.filename));
return Test262BaselineRunner.serializeSourceFile(sourceFile);
return Utils.sourceFileToJSON(sourceFile);
}, false, Test262BaselineRunner.baselineOptions);
});
});