TypeScript/tests/baselines/reference/parseBigInt.types
Wesley Wigham 78a99241d8
Reuse input type nodes when serializing signature parameter and return types (#37444)
* Accept change

* Accept the huge set of ever so slightly changed baselines

* Update return type logic to only reuse nodes if original nodes share scope with current node, like property types, only reuse nodes if symbols referened are acessible, reuse nodes for property signatures, too

* Only reuse nodes when a context is provided (otherwise identifier printback may fail)

* Only track symbol if symbol is found and no error is recorded

* Fix type parameter reuse lookup

* Forbid cjs module.exports references in retained nodes

* Adjust check for cjs export references to not include bad module type in output

* Add symbol to all identifiers we see in existing nodes for quickinfo

* Accept fourslash baseline updates

* Accept slightly updated baseline post-merge

* Do not copy original nodes for error types, replace empty type references with any
2020-04-01 19:50:21 -07:00

254 lines
5.8 KiB
Plaintext

=== tests/cases/compiler/parseBigInt.ts ===
// All bases should allow "n" suffix
const bin = 0b101, binBig = 0b101n; // 5, 5n
>bin : 5
>0b101 : 5
>binBig : 5n
>0b101n : 5n
const oct = 0o567, octBig = 0o567n; // 375, 375n
>oct : 375
>0o567 : 375
>octBig : 375n
>0o567n : 375n
const hex = 0xC0B, hexBig = 0xC0Bn; // 3083, 3083n
>hex : 3083
>0xC0B : 3083
>hexBig : 3083n
>0xC0Bn : 3083n
const dec = 123, decBig = 123n;
>dec : 123
>123 : 123
>decBig : 123n
>123n : 123n
// Test literals whose values overflow a 53-bit integer
// These should be represented exactly in the emitted JS
const largeBin = 0b10101010101010101010101010101010101010101010101010101010101n; // 384307168202282325n
>largeBin : 384307168202282325n
>0b10101010101010101010101010101010101010101010101010101010101n : 384307168202282325n
const largeOct = 0o123456712345671234567n; // 1505852261029722487n
>largeOct : 1505852261029722487n
>0o123456712345671234567n : 1505852261029722487n
const largeDec = 12345678091234567890n;
>largeDec : 12345678091234567890n
>12345678091234567890n : 12345678091234567890n
const largeHex = 0x1234567890abcdefn; // 1311768467294899695n
>largeHex : 1311768467294899695n
>0x1234567890abcdefn : 1311768467294899695n
// Test literals with separators
const separatedBin = 0b010_10_1n; // 21n
>separatedBin : 21n
>0b010_10_1n : 21n
const separatedOct = 0o1234_567n; // 342391n
>separatedOct : 342391n
>0o1234_567n : 342391n
const separatedDec = 123_456_789n;
>separatedDec : 123456789n
>123_456_789n : 123456789n
const separatedHex = 0x0_abcdefn; // 11259375n
>separatedHex : 11259375n
>0x0_abcdefn : 11259375n
// Test parsing literals of different bit sizes
// to ensure that parsePseudoBigInt() allocates enough space
const zero = 0b0n;
>zero : 0n
>0b0n : 0n
const oneBit = 0b1n;
>oneBit : 1n
>0b1n : 1n
const twoBit = 0b11n; // 3n
>twoBit : 3n
>0b11n : 3n
const threeBit = 0b111n; // 7n
>threeBit : 7n
>0b111n : 7n
const fourBit = 0b1111n; // 15n
>fourBit : 15n
>0b1111n : 15n
const fiveBit = 0b11111n; // 31n
>fiveBit : 31n
>0b11111n : 31n
const sixBit = 0b111111n; // 63n
>sixBit : 63n
>0b111111n : 63n
const sevenBit = 0b1111111n; // 127n
>sevenBit : 127n
>0b1111111n : 127n
const eightBit = 0b11111111n; // 255n
>eightBit : 255n
>0b11111111n : 255n
const nineBit = 0b111111111n; // 511n
>nineBit : 511n
>0b111111111n : 511n
const tenBit = 0b1111111111n; // 1023n
>tenBit : 1023n
>0b1111111111n : 1023n
const elevenBit = 0b11111111111n; // 2047n
>elevenBit : 2047n
>0b11111111111n : 2047n
const twelveBit = 0b111111111111n; // 4095n
>twelveBit : 4095n
>0b111111111111n : 4095n
const thirteenBit = 0b1111111111111n; // 8191n
>thirteenBit : 8191n
>0b1111111111111n : 8191n
const fourteenBit = 0b11111111111111n; // 16383n
>fourteenBit : 16383n
>0b11111111111111n : 16383n
const fifteenBit = 0b111111111111111n; // 32767n
>fifteenBit : 32767n
>0b111111111111111n : 32767n
const sixteenBit = 0b1111111111111111n; // 65535n
>sixteenBit : 65535n
>0b1111111111111111n : 65535n
const seventeenBit = 0b11111111111111111n; // 131071n
>seventeenBit : 131071n
>0b11111111111111111n : 131071n
// Test negative literals
const neg = -123n;
>neg : -123n
>-123n : -123n
>123n : 123n
const negHex: -16n = -0x10n;
>negHex : -16n
>-16n : -16n
>16n : 16n
>-0x10n : -16n
>0x10n : 16n
// Test normalization of bigints -- all of these should succeed
const negZero: 0n = -0n;
>negZero : 0n
>-0n : 0n
>0n : 0n
const baseChange: 255n = 0xFFn;
>baseChange : 255n
>0xFFn : 255n
const leadingZeros: 0xFFn = 0x000000FFn;
>leadingZeros : 255n
>0x000000FFn : 255n
// Plus not allowed on literals
const unaryPlus = +123n;
>unaryPlus : number
>+123n : number
>123n : 123n
const unaryPlusHex = +0x123n;
>unaryPlusHex : number
>+0x123n : number
>0x123n : 291n
// Parsing errors
// In separate blocks because they each declare an "n" variable
{ const legacyOct = 0123n; }
>legacyOct : 123
>0123 : 123
>n : any
{ const scientific = 1e2n; }
>scientific : 100
>1e2n : 100
{ const decimal = 4.1n; }
>decimal : 4.1
>4.1n : 4.1
{ const leadingDecimal = .1n; }
>leadingDecimal : 0.1
>.1n : 0.1
const emptyBinary = 0bn; // should error but infer 0n
>emptyBinary : 0n
>0bn : 0n
const emptyOct = 0on; // should error but infer 0n
>emptyOct : 0n
>0on : 0n
const emptyHex = 0xn; // should error but infer 0n
>emptyHex : 0n
>0xn : 0n
const leadingSeparator = _123n;
>leadingSeparator : any
>_123n : any
const trailingSeparator = 123_n;
>trailingSeparator : 123n
>123_n : 123n
const doubleSeparator = 123_456__789n;
>doubleSeparator : 123456789n
>123_456__789n : 123456789n
// Using literals as types
const oneTwoOrThree = (x: 1n | 2n | 3n): bigint => x ** 2n;
>oneTwoOrThree : (x: 1n | 2n | 3n) => bigint
>(x: 1n | 2n | 3n): bigint => x ** 2n : (x: 1n | 2n | 3n) => bigint
>x : 1n | 3n | 2n
>x ** 2n : bigint
>x : 1n | 3n | 2n
>2n : 2n
oneTwoOrThree(0n); oneTwoOrThree(1n); oneTwoOrThree(2n); oneTwoOrThree(3n);
>oneTwoOrThree(0n) : bigint
>oneTwoOrThree : (x: 1n | 3n | 2n) => bigint
>0n : 0n
>oneTwoOrThree(1n) : bigint
>oneTwoOrThree : (x: 1n | 3n | 2n) => bigint
>1n : 1n
>oneTwoOrThree(2n) : bigint
>oneTwoOrThree : (x: 1n | 3n | 2n) => bigint
>2n : 2n
>oneTwoOrThree(3n) : bigint
>oneTwoOrThree : (x: 1n | 3n | 2n) => bigint
>3n : 3n
oneTwoOrThree(0); oneTwoOrThree(1); oneTwoOrThree(2); oneTwoOrThree(3);
>oneTwoOrThree(0) : bigint
>oneTwoOrThree : (x: 1n | 3n | 2n) => bigint
>0 : 0
>oneTwoOrThree(1) : bigint
>oneTwoOrThree : (x: 1n | 3n | 2n) => bigint
>1 : 1
>oneTwoOrThree(2) : bigint
>oneTwoOrThree : (x: 1n | 3n | 2n) => bigint
>2 : 2
>oneTwoOrThree(3) : bigint
>oneTwoOrThree : (x: 1n | 3n | 2n) => bigint
>3 : 3