@zzzen/pyright-internal 1.2.0-dev.20230514 → 1.2.0-dev.20230521
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/analyzer/analyzerFileInfo.d.ts +1 -0
- package/dist/analyzer/analyzerFileInfo.js +4 -3
- package/dist/analyzer/analyzerFileInfo.js.map +1 -1
- package/dist/analyzer/backgroundAnalysisProgram.d.ts +17 -15
- package/dist/analyzer/backgroundAnalysisProgram.js +43 -53
- package/dist/analyzer/backgroundAnalysisProgram.js.map +1 -1
- package/dist/analyzer/binder.d.ts +0 -2
- package/dist/analyzer/binder.js +2 -20
- package/dist/analyzer/binder.js.map +1 -1
- package/dist/analyzer/checker.d.ts +1 -1
- package/dist/analyzer/checker.js +61 -28
- package/dist/analyzer/checker.js.map +1 -1
- package/dist/analyzer/constructorTransform.js +5 -1
- package/dist/analyzer/constructorTransform.js.map +1 -1
- package/dist/analyzer/constructors.js +14 -12
- package/dist/analyzer/constructors.js.map +1 -1
- package/dist/analyzer/dataClasses.js +3 -0
- package/dist/analyzer/dataClasses.js.map +1 -1
- package/dist/analyzer/declarationUtils.js +1 -0
- package/dist/analyzer/declarationUtils.js.map +1 -1
- package/dist/analyzer/importResolver.d.ts +4 -4
- package/dist/analyzer/importResolver.js +46 -40
- package/dist/analyzer/importResolver.js.map +1 -1
- package/dist/analyzer/importResult.d.ts +2 -2
- package/dist/analyzer/importStatementUtils.js +2 -2
- package/dist/analyzer/importStatementUtils.js.map +1 -1
- package/dist/analyzer/packageTypeVerifier.js +1 -1
- package/dist/analyzer/packageTypeVerifier.js.map +1 -1
- package/dist/analyzer/parseTreeUtils.js +2 -34
- package/dist/analyzer/parseTreeUtils.js.map +1 -1
- package/dist/analyzer/parseTreeWalker.js +2 -2
- package/dist/analyzer/parseTreeWalker.js.map +1 -1
- package/dist/analyzer/patternMatching.js +1 -0
- package/dist/analyzer/patternMatching.js.map +1 -1
- package/dist/analyzer/program.d.ts +12 -18
- package/dist/analyzer/program.js +25 -12
- package/dist/analyzer/program.js.map +1 -1
- package/dist/analyzer/protocols.js +3 -1
- package/dist/analyzer/protocols.js.map +1 -1
- package/dist/analyzer/service.d.ts +4 -4
- package/dist/analyzer/service.js +21 -24
- package/dist/analyzer/service.js.map +1 -1
- package/dist/analyzer/sourceFile.d.ts +1 -6
- package/dist/analyzer/sourceFile.js +7 -26
- package/dist/analyzer/sourceFile.js.map +1 -1
- package/dist/analyzer/sourceMapper.js +1 -1
- package/dist/analyzer/sourceMapper.js.map +1 -1
- package/dist/analyzer/typeDocStringUtils.js +1 -1
- package/dist/analyzer/typeDocStringUtils.js.map +1 -1
- package/dist/analyzer/typeEvaluator.d.ts +2 -2
- package/dist/analyzer/typeEvaluator.js +134 -55
- package/dist/analyzer/typeEvaluator.js.map +1 -1
- package/dist/analyzer/typeEvaluatorTypes.d.ts +3 -2
- package/dist/analyzer/typeEvaluatorTypes.js.map +1 -1
- package/dist/analyzer/typeGuards.js +1 -0
- package/dist/analyzer/typeGuards.js.map +1 -1
- package/dist/analyzer/typePrinter.js +8 -1
- package/dist/analyzer/typePrinter.js.map +1 -1
- package/dist/analyzer/typeUtils.js +1 -1
- package/dist/analyzer/typeUtils.js.map +1 -1
- package/dist/analyzer/typeVarContext.js +2 -2
- package/dist/analyzer/typeVarContext.js.map +1 -1
- package/dist/analyzer/typedDicts.js +138 -41
- package/dist/analyzer/typedDicts.js.map +1 -1
- package/dist/analyzer/types.d.ts +7 -1
- package/dist/analyzer/types.js +19 -6
- package/dist/analyzer/types.js.map +1 -1
- package/dist/backgroundAnalysisBase.d.ts +25 -19
- package/dist/backgroundAnalysisBase.js +145 -115
- package/dist/backgroundAnalysisBase.js.map +1 -1
- package/dist/backgroundThreadBase.d.ts +1 -1
- package/dist/backgroundThreadBase.js +1 -1
- package/dist/backgroundThreadBase.js.map +1 -1
- package/dist/commands/dumpFileDebugInfoCommand.js +3 -3
- package/dist/commands/dumpFileDebugInfoCommand.js.map +1 -1
- package/dist/common/configOptions.js +1 -1
- package/dist/common/configOptions.js.map +1 -1
- package/dist/common/extensibility.d.ts +2 -4
- package/dist/common/extensibility.js.map +1 -1
- package/dist/common/textRange.js +1 -1
- package/dist/common/textRange.js.map +1 -1
- package/dist/common/workspaceEditUtils.d.ts +1 -1
- package/dist/common/workspaceEditUtils.js +5 -7
- package/dist/common/workspaceEditUtils.js.map +1 -1
- package/dist/languageServerBase.js +2 -2
- package/dist/languageServerBase.js.map +1 -1
- package/dist/languageService/autoImporter.d.ts +3 -3
- package/dist/languageService/autoImporter.js +3 -3
- package/dist/languageService/autoImporter.js.map +1 -1
- package/dist/languageService/callHierarchyProvider.js +3 -2
- package/dist/languageService/callHierarchyProvider.js.map +1 -1
- package/dist/languageService/completionProvider.d.ts +2 -2
- package/dist/languageService/completionProvider.js +37 -30
- package/dist/languageService/completionProvider.js.map +1 -1
- package/dist/languageService/documentSymbolCollector.js +9 -9
- package/dist/languageService/documentSymbolCollector.js.map +1 -1
- package/dist/languageService/referencesProvider.js +1 -1
- package/dist/languageService/referencesProvider.js.map +1 -1
- package/dist/languageService/signatureHelpProvider.js +1 -1
- package/dist/languageService/signatureHelpProvider.js.map +1 -1
- package/dist/languageService/symbolIndexer.d.ts +0 -8
- package/dist/languageService/symbolIndexer.js.map +1 -1
- package/dist/localization/localize.d.ts +22 -3
- package/dist/localization/localize.js +12 -3
- package/dist/localization/localize.js.map +1 -1
- package/dist/localization/package.nls.en-us.json +14 -5
- package/dist/parser/parseNodes.d.ts +8 -8
- package/dist/parser/parseNodes.js +20 -10
- package/dist/parser/parseNodes.js.map +1 -1
- package/dist/parser/parser.d.ts +3 -3
- package/dist/parser/parser.js +133 -159
- package/dist/parser/parser.js.map +1 -1
- package/dist/parser/stringTokenUtils.d.ts +3 -13
- package/dist/parser/stringTokenUtils.js +8 -181
- package/dist/parser/stringTokenUtils.js.map +1 -1
- package/dist/parser/tokenizer.d.ts +3 -0
- package/dist/parser/tokenizer.js +211 -24
- package/dist/parser/tokenizer.js.map +1 -1
- package/dist/parser/tokenizerTypes.d.ts +31 -1
- package/dist/parser/tokenizerTypes.js +51 -1
- package/dist/parser/tokenizerTypes.js.map +1 -1
- package/dist/readonlyAugmentedFileSystem.js +1 -1
- package/dist/readonlyAugmentedFileSystem.js.map +1 -1
- package/dist/tests/harness/fourslash/testLanguageService.js +1 -1
- package/dist/tests/harness/fourslash/testLanguageService.js.map +1 -1
- package/dist/tests/harness/fourslash/testState.js +2 -1
- package/dist/tests/harness/fourslash/testState.js.map +1 -1
- package/dist/tests/importResolver.test.js +3 -3
- package/dist/tests/importResolver.test.js.map +1 -1
- package/dist/tests/textRange.test.d.ts +1 -0
- package/dist/tests/textRange.test.js +45 -0
- package/dist/tests/textRange.test.js.map +1 -0
- package/dist/tests/tokenizer.test.js +272 -58
- package/dist/tests/tokenizer.test.js.map +1 -1
- package/dist/tests/typeEvaluator2.test.js +4 -0
- package/dist/tests/typeEvaluator2.test.js.map +1 -1
- package/dist/tests/typeEvaluator3.test.js +10 -0
- package/dist/tests/typeEvaluator3.test.js.map +1 -1
- package/dist/tests/typeEvaluator4.test.js +7 -2
- package/dist/tests/typeEvaluator4.test.js.map +1 -1
- package/dist/tests/typeEvaluator5.test.js +8 -0
- package/dist/tests/typeEvaluator5.test.js.map +1 -1
- package/dist/workspaceFactory.js +3 -5
- package/dist/workspaceFactory.js.map +1 -1
- package/package.json +1 -1
@@ -0,0 +1,45 @@
|
|
1
|
+
"use strict";
|
2
|
+
/*
|
3
|
+
* textRange.test.ts
|
4
|
+
* Copyright (c) Microsoft Corporation.
|
5
|
+
* Licensed under the MIT license.
|
6
|
+
*/
|
7
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
8
|
+
if (k2 === undefined) k2 = k;
|
9
|
+
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
10
|
+
}) : (function(o, m, k, k2) {
|
11
|
+
if (k2 === undefined) k2 = k;
|
12
|
+
o[k2] = m[k];
|
13
|
+
}));
|
14
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
15
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
16
|
+
}) : function(o, v) {
|
17
|
+
o["default"] = v;
|
18
|
+
});
|
19
|
+
var __importStar = (this && this.__importStar) || function (mod) {
|
20
|
+
if (mod && mod.__esModule) return mod;
|
21
|
+
var result = {};
|
22
|
+
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
23
|
+
__setModuleDefault(result, mod);
|
24
|
+
return result;
|
25
|
+
};
|
26
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
27
|
+
const assert = __importStar(require("assert"));
|
28
|
+
const textRange_1 = require("../common/textRange");
|
29
|
+
test('textRange combine', () => {
|
30
|
+
const range1 = textRange_1.TextRange.create(10, 2);
|
31
|
+
const range2 = textRange_1.TextRange.create(12, 2);
|
32
|
+
const range3 = textRange_1.TextRange.create(8, 2);
|
33
|
+
const combined = textRange_1.TextRange.combine([range1, range2, range3]);
|
34
|
+
assert.ok(combined);
|
35
|
+
assert.equal(combined.start, 8);
|
36
|
+
assert.equal(combined.length, 6);
|
37
|
+
// Ensure input ranges are unchanged
|
38
|
+
assert.equal(range1.start, 10);
|
39
|
+
assert.equal(range1.length, 2);
|
40
|
+
assert.equal(range2.start, 12);
|
41
|
+
assert.equal(range2.length, 2);
|
42
|
+
assert.equal(range3.start, 8);
|
43
|
+
assert.equal(range3.length, 2);
|
44
|
+
});
|
45
|
+
//# sourceMappingURL=textRange.test.js.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"textRange.test.js","sourceRoot":"","sources":["../../../src/tests/textRange.test.ts"],"names":[],"mappings":";AAAA;;;;GAIG;;;;;;;;;;;;;;;;;;;;;AAEH,+CAAiC;AAEjC,mDAAgD;AAEhD,IAAI,CAAC,mBAAmB,EAAE,GAAG,EAAE;IAC3B,MAAM,MAAM,GAAG,qBAAS,CAAC,MAAM,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC;IACvC,MAAM,MAAM,GAAG,qBAAS,CAAC,MAAM,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC;IACvC,MAAM,MAAM,GAAG,qBAAS,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;IAEtC,MAAM,QAAQ,GAAG,qBAAS,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC,CAAC;IAE7D,MAAM,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC;IACpB,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;IAChC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;IAEjC,oCAAoC;IACpC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC;IAC/B,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;IAC/B,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC;IAC/B,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;IAC/B,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;IAC9B,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC;AACnC,CAAC,CAAC,CAAC"}
|
@@ -449,90 +449,304 @@ test('Strings: triplicate double quote escape', () => {
|
|
449
449
|
test('Strings: single quoted f-string', () => {
|
450
450
|
const t = new tokenizer_1.Tokenizer();
|
451
451
|
const results = t.tokenize("a+f'quoted'");
|
452
|
-
assert_1.default.equal(results.tokens.count,
|
452
|
+
assert_1.default.equal(results.tokens.count, 5 + _implicitTokenCount);
|
453
453
|
assert_1.default.equal(results.tokens.getItemAt(0).type, 7 /* Identifier */);
|
454
454
|
assert_1.default.equal(results.tokens.getItemAt(1).type, 9 /* Operator */);
|
455
|
-
const
|
456
|
-
assert_1.default.equal(
|
457
|
-
assert_1.default.equal(
|
458
|
-
assert_1.default.equal(
|
459
|
-
|
455
|
+
const fStringStartToken = results.tokens.getItemAt(2);
|
456
|
+
assert_1.default.equal(fStringStartToken.type, 24 /* FStringStart */);
|
457
|
+
assert_1.default.equal(fStringStartToken.flags, 1 /* SingleQuote */ | 64 /* Format */);
|
458
|
+
assert_1.default.equal(fStringStartToken.length, 2);
|
459
|
+
const fStringMiddleToken = results.tokens.getItemAt(3);
|
460
|
+
assert_1.default.equal(fStringMiddleToken.type, 25 /* FStringMiddle */);
|
461
|
+
assert_1.default.equal(fStringMiddleToken.flags, 1 /* SingleQuote */ | 64 /* Format */);
|
462
|
+
assert_1.default.equal(fStringMiddleToken.length, 6);
|
463
|
+
assert_1.default.equal(fStringMiddleToken.escapedValue, 'quoted');
|
464
|
+
const fStringEndToken = results.tokens.getItemAt(4);
|
465
|
+
assert_1.default.equal(fStringEndToken.type, 26 /* FStringEnd */);
|
466
|
+
assert_1.default.equal(fStringEndToken.flags, 1 /* SingleQuote */ | 64 /* Format */);
|
467
|
+
assert_1.default.equal(fStringEndToken.length, 1);
|
460
468
|
});
|
461
469
|
test('Strings: double quoted f-string', () => {
|
462
470
|
const t = new tokenizer_1.Tokenizer();
|
463
471
|
const results = t.tokenize('x(1,f"quoted")');
|
464
|
-
assert_1.default.equal(results.tokens.count,
|
472
|
+
assert_1.default.equal(results.tokens.count, 8 + _implicitTokenCount);
|
465
473
|
assert_1.default.equal(results.tokens.getItemAt(0).type, 7 /* Identifier */);
|
466
474
|
assert_1.default.equal(results.tokens.getItemAt(1).type, 13 /* OpenParenthesis */);
|
467
475
|
assert_1.default.equal(results.tokens.getItemAt(2).type, 6 /* Number */);
|
468
476
|
assert_1.default.equal(results.tokens.getItemAt(3).type, 12 /* Comma */);
|
469
|
-
assert_1.default.equal(results.tokens.getItemAt(
|
470
|
-
const
|
471
|
-
assert_1.default.equal(
|
472
|
-
assert_1.default.equal(
|
473
|
-
assert_1.default.equal(
|
474
|
-
|
477
|
+
assert_1.default.equal(results.tokens.getItemAt(7).type, 14 /* CloseParenthesis */);
|
478
|
+
const fStringStartToken = results.tokens.getItemAt(4);
|
479
|
+
assert_1.default.equal(fStringStartToken.type, 24 /* FStringStart */);
|
480
|
+
assert_1.default.equal(fStringStartToken.flags, 2 /* DoubleQuote */ | 64 /* Format */);
|
481
|
+
assert_1.default.equal(fStringStartToken.length, 2);
|
482
|
+
const fStringMiddleToken = results.tokens.getItemAt(5);
|
483
|
+
assert_1.default.equal(fStringMiddleToken.type, 25 /* FStringMiddle */);
|
484
|
+
assert_1.default.equal(fStringMiddleToken.flags, 2 /* DoubleQuote */ | 64 /* Format */);
|
485
|
+
assert_1.default.equal(fStringMiddleToken.length, 6);
|
486
|
+
assert_1.default.equal(fStringMiddleToken.escapedValue, 'quoted');
|
487
|
+
const fStringEndToken = results.tokens.getItemAt(6);
|
488
|
+
assert_1.default.equal(fStringEndToken.type, 26 /* FStringEnd */);
|
489
|
+
assert_1.default.equal(fStringEndToken.flags, 2 /* DoubleQuote */ | 64 /* Format */);
|
490
|
+
assert_1.default.equal(fStringEndToken.length, 1);
|
475
491
|
});
|
476
492
|
test('Strings: single quoted multiline f-string', () => {
|
477
493
|
const t = new tokenizer_1.Tokenizer();
|
478
494
|
const results = t.tokenize("f'''quoted'''");
|
479
|
-
assert_1.default.equal(results.tokens.count,
|
480
|
-
const
|
481
|
-
assert_1.default.equal(
|
482
|
-
assert_1.default.equal(
|
483
|
-
assert_1.default.equal(
|
484
|
-
|
495
|
+
assert_1.default.equal(results.tokens.count, 3 + _implicitTokenCount);
|
496
|
+
const fStringStartToken = results.tokens.getItemAt(0);
|
497
|
+
assert_1.default.equal(fStringStartToken.type, 24 /* FStringStart */);
|
498
|
+
assert_1.default.equal(fStringStartToken.flags, 1 /* SingleQuote */ | 4 /* Triplicate */ | 64 /* Format */);
|
499
|
+
assert_1.default.equal(fStringStartToken.length, 4);
|
500
|
+
const fStringMiddleToken = results.tokens.getItemAt(1);
|
501
|
+
assert_1.default.equal(fStringMiddleToken.type, 25 /* FStringMiddle */);
|
502
|
+
assert_1.default.equal(fStringMiddleToken.flags, 1 /* SingleQuote */ | 4 /* Triplicate */ | 64 /* Format */);
|
503
|
+
assert_1.default.equal(fStringMiddleToken.length, 6);
|
504
|
+
assert_1.default.equal(fStringMiddleToken.escapedValue, 'quoted');
|
505
|
+
const fStringEndToken = results.tokens.getItemAt(2);
|
506
|
+
assert_1.default.equal(fStringEndToken.type, 26 /* FStringEnd */);
|
507
|
+
assert_1.default.equal(fStringEndToken.flags, 1 /* SingleQuote */ | 4 /* Triplicate */ | 64 /* Format */);
|
508
|
+
assert_1.default.equal(fStringEndToken.length, 3);
|
485
509
|
});
|
486
510
|
test('Strings: double quoted multiline f-string', () => {
|
487
511
|
const t = new tokenizer_1.Tokenizer();
|
488
512
|
const results = t.tokenize('f"""quoted """');
|
489
|
-
assert_1.default.equal(results.tokens.count,
|
490
|
-
const
|
491
|
-
assert_1.default.equal(
|
492
|
-
assert_1.default.equal(
|
493
|
-
assert_1.default.equal(
|
494
|
-
|
513
|
+
assert_1.default.equal(results.tokens.count, 3 + _implicitTokenCount);
|
514
|
+
const fStringStartToken = results.tokens.getItemAt(0);
|
515
|
+
assert_1.default.equal(fStringStartToken.type, 24 /* FStringStart */);
|
516
|
+
assert_1.default.equal(fStringStartToken.flags, 2 /* DoubleQuote */ | 4 /* Triplicate */ | 64 /* Format */);
|
517
|
+
assert_1.default.equal(fStringStartToken.length, 4);
|
518
|
+
const fStringMiddleToken = results.tokens.getItemAt(1);
|
519
|
+
assert_1.default.equal(fStringMiddleToken.type, 25 /* FStringMiddle */);
|
520
|
+
assert_1.default.equal(fStringMiddleToken.flags, 2 /* DoubleQuote */ | 4 /* Triplicate */ | 64 /* Format */);
|
521
|
+
assert_1.default.equal(fStringMiddleToken.length, 7);
|
522
|
+
assert_1.default.equal(fStringMiddleToken.escapedValue, 'quoted ');
|
523
|
+
const fStringEndToken = results.tokens.getItemAt(2);
|
524
|
+
assert_1.default.equal(fStringEndToken.type, 26 /* FStringEnd */);
|
525
|
+
assert_1.default.equal(fStringEndToken.flags, 2 /* DoubleQuote */ | 4 /* Triplicate */ | 64 /* Format */);
|
526
|
+
assert_1.default.equal(fStringEndToken.length, 3);
|
495
527
|
});
|
496
528
|
test('Strings: f-string with single right brace', () => {
|
497
529
|
const t = new tokenizer_1.Tokenizer();
|
498
530
|
const results = t.tokenize("f'hello}'");
|
499
|
-
assert_1.default.equal(results.tokens.count,
|
500
|
-
const
|
501
|
-
|
502
|
-
assert_1.default.equal(
|
503
|
-
assert_1.default.equal(
|
504
|
-
|
505
|
-
assert_1.default.equal(
|
506
|
-
assert_1.default.equal(
|
507
|
-
assert_1.default.equal(
|
508
|
-
|
531
|
+
assert_1.default.equal(results.tokens.count, 4 + _implicitTokenCount);
|
532
|
+
const fStringStartToken = results.tokens.getItemAt(0);
|
533
|
+
assert_1.default.equal(fStringStartToken.type, 24 /* FStringStart */);
|
534
|
+
assert_1.default.equal(fStringStartToken.length, 2);
|
535
|
+
assert_1.default.equal(fStringStartToken.flags, 1 /* SingleQuote */ | 64 /* Format */);
|
536
|
+
const fStringMiddleToken = results.tokens.getItemAt(1);
|
537
|
+
assert_1.default.equal(fStringMiddleToken.type, 25 /* FStringMiddle */);
|
538
|
+
assert_1.default.equal(fStringMiddleToken.length, 5);
|
539
|
+
assert_1.default.equal(fStringMiddleToken.flags, 1 /* SingleQuote */ | 64 /* Format */ | 256 /* ReplacementFieldEnd */);
|
540
|
+
const braceToken = results.tokens.getItemAt(2).type;
|
541
|
+
assert_1.default.equal(braceToken, 18 /* CloseCurlyBrace */);
|
542
|
+
const fStringEndToken = results.tokens.getItemAt(3);
|
543
|
+
assert_1.default.equal(fStringEndToken.type, 26 /* FStringEnd */);
|
544
|
+
assert_1.default.equal(fStringEndToken.flags, 1 /* SingleQuote */ | 64 /* Format */);
|
545
|
+
assert_1.default.equal(fStringEndToken.length, 1);
|
509
546
|
});
|
510
547
|
test('Strings: f-string with escape in expression', () => {
|
511
548
|
const t = new tokenizer_1.Tokenizer();
|
512
|
-
const results = t.tokenize(
|
513
|
-
assert_1.default.equal(results.tokens.count,
|
514
|
-
|
515
|
-
|
516
|
-
assert_1.default.equal(
|
517
|
-
assert_1.default.equal(
|
518
|
-
assert_1.default.equal(
|
519
|
-
assert_1.default.equal(
|
520
|
-
|
521
|
-
|
522
|
-
|
549
|
+
const results = t.tokenize(`f'hello { "\\t" }'`);
|
550
|
+
assert_1.default.equal(results.tokens.count, 6 + _implicitTokenCount);
|
551
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
552
|
+
assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* FStringMiddle */);
|
553
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
|
554
|
+
assert_1.default.equal(results.tokens.getItemAt(3).type, 5 /* String */);
|
555
|
+
assert_1.default.equal(results.tokens.getItemAt(4).type, 18 /* CloseCurlyBrace */);
|
556
|
+
assert_1.default.equal(results.tokens.getItemAt(5).type, 26 /* FStringEnd */);
|
557
|
+
});
|
558
|
+
test('Strings: f-string with escape in format string 1', () => {
|
559
|
+
const t = new tokenizer_1.Tokenizer();
|
560
|
+
const results = t.tokenize("f'he\\{ 1 }lo'");
|
561
|
+
assert_1.default.equal(results.tokens.count, 7 + _implicitTokenCount);
|
562
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
563
|
+
const middleFString = results.tokens.getItemAt(1);
|
564
|
+
assert_1.default.equal(middleFString.type, 25 /* FStringMiddle */);
|
565
|
+
assert_1.default.equal(middleFString.escapedValue.length, 3);
|
566
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
|
567
|
+
assert_1.default.equal(results.tokens.getItemAt(3).type, 6 /* Number */);
|
568
|
+
assert_1.default.equal(results.tokens.getItemAt(4).type, 18 /* CloseCurlyBrace */);
|
569
|
+
assert_1.default.equal(results.tokens.getItemAt(5).type, 25 /* FStringMiddle */);
|
570
|
+
assert_1.default.equal(results.tokens.getItemAt(6).type, 26 /* FStringEnd */);
|
571
|
+
});
|
572
|
+
test('Strings: f-string with escape in format string 2', () => {
|
573
|
+
const t = new tokenizer_1.Tokenizer();
|
574
|
+
const results = t.tokenize(`f"'{{\\"{0}\\": {0}}}'"`);
|
575
|
+
assert_1.default.equal(results.tokens.count, 11 + _implicitTokenCount);
|
576
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
577
|
+
const middleFString = results.tokens.getItemAt(1);
|
578
|
+
assert_1.default.equal(middleFString.type, 25 /* FStringMiddle */);
|
579
|
+
assert_1.default.equal(middleFString.escapedValue.length, 5);
|
580
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
|
581
|
+
assert_1.default.equal(results.tokens.getItemAt(3).type, 6 /* Number */);
|
582
|
+
assert_1.default.equal(results.tokens.getItemAt(4).type, 18 /* CloseCurlyBrace */);
|
583
|
+
assert_1.default.equal(results.tokens.getItemAt(5).type, 25 /* FStringMiddle */);
|
584
|
+
assert_1.default.equal(results.tokens.getItemAt(6).type, 17 /* OpenCurlyBrace */);
|
585
|
+
assert_1.default.equal(results.tokens.getItemAt(7).type, 6 /* Number */);
|
586
|
+
assert_1.default.equal(results.tokens.getItemAt(8).type, 18 /* CloseCurlyBrace */);
|
587
|
+
assert_1.default.equal(results.tokens.getItemAt(9).type, 25 /* FStringMiddle */);
|
588
|
+
assert_1.default.equal(results.tokens.getItemAt(10).type, 26 /* FStringEnd */);
|
589
|
+
});
|
590
|
+
test('Strings: f-string with double brace', () => {
|
591
|
+
const t = new tokenizer_1.Tokenizer();
|
592
|
+
const results = t.tokenize(`f"hello {{{0==0}}}"`);
|
593
|
+
assert_1.default.equal(results.tokens.count, 9 + _implicitTokenCount);
|
594
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
595
|
+
assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* FStringMiddle */);
|
596
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
|
597
|
+
assert_1.default.equal(results.tokens.getItemAt(3).type, 6 /* Number */);
|
598
|
+
assert_1.default.equal(results.tokens.getItemAt(4).type, 9 /* Operator */);
|
599
|
+
assert_1.default.equal(results.tokens.getItemAt(5).type, 6 /* Number */);
|
600
|
+
assert_1.default.equal(results.tokens.getItemAt(6).type, 18 /* CloseCurlyBrace */);
|
601
|
+
assert_1.default.equal(results.tokens.getItemAt(7).type, 25 /* FStringMiddle */);
|
602
|
+
assert_1.default.equal(results.tokens.getItemAt(8).type, 26 /* FStringEnd */);
|
603
|
+
});
|
604
|
+
test('Strings: f-string with walrus operator', () => {
|
605
|
+
const t = new tokenizer_1.Tokenizer();
|
606
|
+
const results = t.tokenize(`f"{(x:=0)}"`);
|
607
|
+
assert_1.default.equal(results.tokens.count, 9 + _implicitTokenCount);
|
608
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
609
|
+
assert_1.default.equal(results.tokens.getItemAt(1).type, 17 /* OpenCurlyBrace */);
|
610
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 13 /* OpenParenthesis */);
|
611
|
+
assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* Identifier */);
|
612
|
+
assert_1.default.equal(results.tokens.getItemAt(4).type, 9 /* Operator */);
|
613
|
+
assert_1.default.equal(results.tokens.getItemAt(5).type, 6 /* Number */);
|
614
|
+
assert_1.default.equal(results.tokens.getItemAt(6).type, 14 /* CloseParenthesis */);
|
615
|
+
assert_1.default.equal(results.tokens.getItemAt(7).type, 18 /* CloseCurlyBrace */);
|
616
|
+
assert_1.default.equal(results.tokens.getItemAt(8).type, 26 /* FStringEnd */);
|
617
|
+
});
|
618
|
+
test('Strings: f-string with single right brace', () => {
|
619
|
+
const t = new tokenizer_1.Tokenizer();
|
620
|
+
const results = t.tokenize(`f"}"`);
|
621
|
+
assert_1.default.equal(results.tokens.count, 3 + _implicitTokenCount);
|
622
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
623
|
+
assert_1.default.equal(results.tokens.getItemAt(1).type, 18 /* CloseCurlyBrace */);
|
624
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 26 /* FStringEnd */);
|
625
|
+
});
|
626
|
+
test('Strings: f-string with comment', () => {
|
627
|
+
const t = new tokenizer_1.Tokenizer();
|
628
|
+
const results = t.tokenize(`f'''hello{\nx # comment\n}'''`);
|
629
|
+
assert_1.default.equal(results.tokens.count, 6 + _implicitTokenCount);
|
630
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
631
|
+
assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* FStringMiddle */);
|
632
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
|
633
|
+
assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* Identifier */);
|
634
|
+
const closeBraceToken = results.tokens.getItemAt(4);
|
635
|
+
assert_1.default.equal(closeBraceToken.type, 18 /* CloseCurlyBrace */);
|
636
|
+
assert_1.default.deepEqual(closeBraceToken.comments, [
|
637
|
+
{ type: 0 /* Regular */, value: ' comment', start: 14, length: 8 },
|
638
|
+
]);
|
639
|
+
assert_1.default.equal(results.tokens.getItemAt(5).type, 26 /* FStringEnd */);
|
523
640
|
});
|
524
641
|
test('Strings: f-string with unterminated expression', () => {
|
525
642
|
const t = new tokenizer_1.Tokenizer();
|
526
|
-
const results = t.tokenize("f'hello { a
|
527
|
-
assert_1.default.equal(results.tokens.count,
|
528
|
-
|
529
|
-
|
530
|
-
assert_1.default.equal(
|
531
|
-
assert_1.default.equal(
|
532
|
-
|
533
|
-
assert_1.default.equal(
|
534
|
-
assert_1.default.equal(
|
535
|
-
|
643
|
+
const results = t.tokenize("f'hello { a'");
|
644
|
+
assert_1.default.equal(results.tokens.count, 5 + _implicitTokenCount);
|
645
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
646
|
+
assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* FStringMiddle */);
|
647
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
|
648
|
+
assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* Identifier */);
|
649
|
+
const fStringEnd = results.tokens.getItemAt(4);
|
650
|
+
assert_1.default.equal(fStringEnd.type, 26 /* FStringEnd */);
|
651
|
+
assert_1.default.equal(fStringEnd.flags, 64 /* Format */ | 1 /* SingleQuote */);
|
652
|
+
});
|
653
|
+
test('Strings: f-string with replacement field', () => {
|
654
|
+
const t = new tokenizer_1.Tokenizer();
|
655
|
+
const results = t.tokenize("f'hello { a + b}'");
|
656
|
+
assert_1.default.equal(results.tokens.count, 8 + _implicitTokenCount);
|
657
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
658
|
+
assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* FStringMiddle */);
|
659
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
|
660
|
+
assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* Identifier */);
|
661
|
+
assert_1.default.equal(results.tokens.getItemAt(4).type, 9 /* Operator */);
|
662
|
+
assert_1.default.equal(results.tokens.getItemAt(5).type, 7 /* Identifier */);
|
663
|
+
assert_1.default.equal(results.tokens.getItemAt(6).type, 18 /* CloseCurlyBrace */);
|
664
|
+
assert_1.default.equal(results.tokens.getItemAt(7).type, 26 /* FStringEnd */);
|
665
|
+
});
|
666
|
+
test('Strings: f-string with format specifier', () => {
|
667
|
+
const t = new tokenizer_1.Tokenizer();
|
668
|
+
const results = t.tokenize("f'hello { a ! b}'");
|
669
|
+
assert_1.default.equal(results.tokens.count, 8 + _implicitTokenCount);
|
670
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
671
|
+
assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* FStringMiddle */);
|
672
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
|
673
|
+
assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* Identifier */);
|
674
|
+
assert_1.default.equal(results.tokens.getItemAt(4).type, 23 /* ExclamationMark */);
|
675
|
+
assert_1.default.equal(results.tokens.getItemAt(5).type, 7 /* Identifier */);
|
676
|
+
assert_1.default.equal(results.tokens.getItemAt(6).type, 18 /* CloseCurlyBrace */);
|
677
|
+
assert_1.default.equal(results.tokens.getItemAt(7).type, 26 /* FStringEnd */);
|
678
|
+
});
|
679
|
+
test('Strings: f-string with debug format specifier', () => {
|
680
|
+
const t = new tokenizer_1.Tokenizer();
|
681
|
+
const results = t.tokenize("f'hello { a =}'");
|
682
|
+
assert_1.default.equal(results.tokens.count, 7 + _implicitTokenCount);
|
683
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
684
|
+
assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* FStringMiddle */);
|
685
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
|
686
|
+
assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* Identifier */);
|
687
|
+
assert_1.default.equal(results.tokens.getItemAt(4).type, 9 /* Operator */);
|
688
|
+
assert_1.default.equal(results.tokens.getItemAt(5).type, 18 /* CloseCurlyBrace */);
|
689
|
+
assert_1.default.equal(results.tokens.getItemAt(6).type, 26 /* FStringEnd */);
|
690
|
+
});
|
691
|
+
test('Strings: nested f-string', () => {
|
692
|
+
const t = new tokenizer_1.Tokenizer();
|
693
|
+
const results = t.tokenize("f'{f'{a}'}'");
|
694
|
+
assert_1.default.equal(results.tokens.count, 9 + _implicitTokenCount);
|
695
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
696
|
+
assert_1.default.equal(results.tokens.getItemAt(1).type, 17 /* OpenCurlyBrace */);
|
697
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 24 /* FStringStart */);
|
698
|
+
assert_1.default.equal(results.tokens.getItemAt(3).type, 17 /* OpenCurlyBrace */);
|
699
|
+
assert_1.default.equal(results.tokens.getItemAt(4).type, 7 /* Identifier */);
|
700
|
+
assert_1.default.equal(results.tokens.getItemAt(5).type, 18 /* CloseCurlyBrace */);
|
701
|
+
assert_1.default.equal(results.tokens.getItemAt(6).type, 26 /* FStringEnd */);
|
702
|
+
assert_1.default.equal(results.tokens.getItemAt(7).type, 18 /* CloseCurlyBrace */);
|
703
|
+
assert_1.default.equal(results.tokens.getItemAt(8).type, 26 /* FStringEnd */);
|
704
|
+
});
|
705
|
+
test('Strings: nested f-string formats 1', () => {
|
706
|
+
const t = new tokenizer_1.Tokenizer();
|
707
|
+
const results = t.tokenize("f'{a:x{{b}+:x{c}+}}'");
|
708
|
+
assert_1.default.equal(results.tokens.count, 19 + _implicitTokenCount);
|
709
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
710
|
+
assert_1.default.equal(results.tokens.getItemAt(1).type, 17 /* OpenCurlyBrace */);
|
711
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 7 /* Identifier */);
|
712
|
+
assert_1.default.equal(results.tokens.getItemAt(3).type, 10 /* Colon */);
|
713
|
+
assert_1.default.equal(results.tokens.getItemAt(4).type, 25 /* FStringMiddle */);
|
714
|
+
assert_1.default.equal(results.tokens.getItemAt(5).type, 17 /* OpenCurlyBrace */);
|
715
|
+
assert_1.default.equal(results.tokens.getItemAt(6).type, 17 /* OpenCurlyBrace */);
|
716
|
+
assert_1.default.equal(results.tokens.getItemAt(7).type, 7 /* Identifier */);
|
717
|
+
assert_1.default.equal(results.tokens.getItemAt(8).type, 18 /* CloseCurlyBrace */);
|
718
|
+
assert_1.default.equal(results.tokens.getItemAt(9).type, 9 /* Operator */);
|
719
|
+
assert_1.default.equal(results.tokens.getItemAt(10).type, 10 /* Colon */);
|
720
|
+
assert_1.default.equal(results.tokens.getItemAt(11).type, 25 /* FStringMiddle */);
|
721
|
+
assert_1.default.equal(results.tokens.getItemAt(12).type, 17 /* OpenCurlyBrace */);
|
722
|
+
assert_1.default.equal(results.tokens.getItemAt(13).type, 7 /* Identifier */);
|
723
|
+
assert_1.default.equal(results.tokens.getItemAt(14).type, 18 /* CloseCurlyBrace */);
|
724
|
+
assert_1.default.equal(results.tokens.getItemAt(15).type, 25 /* FStringMiddle */);
|
725
|
+
assert_1.default.equal(results.tokens.getItemAt(16).type, 18 /* CloseCurlyBrace */);
|
726
|
+
assert_1.default.equal(results.tokens.getItemAt(17).type, 18 /* CloseCurlyBrace */);
|
727
|
+
assert_1.default.equal(results.tokens.getItemAt(18).type, 26 /* FStringEnd */);
|
728
|
+
});
|
729
|
+
test('Strings: nested f-string formats 2', () => {
|
730
|
+
const t = new tokenizer_1.Tokenizer();
|
731
|
+
const results = t.tokenize("f'hi{'x':*^{8:{'':}}0}'");
|
732
|
+
assert_1.default.equal(results.tokens.count, 17 + _implicitTokenCount);
|
733
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
734
|
+
assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* FStringMiddle */);
|
735
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
|
736
|
+
assert_1.default.equal(results.tokens.getItemAt(3).type, 5 /* String */);
|
737
|
+
assert_1.default.equal(results.tokens.getItemAt(4).type, 10 /* Colon */);
|
738
|
+
assert_1.default.equal(results.tokens.getItemAt(5).type, 25 /* FStringMiddle */);
|
739
|
+
assert_1.default.equal(results.tokens.getItemAt(6).type, 17 /* OpenCurlyBrace */);
|
740
|
+
assert_1.default.equal(results.tokens.getItemAt(7).type, 6 /* Number */);
|
741
|
+
assert_1.default.equal(results.tokens.getItemAt(8).type, 10 /* Colon */);
|
742
|
+
assert_1.default.equal(results.tokens.getItemAt(9).type, 17 /* OpenCurlyBrace */);
|
743
|
+
assert_1.default.equal(results.tokens.getItemAt(10).type, 5 /* String */);
|
744
|
+
assert_1.default.equal(results.tokens.getItemAt(11).type, 10 /* Colon */);
|
745
|
+
assert_1.default.equal(results.tokens.getItemAt(12).type, 18 /* CloseCurlyBrace */);
|
746
|
+
assert_1.default.equal(results.tokens.getItemAt(13).type, 18 /* CloseCurlyBrace */);
|
747
|
+
assert_1.default.equal(results.tokens.getItemAt(14).type, 25 /* FStringMiddle */);
|
748
|
+
assert_1.default.equal(results.tokens.getItemAt(15).type, 18 /* CloseCurlyBrace */);
|
749
|
+
assert_1.default.equal(results.tokens.getItemAt(16).type, 26 /* FStringEnd */);
|
536
750
|
});
|
537
751
|
test('Strings: escape at the end of single quoted string', () => {
|
538
752
|
const t = new tokenizer_1.Tokenizer();
|
@@ -782,7 +996,7 @@ test('Strings: bad name escapes', () => {
|
|
782
996
|
});
|
783
997
|
test('Comments', () => {
|
784
998
|
const t = new tokenizer_1.Tokenizer();
|
785
|
-
const results = t.tokenize(' #co"""mment1\n\t\n#
|
999
|
+
const results = t.tokenize(' #co"""mment1\n\t\n#x\'y2 ');
|
786
1000
|
assert_1.default.equal(results.tokens.count, 1 + _implicitTokenCountNoImplicitNewLine);
|
787
1001
|
assert_1.default.equal(results.tokens.getItemAt(0).type, 2 /* NewLine */);
|
788
1002
|
});
|
@@ -1226,7 +1440,7 @@ test('TypeIgnoreAll2', () => {
|
|
1226
1440
|
});
|
1227
1441
|
test('TypeIgnoreAll3', () => {
|
1228
1442
|
const t = new tokenizer_1.Tokenizer();
|
1229
|
-
const results = t.tokenize('\n# type:
|
1443
|
+
const results = t.tokenize('\n# type: ignoreSsss\n');
|
1230
1444
|
(0, assert_1.default)(!results.typeIgnoreAll);
|
1231
1445
|
});
|
1232
1446
|
test('TypeIgnoreAll3', () => {
|