@zzzen/pyright-internal 1.2.0-dev.20230514 → 1.2.0-dev.20230528
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/analyzer/analyzerFileInfo.d.ts +1 -0
- package/dist/analyzer/analyzerFileInfo.js +4 -3
- package/dist/analyzer/analyzerFileInfo.js.map +1 -1
- package/dist/analyzer/backgroundAnalysisProgram.d.ts +17 -15
- package/dist/analyzer/backgroundAnalysisProgram.js +43 -53
- package/dist/analyzer/backgroundAnalysisProgram.js.map +1 -1
- package/dist/analyzer/binder.d.ts +0 -2
- package/dist/analyzer/binder.js +2 -20
- package/dist/analyzer/binder.js.map +1 -1
- package/dist/analyzer/checker.d.ts +1 -1
- package/dist/analyzer/checker.js +65 -32
- package/dist/analyzer/checker.js.map +1 -1
- package/dist/analyzer/commentUtils.js +15 -16
- package/dist/analyzer/commentUtils.js.map +1 -1
- package/dist/analyzer/constraintSolver.js +6 -2
- package/dist/analyzer/constraintSolver.js.map +1 -1
- package/dist/analyzer/constructorTransform.js +8 -4
- package/dist/analyzer/constructorTransform.js.map +1 -1
- package/dist/analyzer/constructors.js +128 -130
- package/dist/analyzer/constructors.js.map +1 -1
- package/dist/analyzer/dataClasses.js +4 -1
- package/dist/analyzer/dataClasses.js.map +1 -1
- package/dist/analyzer/declarationUtils.js +1 -0
- package/dist/analyzer/declarationUtils.js.map +1 -1
- package/dist/analyzer/docStringConversion.js +1 -1
- package/dist/analyzer/enums.js +1 -1
- package/dist/analyzer/enums.js.map +1 -1
- package/dist/analyzer/importResolver.d.ts +4 -4
- package/dist/analyzer/importResolver.js +46 -40
- package/dist/analyzer/importResolver.js.map +1 -1
- package/dist/analyzer/importResult.d.ts +2 -2
- package/dist/analyzer/importStatementUtils.js +2 -2
- package/dist/analyzer/importStatementUtils.js.map +1 -1
- package/dist/analyzer/packageTypeVerifier.js +1 -1
- package/dist/analyzer/packageTypeVerifier.js.map +1 -1
- package/dist/analyzer/parameterUtils.js +7 -7
- package/dist/analyzer/parameterUtils.js.map +1 -1
- package/dist/analyzer/parseTreeUtils.d.ts +1 -0
- package/dist/analyzer/parseTreeUtils.js +23 -40
- package/dist/analyzer/parseTreeUtils.js.map +1 -1
- package/dist/analyzer/parseTreeWalker.js +2 -2
- package/dist/analyzer/parseTreeWalker.js.map +1 -1
- package/dist/analyzer/patternMatching.js +1 -0
- package/dist/analyzer/patternMatching.js.map +1 -1
- package/dist/analyzer/program.d.ts +12 -18
- package/dist/analyzer/program.js +25 -12
- package/dist/analyzer/program.js.map +1 -1
- package/dist/analyzer/protocols.js +3 -1
- package/dist/analyzer/protocols.js.map +1 -1
- package/dist/analyzer/service.d.ts +4 -4
- package/dist/analyzer/service.js +29 -24
- package/dist/analyzer/service.js.map +1 -1
- package/dist/analyzer/sourceFile.d.ts +1 -6
- package/dist/analyzer/sourceFile.js +12 -27
- package/dist/analyzer/sourceFile.js.map +1 -1
- package/dist/analyzer/sourceMapper.js +1 -1
- package/dist/analyzer/sourceMapper.js.map +1 -1
- package/dist/analyzer/typeCacheUtils.d.ts +9 -2
- package/dist/analyzer/typeCacheUtils.js +52 -9
- package/dist/analyzer/typeCacheUtils.js.map +1 -1
- package/dist/analyzer/typeDocStringUtils.js +1 -1
- package/dist/analyzer/typeDocStringUtils.js.map +1 -1
- package/dist/analyzer/typeEvaluator.d.ts +2 -2
- package/dist/analyzer/typeEvaluator.js +257 -143
- package/dist/analyzer/typeEvaluator.js.map +1 -1
- package/dist/analyzer/typeEvaluatorTypes.d.ts +3 -2
- package/dist/analyzer/typeEvaluatorTypes.js.map +1 -1
- package/dist/analyzer/typeGuards.js +3 -2
- package/dist/analyzer/typeGuards.js.map +1 -1
- package/dist/analyzer/typePrinter.js +17 -10
- package/dist/analyzer/typePrinter.js.map +1 -1
- package/dist/analyzer/typeStubWriter.js +2 -2
- package/dist/analyzer/typeStubWriter.js.map +1 -1
- package/dist/analyzer/typeUtils.d.ts +1 -0
- package/dist/analyzer/typeUtils.js +20 -18
- package/dist/analyzer/typeUtils.js.map +1 -1
- package/dist/analyzer/typeVarContext.js +2 -2
- package/dist/analyzer/typeVarContext.js.map +1 -1
- package/dist/analyzer/typedDicts.js +140 -43
- package/dist/analyzer/typedDicts.js.map +1 -1
- package/dist/analyzer/types.d.ts +9 -1
- package/dist/analyzer/types.js +39 -16
- package/dist/analyzer/types.js.map +1 -1
- package/dist/backgroundAnalysisBase.d.ts +25 -19
- package/dist/backgroundAnalysisBase.js +145 -115
- package/dist/backgroundAnalysisBase.js.map +1 -1
- package/dist/backgroundThreadBase.d.ts +1 -1
- package/dist/backgroundThreadBase.js +1 -1
- package/dist/backgroundThreadBase.js.map +1 -1
- package/dist/commands/dumpFileDebugInfoCommand.js +5 -5
- package/dist/commands/dumpFileDebugInfoCommand.js.map +1 -1
- package/dist/common/commandLineOptions.d.ts +1 -0
- package/dist/common/commandLineOptions.js.map +1 -1
- package/dist/common/configOptions.js +1 -1
- package/dist/common/configOptions.js.map +1 -1
- package/dist/common/extensibility.d.ts +2 -4
- package/dist/common/extensibility.js.map +1 -1
- package/dist/common/textRange.d.ts +3 -3
- package/dist/common/textRange.js +17 -8
- package/dist/common/textRange.js.map +1 -1
- package/dist/common/uriParser.d.ts +1 -0
- package/dist/common/uriParser.js +9 -0
- package/dist/common/uriParser.js.map +1 -1
- package/dist/common/workspaceEditUtils.d.ts +1 -1
- package/dist/common/workspaceEditUtils.js +5 -7
- package/dist/common/workspaceEditUtils.js.map +1 -1
- package/dist/languageServerBase.js +6 -4
- package/dist/languageServerBase.js.map +1 -1
- package/dist/languageService/analyzerServiceExecutor.js +1 -0
- package/dist/languageService/analyzerServiceExecutor.js.map +1 -1
- package/dist/languageService/autoImporter.d.ts +3 -3
- package/dist/languageService/autoImporter.js +3 -3
- package/dist/languageService/autoImporter.js.map +1 -1
- package/dist/languageService/callHierarchyProvider.js +3 -2
- package/dist/languageService/callHierarchyProvider.js.map +1 -1
- package/dist/languageService/completionProvider.d.ts +2 -2
- package/dist/languageService/completionProvider.js +42 -35
- package/dist/languageService/completionProvider.js.map +1 -1
- package/dist/languageService/documentSymbolCollector.js +9 -9
- package/dist/languageService/documentSymbolCollector.js.map +1 -1
- package/dist/languageService/referencesProvider.js +1 -1
- package/dist/languageService/referencesProvider.js.map +1 -1
- package/dist/languageService/renameProvider.d.ts +3 -3
- package/dist/languageService/renameProvider.js +6 -6
- package/dist/languageService/renameProvider.js.map +1 -1
- package/dist/languageService/signatureHelpProvider.js +1 -1
- package/dist/languageService/signatureHelpProvider.js.map +1 -1
- package/dist/languageService/symbolIndexer.d.ts +0 -8
- package/dist/languageService/symbolIndexer.js.map +1 -1
- package/dist/localization/localize.d.ts +23 -3
- package/dist/localization/localize.js +13 -3
- package/dist/localization/localize.js.map +1 -1
- package/dist/localization/package.nls.en-us.json +15 -5
- package/dist/parser/parseNodes.d.ts +19 -11
- package/dist/parser/parseNodes.js +27 -19
- package/dist/parser/parseNodes.js.map +1 -1
- package/dist/parser/parser.d.ts +3 -3
- package/dist/parser/parser.js +146 -169
- package/dist/parser/parser.js.map +1 -1
- package/dist/parser/stringTokenUtils.d.ts +3 -13
- package/dist/parser/stringTokenUtils.js +8 -181
- package/dist/parser/stringTokenUtils.js.map +1 -1
- package/dist/parser/tokenizer.d.ts +3 -0
- package/dist/parser/tokenizer.js +212 -24
- package/dist/parser/tokenizer.js.map +1 -1
- package/dist/parser/tokenizerTypes.d.ts +31 -1
- package/dist/parser/tokenizerTypes.js +51 -1
- package/dist/parser/tokenizerTypes.js.map +1 -1
- package/dist/readonlyAugmentedFileSystem.js +1 -1
- package/dist/readonlyAugmentedFileSystem.js.map +1 -1
- package/dist/tests/docStringConversion.test.js +44 -0
- package/dist/tests/docStringConversion.test.js.map +1 -1
- package/dist/tests/fourslash/fourslash.d.ts +1 -1
- package/dist/tests/fourslash/rename.function.untitledFile.fourslash.d.ts +1 -0
- package/dist/tests/fourslash/rename.function.untitledFile.fourslash.js +17 -0
- package/dist/tests/fourslash/rename.function.untitledFile.fourslash.js.map +1 -0
- package/dist/tests/harness/fourslash/testLanguageService.js +1 -1
- package/dist/tests/harness/fourslash/testLanguageService.js.map +1 -1
- package/dist/tests/harness/fourslash/testState.d.ts +1 -1
- package/dist/tests/harness/fourslash/testState.js +4 -3
- package/dist/tests/harness/fourslash/testState.js.map +1 -1
- package/dist/tests/importResolver.test.js +3 -3
- package/dist/tests/importResolver.test.js.map +1 -1
- package/dist/tests/textRange.test.d.ts +1 -0
- package/dist/tests/textRange.test.js +45 -0
- package/dist/tests/textRange.test.js.map +1 -0
- package/dist/tests/tokenizer.test.js +280 -58
- package/dist/tests/tokenizer.test.js.map +1 -1
- package/dist/tests/typeEvaluator1.test.js +5 -1
- package/dist/tests/typeEvaluator1.test.js.map +1 -1
- package/dist/tests/typeEvaluator2.test.js +9 -1
- package/dist/tests/typeEvaluator2.test.js.map +1 -1
- package/dist/tests/typeEvaluator3.test.js +11 -1
- package/dist/tests/typeEvaluator3.test.js.map +1 -1
- package/dist/tests/typeEvaluator4.test.js +7 -2
- package/dist/tests/typeEvaluator4.test.js.map +1 -1
- package/dist/tests/typeEvaluator5.test.js +8 -0
- package/dist/tests/typeEvaluator5.test.js.map +1 -1
- package/dist/tests/typePrinter.test.js +3 -3
- package/dist/tests/typePrinter.test.js.map +1 -1
- package/dist/workspaceFactory.js +7 -9
- package/dist/workspaceFactory.js.map +1 -1
- package/package.json +1 -1
@@ -449,90 +449,312 @@ test('Strings: triplicate double quote escape', () => {
|
|
449
449
|
test('Strings: single quoted f-string', () => {
|
450
450
|
const t = new tokenizer_1.Tokenizer();
|
451
451
|
const results = t.tokenize("a+f'quoted'");
|
452
|
-
assert_1.default.equal(results.tokens.count,
|
452
|
+
assert_1.default.equal(results.tokens.count, 5 + _implicitTokenCount);
|
453
453
|
assert_1.default.equal(results.tokens.getItemAt(0).type, 7 /* Identifier */);
|
454
454
|
assert_1.default.equal(results.tokens.getItemAt(1).type, 9 /* Operator */);
|
455
|
-
const
|
456
|
-
assert_1.default.equal(
|
457
|
-
assert_1.default.equal(
|
458
|
-
assert_1.default.equal(
|
459
|
-
|
455
|
+
const fStringStartToken = results.tokens.getItemAt(2);
|
456
|
+
assert_1.default.equal(fStringStartToken.type, 24 /* FStringStart */);
|
457
|
+
assert_1.default.equal(fStringStartToken.flags, 1 /* SingleQuote */ | 64 /* Format */);
|
458
|
+
assert_1.default.equal(fStringStartToken.length, 2);
|
459
|
+
const fStringMiddleToken = results.tokens.getItemAt(3);
|
460
|
+
assert_1.default.equal(fStringMiddleToken.type, 25 /* FStringMiddle */);
|
461
|
+
assert_1.default.equal(fStringMiddleToken.flags, 1 /* SingleQuote */ | 64 /* Format */);
|
462
|
+
assert_1.default.equal(fStringMiddleToken.length, 6);
|
463
|
+
assert_1.default.equal(fStringMiddleToken.escapedValue, 'quoted');
|
464
|
+
const fStringEndToken = results.tokens.getItemAt(4);
|
465
|
+
assert_1.default.equal(fStringEndToken.type, 26 /* FStringEnd */);
|
466
|
+
assert_1.default.equal(fStringEndToken.flags, 1 /* SingleQuote */ | 64 /* Format */);
|
467
|
+
assert_1.default.equal(fStringEndToken.length, 1);
|
460
468
|
});
|
461
469
|
test('Strings: double quoted f-string', () => {
|
462
470
|
const t = new tokenizer_1.Tokenizer();
|
463
471
|
const results = t.tokenize('x(1,f"quoted")');
|
464
|
-
assert_1.default.equal(results.tokens.count,
|
472
|
+
assert_1.default.equal(results.tokens.count, 8 + _implicitTokenCount);
|
465
473
|
assert_1.default.equal(results.tokens.getItemAt(0).type, 7 /* Identifier */);
|
466
474
|
assert_1.default.equal(results.tokens.getItemAt(1).type, 13 /* OpenParenthesis */);
|
467
475
|
assert_1.default.equal(results.tokens.getItemAt(2).type, 6 /* Number */);
|
468
476
|
assert_1.default.equal(results.tokens.getItemAt(3).type, 12 /* Comma */);
|
469
|
-
assert_1.default.equal(results.tokens.getItemAt(
|
470
|
-
const
|
471
|
-
assert_1.default.equal(
|
472
|
-
assert_1.default.equal(
|
473
|
-
assert_1.default.equal(
|
474
|
-
|
477
|
+
assert_1.default.equal(results.tokens.getItemAt(7).type, 14 /* CloseParenthesis */);
|
478
|
+
const fStringStartToken = results.tokens.getItemAt(4);
|
479
|
+
assert_1.default.equal(fStringStartToken.type, 24 /* FStringStart */);
|
480
|
+
assert_1.default.equal(fStringStartToken.flags, 2 /* DoubleQuote */ | 64 /* Format */);
|
481
|
+
assert_1.default.equal(fStringStartToken.length, 2);
|
482
|
+
const fStringMiddleToken = results.tokens.getItemAt(5);
|
483
|
+
assert_1.default.equal(fStringMiddleToken.type, 25 /* FStringMiddle */);
|
484
|
+
assert_1.default.equal(fStringMiddleToken.flags, 2 /* DoubleQuote */ | 64 /* Format */);
|
485
|
+
assert_1.default.equal(fStringMiddleToken.length, 6);
|
486
|
+
assert_1.default.equal(fStringMiddleToken.escapedValue, 'quoted');
|
487
|
+
const fStringEndToken = results.tokens.getItemAt(6);
|
488
|
+
assert_1.default.equal(fStringEndToken.type, 26 /* FStringEnd */);
|
489
|
+
assert_1.default.equal(fStringEndToken.flags, 2 /* DoubleQuote */ | 64 /* Format */);
|
490
|
+
assert_1.default.equal(fStringEndToken.length, 1);
|
475
491
|
});
|
476
492
|
test('Strings: single quoted multiline f-string', () => {
|
477
493
|
const t = new tokenizer_1.Tokenizer();
|
478
494
|
const results = t.tokenize("f'''quoted'''");
|
479
|
-
assert_1.default.equal(results.tokens.count,
|
480
|
-
const
|
481
|
-
assert_1.default.equal(
|
482
|
-
assert_1.default.equal(
|
483
|
-
assert_1.default.equal(
|
484
|
-
|
495
|
+
assert_1.default.equal(results.tokens.count, 3 + _implicitTokenCount);
|
496
|
+
const fStringStartToken = results.tokens.getItemAt(0);
|
497
|
+
assert_1.default.equal(fStringStartToken.type, 24 /* FStringStart */);
|
498
|
+
assert_1.default.equal(fStringStartToken.flags, 1 /* SingleQuote */ | 4 /* Triplicate */ | 64 /* Format */);
|
499
|
+
assert_1.default.equal(fStringStartToken.length, 4);
|
500
|
+
const fStringMiddleToken = results.tokens.getItemAt(1);
|
501
|
+
assert_1.default.equal(fStringMiddleToken.type, 25 /* FStringMiddle */);
|
502
|
+
assert_1.default.equal(fStringMiddleToken.flags, 1 /* SingleQuote */ | 4 /* Triplicate */ | 64 /* Format */);
|
503
|
+
assert_1.default.equal(fStringMiddleToken.length, 6);
|
504
|
+
assert_1.default.equal(fStringMiddleToken.escapedValue, 'quoted');
|
505
|
+
const fStringEndToken = results.tokens.getItemAt(2);
|
506
|
+
assert_1.default.equal(fStringEndToken.type, 26 /* FStringEnd */);
|
507
|
+
assert_1.default.equal(fStringEndToken.flags, 1 /* SingleQuote */ | 4 /* Triplicate */ | 64 /* Format */);
|
508
|
+
assert_1.default.equal(fStringEndToken.length, 3);
|
485
509
|
});
|
486
510
|
test('Strings: double quoted multiline f-string', () => {
|
487
511
|
const t = new tokenizer_1.Tokenizer();
|
488
512
|
const results = t.tokenize('f"""quoted """');
|
489
|
-
assert_1.default.equal(results.tokens.count,
|
490
|
-
const
|
491
|
-
assert_1.default.equal(
|
492
|
-
assert_1.default.equal(
|
493
|
-
assert_1.default.equal(
|
494
|
-
|
513
|
+
assert_1.default.equal(results.tokens.count, 3 + _implicitTokenCount);
|
514
|
+
const fStringStartToken = results.tokens.getItemAt(0);
|
515
|
+
assert_1.default.equal(fStringStartToken.type, 24 /* FStringStart */);
|
516
|
+
assert_1.default.equal(fStringStartToken.flags, 2 /* DoubleQuote */ | 4 /* Triplicate */ | 64 /* Format */);
|
517
|
+
assert_1.default.equal(fStringStartToken.length, 4);
|
518
|
+
const fStringMiddleToken = results.tokens.getItemAt(1);
|
519
|
+
assert_1.default.equal(fStringMiddleToken.type, 25 /* FStringMiddle */);
|
520
|
+
assert_1.default.equal(fStringMiddleToken.flags, 2 /* DoubleQuote */ | 4 /* Triplicate */ | 64 /* Format */);
|
521
|
+
assert_1.default.equal(fStringMiddleToken.length, 7);
|
522
|
+
assert_1.default.equal(fStringMiddleToken.escapedValue, 'quoted ');
|
523
|
+
const fStringEndToken = results.tokens.getItemAt(2);
|
524
|
+
assert_1.default.equal(fStringEndToken.type, 26 /* FStringEnd */);
|
525
|
+
assert_1.default.equal(fStringEndToken.flags, 2 /* DoubleQuote */ | 4 /* Triplicate */ | 64 /* Format */);
|
526
|
+
assert_1.default.equal(fStringEndToken.length, 3);
|
495
527
|
});
|
496
528
|
test('Strings: f-string with single right brace', () => {
|
497
529
|
const t = new tokenizer_1.Tokenizer();
|
498
530
|
const results = t.tokenize("f'hello}'");
|
499
|
-
assert_1.default.equal(results.tokens.count,
|
500
|
-
const
|
501
|
-
|
502
|
-
assert_1.default.equal(
|
503
|
-
assert_1.default.equal(
|
504
|
-
|
505
|
-
assert_1.default.equal(
|
506
|
-
assert_1.default.equal(
|
507
|
-
assert_1.default.equal(
|
508
|
-
|
531
|
+
assert_1.default.equal(results.tokens.count, 4 + _implicitTokenCount);
|
532
|
+
const fStringStartToken = results.tokens.getItemAt(0);
|
533
|
+
assert_1.default.equal(fStringStartToken.type, 24 /* FStringStart */);
|
534
|
+
assert_1.default.equal(fStringStartToken.length, 2);
|
535
|
+
assert_1.default.equal(fStringStartToken.flags, 1 /* SingleQuote */ | 64 /* Format */);
|
536
|
+
const fStringMiddleToken = results.tokens.getItemAt(1);
|
537
|
+
assert_1.default.equal(fStringMiddleToken.type, 25 /* FStringMiddle */);
|
538
|
+
assert_1.default.equal(fStringMiddleToken.length, 5);
|
539
|
+
assert_1.default.equal(fStringMiddleToken.flags, 1 /* SingleQuote */ | 64 /* Format */ | 256 /* ReplacementFieldEnd */);
|
540
|
+
const braceToken = results.tokens.getItemAt(2).type;
|
541
|
+
assert_1.default.equal(braceToken, 18 /* CloseCurlyBrace */);
|
542
|
+
const fStringEndToken = results.tokens.getItemAt(3);
|
543
|
+
assert_1.default.equal(fStringEndToken.type, 26 /* FStringEnd */);
|
544
|
+
assert_1.default.equal(fStringEndToken.flags, 1 /* SingleQuote */ | 64 /* Format */);
|
545
|
+
assert_1.default.equal(fStringEndToken.length, 1);
|
546
|
+
});
|
547
|
+
test('Strings: f-string with new line escape', () => {
|
548
|
+
const t = new tokenizer_1.Tokenizer();
|
549
|
+
const results = t.tokenize(`f'x \\\ny'`);
|
550
|
+
assert_1.default.equal(results.tokens.count, 3 + _implicitTokenCount);
|
551
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
552
|
+
assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* FStringMiddle */);
|
553
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 26 /* FStringEnd */);
|
509
554
|
});
|
510
555
|
test('Strings: f-string with escape in expression', () => {
|
511
556
|
const t = new tokenizer_1.Tokenizer();
|
512
|
-
const results = t.tokenize(
|
513
|
-
assert_1.default.equal(results.tokens.count,
|
514
|
-
|
515
|
-
|
516
|
-
assert_1.default.equal(
|
517
|
-
assert_1.default.equal(
|
518
|
-
assert_1.default.equal(
|
519
|
-
assert_1.default.equal(
|
520
|
-
|
521
|
-
|
522
|
-
|
557
|
+
const results = t.tokenize(`f'hello { "\\t" }'`);
|
558
|
+
assert_1.default.equal(results.tokens.count, 6 + _implicitTokenCount);
|
559
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
560
|
+
assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* FStringMiddle */);
|
561
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
|
562
|
+
assert_1.default.equal(results.tokens.getItemAt(3).type, 5 /* String */);
|
563
|
+
assert_1.default.equal(results.tokens.getItemAt(4).type, 18 /* CloseCurlyBrace */);
|
564
|
+
assert_1.default.equal(results.tokens.getItemAt(5).type, 26 /* FStringEnd */);
|
565
|
+
});
|
566
|
+
test('Strings: f-string with escape in format string 1', () => {
|
567
|
+
const t = new tokenizer_1.Tokenizer();
|
568
|
+
const results = t.tokenize("f'he\\{ 1 }lo'");
|
569
|
+
assert_1.default.equal(results.tokens.count, 7 + _implicitTokenCount);
|
570
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
571
|
+
const middleFString = results.tokens.getItemAt(1);
|
572
|
+
assert_1.default.equal(middleFString.type, 25 /* FStringMiddle */);
|
573
|
+
assert_1.default.equal(middleFString.escapedValue.length, 3);
|
574
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
|
575
|
+
assert_1.default.equal(results.tokens.getItemAt(3).type, 6 /* Number */);
|
576
|
+
assert_1.default.equal(results.tokens.getItemAt(4).type, 18 /* CloseCurlyBrace */);
|
577
|
+
assert_1.default.equal(results.tokens.getItemAt(5).type, 25 /* FStringMiddle */);
|
578
|
+
assert_1.default.equal(results.tokens.getItemAt(6).type, 26 /* FStringEnd */);
|
579
|
+
});
|
580
|
+
test('Strings: f-string with escape in format string 2', () => {
|
581
|
+
const t = new tokenizer_1.Tokenizer();
|
582
|
+
const results = t.tokenize(`f"'{{\\"{0}\\": {0}}}'"`);
|
583
|
+
assert_1.default.equal(results.tokens.count, 11 + _implicitTokenCount);
|
584
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
585
|
+
const middleFString = results.tokens.getItemAt(1);
|
586
|
+
assert_1.default.equal(middleFString.type, 25 /* FStringMiddle */);
|
587
|
+
assert_1.default.equal(middleFString.escapedValue.length, 5);
|
588
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
|
589
|
+
assert_1.default.equal(results.tokens.getItemAt(3).type, 6 /* Number */);
|
590
|
+
assert_1.default.equal(results.tokens.getItemAt(4).type, 18 /* CloseCurlyBrace */);
|
591
|
+
assert_1.default.equal(results.tokens.getItemAt(5).type, 25 /* FStringMiddle */);
|
592
|
+
assert_1.default.equal(results.tokens.getItemAt(6).type, 17 /* OpenCurlyBrace */);
|
593
|
+
assert_1.default.equal(results.tokens.getItemAt(7).type, 6 /* Number */);
|
594
|
+
assert_1.default.equal(results.tokens.getItemAt(8).type, 18 /* CloseCurlyBrace */);
|
595
|
+
assert_1.default.equal(results.tokens.getItemAt(9).type, 25 /* FStringMiddle */);
|
596
|
+
assert_1.default.equal(results.tokens.getItemAt(10).type, 26 /* FStringEnd */);
|
597
|
+
});
|
598
|
+
test('Strings: f-string with double brace', () => {
|
599
|
+
const t = new tokenizer_1.Tokenizer();
|
600
|
+
const results = t.tokenize(`f"hello {{{0==0}}}"`);
|
601
|
+
assert_1.default.equal(results.tokens.count, 9 + _implicitTokenCount);
|
602
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
603
|
+
assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* FStringMiddle */);
|
604
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
|
605
|
+
assert_1.default.equal(results.tokens.getItemAt(3).type, 6 /* Number */);
|
606
|
+
assert_1.default.equal(results.tokens.getItemAt(4).type, 9 /* Operator */);
|
607
|
+
assert_1.default.equal(results.tokens.getItemAt(5).type, 6 /* Number */);
|
608
|
+
assert_1.default.equal(results.tokens.getItemAt(6).type, 18 /* CloseCurlyBrace */);
|
609
|
+
assert_1.default.equal(results.tokens.getItemAt(7).type, 25 /* FStringMiddle */);
|
610
|
+
assert_1.default.equal(results.tokens.getItemAt(8).type, 26 /* FStringEnd */);
|
611
|
+
});
|
612
|
+
test('Strings: f-string with walrus operator', () => {
|
613
|
+
const t = new tokenizer_1.Tokenizer();
|
614
|
+
const results = t.tokenize(`f"{(x:=0)}"`);
|
615
|
+
assert_1.default.equal(results.tokens.count, 9 + _implicitTokenCount);
|
616
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
617
|
+
assert_1.default.equal(results.tokens.getItemAt(1).type, 17 /* OpenCurlyBrace */);
|
618
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 13 /* OpenParenthesis */);
|
619
|
+
assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* Identifier */);
|
620
|
+
assert_1.default.equal(results.tokens.getItemAt(4).type, 9 /* Operator */);
|
621
|
+
assert_1.default.equal(results.tokens.getItemAt(5).type, 6 /* Number */);
|
622
|
+
assert_1.default.equal(results.tokens.getItemAt(6).type, 14 /* CloseParenthesis */);
|
623
|
+
assert_1.default.equal(results.tokens.getItemAt(7).type, 18 /* CloseCurlyBrace */);
|
624
|
+
assert_1.default.equal(results.tokens.getItemAt(8).type, 26 /* FStringEnd */);
|
625
|
+
});
|
626
|
+
test('Strings: f-string with single right brace', () => {
|
627
|
+
const t = new tokenizer_1.Tokenizer();
|
628
|
+
const results = t.tokenize(`f"}"`);
|
629
|
+
assert_1.default.equal(results.tokens.count, 3 + _implicitTokenCount);
|
630
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
631
|
+
assert_1.default.equal(results.tokens.getItemAt(1).type, 18 /* CloseCurlyBrace */);
|
632
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 26 /* FStringEnd */);
|
633
|
+
});
|
634
|
+
test('Strings: f-string with comment', () => {
|
635
|
+
const t = new tokenizer_1.Tokenizer();
|
636
|
+
const results = t.tokenize(`f'''hello{\nx # comment\n}'''`);
|
637
|
+
assert_1.default.equal(results.tokens.count, 6 + _implicitTokenCount);
|
638
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
639
|
+
assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* FStringMiddle */);
|
640
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
|
641
|
+
assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* Identifier */);
|
642
|
+
const closeBraceToken = results.tokens.getItemAt(4);
|
643
|
+
assert_1.default.equal(closeBraceToken.type, 18 /* CloseCurlyBrace */);
|
644
|
+
assert_1.default.deepEqual(closeBraceToken.comments, [
|
645
|
+
{ type: 0 /* Regular */, value: ' comment', start: 14, length: 8 },
|
646
|
+
]);
|
647
|
+
assert_1.default.equal(results.tokens.getItemAt(5).type, 26 /* FStringEnd */);
|
523
648
|
});
|
524
649
|
test('Strings: f-string with unterminated expression', () => {
|
525
650
|
const t = new tokenizer_1.Tokenizer();
|
526
|
-
const results = t.tokenize("f'hello { a
|
527
|
-
assert_1.default.equal(results.tokens.count,
|
528
|
-
|
529
|
-
|
530
|
-
assert_1.default.equal(
|
531
|
-
assert_1.default.equal(
|
532
|
-
|
533
|
-
assert_1.default.equal(
|
534
|
-
assert_1.default.equal(
|
535
|
-
|
651
|
+
const results = t.tokenize("f'hello { a'");
|
652
|
+
assert_1.default.equal(results.tokens.count, 5 + _implicitTokenCount);
|
653
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
654
|
+
assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* FStringMiddle */);
|
655
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
|
656
|
+
assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* Identifier */);
|
657
|
+
const fStringEnd = results.tokens.getItemAt(4);
|
658
|
+
assert_1.default.equal(fStringEnd.type, 26 /* FStringEnd */);
|
659
|
+
assert_1.default.equal(fStringEnd.flags, 64 /* Format */ | 1 /* SingleQuote */);
|
660
|
+
});
|
661
|
+
test('Strings: f-string with replacement field', () => {
|
662
|
+
const t = new tokenizer_1.Tokenizer();
|
663
|
+
const results = t.tokenize("f'hello { a + b}'");
|
664
|
+
assert_1.default.equal(results.tokens.count, 8 + _implicitTokenCount);
|
665
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
666
|
+
assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* FStringMiddle */);
|
667
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
|
668
|
+
assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* Identifier */);
|
669
|
+
assert_1.default.equal(results.tokens.getItemAt(4).type, 9 /* Operator */);
|
670
|
+
assert_1.default.equal(results.tokens.getItemAt(5).type, 7 /* Identifier */);
|
671
|
+
assert_1.default.equal(results.tokens.getItemAt(6).type, 18 /* CloseCurlyBrace */);
|
672
|
+
assert_1.default.equal(results.tokens.getItemAt(7).type, 26 /* FStringEnd */);
|
673
|
+
});
|
674
|
+
test('Strings: f-string with format specifier', () => {
|
675
|
+
const t = new tokenizer_1.Tokenizer();
|
676
|
+
const results = t.tokenize("f'hello { a ! b}'");
|
677
|
+
assert_1.default.equal(results.tokens.count, 8 + _implicitTokenCount);
|
678
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
679
|
+
assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* FStringMiddle */);
|
680
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
|
681
|
+
assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* Identifier */);
|
682
|
+
assert_1.default.equal(results.tokens.getItemAt(4).type, 23 /* ExclamationMark */);
|
683
|
+
assert_1.default.equal(results.tokens.getItemAt(5).type, 7 /* Identifier */);
|
684
|
+
assert_1.default.equal(results.tokens.getItemAt(6).type, 18 /* CloseCurlyBrace */);
|
685
|
+
assert_1.default.equal(results.tokens.getItemAt(7).type, 26 /* FStringEnd */);
|
686
|
+
});
|
687
|
+
test('Strings: f-string with debug format specifier', () => {
|
688
|
+
const t = new tokenizer_1.Tokenizer();
|
689
|
+
const results = t.tokenize("f'hello { a =}'");
|
690
|
+
assert_1.default.equal(results.tokens.count, 7 + _implicitTokenCount);
|
691
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
692
|
+
assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* FStringMiddle */);
|
693
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
|
694
|
+
assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* Identifier */);
|
695
|
+
assert_1.default.equal(results.tokens.getItemAt(4).type, 9 /* Operator */);
|
696
|
+
assert_1.default.equal(results.tokens.getItemAt(5).type, 18 /* CloseCurlyBrace */);
|
697
|
+
assert_1.default.equal(results.tokens.getItemAt(6).type, 26 /* FStringEnd */);
|
698
|
+
});
|
699
|
+
test('Strings: nested f-string', () => {
|
700
|
+
const t = new tokenizer_1.Tokenizer();
|
701
|
+
const results = t.tokenize("f'{f'{a}'}'");
|
702
|
+
assert_1.default.equal(results.tokens.count, 9 + _implicitTokenCount);
|
703
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
704
|
+
assert_1.default.equal(results.tokens.getItemAt(1).type, 17 /* OpenCurlyBrace */);
|
705
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 24 /* FStringStart */);
|
706
|
+
assert_1.default.equal(results.tokens.getItemAt(3).type, 17 /* OpenCurlyBrace */);
|
707
|
+
assert_1.default.equal(results.tokens.getItemAt(4).type, 7 /* Identifier */);
|
708
|
+
assert_1.default.equal(results.tokens.getItemAt(5).type, 18 /* CloseCurlyBrace */);
|
709
|
+
assert_1.default.equal(results.tokens.getItemAt(6).type, 26 /* FStringEnd */);
|
710
|
+
assert_1.default.equal(results.tokens.getItemAt(7).type, 18 /* CloseCurlyBrace */);
|
711
|
+
assert_1.default.equal(results.tokens.getItemAt(8).type, 26 /* FStringEnd */);
|
712
|
+
});
|
713
|
+
test('Strings: nested f-string formats 1', () => {
|
714
|
+
const t = new tokenizer_1.Tokenizer();
|
715
|
+
const results = t.tokenize("f'{a:x{{b}+:x{c}+}}'");
|
716
|
+
assert_1.default.equal(results.tokens.count, 19 + _implicitTokenCount);
|
717
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
718
|
+
assert_1.default.equal(results.tokens.getItemAt(1).type, 17 /* OpenCurlyBrace */);
|
719
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 7 /* Identifier */);
|
720
|
+
assert_1.default.equal(results.tokens.getItemAt(3).type, 10 /* Colon */);
|
721
|
+
assert_1.default.equal(results.tokens.getItemAt(4).type, 25 /* FStringMiddle */);
|
722
|
+
assert_1.default.equal(results.tokens.getItemAt(5).type, 17 /* OpenCurlyBrace */);
|
723
|
+
assert_1.default.equal(results.tokens.getItemAt(6).type, 17 /* OpenCurlyBrace */);
|
724
|
+
assert_1.default.equal(results.tokens.getItemAt(7).type, 7 /* Identifier */);
|
725
|
+
assert_1.default.equal(results.tokens.getItemAt(8).type, 18 /* CloseCurlyBrace */);
|
726
|
+
assert_1.default.equal(results.tokens.getItemAt(9).type, 9 /* Operator */);
|
727
|
+
assert_1.default.equal(results.tokens.getItemAt(10).type, 10 /* Colon */);
|
728
|
+
assert_1.default.equal(results.tokens.getItemAt(11).type, 25 /* FStringMiddle */);
|
729
|
+
assert_1.default.equal(results.tokens.getItemAt(12).type, 17 /* OpenCurlyBrace */);
|
730
|
+
assert_1.default.equal(results.tokens.getItemAt(13).type, 7 /* Identifier */);
|
731
|
+
assert_1.default.equal(results.tokens.getItemAt(14).type, 18 /* CloseCurlyBrace */);
|
732
|
+
assert_1.default.equal(results.tokens.getItemAt(15).type, 25 /* FStringMiddle */);
|
733
|
+
assert_1.default.equal(results.tokens.getItemAt(16).type, 18 /* CloseCurlyBrace */);
|
734
|
+
assert_1.default.equal(results.tokens.getItemAt(17).type, 18 /* CloseCurlyBrace */);
|
735
|
+
assert_1.default.equal(results.tokens.getItemAt(18).type, 26 /* FStringEnd */);
|
736
|
+
});
|
737
|
+
test('Strings: nested f-string formats 2', () => {
|
738
|
+
const t = new tokenizer_1.Tokenizer();
|
739
|
+
const results = t.tokenize("f'hi{'x':*^{8:{'':}}0}'");
|
740
|
+
assert_1.default.equal(results.tokens.count, 17 + _implicitTokenCount);
|
741
|
+
assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
|
742
|
+
assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* FStringMiddle */);
|
743
|
+
assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
|
744
|
+
assert_1.default.equal(results.tokens.getItemAt(3).type, 5 /* String */);
|
745
|
+
assert_1.default.equal(results.tokens.getItemAt(4).type, 10 /* Colon */);
|
746
|
+
assert_1.default.equal(results.tokens.getItemAt(5).type, 25 /* FStringMiddle */);
|
747
|
+
assert_1.default.equal(results.tokens.getItemAt(6).type, 17 /* OpenCurlyBrace */);
|
748
|
+
assert_1.default.equal(results.tokens.getItemAt(7).type, 6 /* Number */);
|
749
|
+
assert_1.default.equal(results.tokens.getItemAt(8).type, 10 /* Colon */);
|
750
|
+
assert_1.default.equal(results.tokens.getItemAt(9).type, 17 /* OpenCurlyBrace */);
|
751
|
+
assert_1.default.equal(results.tokens.getItemAt(10).type, 5 /* String */);
|
752
|
+
assert_1.default.equal(results.tokens.getItemAt(11).type, 10 /* Colon */);
|
753
|
+
assert_1.default.equal(results.tokens.getItemAt(12).type, 18 /* CloseCurlyBrace */);
|
754
|
+
assert_1.default.equal(results.tokens.getItemAt(13).type, 18 /* CloseCurlyBrace */);
|
755
|
+
assert_1.default.equal(results.tokens.getItemAt(14).type, 25 /* FStringMiddle */);
|
756
|
+
assert_1.default.equal(results.tokens.getItemAt(15).type, 18 /* CloseCurlyBrace */);
|
757
|
+
assert_1.default.equal(results.tokens.getItemAt(16).type, 26 /* FStringEnd */);
|
536
758
|
});
|
537
759
|
test('Strings: escape at the end of single quoted string', () => {
|
538
760
|
const t = new tokenizer_1.Tokenizer();
|
@@ -782,7 +1004,7 @@ test('Strings: bad name escapes', () => {
|
|
782
1004
|
});
|
783
1005
|
test('Comments', () => {
|
784
1006
|
const t = new tokenizer_1.Tokenizer();
|
785
|
-
const results = t.tokenize(' #co"""mment1\n\t\n#
|
1007
|
+
const results = t.tokenize(' #co"""mment1\n\t\n#x\'y2 ');
|
786
1008
|
assert_1.default.equal(results.tokens.count, 1 + _implicitTokenCountNoImplicitNewLine);
|
787
1009
|
assert_1.default.equal(results.tokens.getItemAt(0).type, 2 /* NewLine */);
|
788
1010
|
});
|
@@ -1226,7 +1448,7 @@ test('TypeIgnoreAll2', () => {
|
|
1226
1448
|
});
|
1227
1449
|
test('TypeIgnoreAll3', () => {
|
1228
1450
|
const t = new tokenizer_1.Tokenizer();
|
1229
|
-
const results = t.tokenize('\n# type:
|
1451
|
+
const results = t.tokenize('\n# type: ignoreSsss\n');
|
1230
1452
|
(0, assert_1.default)(!results.typeIgnoreAll);
|
1231
1453
|
});
|
1232
1454
|
test('TypeIgnoreAll3', () => {
|