brighterscript 1.0.0-alpha.24 → 1.0.0-alpha.26
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +521 -233
- package/README.md +45 -139
- package/bsconfig.schema.json +46 -0
- package/dist/ActionPipeline.d.ts +10 -0
- package/dist/ActionPipeline.js +40 -0
- package/dist/ActionPipeline.js.map +1 -0
- package/dist/AstValidationSegmenter.d.ts +25 -0
- package/dist/AstValidationSegmenter.js +152 -0
- package/dist/AstValidationSegmenter.js.map +1 -0
- package/dist/BsConfig.d.ts +40 -4
- package/dist/BusyStatusTracker.d.ts +31 -0
- package/dist/BusyStatusTracker.js +83 -0
- package/dist/BusyStatusTracker.js.map +1 -0
- package/dist/Cache.js +3 -3
- package/dist/Cache.js.map +1 -1
- package/dist/CacheVerifier.d.ts +7 -0
- package/dist/CacheVerifier.js +20 -0
- package/dist/CacheVerifier.js.map +1 -0
- package/dist/CodeActionUtil.d.ts +3 -3
- package/dist/CodeActionUtil.js.map +1 -1
- package/dist/CommentFlagProcessor.d.ts +3 -2
- package/dist/CommentFlagProcessor.js +5 -4
- package/dist/CommentFlagProcessor.js.map +1 -1
- package/dist/DependencyGraph.d.ts +3 -2
- package/dist/DependencyGraph.js +11 -10
- package/dist/DependencyGraph.js.map +1 -1
- package/dist/DiagnosticCollection.js +9 -5
- package/dist/DiagnosticCollection.js.map +1 -1
- package/dist/DiagnosticFilterer.d.ts +1 -0
- package/dist/DiagnosticFilterer.js +5 -3
- package/dist/DiagnosticFilterer.js.map +1 -1
- package/dist/DiagnosticMessages.d.ts +61 -13
- package/dist/DiagnosticMessages.js +116 -19
- package/dist/DiagnosticMessages.js.map +1 -1
- package/dist/DiagnosticSeverityAdjuster.d.ts +7 -0
- package/dist/DiagnosticSeverityAdjuster.js +41 -0
- package/dist/DiagnosticSeverityAdjuster.js.map +1 -0
- package/dist/FunctionScope.d.ts +28 -0
- package/dist/FunctionScope.js +52 -0
- package/dist/FunctionScope.js.map +1 -0
- package/dist/KeyedThrottler.d.ts +3 -3
- package/dist/KeyedThrottler.js +3 -3
- package/dist/KeyedThrottler.js.map +1 -1
- package/dist/LanguageServer.d.ts +23 -11
- package/dist/LanguageServer.js +150 -69
- package/dist/LanguageServer.js.map +1 -1
- package/dist/Logger.d.ts +3 -2
- package/dist/Logger.js +11 -3
- package/dist/Logger.js.map +1 -1
- package/dist/PluginInterface.d.ts +21 -3
- package/dist/PluginInterface.js +74 -6
- package/dist/PluginInterface.js.map +1 -1
- package/dist/Program.d.ts +158 -79
- package/dist/Program.js +841 -706
- package/dist/Program.js.map +1 -1
- package/dist/ProgramBuilder.d.ts +22 -12
- package/dist/ProgramBuilder.js +130 -103
- package/dist/ProgramBuilder.js.map +1 -1
- package/dist/Scope.d.ts +86 -137
- package/dist/Scope.js +453 -519
- package/dist/Scope.js.map +1 -1
- package/dist/Stopwatch.js +1 -1
- package/dist/Stopwatch.js.map +1 -1
- package/dist/SymbolTable.d.ts +89 -34
- package/dist/SymbolTable.js +239 -114
- package/dist/SymbolTable.js.map +1 -1
- package/dist/Throttler.d.ts +12 -0
- package/dist/Throttler.js +39 -0
- package/dist/Throttler.js.map +1 -1
- package/dist/Watcher.d.ts +0 -3
- package/dist/Watcher.js +0 -3
- package/dist/Watcher.js.map +1 -1
- package/dist/XmlScope.d.ts +4 -11
- package/dist/XmlScope.js +75 -88
- package/dist/XmlScope.js.map +1 -1
- package/dist/astUtils/CachedLookups.d.ts +48 -0
- package/dist/astUtils/CachedLookups.js +323 -0
- package/dist/astUtils/CachedLookups.js.map +1 -0
- package/dist/astUtils/{AstEditor.d.ts → Editor.d.ts} +9 -5
- package/dist/astUtils/{AstEditor.js → Editor.js} +10 -4
- package/dist/astUtils/Editor.js.map +1 -0
- package/dist/astUtils/{AstEditor.spec.js → Editor.spec.js} +69 -65
- package/dist/astUtils/Editor.spec.js.map +1 -0
- package/dist/astUtils/creators.d.ts +10 -10
- package/dist/astUtils/creators.js +54 -24
- package/dist/astUtils/creators.js.map +1 -1
- package/dist/astUtils/creators.spec.js +5 -5
- package/dist/astUtils/creators.spec.js.map +1 -1
- package/dist/astUtils/reflection.d.ts +132 -104
- package/dist/astUtils/reflection.js +220 -174
- package/dist/astUtils/reflection.js.map +1 -1
- package/dist/astUtils/reflection.spec.js +256 -157
- package/dist/astUtils/reflection.spec.js.map +1 -1
- package/dist/astUtils/stackedVisitor.spec.js +12 -12
- package/dist/astUtils/stackedVisitor.spec.js.map +1 -1
- package/dist/astUtils/visitors.d.ts +53 -35
- package/dist/astUtils/visitors.js +29 -3
- package/dist/astUtils/visitors.js.map +1 -1
- package/dist/astUtils/visitors.spec.js +208 -52
- package/dist/astUtils/visitors.spec.js.map +1 -1
- package/dist/astUtils/xml.d.ts +9 -9
- package/dist/astUtils/xml.js +9 -9
- package/dist/astUtils/xml.js.map +1 -1
- package/dist/bscPlugin/BscPlugin.d.ts +11 -2
- package/dist/bscPlugin/BscPlugin.js +37 -3
- package/dist/bscPlugin/BscPlugin.js.map +1 -1
- package/dist/bscPlugin/CallExpressionInfo.d.ts +36 -0
- package/dist/bscPlugin/CallExpressionInfo.js +131 -0
- package/dist/bscPlugin/CallExpressionInfo.js.map +1 -0
- package/dist/bscPlugin/FileWriter.d.ts +6 -0
- package/dist/bscPlugin/FileWriter.js +24 -0
- package/dist/bscPlugin/FileWriter.js.map +1 -0
- package/dist/bscPlugin/SignatureHelpUtil.d.ts +10 -0
- package/dist/bscPlugin/SignatureHelpUtil.js +136 -0
- package/dist/bscPlugin/SignatureHelpUtil.js.map +1 -0
- package/dist/bscPlugin/codeActions/CodeActionsProcessor.js +16 -13
- package/dist/bscPlugin/codeActions/CodeActionsProcessor.js.map +1 -1
- package/dist/bscPlugin/codeActions/CodeActionsProcessor.spec.js +16 -16
- package/dist/bscPlugin/codeActions/CodeActionsProcessor.spec.js.map +1 -1
- package/dist/bscPlugin/completions/CompletionsProcessor.d.ts +52 -1
- package/dist/bscPlugin/completions/CompletionsProcessor.js +517 -26
- package/dist/bscPlugin/completions/CompletionsProcessor.js.map +1 -1
- package/dist/bscPlugin/completions/CompletionsProcessor.spec.js +1909 -0
- package/dist/bscPlugin/completions/CompletionsProcessor.spec.js.map +1 -0
- package/dist/bscPlugin/definition/DefinitionProvider.d.ts +13 -0
- package/dist/bscPlugin/definition/DefinitionProvider.js +210 -0
- package/dist/bscPlugin/definition/DefinitionProvider.js.map +1 -0
- package/dist/bscPlugin/definition/DefinitionProvider.spec.js +88 -0
- package/dist/bscPlugin/definition/DefinitionProvider.spec.js.map +1 -0
- package/dist/bscPlugin/fileProviders/FileProvider.d.ts +9 -0
- package/dist/bscPlugin/fileProviders/FileProvider.js +51 -0
- package/dist/bscPlugin/fileProviders/FileProvider.js.map +1 -0
- package/dist/bscPlugin/hover/HoverProcessor.d.ts +7 -7
- package/dist/bscPlugin/hover/HoverProcessor.js +123 -125
- package/dist/bscPlugin/hover/HoverProcessor.js.map +1 -1
- package/dist/bscPlugin/hover/HoverProcessor.spec.js +371 -53
- package/dist/bscPlugin/hover/HoverProcessor.spec.js.map +1 -1
- package/dist/bscPlugin/semanticTokens/BrsFileSemanticTokensProcessor.d.ts +2 -1
- package/dist/bscPlugin/semanticTokens/BrsFileSemanticTokensProcessor.js +83 -23
- package/dist/bscPlugin/semanticTokens/BrsFileSemanticTokensProcessor.js.map +1 -1
- package/dist/bscPlugin/semanticTokens/BrsFileSemanticTokensProcessor.spec.js +83 -6
- package/dist/bscPlugin/semanticTokens/BrsFileSemanticTokensProcessor.spec.js.map +1 -1
- package/dist/bscPlugin/serialize/BslibInjector.spec.d.ts +1 -0
- package/dist/bscPlugin/serialize/BslibInjector.spec.js +19 -0
- package/dist/bscPlugin/serialize/BslibInjector.spec.js.map +1 -0
- package/dist/bscPlugin/serialize/BslibManager.d.ts +9 -0
- package/dist/bscPlugin/serialize/BslibManager.js +40 -0
- package/dist/bscPlugin/serialize/BslibManager.js.map +1 -0
- package/dist/bscPlugin/serialize/FileSerializer.d.ts +9 -0
- package/dist/bscPlugin/serialize/FileSerializer.js +72 -0
- package/dist/bscPlugin/serialize/FileSerializer.js.map +1 -0
- package/dist/bscPlugin/transpile/{BrsFilePreTranspileProcessor.d.ts → BrsFileTranspileProcessor.d.ts} +4 -2
- package/dist/bscPlugin/transpile/{BrsFilePreTranspileProcessor.js → BrsFileTranspileProcessor.js} +38 -12
- package/dist/bscPlugin/transpile/BrsFileTranspileProcessor.js.map +1 -0
- package/dist/bscPlugin/transpile/BrsFileTranspileProcessor.spec.d.ts +1 -0
- package/dist/bscPlugin/transpile/BrsFileTranspileProcessor.spec.js +41 -0
- package/dist/bscPlugin/transpile/BrsFileTranspileProcessor.spec.js.map +1 -0
- package/dist/bscPlugin/transpile/XmlFilePreTranspileProcessor.d.ts +12 -0
- package/dist/bscPlugin/transpile/XmlFilePreTranspileProcessor.js +99 -0
- package/dist/bscPlugin/transpile/XmlFilePreTranspileProcessor.js.map +1 -0
- package/dist/bscPlugin/validation/BrsFileValidator.d.ts +13 -5
- package/dist/bscPlugin/validation/BrsFileValidator.js +262 -52
- package/dist/bscPlugin/validation/BrsFileValidator.js.map +1 -1
- package/dist/bscPlugin/validation/BrsFileValidator.spec.js +230 -14
- package/dist/bscPlugin/validation/BrsFileValidator.spec.js.map +1 -1
- package/dist/bscPlugin/validation/ProgramValidator.d.ts +10 -0
- package/dist/bscPlugin/validation/ProgramValidator.js +32 -0
- package/dist/bscPlugin/validation/ProgramValidator.js.map +1 -0
- package/dist/bscPlugin/validation/ScopeValidator.d.ts +58 -27
- package/dist/bscPlugin/validation/ScopeValidator.js +514 -286
- package/dist/bscPlugin/validation/ScopeValidator.js.map +1 -1
- package/dist/bscPlugin/validation/ScopeValidator.spec.d.ts +1 -0
- package/dist/bscPlugin/validation/ScopeValidator.spec.js +2527 -0
- package/dist/bscPlugin/validation/ScopeValidator.spec.js.map +1 -0
- package/dist/bscPlugin/validation/XmlFileValidator.d.ts +8 -0
- package/dist/bscPlugin/validation/XmlFileValidator.js +44 -0
- package/dist/bscPlugin/validation/XmlFileValidator.js.map +1 -0
- package/dist/cli.js +104 -13
- package/dist/cli.js.map +1 -1
- package/dist/deferred.d.ts +3 -3
- package/dist/deferred.js.map +1 -1
- package/dist/diagnosticUtils.d.ts +8 -2
- package/dist/diagnosticUtils.js +47 -17
- package/dist/diagnosticUtils.js.map +1 -1
- package/dist/examples/plugins/removePrint.js +8 -10
- package/dist/examples/plugins/removePrint.js.map +1 -1
- package/dist/files/AssetFile.d.ts +26 -0
- package/dist/files/AssetFile.js +26 -0
- package/dist/files/AssetFile.js.map +1 -0
- package/dist/files/BrsFile.Class.spec.js +523 -493
- package/dist/files/BrsFile.Class.spec.js.map +1 -1
- package/dist/files/BrsFile.d.ts +111 -117
- package/dist/files/BrsFile.js +684 -1142
- package/dist/files/BrsFile.js.map +1 -1
- package/dist/files/BrsFile.spec.js +1783 -1233
- package/dist/files/BrsFile.spec.js.map +1 -1
- package/dist/files/BscFile.d.ts +104 -0
- package/dist/files/BscFile.js +16 -0
- package/dist/files/BscFile.js.map +1 -0
- package/dist/files/Factory.d.ts +25 -0
- package/dist/files/Factory.js +22 -0
- package/dist/files/Factory.js.map +1 -0
- package/dist/files/LazyFileData.d.ts +20 -0
- package/dist/files/LazyFileData.js +54 -0
- package/dist/files/LazyFileData.js.map +1 -0
- package/dist/files/LazyFileData.spec.d.ts +1 -0
- package/dist/files/LazyFileData.spec.js +27 -0
- package/dist/files/LazyFileData.spec.js.map +1 -0
- package/dist/files/XmlFile.d.ts +70 -32
- package/dist/files/XmlFile.js +106 -118
- package/dist/files/XmlFile.js.map +1 -1
- package/dist/files/XmlFile.spec.js +325 -262
- package/dist/files/XmlFile.spec.js.map +1 -1
- package/dist/files/tests/imports.spec.js +48 -40
- package/dist/files/tests/imports.spec.js.map +1 -1
- package/dist/files/tests/optionalChaning.spec.js +84 -24
- package/dist/files/tests/optionalChaning.spec.js.map +1 -1
- package/dist/globalCallables.js +16 -21
- package/dist/globalCallables.js.map +1 -1
- package/dist/index.d.ts +12 -1
- package/dist/index.js +12 -1
- package/dist/index.js.map +1 -1
- package/dist/interfaces.d.ts +421 -162
- package/dist/interfaces.js +27 -0
- package/dist/interfaces.js.map +1 -1
- package/dist/lexer/Character.spec.js +5 -5
- package/dist/lexer/Character.spec.js.map +1 -1
- package/dist/lexer/Lexer.d.ts +12 -5
- package/dist/lexer/Lexer.js +28 -13
- package/dist/lexer/Lexer.js.map +1 -1
- package/dist/lexer/Lexer.spec.js +181 -135
- package/dist/lexer/Lexer.spec.js.map +1 -1
- package/dist/lexer/Token.d.ts +9 -1
- package/dist/lexer/Token.js +9 -1
- package/dist/lexer/Token.js.map +1 -1
- package/dist/lexer/TokenKind.d.ts +8 -0
- package/dist/lexer/TokenKind.js +24 -4
- package/dist/lexer/TokenKind.js.map +1 -1
- package/dist/parser/AstNode.d.ts +162 -0
- package/dist/parser/AstNode.js +225 -0
- package/dist/parser/AstNode.js.map +1 -0
- package/dist/parser/AstNode.spec.d.ts +1 -0
- package/dist/parser/AstNode.spec.js +165 -0
- package/dist/parser/AstNode.spec.js.map +1 -0
- package/dist/parser/BrsTranspileState.d.ts +4 -7
- package/dist/parser/BrsTranspileState.js +4 -12
- package/dist/parser/BrsTranspileState.js.map +1 -1
- package/dist/parser/Expression.d.ts +376 -283
- package/dist/parser/Expression.js +742 -585
- package/dist/parser/Expression.js.map +1 -1
- package/dist/parser/Parser.Class.spec.js +151 -145
- package/dist/parser/Parser.Class.spec.js.map +1 -1
- package/dist/parser/Parser.d.ts +48 -201
- package/dist/parser/Parser.js +705 -1026
- package/dist/parser/Parser.js.map +1 -1
- package/dist/parser/Parser.spec.d.ts +3 -1
- package/dist/parser/Parser.spec.js +861 -848
- package/dist/parser/Parser.spec.js.map +1 -1
- package/dist/parser/SGParser.d.ts +9 -8
- package/dist/parser/SGParser.js +10 -8
- package/dist/parser/SGParser.js.map +1 -1
- package/dist/parser/SGParser.spec.js +27 -38
- package/dist/parser/SGParser.spec.js.map +1 -1
- package/dist/parser/SGTypes.d.ts +98 -35
- package/dist/parser/SGTypes.js +169 -99
- package/dist/parser/SGTypes.js.map +1 -1
- package/dist/parser/Statement.d.ts +468 -272
- package/dist/parser/Statement.js +904 -631
- package/dist/parser/Statement.js.map +1 -1
- package/dist/parser/Statement.spec.js +47 -23
- package/dist/parser/Statement.spec.js.map +1 -1
- package/dist/parser/TranspileState.d.ts +1 -1
- package/dist/parser/TranspileState.js +7 -12
- package/dist/parser/TranspileState.js.map +1 -1
- package/dist/parser/tests/Parser.spec.js +3 -2
- package/dist/parser/tests/Parser.spec.js.map +1 -1
- package/dist/parser/tests/controlFlow/For.spec.js +33 -23
- package/dist/parser/tests/controlFlow/For.spec.js.map +1 -1
- package/dist/parser/tests/controlFlow/ForEach.spec.js +25 -20
- package/dist/parser/tests/controlFlow/ForEach.spec.js.map +1 -1
- package/dist/parser/tests/controlFlow/If.spec.js +96 -94
- package/dist/parser/tests/controlFlow/If.spec.js.map +1 -1
- package/dist/parser/tests/controlFlow/While.spec.js +22 -16
- package/dist/parser/tests/controlFlow/While.spec.js.map +1 -1
- package/dist/parser/tests/expression/Additive.spec.js +8 -8
- package/dist/parser/tests/expression/Additive.spec.js.map +1 -1
- package/dist/parser/tests/expression/ArrayLiterals.spec.js +58 -21
- package/dist/parser/tests/expression/ArrayLiterals.spec.js.map +1 -1
- package/dist/parser/tests/expression/AssociativeArrayLiterals.spec.js +62 -21
- package/dist/parser/tests/expression/AssociativeArrayLiterals.spec.js.map +1 -1
- package/dist/parser/tests/expression/Boolean.spec.js +8 -8
- package/dist/parser/tests/expression/Boolean.spec.js.map +1 -1
- package/dist/parser/tests/expression/Call.spec.js +129 -21
- package/dist/parser/tests/expression/Call.spec.js.map +1 -1
- package/dist/parser/tests/expression/Exponential.spec.js +5 -5
- package/dist/parser/tests/expression/Exponential.spec.js.map +1 -1
- package/dist/parser/tests/expression/Function.spec.js +36 -36
- package/dist/parser/tests/expression/Function.spec.js.map +1 -1
- package/dist/parser/tests/expression/Indexing.spec.js +92 -22
- package/dist/parser/tests/expression/Indexing.spec.js.map +1 -1
- package/dist/parser/tests/expression/Multiplicative.spec.js +9 -9
- package/dist/parser/tests/expression/Multiplicative.spec.js.map +1 -1
- package/dist/parser/tests/expression/NullCoalescenceExpression.spec.js +59 -59
- package/dist/parser/tests/expression/NullCoalescenceExpression.spec.js.map +1 -1
- package/dist/parser/tests/expression/PrefixUnary.spec.js +12 -12
- package/dist/parser/tests/expression/PrefixUnary.spec.js.map +1 -1
- package/dist/parser/tests/expression/Primary.spec.js +12 -12
- package/dist/parser/tests/expression/Primary.spec.js.map +1 -1
- package/dist/parser/tests/expression/RegexLiteralExpression.spec.js +10 -10
- package/dist/parser/tests/expression/RegexLiteralExpression.spec.js.map +1 -1
- package/dist/parser/tests/expression/Relational.spec.js +13 -13
- package/dist/parser/tests/expression/Relational.spec.js.map +1 -1
- package/dist/parser/tests/expression/SourceLiteralExpression.spec.js +24 -24
- package/dist/parser/tests/expression/SourceLiteralExpression.spec.js.map +1 -1
- package/dist/parser/tests/expression/TemplateStringExpression.spec.js +96 -57
- package/dist/parser/tests/expression/TemplateStringExpression.spec.js.map +1 -1
- package/dist/parser/tests/expression/TernaryExpression.spec.js +89 -89
- package/dist/parser/tests/expression/TernaryExpression.spec.js.map +1 -1
- package/dist/parser/tests/expression/TypeExpression.spec.d.ts +1 -0
- package/dist/parser/tests/expression/TypeExpression.spec.js +127 -0
- package/dist/parser/tests/expression/TypeExpression.spec.js.map +1 -0
- package/dist/parser/tests/expression/UnaryExpression.spec.d.ts +1 -0
- package/dist/parser/tests/expression/UnaryExpression.spec.js +52 -0
- package/dist/parser/tests/expression/UnaryExpression.spec.js.map +1 -0
- package/dist/parser/tests/statement/AssignmentOperators.spec.js +15 -15
- package/dist/parser/tests/statement/AssignmentOperators.spec.js.map +1 -1
- package/dist/parser/tests/statement/ConstStatement.spec.js +82 -33
- package/dist/parser/tests/statement/ConstStatement.spec.js.map +1 -1
- package/dist/parser/tests/statement/Continue.spec.d.ts +1 -0
- package/dist/parser/tests/statement/Continue.spec.js +119 -0
- package/dist/parser/tests/statement/Continue.spec.js.map +1 -0
- package/dist/parser/tests/statement/Declaration.spec.js +19 -19
- package/dist/parser/tests/statement/Declaration.spec.js.map +1 -1
- package/dist/parser/tests/statement/Dim.spec.js +22 -22
- package/dist/parser/tests/statement/Dim.spec.js.map +1 -1
- package/dist/parser/tests/statement/Enum.spec.js +98 -302
- package/dist/parser/tests/statement/Enum.spec.js.map +1 -1
- package/dist/parser/tests/statement/For.spec.js +9 -10
- package/dist/parser/tests/statement/For.spec.js.map +1 -1
- package/dist/parser/tests/statement/ForEach.spec.js +8 -9
- package/dist/parser/tests/statement/ForEach.spec.js.map +1 -1
- package/dist/parser/tests/statement/Function.spec.js +44 -35
- package/dist/parser/tests/statement/Function.spec.js.map +1 -1
- package/dist/parser/tests/statement/Goto.spec.js +5 -5
- package/dist/parser/tests/statement/Goto.spec.js.map +1 -1
- package/dist/parser/tests/statement/Increment.spec.js +20 -20
- package/dist/parser/tests/statement/Increment.spec.js.map +1 -1
- package/dist/parser/tests/statement/InterfaceStatement.spec.js +30 -196
- package/dist/parser/tests/statement/InterfaceStatement.spec.js.map +1 -1
- package/dist/parser/tests/statement/LibraryStatement.spec.js +11 -11
- package/dist/parser/tests/statement/LibraryStatement.spec.js.map +1 -1
- package/dist/parser/tests/statement/Misc.spec.js +16 -78
- package/dist/parser/tests/statement/Misc.spec.js.map +1 -1
- package/dist/parser/tests/statement/PrintStatement.spec.js +36 -34
- package/dist/parser/tests/statement/PrintStatement.spec.js.map +1 -1
- package/dist/parser/tests/statement/ReturnStatement.spec.js +14 -12
- package/dist/parser/tests/statement/ReturnStatement.spec.js.map +1 -1
- package/dist/parser/tests/statement/Set.spec.js +48 -35
- package/dist/parser/tests/statement/Set.spec.js.map +1 -1
- package/dist/parser/tests/statement/Stop.spec.js +6 -6
- package/dist/parser/tests/statement/Stop.spec.js.map +1 -1
- package/dist/parser/tests/statement/Throw.spec.js +6 -6
- package/dist/parser/tests/statement/Throw.spec.js.map +1 -1
- package/dist/parser/tests/statement/TryCatch.spec.js +18 -16
- package/dist/parser/tests/statement/TryCatch.spec.js.map +1 -1
- package/dist/preprocessor/Manifest.d.ts +1 -1
- package/dist/preprocessor/Manifest.js +2 -2
- package/dist/preprocessor/Manifest.js.map +1 -1
- package/dist/preprocessor/Manifest.spec.js +8 -8
- package/dist/preprocessor/Manifest.spec.js.map +1 -1
- package/dist/preprocessor/Preprocessor.d.ts +5 -6
- package/dist/preprocessor/Preprocessor.js +5 -5
- package/dist/preprocessor/Preprocessor.js.map +1 -1
- package/dist/preprocessor/Preprocessor.spec.js +25 -25
- package/dist/preprocessor/Preprocessor.spec.js.map +1 -1
- package/dist/preprocessor/PreprocessorParser.d.ts +1 -1
- package/dist/preprocessor/PreprocessorParser.js +7 -1
- package/dist/preprocessor/PreprocessorParser.js.map +1 -1
- package/dist/preprocessor/PreprocessorParser.spec.js +13 -13
- package/dist/preprocessor/PreprocessorParser.spec.js.map +1 -1
- package/dist/roku-types/data.json +5892 -10081
- package/dist/roku-types/index.d.ts +622 -1719
- package/dist/types/ArrayType.d.ts +10 -9
- package/dist/types/ArrayType.js +65 -60
- package/dist/types/ArrayType.js.map +1 -1
- package/dist/types/ArrayType.spec.js +36 -68
- package/dist/types/ArrayType.spec.js.map +1 -1
- package/dist/types/AssociativeArrayType.d.ts +11 -0
- package/dist/types/AssociativeArrayType.js +52 -0
- package/dist/types/AssociativeArrayType.js.map +1 -0
- package/dist/types/BaseFunctionType.d.ts +9 -0
- package/dist/types/BaseFunctionType.js +25 -0
- package/dist/types/BaseFunctionType.js.map +1 -0
- package/dist/types/BooleanType.d.ts +8 -5
- package/dist/types/BooleanType.js +14 -7
- package/dist/types/BooleanType.js.map +1 -1
- package/dist/types/BooleanType.spec.js +10 -6
- package/dist/types/BooleanType.spec.js.map +1 -1
- package/dist/types/BscType.d.ts +32 -21
- package/dist/types/BscType.js +118 -21
- package/dist/types/BscType.js.map +1 -1
- package/dist/types/BscTypeKind.d.ts +25 -0
- package/dist/types/BscTypeKind.js +30 -0
- package/dist/types/BscTypeKind.js.map +1 -0
- package/dist/types/BuiltInInterfaceAdder.d.ts +23 -0
- package/dist/types/BuiltInInterfaceAdder.js +171 -0
- package/dist/types/BuiltInInterfaceAdder.js.map +1 -0
- package/dist/types/BuiltInInterfaceAdder.spec.d.ts +1 -0
- package/dist/types/BuiltInInterfaceAdder.spec.js +116 -0
- package/dist/types/BuiltInInterfaceAdder.spec.js.map +1 -0
- package/dist/types/ClassType.d.ts +17 -0
- package/dist/types/ClassType.js +58 -0
- package/dist/types/ClassType.js.map +1 -0
- package/dist/types/ClassType.spec.d.ts +1 -0
- package/dist/types/ClassType.spec.js +77 -0
- package/dist/types/ClassType.spec.js.map +1 -0
- package/dist/types/ComponentType.d.ts +26 -0
- package/dist/types/ComponentType.js +83 -0
- package/dist/types/ComponentType.js.map +1 -0
- package/dist/types/DoubleType.d.ts +8 -5
- package/dist/types/DoubleType.js +18 -16
- package/dist/types/DoubleType.js.map +1 -1
- package/dist/types/DoubleType.spec.js +12 -6
- package/dist/types/DoubleType.spec.js.map +1 -1
- package/dist/types/DynamicType.d.ts +9 -5
- package/dist/types/DynamicType.js +15 -4
- package/dist/types/DynamicType.js.map +1 -1
- package/dist/types/DynamicType.spec.js +16 -5
- package/dist/types/DynamicType.spec.js.map +1 -1
- package/dist/types/EnumType.d.ts +30 -12
- package/dist/types/EnumType.js +43 -17
- package/dist/types/EnumType.js.map +1 -1
- package/dist/types/EnumType.spec.d.ts +1 -0
- package/dist/types/EnumType.spec.js +33 -0
- package/dist/types/EnumType.spec.js.map +1 -0
- package/dist/types/FloatType.d.ts +8 -5
- package/dist/types/FloatType.js +18 -16
- package/dist/types/FloatType.js.map +1 -1
- package/dist/types/FloatType.spec.js +4 -6
- package/dist/types/FloatType.spec.js.map +1 -1
- package/dist/types/FunctionType.d.ts +13 -8
- package/dist/types/FunctionType.js +30 -14
- package/dist/types/FunctionType.js.map +1 -1
- package/dist/types/InheritableType.d.ts +28 -0
- package/dist/types/InheritableType.js +152 -0
- package/dist/types/InheritableType.js.map +1 -0
- package/dist/types/IntegerType.d.ts +8 -5
- package/dist/types/IntegerType.js +18 -16
- package/dist/types/IntegerType.js.map +1 -1
- package/dist/types/IntegerType.spec.js +8 -6
- package/dist/types/IntegerType.spec.js.map +1 -1
- package/dist/types/InterfaceType.d.ts +12 -13
- package/dist/types/InterfaceType.js +20 -48
- package/dist/types/InterfaceType.js.map +1 -1
- package/dist/types/InterfaceType.spec.js +90 -56
- package/dist/types/InterfaceType.spec.js.map +1 -1
- package/dist/types/InvalidType.d.ts +7 -5
- package/dist/types/InvalidType.js +13 -7
- package/dist/types/InvalidType.js.map +1 -1
- package/dist/types/InvalidType.spec.js +8 -6
- package/dist/types/InvalidType.spec.js.map +1 -1
- package/dist/types/LongIntegerType.d.ts +8 -5
- package/dist/types/LongIntegerType.js +17 -15
- package/dist/types/LongIntegerType.js.map +1 -1
- package/dist/types/LongIntegerType.spec.js +10 -6
- package/dist/types/LongIntegerType.spec.js.map +1 -1
- package/dist/types/NamespaceType.d.ts +12 -0
- package/dist/types/NamespaceType.js +28 -0
- package/dist/types/NamespaceType.js.map +1 -0
- package/dist/types/ObjectType.d.ts +9 -8
- package/dist/types/ObjectType.js +21 -11
- package/dist/types/ObjectType.js.map +1 -1
- package/dist/types/ObjectType.spec.js +3 -3
- package/dist/types/ObjectType.spec.js.map +1 -1
- package/dist/types/ReferenceType.d.ts +63 -0
- package/dist/types/ReferenceType.js +423 -0
- package/dist/types/ReferenceType.js.map +1 -0
- package/dist/types/ReferenceType.spec.d.ts +1 -0
- package/dist/types/ReferenceType.spec.js +137 -0
- package/dist/types/ReferenceType.spec.js.map +1 -0
- package/dist/types/StringType.d.ts +11 -5
- package/dist/types/StringType.js +18 -7
- package/dist/types/StringType.js.map +1 -1
- package/dist/types/StringType.spec.js +3 -5
- package/dist/types/StringType.spec.js.map +1 -1
- package/dist/types/TypedFunctionType.d.ts +22 -17
- package/dist/types/TypedFunctionType.js +78 -60
- package/dist/types/TypedFunctionType.js.map +1 -1
- package/dist/types/TypedFunctionType.spec.js +105 -20
- package/dist/types/TypedFunctionType.spec.js.map +1 -1
- package/dist/types/UninitializedType.d.ts +8 -6
- package/dist/types/UninitializedType.js +13 -7
- package/dist/types/UninitializedType.js.map +1 -1
- package/dist/types/UnionType.d.ts +20 -0
- package/dist/types/UnionType.js +123 -0
- package/dist/types/UnionType.js.map +1 -0
- package/dist/types/UnionType.spec.d.ts +1 -0
- package/dist/types/UnionType.spec.js +130 -0
- package/dist/types/UnionType.spec.js.map +1 -0
- package/dist/types/VoidType.d.ts +8 -5
- package/dist/types/VoidType.js +14 -7
- package/dist/types/VoidType.js.map +1 -1
- package/dist/types/VoidType.spec.js +3 -3
- package/dist/types/VoidType.spec.js.map +1 -1
- package/dist/types/helper.spec.d.ts +1 -0
- package/dist/types/helper.spec.js +145 -0
- package/dist/types/helper.spec.js.map +1 -0
- package/dist/types/helpers.d.ts +19 -37
- package/dist/types/helpers.js +159 -99
- package/dist/types/helpers.js.map +1 -1
- package/dist/types/index.d.ts +22 -0
- package/dist/types/index.js +39 -0
- package/dist/types/index.js.map +1 -0
- package/dist/util.d.ts +143 -139
- package/dist/util.js +864 -385
- package/dist/util.js.map +1 -1
- package/dist/validators/ClassValidator.d.ts +8 -25
- package/dist/validators/ClassValidator.js +99 -179
- package/dist/validators/ClassValidator.js.map +1 -1
- package/package.json +165 -152
- package/dist/astUtils/AstEditor.js.map +0 -1
- package/dist/astUtils/AstEditor.spec.js.map +0 -1
- package/dist/bscPlugin/transpile/BrsFilePreTranspileProcessor.js.map +0 -1
- package/dist/bscPlugin/transpile/BrsFilePreTranspileProcessor.spec.js +0 -32
- package/dist/bscPlugin/transpile/BrsFilePreTranspileProcessor.spec.js.map +0 -1
- package/dist/parser/SGTypes.spec.js +0 -351
- package/dist/parser/SGTypes.spec.js.map +0 -1
- package/dist/types/CustomType.d.ts +0 -12
- package/dist/types/CustomType.js +0 -44
- package/dist/types/CustomType.js.map +0 -1
- package/dist/types/LazyType.d.ts +0 -16
- package/dist/types/LazyType.js +0 -44
- package/dist/types/LazyType.js.map +0 -1
- /package/dist/astUtils/{AstEditor.spec.d.ts → Editor.spec.d.ts} +0 -0
- /package/dist/bscPlugin/{transpile/BrsFilePreTranspileProcessor.spec.d.ts → completions/CompletionsProcessor.spec.d.ts} +0 -0
- /package/dist/{parser/SGTypes.spec.d.ts → bscPlugin/definition/DefinitionProvider.spec.d.ts} +0 -0
package/dist/lexer/Lexer.spec.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
/* eslint no-template-curly-in-string: 0 */
|
|
4
|
-
const
|
|
4
|
+
const chai_config_spec_1 = require("../chai-config.spec");
|
|
5
5
|
const TokenKind_1 = require("./TokenKind");
|
|
6
6
|
const Lexer_1 = require("./Lexer");
|
|
7
7
|
const Token_1 = require("./Token");
|
|
@@ -11,14 +11,14 @@ const util_1 = require("../util");
|
|
|
11
11
|
describe('lexer', () => {
|
|
12
12
|
it('recognizes the `const` keyword', () => {
|
|
13
13
|
let { tokens } = Lexer_1.Lexer.scan('const');
|
|
14
|
-
(0,
|
|
14
|
+
(0, chai_config_spec_1.expect)(tokens.map(x => x.kind)).to.eql([
|
|
15
15
|
TokenKind_1.TokenKind.Const,
|
|
16
16
|
TokenKind_1.TokenKind.Eof
|
|
17
17
|
]);
|
|
18
18
|
});
|
|
19
19
|
it('recognizes namespace keywords', () => {
|
|
20
20
|
let { tokens } = Lexer_1.Lexer.scan('namespace end namespace endnamespace end namespace');
|
|
21
|
-
(0,
|
|
21
|
+
(0, chai_config_spec_1.expect)(tokens.map(x => x.kind)).to.eql([
|
|
22
22
|
TokenKind_1.TokenKind.Namespace,
|
|
23
23
|
TokenKind_1.TokenKind.EndNamespace,
|
|
24
24
|
TokenKind_1.TokenKind.EndNamespace,
|
|
@@ -84,39 +84,39 @@ describe('lexer', () => {
|
|
|
84
84
|
});
|
|
85
85
|
it('recognizes the callfunc operator', () => {
|
|
86
86
|
let { tokens } = Lexer_1.Lexer.scan('@.');
|
|
87
|
-
(0,
|
|
87
|
+
(0, chai_config_spec_1.expect)(tokens[0].kind).to.equal(TokenKind_1.TokenKind.Callfunc);
|
|
88
88
|
});
|
|
89
89
|
it('recognizes the import token', () => {
|
|
90
90
|
let { tokens } = Lexer_1.Lexer.scan('import');
|
|
91
|
-
(0,
|
|
91
|
+
(0, chai_config_spec_1.expect)(tokens[0].kind).to.eql(TokenKind_1.TokenKind.Import);
|
|
92
92
|
});
|
|
93
93
|
it('recognizes library token', () => {
|
|
94
94
|
let { tokens } = Lexer_1.Lexer.scan('library');
|
|
95
|
-
(0,
|
|
95
|
+
(0, chai_config_spec_1.expect)(tokens[0].kind).to.eql(TokenKind_1.TokenKind.Library);
|
|
96
96
|
});
|
|
97
97
|
it('produces an at symbol token', () => {
|
|
98
98
|
let { tokens } = Lexer_1.Lexer.scan('@');
|
|
99
|
-
(0,
|
|
99
|
+
(0, chai_config_spec_1.expect)(tokens[0].kind).to.equal(TokenKind_1.TokenKind.At);
|
|
100
100
|
});
|
|
101
101
|
it('produces a semicolon token', () => {
|
|
102
102
|
let { tokens } = Lexer_1.Lexer.scan(';');
|
|
103
|
-
(0,
|
|
103
|
+
(0, chai_config_spec_1.expect)(tokens[0].kind).to.equal(TokenKind_1.TokenKind.Semicolon);
|
|
104
104
|
});
|
|
105
105
|
it('emits error on unknown character type', () => {
|
|
106
106
|
let { diagnostics } = Lexer_1.Lexer.scan('\0');
|
|
107
|
-
(0,
|
|
107
|
+
(0, chai_config_spec_1.expect)(diagnostics).to.be.lengthOf(1);
|
|
108
108
|
});
|
|
109
109
|
it('includes an end-of-file marker', () => {
|
|
110
110
|
let { tokens } = Lexer_1.Lexer.scan('');
|
|
111
|
-
(0,
|
|
111
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Eof]);
|
|
112
112
|
});
|
|
113
113
|
it('ignores tabs and spaces', () => {
|
|
114
114
|
let { tokens } = Lexer_1.Lexer.scan('\t\t \t \t');
|
|
115
|
-
(0,
|
|
115
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Eof]);
|
|
116
116
|
});
|
|
117
117
|
it('retains every single newline', () => {
|
|
118
118
|
let { tokens } = Lexer_1.Lexer.scan('\n\n\'foo\n\n\nprint 2\n\n');
|
|
119
|
-
(0,
|
|
119
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
120
120
|
TokenKind_1.TokenKind.Newline,
|
|
121
121
|
TokenKind_1.TokenKind.Newline,
|
|
122
122
|
TokenKind_1.TokenKind.Comment,
|
|
@@ -138,7 +138,7 @@ describe('lexer', () => {
|
|
|
138
138
|
' print 0\r\n' +
|
|
139
139
|
' end if\r\n' +
|
|
140
140
|
'end function\r\n').tokens.map(x => x.kind);
|
|
141
|
-
(0,
|
|
141
|
+
(0, chai_config_spec_1.expect)(kinds).to.eql([
|
|
142
142
|
TokenKind_1.TokenKind.Function, TokenKind_1.TokenKind.Identifier, TokenKind_1.TokenKind.LeftParen, TokenKind_1.TokenKind.RightParen, TokenKind_1.TokenKind.As, TokenKind_1.TokenKind.String, TokenKind_1.TokenKind.Newline,
|
|
143
143
|
TokenKind_1.TokenKind.If, TokenKind_1.TokenKind.True, TokenKind_1.TokenKind.Then, TokenKind_1.TokenKind.Newline,
|
|
144
144
|
TokenKind_1.TokenKind.Print, TokenKind_1.TokenKind.IntegerLiteral, TokenKind_1.TokenKind.Newline,
|
|
@@ -171,20 +171,20 @@ describe('lexer', () => {
|
|
|
171
171
|
[2, 7, 2, 8] //Eof
|
|
172
172
|
];
|
|
173
173
|
/*eslint-enable*/
|
|
174
|
-
(0,
|
|
175
|
-
(0,
|
|
174
|
+
(0, chai_config_spec_1.expect)(withoutWhitespace, 'Without whitespace').to.eql(expectedLocations);
|
|
175
|
+
(0, chai_config_spec_1.expect)(withWhitespace, 'With whitespace').to.eql(expectedLocations);
|
|
176
176
|
});
|
|
177
177
|
it('retains original line endings', () => {
|
|
178
178
|
let { tokens } = Lexer_1.Lexer.scan('print "hello"\r\nprint "world"\n');
|
|
179
|
-
(0,
|
|
179
|
+
(0, chai_config_spec_1.expect)([
|
|
180
180
|
tokens[2].text.charCodeAt(0),
|
|
181
181
|
tokens[2].text.charCodeAt(1)
|
|
182
182
|
], 'should contain \\r\\n').to.eql([13, 10]);
|
|
183
|
-
(0,
|
|
183
|
+
(0, chai_config_spec_1.expect)(tokens[5].text.charCodeAt(0), 'should contain \\r\\n').to.eql(10);
|
|
184
184
|
});
|
|
185
185
|
it('correctly splits the elseif token', () => {
|
|
186
186
|
let { tokens } = Lexer_1.Lexer.scan('else if elseif else if');
|
|
187
|
-
(0,
|
|
187
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
188
188
|
TokenKind_1.TokenKind.Else,
|
|
189
189
|
TokenKind_1.TokenKind.If,
|
|
190
190
|
TokenKind_1.TokenKind.Else,
|
|
@@ -196,20 +196,20 @@ describe('lexer', () => {
|
|
|
196
196
|
});
|
|
197
197
|
it('gives the `as` keyword its own TokenKind', () => {
|
|
198
198
|
let { tokens } = Lexer_1.Lexer.scan('as');
|
|
199
|
-
(0,
|
|
199
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.As, TokenKind_1.TokenKind.Eof]);
|
|
200
200
|
});
|
|
201
201
|
it('gives the `stop` keyword its own TokenKind', () => {
|
|
202
202
|
let { tokens } = Lexer_1.Lexer.scan('stop');
|
|
203
|
-
(0,
|
|
203
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Stop, TokenKind_1.TokenKind.Eof]);
|
|
204
204
|
});
|
|
205
205
|
it('does not alias \'?\' to \'print\' - the parser will do that', () => {
|
|
206
206
|
let { tokens } = Lexer_1.Lexer.scan('?2');
|
|
207
|
-
(0,
|
|
207
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Question, TokenKind_1.TokenKind.IntegerLiteral, TokenKind_1.TokenKind.Eof]);
|
|
208
208
|
});
|
|
209
209
|
describe('comments', () => {
|
|
210
210
|
it('does not include carriage return character', () => {
|
|
211
211
|
let tokens = Lexer_1.Lexer.scan(`'someComment\r\nprint "hello"`).tokens;
|
|
212
|
-
(0,
|
|
212
|
+
(0, chai_config_spec_1.expect)(tokens[0].text).to.equal(`'someComment`);
|
|
213
213
|
});
|
|
214
214
|
it('includes the comment characters in the text', () => {
|
|
215
215
|
let text = Lexer_1.Lexer.scan(`
|
|
@@ -218,7 +218,7 @@ describe('lexer', () => {
|
|
|
218
218
|
`).tokens
|
|
219
219
|
.filter(x => ![TokenKind_1.TokenKind.Newline, TokenKind_1.TokenKind.Eof].includes(x.kind))
|
|
220
220
|
.map(x => x.text);
|
|
221
|
-
(0,
|
|
221
|
+
(0, chai_config_spec_1.expect)(text).to.eql([
|
|
222
222
|
`'comment`,
|
|
223
223
|
'REM some comment'
|
|
224
224
|
]);
|
|
@@ -232,7 +232,7 @@ describe('lexer', () => {
|
|
|
232
232
|
`, {
|
|
233
233
|
includeWhitespace: true
|
|
234
234
|
}).tokens.map(x => [...(0, Parser_spec_1.rangeToArray)(x.range), x.text]);
|
|
235
|
-
(0,
|
|
235
|
+
(0, chai_config_spec_1.expect)(tokens).to.eql([
|
|
236
236
|
[0, 0, 0, 1, '\n'],
|
|
237
237
|
[1, 0, 1, 16, ' '],
|
|
238
238
|
[1, 16, 1, 19, 'sub'],
|
|
@@ -267,14 +267,14 @@ describe('lexer', () => {
|
|
|
267
267
|
'comment
|
|
268
268
|
REM some comment
|
|
269
269
|
`).tokens.filter(x => ![TokenKind_1.TokenKind.Newline, TokenKind_1.TokenKind.Eof].includes(x.kind));
|
|
270
|
-
(0,
|
|
271
|
-
(0,
|
|
270
|
+
(0, chai_config_spec_1.expect)(tokens[0].range).to.eql(vscode_languageserver_1.Range.create(1, 16, 1, 24));
|
|
271
|
+
(0, chai_config_spec_1.expect)(tokens[1].range).to.eql(vscode_languageserver_1.Range.create(2, 16, 2, 32));
|
|
272
272
|
});
|
|
273
273
|
it('finds correct location for newlines', () => {
|
|
274
274
|
let tokens = Lexer_1.Lexer.scan('sub\nsub\r\nsub\n\n').tokens
|
|
275
275
|
//ignore the Eof token
|
|
276
276
|
.filter(x => x.kind !== TokenKind_1.TokenKind.Eof);
|
|
277
|
-
(0,
|
|
277
|
+
(0, chai_config_spec_1.expect)(tokens.map(x => x.range)).to.eql([
|
|
278
278
|
vscode_languageserver_1.Range.create(0, 0, 0, 3),
|
|
279
279
|
vscode_languageserver_1.Range.create(0, 3, 0, 4),
|
|
280
280
|
vscode_languageserver_1.Range.create(1, 0, 1, 3),
|
|
@@ -297,26 +297,26 @@ describe('lexer', () => {
|
|
|
297
297
|
end sub
|
|
298
298
|
`);
|
|
299
299
|
let comments = tokens.filter(x => x.kind === TokenKind_1.TokenKind.Comment);
|
|
300
|
-
(0,
|
|
301
|
-
(0,
|
|
300
|
+
(0, chai_config_spec_1.expect)(comments).to.be.lengthOf(1);
|
|
301
|
+
(0, chai_config_spec_1.expect)(comments[0].range).to.eql(vscode_languageserver_1.Range.create(8, 27, 8, 35));
|
|
302
302
|
});
|
|
303
303
|
it('ignores everything after `\'`', () => {
|
|
304
304
|
let { tokens } = Lexer_1.Lexer.scan('= \' (');
|
|
305
|
-
(0,
|
|
305
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Equal, TokenKind_1.TokenKind.Comment, TokenKind_1.TokenKind.Eof]);
|
|
306
306
|
});
|
|
307
307
|
it('ignores everything after `REM`', () => {
|
|
308
308
|
let { tokens } = Lexer_1.Lexer.scan('= REM (');
|
|
309
|
-
(0,
|
|
309
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Equal, TokenKind_1.TokenKind.Comment, TokenKind_1.TokenKind.Eof]);
|
|
310
310
|
});
|
|
311
311
|
it('ignores everything after `rem`', () => {
|
|
312
312
|
let { tokens } = Lexer_1.Lexer.scan('= rem (');
|
|
313
|
-
(0,
|
|
313
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Equal, TokenKind_1.TokenKind.Comment, TokenKind_1.TokenKind.Eof]);
|
|
314
314
|
});
|
|
315
315
|
}); // comments
|
|
316
316
|
describe('non-literals', () => {
|
|
317
317
|
it('reads parens & braces', () => {
|
|
318
318
|
let { tokens } = Lexer_1.Lexer.scan('(){}');
|
|
319
|
-
(0,
|
|
319
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
320
320
|
TokenKind_1.TokenKind.LeftParen,
|
|
321
321
|
TokenKind_1.TokenKind.RightParen,
|
|
322
322
|
TokenKind_1.TokenKind.LeftCurlyBrace,
|
|
@@ -326,7 +326,7 @@ describe('lexer', () => {
|
|
|
326
326
|
});
|
|
327
327
|
it('reads operators', () => {
|
|
328
328
|
let { tokens } = Lexer_1.Lexer.scan('^ - + * MOD / \\ -- ++');
|
|
329
|
-
(0,
|
|
329
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
330
330
|
TokenKind_1.TokenKind.Caret,
|
|
331
331
|
TokenKind_1.TokenKind.Minus,
|
|
332
332
|
TokenKind_1.TokenKind.Plus,
|
|
@@ -341,7 +341,7 @@ describe('lexer', () => {
|
|
|
341
341
|
});
|
|
342
342
|
it('reads bitshift operators', () => {
|
|
343
343
|
let { tokens } = Lexer_1.Lexer.scan('<< >> <<');
|
|
344
|
-
(0,
|
|
344
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
345
345
|
TokenKind_1.TokenKind.LeftShift,
|
|
346
346
|
TokenKind_1.TokenKind.RightShift,
|
|
347
347
|
TokenKind_1.TokenKind.LeftShift,
|
|
@@ -350,7 +350,7 @@ describe('lexer', () => {
|
|
|
350
350
|
});
|
|
351
351
|
it('reads bitshift assignment operators', () => {
|
|
352
352
|
let { tokens } = Lexer_1.Lexer.scan('<<= >>=');
|
|
353
|
-
(0,
|
|
353
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
354
354
|
TokenKind_1.TokenKind.LeftShiftEqual,
|
|
355
355
|
TokenKind_1.TokenKind.RightShiftEqual,
|
|
356
356
|
TokenKind_1.TokenKind.Eof
|
|
@@ -358,7 +358,7 @@ describe('lexer', () => {
|
|
|
358
358
|
});
|
|
359
359
|
it('reads comparators', () => {
|
|
360
360
|
let { tokens } = Lexer_1.Lexer.scan('< <= > >= = <>');
|
|
361
|
-
(0,
|
|
361
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
362
362
|
TokenKind_1.TokenKind.Less,
|
|
363
363
|
TokenKind_1.TokenKind.LessEqual,
|
|
364
364
|
TokenKind_1.TokenKind.Greater,
|
|
@@ -372,23 +372,23 @@ describe('lexer', () => {
|
|
|
372
372
|
describe('string literals', () => {
|
|
373
373
|
it('produces string literal tokens', () => {
|
|
374
374
|
let { tokens } = Lexer_1.Lexer.scan(`"hello world"`);
|
|
375
|
-
(0,
|
|
375
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.StringLiteral, TokenKind_1.TokenKind.Eof]);
|
|
376
376
|
});
|
|
377
377
|
it(`safely escapes " literals`, () => {
|
|
378
378
|
let { tokens } = Lexer_1.Lexer.scan(`"the cat says ""meow"""`);
|
|
379
|
-
(0,
|
|
379
|
+
(0, chai_config_spec_1.expect)(tokens[0].kind).to.equal(TokenKind_1.TokenKind.StringLiteral);
|
|
380
380
|
});
|
|
381
381
|
it('captures text to end of line for unterminated strings with LF', () => {
|
|
382
382
|
let { tokens } = Lexer_1.Lexer.scan(`"unterminated!\n`);
|
|
383
|
-
(0,
|
|
383
|
+
(0, chai_config_spec_1.expect)(tokens[0].kind).to.eql(TokenKind_1.TokenKind.StringLiteral);
|
|
384
384
|
});
|
|
385
385
|
it('captures text to end of line for unterminated strings with CRLF', () => {
|
|
386
386
|
let { tokens } = Lexer_1.Lexer.scan(`"unterminated!\r\n`);
|
|
387
|
-
(0,
|
|
387
|
+
(0, chai_config_spec_1.expect)(tokens[0].text).to.equal('"unterminated!');
|
|
388
388
|
});
|
|
389
389
|
it('disallows multiline strings', () => {
|
|
390
390
|
let { diagnostics } = Lexer_1.Lexer.scan(`"multi-line\n\n`);
|
|
391
|
-
(0,
|
|
391
|
+
(0, chai_config_spec_1.expect)(diagnostics.map(err => err.message)).to.deep.equal([
|
|
392
392
|
'Unterminated string at end of line'
|
|
393
393
|
]);
|
|
394
394
|
});
|
|
@@ -397,7 +397,7 @@ describe('lexer', () => {
|
|
|
397
397
|
describe('template string literals', () => {
|
|
398
398
|
it('supports escaped chars', () => {
|
|
399
399
|
let { tokens } = Lexer_1.Lexer.scan('`\\n\\`\\r\\n`');
|
|
400
|
-
(0,
|
|
400
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
401
401
|
TokenKind_1.TokenKind.BackTick,
|
|
402
402
|
TokenKind_1.TokenKind.TemplateStringQuasi,
|
|
403
403
|
TokenKind_1.TokenKind.EscapedCharCodeLiteral,
|
|
@@ -411,7 +411,7 @@ describe('lexer', () => {
|
|
|
411
411
|
TokenKind_1.TokenKind.BackTick,
|
|
412
412
|
TokenKind_1.TokenKind.Eof
|
|
413
413
|
]);
|
|
414
|
-
(0,
|
|
414
|
+
(0, chai_config_spec_1.expect)(tokens.map(x => x.charCode).filter(x => !!x)).to.eql([
|
|
415
415
|
10,
|
|
416
416
|
96,
|
|
417
417
|
13,
|
|
@@ -420,7 +420,7 @@ describe('lexer', () => {
|
|
|
420
420
|
});
|
|
421
421
|
it('prevents expressions when escaping the dollar sign', () => {
|
|
422
422
|
let { tokens } = Lexer_1.Lexer.scan('`\\${just text}`');
|
|
423
|
-
(0,
|
|
423
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
424
424
|
TokenKind_1.TokenKind.BackTick,
|
|
425
425
|
TokenKind_1.TokenKind.TemplateStringQuasi,
|
|
426
426
|
TokenKind_1.TokenKind.EscapedCharCodeLiteral,
|
|
@@ -431,7 +431,7 @@ describe('lexer', () => {
|
|
|
431
431
|
});
|
|
432
432
|
it('supports escaping unicode char codes', () => {
|
|
433
433
|
let { tokens } = Lexer_1.Lexer.scan('`\\c1\\c12\\c123`');
|
|
434
|
-
(0,
|
|
434
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
435
435
|
TokenKind_1.TokenKind.BackTick,
|
|
436
436
|
TokenKind_1.TokenKind.TemplateStringQuasi,
|
|
437
437
|
TokenKind_1.TokenKind.EscapedCharCodeLiteral,
|
|
@@ -443,7 +443,7 @@ describe('lexer', () => {
|
|
|
443
443
|
TokenKind_1.TokenKind.BackTick,
|
|
444
444
|
TokenKind_1.TokenKind.Eof
|
|
445
445
|
]);
|
|
446
|
-
(0,
|
|
446
|
+
(0, chai_config_spec_1.expect)(tokens.map(x => x.charCode).filter(x => !!x)).to.eql([
|
|
447
447
|
1,
|
|
448
448
|
12,
|
|
449
449
|
123
|
|
@@ -451,7 +451,7 @@ describe('lexer', () => {
|
|
|
451
451
|
});
|
|
452
452
|
it('converts doublequote to EscapedCharCodeLiteral', () => {
|
|
453
453
|
let { tokens } = Lexer_1.Lexer.scan('`"`');
|
|
454
|
-
(0,
|
|
454
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
455
455
|
TokenKind_1.TokenKind.BackTick,
|
|
456
456
|
TokenKind_1.TokenKind.TemplateStringQuasi,
|
|
457
457
|
TokenKind_1.TokenKind.EscapedCharCodeLiteral,
|
|
@@ -459,11 +459,11 @@ describe('lexer', () => {
|
|
|
459
459
|
TokenKind_1.TokenKind.BackTick,
|
|
460
460
|
TokenKind_1.TokenKind.Eof
|
|
461
461
|
]);
|
|
462
|
-
(0,
|
|
462
|
+
(0, chai_config_spec_1.expect)(tokens[2].charCode).to.equal(34);
|
|
463
463
|
});
|
|
464
464
|
it(`safely escapes \` literals`, () => {
|
|
465
465
|
let { tokens } = Lexer_1.Lexer.scan('`the cat says \\`meow\\` a lot`');
|
|
466
|
-
(0,
|
|
466
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
467
467
|
TokenKind_1.TokenKind.BackTick,
|
|
468
468
|
TokenKind_1.TokenKind.TemplateStringQuasi,
|
|
469
469
|
TokenKind_1.TokenKind.EscapedCharCodeLiteral,
|
|
@@ -473,7 +473,7 @@ describe('lexer', () => {
|
|
|
473
473
|
TokenKind_1.TokenKind.BackTick,
|
|
474
474
|
TokenKind_1.TokenKind.Eof
|
|
475
475
|
]);
|
|
476
|
-
(0,
|
|
476
|
+
(0, chai_config_spec_1.expect)(tokens.map(x => x.text)).to.eql([
|
|
477
477
|
'`',
|
|
478
478
|
'the cat says ',
|
|
479
479
|
'\\`',
|
|
@@ -486,17 +486,17 @@ describe('lexer', () => {
|
|
|
486
486
|
});
|
|
487
487
|
it('produces template string literal tokens', () => {
|
|
488
488
|
let { tokens } = Lexer_1.Lexer.scan('`hello world`');
|
|
489
|
-
(0,
|
|
489
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
490
490
|
TokenKind_1.TokenKind.BackTick,
|
|
491
491
|
TokenKind_1.TokenKind.TemplateStringQuasi,
|
|
492
492
|
TokenKind_1.TokenKind.BackTick,
|
|
493
493
|
TokenKind_1.TokenKind.Eof
|
|
494
494
|
]);
|
|
495
|
-
(0,
|
|
495
|
+
(0, chai_config_spec_1.expect)(tokens[1].text).to.deep.equal('hello world');
|
|
496
496
|
});
|
|
497
497
|
it('collects quasis outside and expressions inside of template strings', () => {
|
|
498
498
|
let { tokens } = Lexer_1.Lexer.scan('`hello ${"world"}!`');
|
|
499
|
-
(0,
|
|
499
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
500
500
|
TokenKind_1.TokenKind.BackTick,
|
|
501
501
|
TokenKind_1.TokenKind.TemplateStringQuasi,
|
|
502
502
|
TokenKind_1.TokenKind.TemplateStringExpressionBegin,
|
|
@@ -506,7 +506,7 @@ describe('lexer', () => {
|
|
|
506
506
|
TokenKind_1.TokenKind.BackTick,
|
|
507
507
|
TokenKind_1.TokenKind.Eof
|
|
508
508
|
]);
|
|
509
|
-
(0,
|
|
509
|
+
(0, chai_config_spec_1.expect)(tokens[1].text).to.deep.equal(`hello `);
|
|
510
510
|
});
|
|
511
511
|
it('real example, which is causing issues in the formatter', () => {
|
|
512
512
|
let { tokens } = Lexer_1.Lexer.scan(`
|
|
@@ -525,7 +525,7 @@ describe('lexer', () => {
|
|
|
525
525
|
\`
|
|
526
526
|
end function
|
|
527
527
|
`);
|
|
528
|
-
(0,
|
|
528
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
529
529
|
TokenKind_1.TokenKind.Newline,
|
|
530
530
|
TokenKind_1.TokenKind.Function,
|
|
531
531
|
TokenKind_1.TokenKind.Identifier,
|
|
@@ -603,7 +603,7 @@ describe('lexer', () => {
|
|
|
603
603
|
});
|
|
604
604
|
it('complicated example', () => {
|
|
605
605
|
let { tokens } = Lexer_1.Lexer.scan('`hello ${"world"}!I am a ${"template" + "string"} and I am very ${["pleased"][0]} to meet you ${m.top.getChildCount()}.The end`');
|
|
606
|
-
(0,
|
|
606
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.eql([
|
|
607
607
|
TokenKind_1.TokenKind.BackTick,
|
|
608
608
|
TokenKind_1.TokenKind.TemplateStringQuasi,
|
|
609
609
|
TokenKind_1.TokenKind.TemplateStringExpressionBegin,
|
|
@@ -641,7 +641,7 @@ describe('lexer', () => {
|
|
|
641
641
|
});
|
|
642
642
|
it('allows multiline strings', () => {
|
|
643
643
|
let { tokens } = Lexer_1.Lexer.scan('`multi-line\n\n`');
|
|
644
|
-
(0,
|
|
644
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
645
645
|
TokenKind_1.TokenKind.BackTick,
|
|
646
646
|
TokenKind_1.TokenKind.TemplateStringQuasi,
|
|
647
647
|
TokenKind_1.TokenKind.EscapedCharCodeLiteral,
|
|
@@ -651,7 +651,7 @@ describe('lexer', () => {
|
|
|
651
651
|
TokenKind_1.TokenKind.BackTick,
|
|
652
652
|
TokenKind_1.TokenKind.Eof
|
|
653
653
|
]);
|
|
654
|
-
(0,
|
|
654
|
+
(0, chai_config_spec_1.expect)(tokens.map(x => x.text)).to.eql([
|
|
655
655
|
'`',
|
|
656
656
|
'multi-line',
|
|
657
657
|
'\n',
|
|
@@ -664,7 +664,7 @@ describe('lexer', () => {
|
|
|
664
664
|
});
|
|
665
665
|
it('maintains proper line/column locations for multiline strings', () => {
|
|
666
666
|
let { tokens } = Lexer_1.Lexer.scan('123 `multi\nline\r\nstrings` true\nfalse');
|
|
667
|
-
(0,
|
|
667
|
+
(0, chai_config_spec_1.expect)(tokens.map(x => {
|
|
668
668
|
return {
|
|
669
669
|
range: x.range,
|
|
670
670
|
kind: x.kind
|
|
@@ -687,7 +687,7 @@ describe('lexer', () => {
|
|
|
687
687
|
});
|
|
688
688
|
it('Example that tripped up the expression tests', () => {
|
|
689
689
|
let { tokens } = Lexer_1.Lexer.scan('`I am a complex example\n${a.isRunning(["a","b","c"])}\nmore ${m.finish(true)}`');
|
|
690
|
-
(0,
|
|
690
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
691
691
|
TokenKind_1.TokenKind.BackTick,
|
|
692
692
|
TokenKind_1.TokenKind.TemplateStringQuasi,
|
|
693
693
|
TokenKind_1.TokenKind.EscapedCharCodeLiteral,
|
|
@@ -726,111 +726,111 @@ describe('lexer', () => {
|
|
|
726
726
|
describe('double literals', () => {
|
|
727
727
|
it('respects \'#\' suffix', () => {
|
|
728
728
|
let d = Lexer_1.Lexer.scan('123#').tokens[0];
|
|
729
|
-
(0,
|
|
730
|
-
(0,
|
|
729
|
+
(0, chai_config_spec_1.expect)(d.kind).to.equal(TokenKind_1.TokenKind.DoubleLiteral);
|
|
730
|
+
(0, chai_config_spec_1.expect)(d.text).to.eql('123#');
|
|
731
731
|
});
|
|
732
732
|
it('forces literals >= 10 digits into doubles', () => {
|
|
733
733
|
let d = Lexer_1.Lexer.scan('0000000005').tokens[0];
|
|
734
|
-
(0,
|
|
735
|
-
(0,
|
|
734
|
+
(0, chai_config_spec_1.expect)(d.kind).to.equal(TokenKind_1.TokenKind.DoubleLiteral);
|
|
735
|
+
(0, chai_config_spec_1.expect)(d.text).to.eql('0000000005');
|
|
736
736
|
});
|
|
737
737
|
it('forces literals with \'D\' in exponent into doubles', () => {
|
|
738
738
|
let d = Lexer_1.Lexer.scan('2.5d3').tokens[0];
|
|
739
|
-
(0,
|
|
740
|
-
(0,
|
|
739
|
+
(0, chai_config_spec_1.expect)(d.kind).to.equal(TokenKind_1.TokenKind.DoubleLiteral);
|
|
740
|
+
(0, chai_config_spec_1.expect)(d.text).to.eql('2.5d3');
|
|
741
741
|
});
|
|
742
742
|
it('allows digits before `.` to be elided', () => {
|
|
743
743
|
let f = Lexer_1.Lexer.scan('.123#').tokens[0];
|
|
744
|
-
(0,
|
|
745
|
-
(0,
|
|
744
|
+
(0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.DoubleLiteral);
|
|
745
|
+
(0, chai_config_spec_1.expect)(f.text).to.eql('.123#');
|
|
746
746
|
});
|
|
747
747
|
it('allows digits after `.` to be elided', () => {
|
|
748
748
|
let f = Lexer_1.Lexer.scan('12.#').tokens[0];
|
|
749
|
-
(0,
|
|
750
|
-
(0,
|
|
749
|
+
(0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.DoubleLiteral);
|
|
750
|
+
(0, chai_config_spec_1.expect)(f.text).to.eql('12.#');
|
|
751
751
|
});
|
|
752
752
|
});
|
|
753
753
|
describe('float literals', () => {
|
|
754
754
|
it('respects \'!\' suffix', () => {
|
|
755
755
|
let f = Lexer_1.Lexer.scan('0.00000008!').tokens[0];
|
|
756
|
-
(0,
|
|
756
|
+
(0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
|
|
757
757
|
// Floating precision will make this *not* equal
|
|
758
|
-
(0,
|
|
759
|
-
(0,
|
|
758
|
+
(0, chai_config_spec_1.expect)(f.text).not.to.equal(8e-8);
|
|
759
|
+
(0, chai_config_spec_1.expect)(f.text).to.eql('0.00000008!');
|
|
760
760
|
});
|
|
761
761
|
it('forces literals with a decimal into floats', () => {
|
|
762
762
|
let f = Lexer_1.Lexer.scan('1.0').tokens[0];
|
|
763
|
-
(0,
|
|
764
|
-
(0,
|
|
763
|
+
(0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
|
|
764
|
+
(0, chai_config_spec_1.expect)(f.text).to.equal('1.0');
|
|
765
765
|
});
|
|
766
766
|
it('forces literals with \'E\' in exponent into floats', () => {
|
|
767
767
|
let f = Lexer_1.Lexer.scan('2.5e3').tokens[0];
|
|
768
|
-
(0,
|
|
769
|
-
(0,
|
|
768
|
+
(0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
|
|
769
|
+
(0, chai_config_spec_1.expect)(f.text).to.eql('2.5e3');
|
|
770
770
|
});
|
|
771
771
|
it('supports larger-than-supported-precision floats to be defined with exponents', () => {
|
|
772
772
|
let f = Lexer_1.Lexer.scan('2.3659475627512424e-38').tokens[0];
|
|
773
|
-
(0,
|
|
774
|
-
(0,
|
|
773
|
+
(0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
|
|
774
|
+
(0, chai_config_spec_1.expect)(f.text).to.eql('2.3659475627512424e-38');
|
|
775
775
|
});
|
|
776
776
|
it('allows digits before `.` to be elided', () => {
|
|
777
777
|
let f = Lexer_1.Lexer.scan('.123').tokens[0];
|
|
778
|
-
(0,
|
|
779
|
-
(0,
|
|
778
|
+
(0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
|
|
779
|
+
(0, chai_config_spec_1.expect)(f.text).to.equal('.123');
|
|
780
780
|
});
|
|
781
781
|
it('allows digits after `.` to be elided', () => {
|
|
782
782
|
let f = Lexer_1.Lexer.scan('12.').tokens[0];
|
|
783
|
-
(0,
|
|
784
|
-
(0,
|
|
783
|
+
(0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
|
|
784
|
+
(0, chai_config_spec_1.expect)(f.text).to.equal('12.');
|
|
785
785
|
});
|
|
786
786
|
});
|
|
787
787
|
describe('long integer literals', () => {
|
|
788
788
|
it('respects \'&\' suffix', () => {
|
|
789
789
|
let f = Lexer_1.Lexer.scan('1&').tokens[0];
|
|
790
|
-
(0,
|
|
791
|
-
(0,
|
|
790
|
+
(0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.LongIntegerLiteral);
|
|
791
|
+
(0, chai_config_spec_1.expect)(f.text).to.eql('1&');
|
|
792
792
|
});
|
|
793
793
|
it('supports hexadecimal literals', () => {
|
|
794
794
|
let i = Lexer_1.Lexer.scan('&hf00d&').tokens[0];
|
|
795
|
-
(0,
|
|
796
|
-
(0,
|
|
795
|
+
(0, chai_config_spec_1.expect)(i.kind).to.equal(TokenKind_1.TokenKind.LongIntegerLiteral);
|
|
796
|
+
(0, chai_config_spec_1.expect)(i.text).to.equal('&hf00d&');
|
|
797
797
|
});
|
|
798
798
|
it('allows very long Int64 literals', () => {
|
|
799
799
|
let li = Lexer_1.Lexer.scan('9876543210&').tokens[0];
|
|
800
|
-
(0,
|
|
801
|
-
(0,
|
|
800
|
+
(0, chai_config_spec_1.expect)(li.kind).to.equal(TokenKind_1.TokenKind.LongIntegerLiteral);
|
|
801
|
+
(0, chai_config_spec_1.expect)(li.text).to.equal('9876543210&');
|
|
802
802
|
});
|
|
803
803
|
it('forces literals with \'&\' suffix into Int64s', () => {
|
|
804
804
|
let li = Lexer_1.Lexer.scan('123&').tokens[0];
|
|
805
|
-
(0,
|
|
806
|
-
(0,
|
|
805
|
+
(0, chai_config_spec_1.expect)(li.kind).to.equal(TokenKind_1.TokenKind.LongIntegerLiteral);
|
|
806
|
+
(0, chai_config_spec_1.expect)(li.text).to.deep.equal('123&');
|
|
807
807
|
});
|
|
808
808
|
});
|
|
809
809
|
describe('integer literals', () => {
|
|
810
810
|
it('respects \'%\' suffix', () => {
|
|
811
811
|
let f = Lexer_1.Lexer.scan('1%').tokens[0];
|
|
812
|
-
(0,
|
|
813
|
-
(0,
|
|
812
|
+
(0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.IntegerLiteral);
|
|
813
|
+
(0, chai_config_spec_1.expect)(f.text).to.eql('1%');
|
|
814
814
|
});
|
|
815
815
|
it('does not allow decimal numbers to end with %', () => {
|
|
816
816
|
let f = Lexer_1.Lexer.scan('1.2%').tokens[0];
|
|
817
|
-
(0,
|
|
818
|
-
(0,
|
|
817
|
+
(0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
|
|
818
|
+
(0, chai_config_spec_1.expect)(f.text).to.eql('1.2');
|
|
819
819
|
});
|
|
820
820
|
it('supports hexadecimal literals', () => {
|
|
821
821
|
let i = Lexer_1.Lexer.scan('&hFf').tokens[0];
|
|
822
|
-
(0,
|
|
823
|
-
(0,
|
|
822
|
+
(0, chai_config_spec_1.expect)(i.kind).to.equal(TokenKind_1.TokenKind.IntegerLiteral);
|
|
823
|
+
(0, chai_config_spec_1.expect)(i.text).to.deep.equal('&hFf');
|
|
824
824
|
});
|
|
825
825
|
it('falls back to a regular integer', () => {
|
|
826
826
|
let i = Lexer_1.Lexer.scan('123').tokens[0];
|
|
827
|
-
(0,
|
|
828
|
-
(0,
|
|
827
|
+
(0, chai_config_spec_1.expect)(i.kind).to.equal(TokenKind_1.TokenKind.IntegerLiteral);
|
|
828
|
+
(0, chai_config_spec_1.expect)(i.text).to.deep.equal('123');
|
|
829
829
|
});
|
|
830
830
|
});
|
|
831
831
|
describe('types', () => {
|
|
832
832
|
it('captures type tokens', () => {
|
|
833
|
-
(0,
|
|
833
|
+
(0, chai_config_spec_1.expect)(Lexer_1.Lexer.scan(`
|
|
834
834
|
void boolean integer longinteger float double string object interface invalid dynamic
|
|
835
835
|
`.trim()).tokens.map(x => x.kind)).to.eql([
|
|
836
836
|
TokenKind_1.TokenKind.Void,
|
|
@@ -853,7 +853,7 @@ describe('lexer', () => {
|
|
|
853
853
|
// test just a sample of single-word reserved words for now.
|
|
854
854
|
// if we find any that we've missed
|
|
855
855
|
let { tokens } = Lexer_1.Lexer.scan('and then or if else endif return true false line_num');
|
|
856
|
-
(0,
|
|
856
|
+
(0, chai_config_spec_1.expect)(tokens.map(w => w.kind)).to.deep.equal([
|
|
857
857
|
TokenKind_1.TokenKind.And,
|
|
858
858
|
TokenKind_1.TokenKind.Then,
|
|
859
859
|
TokenKind_1.TokenKind.Or,
|
|
@@ -869,7 +869,7 @@ describe('lexer', () => {
|
|
|
869
869
|
});
|
|
870
870
|
it('matches multi-word keywords', () => {
|
|
871
871
|
let { tokens } = Lexer_1.Lexer.scan('end if end while End Sub end Function Exit wHILe');
|
|
872
|
-
(0,
|
|
872
|
+
(0, chai_config_spec_1.expect)(tokens.map(w => w.kind)).to.deep.equal([
|
|
873
873
|
TokenKind_1.TokenKind.EndIf,
|
|
874
874
|
TokenKind_1.TokenKind.EndWhile,
|
|
875
875
|
TokenKind_1.TokenKind.EndSub,
|
|
@@ -880,7 +880,7 @@ describe('lexer', () => {
|
|
|
880
880
|
});
|
|
881
881
|
it('accepts \'exit for\' but not \'exitfor\'', () => {
|
|
882
882
|
let { tokens } = Lexer_1.Lexer.scan('exit for exitfor');
|
|
883
|
-
(0,
|
|
883
|
+
(0, chai_config_spec_1.expect)(tokens.map(w => w.kind)).to.deep.equal([
|
|
884
884
|
TokenKind_1.TokenKind.ExitFor,
|
|
885
885
|
TokenKind_1.TokenKind.Identifier,
|
|
886
886
|
TokenKind_1.TokenKind.Eof
|
|
@@ -888,7 +888,7 @@ describe('lexer', () => {
|
|
|
888
888
|
});
|
|
889
889
|
it('matches keywords with silly capitalization', () => {
|
|
890
890
|
let { tokens } = Lexer_1.Lexer.scan('iF ELSE eNDIf FUncTioN');
|
|
891
|
-
(0,
|
|
891
|
+
(0, chai_config_spec_1.expect)(tokens.map(w => w.kind)).to.deep.equal([
|
|
892
892
|
TokenKind_1.TokenKind.If,
|
|
893
893
|
TokenKind_1.TokenKind.Else,
|
|
894
894
|
TokenKind_1.TokenKind.EndIf,
|
|
@@ -898,14 +898,14 @@ describe('lexer', () => {
|
|
|
898
898
|
});
|
|
899
899
|
it('allows alpha-numeric (plus \'_\') identifiers', () => {
|
|
900
900
|
let identifier = Lexer_1.Lexer.scan('_abc_123_').tokens[0];
|
|
901
|
-
(0,
|
|
902
|
-
(0,
|
|
901
|
+
(0, chai_config_spec_1.expect)(identifier.kind).to.equal(TokenKind_1.TokenKind.Identifier);
|
|
902
|
+
(0, chai_config_spec_1.expect)(identifier.text).to.equal('_abc_123_');
|
|
903
903
|
});
|
|
904
904
|
it('allows identifiers with trailing type designators', () => {
|
|
905
905
|
let { tokens } = Lexer_1.Lexer.scan('lorem$ ipsum% dolor! sit# amet&');
|
|
906
906
|
let identifiers = tokens.filter(t => t.kind !== TokenKind_1.TokenKind.Eof);
|
|
907
|
-
(0,
|
|
908
|
-
(0,
|
|
907
|
+
(0, chai_config_spec_1.expect)(identifiers.every(t => t.kind === TokenKind_1.TokenKind.Identifier));
|
|
908
|
+
(0, chai_config_spec_1.expect)(identifiers.map(t => t.text)).to.deep.equal([
|
|
909
909
|
'lorem$',
|
|
910
910
|
'ipsum%',
|
|
911
911
|
'dolor!',
|
|
@@ -917,7 +917,7 @@ describe('lexer', () => {
|
|
|
917
917
|
describe('conditional compilation', () => {
|
|
918
918
|
it('reads constant declarations', () => {
|
|
919
919
|
let { tokens } = Lexer_1.Lexer.scan('#const foo true');
|
|
920
|
-
(0,
|
|
920
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
921
921
|
TokenKind_1.TokenKind.HashConst,
|
|
922
922
|
TokenKind_1.TokenKind.Identifier,
|
|
923
923
|
TokenKind_1.TokenKind.True,
|
|
@@ -926,7 +926,7 @@ describe('lexer', () => {
|
|
|
926
926
|
});
|
|
927
927
|
it('reads constant aliases', () => {
|
|
928
928
|
let { tokens } = Lexer_1.Lexer.scan('#const bar foo');
|
|
929
|
-
(0,
|
|
929
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
930
930
|
TokenKind_1.TokenKind.HashConst,
|
|
931
931
|
TokenKind_1.TokenKind.Identifier,
|
|
932
932
|
TokenKind_1.TokenKind.Identifier,
|
|
@@ -944,7 +944,7 @@ describe('lexer', () => {
|
|
|
944
944
|
`, {
|
|
945
945
|
includeWhitespace: false
|
|
946
946
|
});
|
|
947
|
-
(0,
|
|
947
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind).filter(x => x !== TokenKind_1.TokenKind.Newline)).to.deep.equal([
|
|
948
948
|
TokenKind_1.TokenKind.HashIf,
|
|
949
949
|
TokenKind_1.TokenKind.HashElseIf,
|
|
950
950
|
TokenKind_1.TokenKind.HashElseIf,
|
|
@@ -956,7 +956,7 @@ describe('lexer', () => {
|
|
|
956
956
|
});
|
|
957
957
|
it('treats text "constructor" as an identifier', () => {
|
|
958
958
|
let lexer = Lexer_1.Lexer.scan(`function constructor()\nend function`);
|
|
959
|
-
(0,
|
|
959
|
+
(0, chai_config_spec_1.expect)(lexer.tokens[1].kind).to.equal(TokenKind_1.TokenKind.Identifier);
|
|
960
960
|
});
|
|
961
961
|
it('reads upper case conditional directives', () => {
|
|
962
962
|
let { tokens } = Lexer_1.Lexer.scan(`
|
|
@@ -969,7 +969,7 @@ describe('lexer', () => {
|
|
|
969
969
|
`, {
|
|
970
970
|
includeWhitespace: false
|
|
971
971
|
});
|
|
972
|
-
(0,
|
|
972
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind).filter(x => x !== TokenKind_1.TokenKind.Newline)).to.deep.equal([
|
|
973
973
|
TokenKind_1.TokenKind.HashIf,
|
|
974
974
|
TokenKind_1.TokenKind.HashElseIf,
|
|
975
975
|
TokenKind_1.TokenKind.HashElseIf,
|
|
@@ -981,7 +981,7 @@ describe('lexer', () => {
|
|
|
981
981
|
});
|
|
982
982
|
it('supports various spacings between #endif', () => {
|
|
983
983
|
let { tokens } = Lexer_1.Lexer.scan('#endif #end if #end\tif #end if #end\t\t if');
|
|
984
|
-
(0,
|
|
984
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
985
985
|
TokenKind_1.TokenKind.HashEndIf,
|
|
986
986
|
TokenKind_1.TokenKind.HashEndIf,
|
|
987
987
|
TokenKind_1.TokenKind.HashEndIf,
|
|
@@ -994,20 +994,20 @@ describe('lexer', () => {
|
|
|
994
994
|
let { tokens } = Lexer_1.Lexer.scan('#error a message goes here\n', {
|
|
995
995
|
includeWhitespace: true
|
|
996
996
|
});
|
|
997
|
-
(0,
|
|
997
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
998
998
|
TokenKind_1.TokenKind.HashError,
|
|
999
999
|
TokenKind_1.TokenKind.Whitespace,
|
|
1000
1000
|
TokenKind_1.TokenKind.HashErrorMessage,
|
|
1001
1001
|
TokenKind_1.TokenKind.Newline,
|
|
1002
1002
|
TokenKind_1.TokenKind.Eof
|
|
1003
1003
|
]);
|
|
1004
|
-
(0,
|
|
1004
|
+
(0, chai_config_spec_1.expect)(tokens[2].text).to.equal('a message goes here');
|
|
1005
1005
|
});
|
|
1006
1006
|
});
|
|
1007
1007
|
describe('location tracking', () => {
|
|
1008
1008
|
it('tracks starting and ending locations including whitespace', () => {
|
|
1009
1009
|
let { tokens } = Lexer_1.Lexer.scan(`sub foo()\n print "bar"\r\nend sub`, { includeWhitespace: true });
|
|
1010
|
-
(0,
|
|
1010
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.range)).to.eql([
|
|
1011
1011
|
vscode_languageserver_1.Range.create(0, 0, 0, 3),
|
|
1012
1012
|
vscode_languageserver_1.Range.create(0, 3, 0, 4),
|
|
1013
1013
|
vscode_languageserver_1.Range.create(0, 4, 0, 7),
|
|
@@ -1025,7 +1025,7 @@ describe('lexer', () => {
|
|
|
1025
1025
|
});
|
|
1026
1026
|
it('tracks starting and ending locations excluding whitespace', () => {
|
|
1027
1027
|
let { tokens } = Lexer_1.Lexer.scan(`sub foo()\n print "bar"\r\nend sub`, { includeWhitespace: false });
|
|
1028
|
-
(0,
|
|
1028
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.range)).to.eql([
|
|
1029
1029
|
vscode_languageserver_1.Range.create(0, 0, 0, 3),
|
|
1030
1030
|
vscode_languageserver_1.Range.create(0, 4, 0, 7),
|
|
1031
1031
|
vscode_languageserver_1.Range.create(0, 7, 0, 8),
|
|
@@ -1042,7 +1042,7 @@ describe('lexer', () => {
|
|
|
1042
1042
|
describe('two word keywords', () => {
|
|
1043
1043
|
it('supports various spacing between for each', () => {
|
|
1044
1044
|
let { tokens } = Lexer_1.Lexer.scan('for each for each for each for\teach for\t each for \teach for \t each');
|
|
1045
|
-
(0,
|
|
1045
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
1046
1046
|
TokenKind_1.TokenKind.ForEach,
|
|
1047
1047
|
TokenKind_1.TokenKind.ForEach,
|
|
1048
1048
|
TokenKind_1.TokenKind.ForEach,
|
|
@@ -1056,7 +1056,7 @@ describe('lexer', () => {
|
|
|
1056
1056
|
});
|
|
1057
1057
|
it('detects rem when used as keyword', () => {
|
|
1058
1058
|
let { tokens } = Lexer_1.Lexer.scan('person.rem=true');
|
|
1059
|
-
(0,
|
|
1059
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.eql([
|
|
1060
1060
|
TokenKind_1.TokenKind.Identifier,
|
|
1061
1061
|
TokenKind_1.TokenKind.Dot,
|
|
1062
1062
|
TokenKind_1.TokenKind.Identifier,
|
|
@@ -1065,7 +1065,7 @@ describe('lexer', () => {
|
|
|
1065
1065
|
TokenKind_1.TokenKind.Eof
|
|
1066
1066
|
]);
|
|
1067
1067
|
//verify the location of `rem`
|
|
1068
|
-
(0,
|
|
1068
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => [t.range.start.character, t.range.end.character])).to.eql([
|
|
1069
1069
|
[0, 6],
|
|
1070
1070
|
[6, 7],
|
|
1071
1071
|
[7, 10],
|
|
@@ -1077,12 +1077,12 @@ describe('lexer', () => {
|
|
|
1077
1077
|
describe('isToken', () => {
|
|
1078
1078
|
it('works', () => {
|
|
1079
1079
|
let range = vscode_languageserver_1.Range.create(0, 0, 0, 2);
|
|
1080
|
-
(0,
|
|
1081
|
-
(0,
|
|
1080
|
+
(0, chai_config_spec_1.expect)((0, Token_1.isToken)({ kind: TokenKind_1.TokenKind.And, text: 'and', range: range })).is.true;
|
|
1081
|
+
(0, chai_config_spec_1.expect)((0, Token_1.isToken)({ text: 'and', range: range })).is.false;
|
|
1082
1082
|
});
|
|
1083
1083
|
});
|
|
1084
1084
|
it('recognizes enum-related keywords', () => {
|
|
1085
|
-
(0,
|
|
1085
|
+
(0, chai_config_spec_1.expect)(Lexer_1.Lexer.scan('enum end enum endenum').tokens.map(x => x.kind)).to.eql([
|
|
1086
1086
|
TokenKind_1.TokenKind.Enum,
|
|
1087
1087
|
TokenKind_1.TokenKind.EndEnum,
|
|
1088
1088
|
TokenKind_1.TokenKind.EndEnum,
|
|
@@ -1090,7 +1090,7 @@ describe('lexer', () => {
|
|
|
1090
1090
|
]);
|
|
1091
1091
|
});
|
|
1092
1092
|
it('recognizes class-related keywords', () => {
|
|
1093
|
-
(0,
|
|
1093
|
+
(0, chai_config_spec_1.expect)(Lexer_1.Lexer.scan('class public protected private end class endclass new override').tokens.map(x => x.kind)).to.eql([
|
|
1094
1094
|
TokenKind_1.TokenKind.Class,
|
|
1095
1095
|
TokenKind_1.TokenKind.Public,
|
|
1096
1096
|
TokenKind_1.TokenKind.Protected,
|
|
@@ -1105,14 +1105,14 @@ describe('lexer', () => {
|
|
|
1105
1105
|
describe('whitespace', () => {
|
|
1106
1106
|
it('preserves the exact number of whitespace characterswhitespace', () => {
|
|
1107
1107
|
let { tokens } = Lexer_1.Lexer.scan(' ', { includeWhitespace: true });
|
|
1108
|
-
(0,
|
|
1108
|
+
(0, chai_config_spec_1.expect)(tokens[0]).to.include({
|
|
1109
1109
|
kind: TokenKind_1.TokenKind.Whitespace,
|
|
1110
1110
|
text: ' '
|
|
1111
1111
|
});
|
|
1112
1112
|
});
|
|
1113
1113
|
it('tokenizes whitespace between things', () => {
|
|
1114
1114
|
let { tokens } = Lexer_1.Lexer.scan('sub main ( ) \n end sub', { includeWhitespace: true });
|
|
1115
|
-
(0,
|
|
1115
|
+
(0, chai_config_spec_1.expect)(tokens.map(x => x.kind)).to.eql([
|
|
1116
1116
|
TokenKind_1.TokenKind.Sub,
|
|
1117
1117
|
TokenKind_1.TokenKind.Whitespace,
|
|
1118
1118
|
TokenKind_1.TokenKind.Identifier,
|
|
@@ -1130,7 +1130,7 @@ describe('lexer', () => {
|
|
|
1130
1130
|
});
|
|
1131
1131
|
it('identifies brighterscript source literals', () => {
|
|
1132
1132
|
let { tokens } = Lexer_1.Lexer.scan('LINE_NUM SOURCE_FILE_PATH SOURCE_LINE_NUM FUNCTION_NAME SOURCE_FUNCTION_NAME SOURCE_LOCATION PKG_PATH PKG_LOCATION');
|
|
1133
|
-
(0,
|
|
1133
|
+
(0, chai_config_spec_1.expect)(tokens.map(x => x.kind)).to.eql([
|
|
1134
1134
|
TokenKind_1.TokenKind.LineNumLiteral,
|
|
1135
1135
|
TokenKind_1.TokenKind.SourceFilePathLiteral,
|
|
1136
1136
|
TokenKind_1.TokenKind.SourceLineNumLiteral,
|
|
@@ -1151,11 +1151,11 @@ describe('lexer', () => {
|
|
|
1151
1151
|
end sub
|
|
1152
1152
|
`;
|
|
1153
1153
|
const { tokens } = Lexer_1.Lexer.scan(text, { includeWhitespace: false });
|
|
1154
|
-
(0,
|
|
1154
|
+
(0, chai_config_spec_1.expect)(util_1.default.tokensToString(tokens)).to.equal(text);
|
|
1155
1155
|
});
|
|
1156
1156
|
it('properly detects try/catch tokens', () => {
|
|
1157
1157
|
const { tokens } = Lexer_1.Lexer.scan(`try catch endtry end try throw`, { includeWhitespace: false });
|
|
1158
|
-
(0,
|
|
1158
|
+
(0, chai_config_spec_1.expect)(tokens.map(x => x.kind)).to.eql([
|
|
1159
1159
|
TokenKind_1.TokenKind.Try,
|
|
1160
1160
|
TokenKind_1.TokenKind.Catch,
|
|
1161
1161
|
TokenKind_1.TokenKind.EndTry,
|
|
@@ -1172,7 +1172,7 @@ describe('lexer', () => {
|
|
|
1172
1172
|
const { tokens } = Lexer_1.Lexer.scan(regexp);
|
|
1173
1173
|
results.push(tokens[0].text);
|
|
1174
1174
|
}
|
|
1175
|
-
(0,
|
|
1175
|
+
(0, chai_config_spec_1.expect)(results).to.eql(regexps);
|
|
1176
1176
|
}
|
|
1177
1177
|
it('recognizes regex literals', () => {
|
|
1178
1178
|
testRegex(/simple/, /SimpleWithValidFlags/g, /UnknownFlags/gi, /with spaces/s, /with(parens)and[squarebraces]/,
|
|
@@ -1185,7 +1185,7 @@ describe('lexer', () => {
|
|
|
1185
1185
|
const { tokens } = Lexer_1.Lexer.scan(`one = 1/2 + 1/4 + 1/4`, {
|
|
1186
1186
|
includeWhitespace: false
|
|
1187
1187
|
});
|
|
1188
|
-
(0,
|
|
1188
|
+
(0, chai_config_spec_1.expect)(tokens.map(x => x.kind)).to.eql([
|
|
1189
1189
|
TokenKind_1.TokenKind.Identifier,
|
|
1190
1190
|
TokenKind_1.TokenKind.Equal,
|
|
1191
1191
|
TokenKind_1.TokenKind.IntegerLiteral,
|
|
@@ -1203,7 +1203,7 @@ describe('lexer', () => {
|
|
|
1203
1203
|
]);
|
|
1204
1204
|
});
|
|
1205
1205
|
it('only captures alphanumeric flags', () => {
|
|
1206
|
-
(0,
|
|
1206
|
+
(0, chai_config_spec_1.expect)(Lexer_1.Lexer.scan('speak(/a/)').tokens.map(x => x.kind)).to.eql([
|
|
1207
1207
|
TokenKind_1.TokenKind.Identifier,
|
|
1208
1208
|
TokenKind_1.TokenKind.LeftParen,
|
|
1209
1209
|
TokenKind_1.TokenKind.RegexLiteral,
|
|
@@ -1219,11 +1219,57 @@ describe('lexer', () => {
|
|
|
1219
1219
|
/\\\n/);
|
|
1220
1220
|
});
|
|
1221
1221
|
});
|
|
1222
|
+
it('detects "continue" as a keyword', () => {
|
|
1223
|
+
(0, chai_config_spec_1.expect)(Lexer_1.Lexer.scan('continue').tokens.map(x => x.kind)).to.eql([
|
|
1224
|
+
TokenKind_1.TokenKind.Continue,
|
|
1225
|
+
TokenKind_1.TokenKind.Eof
|
|
1226
|
+
]);
|
|
1227
|
+
});
|
|
1228
|
+
describe('trivia', () => {
|
|
1229
|
+
function stringify(tokens) {
|
|
1230
|
+
return tokens
|
|
1231
|
+
//exclude the explicit triva tokens since they'll be included in the leading/trailing arrays
|
|
1232
|
+
.filter(x => !TokenKind_1.AllowedTriviaTokens.includes(x.kind))
|
|
1233
|
+
.flatMap(x => [...x.leadingTrivia, x])
|
|
1234
|
+
.map(x => x.text)
|
|
1235
|
+
.join('');
|
|
1236
|
+
}
|
|
1237
|
+
it('combining token text and trivia can reproduce full input', () => {
|
|
1238
|
+
const input = `
|
|
1239
|
+
function test( )
|
|
1240
|
+
'comment
|
|
1241
|
+
print alpha ' blabla
|
|
1242
|
+
end function 'trailing
|
|
1243
|
+
'trailing2
|
|
1244
|
+
`;
|
|
1245
|
+
(0, chai_config_spec_1.expect)(stringify(Lexer_1.Lexer.scan(input).tokens)).to.eql(input);
|
|
1246
|
+
});
|
|
1247
|
+
function expectTrivia(text, expected) {
|
|
1248
|
+
const tokens = Lexer_1.Lexer.scan(text).tokens.filter(x => !TokenKind_1.AllowedTriviaTokens.includes(x.kind));
|
|
1249
|
+
(0, chai_config_spec_1.expect)(tokens.map(x => {
|
|
1250
|
+
return {
|
|
1251
|
+
text: x.text,
|
|
1252
|
+
leadingTrivia: x.leadingTrivia.map(x => x.text)
|
|
1253
|
+
};
|
|
1254
|
+
})).to.eql(expected.map(x => (Object.assign({ leadingTrivia: [] }, x))));
|
|
1255
|
+
}
|
|
1256
|
+
it('associates trailing items on same line with the preceeding token', () => {
|
|
1257
|
+
expectTrivia(`'leading\n` +
|
|
1258
|
+
`alpha = true 'trueComment\n` +
|
|
1259
|
+
`'eof`, [
|
|
1260
|
+
{ leadingTrivia: [`'leading`, `\n`], text: `alpha` },
|
|
1261
|
+
{ leadingTrivia: [` `], text: `=` },
|
|
1262
|
+
{ leadingTrivia: [` `], text: `true` },
|
|
1263
|
+
//EOF
|
|
1264
|
+
{ leadingTrivia: [` `, `'trueComment`, `\n`, `'eof`], text: `` }
|
|
1265
|
+
]);
|
|
1266
|
+
});
|
|
1267
|
+
});
|
|
1222
1268
|
});
|
|
1223
1269
|
function expectKinds(text, tokenKinds) {
|
|
1224
1270
|
let actual = Lexer_1.Lexer.scan(text).tokens.map(x => x.kind);
|
|
1225
1271
|
//remove the EOF token
|
|
1226
1272
|
actual.pop();
|
|
1227
|
-
(0,
|
|
1273
|
+
(0, chai_config_spec_1.expect)(actual).to.eql(tokenKinds);
|
|
1228
1274
|
}
|
|
1229
1275
|
//# sourceMappingURL=Lexer.spec.js.map
|