brighterscript 1.0.0-alpha.23 → 1.0.0-alpha.25
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +585 -218
- package/README.md +45 -139
- package/bsconfig.schema.json +41 -0
- package/dist/ActionPipeline.d.ts +10 -0
- package/dist/ActionPipeline.js +40 -0
- package/dist/ActionPipeline.js.map +1 -0
- package/dist/AstValidationSegmenter.d.ts +25 -0
- package/dist/AstValidationSegmenter.js +152 -0
- package/dist/AstValidationSegmenter.js.map +1 -0
- package/dist/BsConfig.d.ts +39 -4
- package/dist/BusyStatusTracker.d.ts +31 -0
- package/dist/BusyStatusTracker.js +83 -0
- package/dist/BusyStatusTracker.js.map +1 -0
- package/dist/Cache.js +3 -3
- package/dist/Cache.js.map +1 -1
- package/dist/CacheVerifier.d.ts +7 -0
- package/dist/CacheVerifier.js +20 -0
- package/dist/CacheVerifier.js.map +1 -0
- package/dist/CodeActionUtil.d.ts +3 -3
- package/dist/CodeActionUtil.js.map +1 -1
- package/dist/CommentFlagProcessor.d.ts +3 -2
- package/dist/CommentFlagProcessor.js +5 -4
- package/dist/CommentFlagProcessor.js.map +1 -1
- package/dist/DependencyGraph.d.ts +3 -2
- package/dist/DependencyGraph.js +11 -10
- package/dist/DependencyGraph.js.map +1 -1
- package/dist/DiagnosticCollection.js +9 -5
- package/dist/DiagnosticCollection.js.map +1 -1
- package/dist/DiagnosticFilterer.d.ts +1 -0
- package/dist/DiagnosticFilterer.js +5 -3
- package/dist/DiagnosticFilterer.js.map +1 -1
- package/dist/DiagnosticMessages.d.ts +79 -15
- package/dist/DiagnosticMessages.js +134 -21
- package/dist/DiagnosticMessages.js.map +1 -1
- package/dist/DiagnosticSeverityAdjuster.d.ts +7 -0
- package/dist/DiagnosticSeverityAdjuster.js +41 -0
- package/dist/DiagnosticSeverityAdjuster.js.map +1 -0
- package/dist/FunctionScope.d.ts +28 -0
- package/dist/FunctionScope.js +52 -0
- package/dist/FunctionScope.js.map +1 -0
- package/dist/KeyedThrottler.d.ts +3 -3
- package/dist/KeyedThrottler.js +3 -3
- package/dist/KeyedThrottler.js.map +1 -1
- package/dist/LanguageServer.d.ts +23 -11
- package/dist/LanguageServer.js +222 -87
- package/dist/LanguageServer.js.map +1 -1
- package/dist/Logger.d.ts +3 -2
- package/dist/Logger.js +11 -3
- package/dist/Logger.js.map +1 -1
- package/dist/PluginInterface.d.ts +21 -3
- package/dist/PluginInterface.js +74 -6
- package/dist/PluginInterface.js.map +1 -1
- package/dist/Program.d.ts +162 -81
- package/dist/Program.js +903 -732
- package/dist/Program.js.map +1 -1
- package/dist/ProgramBuilder.d.ts +22 -12
- package/dist/ProgramBuilder.js +132 -104
- package/dist/ProgramBuilder.js.map +1 -1
- package/dist/Scope.d.ts +95 -134
- package/dist/Scope.js +477 -551
- package/dist/Scope.js.map +1 -1
- package/dist/Stopwatch.js +1 -1
- package/dist/Stopwatch.js.map +1 -1
- package/dist/SymbolTable.d.ts +95 -29
- package/dist/SymbolTable.js +256 -102
- package/dist/SymbolTable.js.map +1 -1
- package/dist/Throttler.d.ts +12 -0
- package/dist/Throttler.js +39 -0
- package/dist/Throttler.js.map +1 -1
- package/dist/Watcher.d.ts +0 -3
- package/dist/Watcher.js +0 -3
- package/dist/Watcher.js.map +1 -1
- package/dist/XmlScope.d.ts +4 -6
- package/dist/XmlScope.js +74 -68
- package/dist/XmlScope.js.map +1 -1
- package/dist/astUtils/CachedLookups.d.ts +48 -0
- package/dist/astUtils/CachedLookups.js +323 -0
- package/dist/astUtils/CachedLookups.js.map +1 -0
- package/dist/astUtils/{AstEditor.d.ts → Editor.d.ts} +9 -5
- package/dist/astUtils/{AstEditor.js → Editor.js} +10 -4
- package/dist/astUtils/Editor.js.map +1 -0
- package/dist/astUtils/{AstEditor.spec.js → Editor.spec.js} +68 -64
- package/dist/astUtils/Editor.spec.js.map +1 -0
- package/dist/astUtils/creators.d.ts +10 -10
- package/dist/astUtils/creators.js +26 -16
- package/dist/astUtils/creators.js.map +1 -1
- package/dist/astUtils/creators.spec.js +5 -5
- package/dist/astUtils/creators.spec.js.map +1 -1
- package/dist/astUtils/reflection.d.ts +132 -100
- package/dist/astUtils/reflection.js +225 -166
- package/dist/astUtils/reflection.js.map +1 -1
- package/dist/astUtils/reflection.spec.js +208 -126
- package/dist/astUtils/reflection.spec.js.map +1 -1
- package/dist/astUtils/stackedVisitor.spec.js +12 -12
- package/dist/astUtils/stackedVisitor.spec.js.map +1 -1
- package/dist/astUtils/visitors.d.ts +54 -35
- package/dist/astUtils/visitors.js +29 -3
- package/dist/astUtils/visitors.js.map +1 -1
- package/dist/astUtils/visitors.spec.js +178 -33
- package/dist/astUtils/visitors.spec.js.map +1 -1
- package/dist/astUtils/xml.d.ts +9 -9
- package/dist/astUtils/xml.js +9 -9
- package/dist/astUtils/xml.js.map +1 -1
- package/dist/bscPlugin/BscPlugin.d.ts +12 -2
- package/dist/bscPlugin/BscPlugin.js +41 -3
- package/dist/bscPlugin/BscPlugin.js.map +1 -1
- package/dist/bscPlugin/CallExpressionInfo.d.ts +36 -0
- package/dist/bscPlugin/CallExpressionInfo.js +131 -0
- package/dist/bscPlugin/CallExpressionInfo.js.map +1 -0
- package/dist/bscPlugin/FileWriter.d.ts +6 -0
- package/dist/bscPlugin/FileWriter.js +24 -0
- package/dist/bscPlugin/FileWriter.js.map +1 -0
- package/dist/bscPlugin/SignatureHelpUtil.d.ts +10 -0
- package/dist/bscPlugin/SignatureHelpUtil.js +135 -0
- package/dist/bscPlugin/SignatureHelpUtil.js.map +1 -0
- package/dist/bscPlugin/codeActions/CodeActionsProcessor.d.ts +1 -1
- package/dist/bscPlugin/codeActions/CodeActionsProcessor.js +21 -12
- package/dist/bscPlugin/codeActions/CodeActionsProcessor.js.map +1 -1
- package/dist/bscPlugin/codeActions/CodeActionsProcessor.spec.js +86 -12
- package/dist/bscPlugin/codeActions/CodeActionsProcessor.spec.js.map +1 -1
- package/dist/bscPlugin/completions/CompletionsProcessor.d.ts +57 -0
- package/dist/bscPlugin/completions/CompletionsProcessor.js +544 -0
- package/dist/bscPlugin/completions/CompletionsProcessor.js.map +1 -0
- package/dist/bscPlugin/completions/CompletionsProcessor.spec.js +1909 -0
- package/dist/bscPlugin/completions/CompletionsProcessor.spec.js.map +1 -0
- package/dist/bscPlugin/fileProviders/FileProvider.d.ts +9 -0
- package/dist/bscPlugin/fileProviders/FileProvider.js +51 -0
- package/dist/bscPlugin/fileProviders/FileProvider.js.map +1 -0
- package/dist/bscPlugin/hover/HoverProcessor.d.ts +17 -0
- package/dist/bscPlugin/hover/HoverProcessor.js +188 -0
- package/dist/bscPlugin/hover/HoverProcessor.js.map +1 -0
- package/dist/bscPlugin/hover/HoverProcessor.spec.js +513 -0
- package/dist/bscPlugin/hover/HoverProcessor.spec.js.map +1 -0
- package/dist/bscPlugin/semanticTokens/BrsFileSemanticTokensProcessor.d.ts +3 -1
- package/dist/bscPlugin/semanticTokens/BrsFileSemanticTokensProcessor.js +102 -29
- package/dist/bscPlugin/semanticTokens/BrsFileSemanticTokensProcessor.js.map +1 -1
- package/dist/bscPlugin/semanticTokens/BrsFileSemanticTokensProcessor.spec.js +167 -6
- package/dist/bscPlugin/semanticTokens/BrsFileSemanticTokensProcessor.spec.js.map +1 -1
- package/dist/bscPlugin/serialize/BslibInjector.spec.d.ts +1 -0
- package/dist/bscPlugin/serialize/BslibInjector.spec.js +19 -0
- package/dist/bscPlugin/serialize/BslibInjector.spec.js.map +1 -0
- package/dist/bscPlugin/serialize/BslibManager.d.ts +9 -0
- package/dist/bscPlugin/serialize/BslibManager.js +40 -0
- package/dist/bscPlugin/serialize/BslibManager.js.map +1 -0
- package/dist/bscPlugin/serialize/FileSerializer.d.ts +9 -0
- package/dist/bscPlugin/serialize/FileSerializer.js +72 -0
- package/dist/bscPlugin/serialize/FileSerializer.js.map +1 -0
- package/dist/bscPlugin/transpile/BrsFileTranspileProcessor.d.ts +16 -0
- package/dist/bscPlugin/transpile/BrsFileTranspileProcessor.js +123 -0
- package/dist/bscPlugin/transpile/BrsFileTranspileProcessor.js.map +1 -0
- package/dist/bscPlugin/transpile/BrsFileTranspileProcessor.spec.d.ts +1 -0
- package/dist/bscPlugin/transpile/BrsFileTranspileProcessor.spec.js +41 -0
- package/dist/bscPlugin/transpile/BrsFileTranspileProcessor.spec.js.map +1 -0
- package/dist/bscPlugin/transpile/XmlFilePreTranspileProcessor.d.ts +12 -0
- package/dist/bscPlugin/transpile/XmlFilePreTranspileProcessor.js +99 -0
- package/dist/bscPlugin/transpile/XmlFilePreTranspileProcessor.js.map +1 -0
- package/dist/bscPlugin/validation/BrsFileValidator.d.ts +22 -1
- package/dist/bscPlugin/validation/BrsFileValidator.js +316 -29
- package/dist/bscPlugin/validation/BrsFileValidator.js.map +1 -1
- package/dist/bscPlugin/validation/BrsFileValidator.spec.d.ts +1 -0
- package/dist/bscPlugin/validation/BrsFileValidator.spec.js +264 -0
- package/dist/bscPlugin/validation/BrsFileValidator.spec.js.map +1 -0
- package/dist/bscPlugin/validation/ProgramValidator.d.ts +10 -0
- package/dist/bscPlugin/validation/ProgramValidator.js +32 -0
- package/dist/bscPlugin/validation/ProgramValidator.js.map +1 -0
- package/dist/bscPlugin/validation/ScopeValidator.d.ts +56 -8
- package/dist/bscPlugin/validation/ScopeValidator.js +514 -116
- package/dist/bscPlugin/validation/ScopeValidator.js.map +1 -1
- package/dist/bscPlugin/validation/ScopeValidator.spec.d.ts +1 -0
- package/dist/bscPlugin/validation/ScopeValidator.spec.js +2454 -0
- package/dist/bscPlugin/validation/ScopeValidator.spec.js.map +1 -0
- package/dist/bscPlugin/validation/XmlFileValidator.d.ts +8 -0
- package/dist/bscPlugin/validation/XmlFileValidator.js +44 -0
- package/dist/bscPlugin/validation/XmlFileValidator.js.map +1 -0
- package/dist/cli.js +107 -8
- package/dist/cli.js.map +1 -1
- package/dist/deferred.d.ts +3 -3
- package/dist/deferred.js.map +1 -1
- package/dist/diagnosticUtils.d.ts +8 -2
- package/dist/diagnosticUtils.js +47 -17
- package/dist/diagnosticUtils.js.map +1 -1
- package/dist/examples/plugins/removePrint.js +8 -10
- package/dist/examples/plugins/removePrint.js.map +1 -1
- package/dist/files/AssetFile.d.ts +26 -0
- package/dist/files/AssetFile.js +26 -0
- package/dist/files/AssetFile.js.map +1 -0
- package/dist/files/BrsFile.Class.spec.js +529 -486
- package/dist/files/BrsFile.Class.spec.js.map +1 -1
- package/dist/files/BrsFile.d.ts +124 -112
- package/dist/files/BrsFile.js +819 -1131
- package/dist/files/BrsFile.js.map +1 -1
- package/dist/files/BrsFile.spec.js +1869 -1277
- package/dist/files/BrsFile.spec.js.map +1 -1
- package/dist/files/BscFile.d.ts +104 -0
- package/dist/files/BscFile.js +16 -0
- package/dist/files/BscFile.js.map +1 -0
- package/dist/files/Factory.d.ts +25 -0
- package/dist/files/Factory.js +22 -0
- package/dist/files/Factory.js.map +1 -0
- package/dist/files/LazyFileData.d.ts +20 -0
- package/dist/files/LazyFileData.js +54 -0
- package/dist/files/LazyFileData.js.map +1 -0
- package/dist/files/LazyFileData.spec.d.ts +1 -0
- package/dist/files/LazyFileData.spec.js +27 -0
- package/dist/files/LazyFileData.spec.js.map +1 -0
- package/dist/files/XmlFile.d.ts +70 -32
- package/dist/files/XmlFile.js +106 -117
- package/dist/files/XmlFile.js.map +1 -1
- package/dist/files/XmlFile.spec.js +325 -262
- package/dist/files/XmlFile.spec.js.map +1 -1
- package/dist/files/tests/imports.spec.js +49 -41
- package/dist/files/tests/imports.spec.js.map +1 -1
- package/dist/files/tests/optionalChaning.spec.js +104 -40
- package/dist/files/tests/optionalChaning.spec.js.map +1 -1
- package/dist/globalCallables.js +16 -18
- package/dist/globalCallables.js.map +1 -1
- package/dist/index.d.ts +13 -2
- package/dist/index.js +15 -2
- package/dist/index.js.map +1 -1
- package/dist/interfaces.d.ts +440 -150
- package/dist/interfaces.js +27 -0
- package/dist/interfaces.js.map +1 -1
- package/dist/lexer/Character.spec.js +5 -5
- package/dist/lexer/Character.spec.js.map +1 -1
- package/dist/lexer/Lexer.d.ts +12 -5
- package/dist/lexer/Lexer.js +28 -13
- package/dist/lexer/Lexer.js.map +1 -1
- package/dist/lexer/Lexer.spec.js +187 -134
- package/dist/lexer/Lexer.spec.js.map +1 -1
- package/dist/lexer/Token.d.ts +9 -1
- package/dist/lexer/Token.js +9 -1
- package/dist/lexer/Token.js.map +1 -1
- package/dist/lexer/TokenKind.d.ts +9 -0
- package/dist/lexer/TokenKind.js +30 -5
- package/dist/lexer/TokenKind.js.map +1 -1
- package/dist/parser/AstNode.d.ts +162 -0
- package/dist/parser/AstNode.js +225 -0
- package/dist/parser/AstNode.js.map +1 -0
- package/dist/parser/AstNode.spec.d.ts +1 -0
- package/dist/parser/AstNode.spec.js +165 -0
- package/dist/parser/AstNode.spec.js.map +1 -0
- package/dist/parser/BrsTranspileState.d.ts +4 -7
- package/dist/parser/BrsTranspileState.js +4 -12
- package/dist/parser/BrsTranspileState.js.map +1 -1
- package/dist/parser/Expression.d.ts +126 -167
- package/dist/parser/Expression.js +524 -394
- package/dist/parser/Expression.js.map +1 -1
- package/dist/parser/Parser.Class.spec.js +152 -146
- package/dist/parser/Parser.Class.spec.js.map +1 -1
- package/dist/parser/Parser.d.ts +45 -196
- package/dist/parser/Parser.js +470 -926
- package/dist/parser/Parser.js.map +1 -1
- package/dist/parser/Parser.spec.d.ts +3 -1
- package/dist/parser/Parser.spec.js +1034 -805
- package/dist/parser/Parser.spec.js.map +1 -1
- package/dist/parser/SGParser.d.ts +9 -8
- package/dist/parser/SGParser.js +10 -8
- package/dist/parser/SGParser.js.map +1 -1
- package/dist/parser/SGParser.spec.js +27 -38
- package/dist/parser/SGParser.spec.js.map +1 -1
- package/dist/parser/SGTypes.d.ts +98 -35
- package/dist/parser/SGTypes.js +169 -99
- package/dist/parser/SGTypes.js.map +1 -1
- package/dist/parser/Statement.d.ts +208 -122
- package/dist/parser/Statement.js +599 -364
- package/dist/parser/Statement.js.map +1 -1
- package/dist/parser/Statement.spec.js +45 -21
- package/dist/parser/Statement.spec.js.map +1 -1
- package/dist/parser/TranspileState.d.ts +1 -1
- package/dist/parser/TranspileState.js +7 -12
- package/dist/parser/TranspileState.js.map +1 -1
- package/dist/parser/tests/Parser.spec.js +3 -2
- package/dist/parser/tests/Parser.spec.js.map +1 -1
- package/dist/parser/tests/controlFlow/For.spec.js +33 -23
- package/dist/parser/tests/controlFlow/For.spec.js.map +1 -1
- package/dist/parser/tests/controlFlow/ForEach.spec.js +25 -20
- package/dist/parser/tests/controlFlow/ForEach.spec.js.map +1 -1
- package/dist/parser/tests/controlFlow/If.spec.js +96 -94
- package/dist/parser/tests/controlFlow/If.spec.js.map +1 -1
- package/dist/parser/tests/controlFlow/While.spec.js +22 -16
- package/dist/parser/tests/controlFlow/While.spec.js.map +1 -1
- package/dist/parser/tests/expression/Additive.spec.js +8 -8
- package/dist/parser/tests/expression/Additive.spec.js.map +1 -1
- package/dist/parser/tests/expression/ArrayLiterals.spec.js +58 -21
- package/dist/parser/tests/expression/ArrayLiterals.spec.js.map +1 -1
- package/dist/parser/tests/expression/AssociativeArrayLiterals.spec.js +61 -20
- package/dist/parser/tests/expression/AssociativeArrayLiterals.spec.js.map +1 -1
- package/dist/parser/tests/expression/Boolean.spec.js +8 -8
- package/dist/parser/tests/expression/Boolean.spec.js.map +1 -1
- package/dist/parser/tests/expression/Call.spec.js +129 -21
- package/dist/parser/tests/expression/Call.spec.js.map +1 -1
- package/dist/parser/tests/expression/Exponential.spec.js +5 -5
- package/dist/parser/tests/expression/Exponential.spec.js.map +1 -1
- package/dist/parser/tests/expression/Function.spec.js +36 -36
- package/dist/parser/tests/expression/Function.spec.js.map +1 -1
- package/dist/parser/tests/expression/Indexing.spec.js +67 -22
- package/dist/parser/tests/expression/Indexing.spec.js.map +1 -1
- package/dist/parser/tests/expression/Multiplicative.spec.js +9 -9
- package/dist/parser/tests/expression/Multiplicative.spec.js.map +1 -1
- package/dist/parser/tests/expression/NullCoalescenceExpression.spec.js +123 -81
- package/dist/parser/tests/expression/NullCoalescenceExpression.spec.js.map +1 -1
- package/dist/parser/tests/expression/PrefixUnary.spec.js +12 -12
- package/dist/parser/tests/expression/PrefixUnary.spec.js.map +1 -1
- package/dist/parser/tests/expression/Primary.spec.js +12 -12
- package/dist/parser/tests/expression/Primary.spec.js.map +1 -1
- package/dist/parser/tests/expression/RegexLiteralExpression.spec.js +10 -10
- package/dist/parser/tests/expression/RegexLiteralExpression.spec.js.map +1 -1
- package/dist/parser/tests/expression/Relational.spec.js +13 -13
- package/dist/parser/tests/expression/Relational.spec.js.map +1 -1
- package/dist/parser/tests/expression/SourceLiteralExpression.spec.js +24 -24
- package/dist/parser/tests/expression/SourceLiteralExpression.spec.js.map +1 -1
- package/dist/parser/tests/expression/TemplateStringExpression.spec.js +221 -81
- package/dist/parser/tests/expression/TemplateStringExpression.spec.js.map +1 -1
- package/dist/parser/tests/expression/TernaryExpression.spec.js +287 -105
- package/dist/parser/tests/expression/TernaryExpression.spec.js.map +1 -1
- package/dist/parser/tests/expression/TypeExpression.spec.d.ts +1 -0
- package/dist/parser/tests/expression/TypeExpression.spec.js +127 -0
- package/dist/parser/tests/expression/TypeExpression.spec.js.map +1 -0
- package/dist/parser/tests/expression/UnaryExpression.spec.d.ts +1 -0
- package/dist/parser/tests/expression/UnaryExpression.spec.js +52 -0
- package/dist/parser/tests/expression/UnaryExpression.spec.js.map +1 -0
- package/dist/parser/tests/statement/AssignmentOperators.spec.js +15 -15
- package/dist/parser/tests/statement/AssignmentOperators.spec.js.map +1 -1
- package/dist/parser/tests/statement/ConstStatement.spec.d.ts +1 -0
- package/dist/parser/tests/statement/ConstStatement.spec.js +262 -0
- package/dist/parser/tests/statement/ConstStatement.spec.js.map +1 -0
- package/dist/parser/tests/statement/Continue.spec.d.ts +1 -0
- package/dist/parser/tests/statement/Continue.spec.js +119 -0
- package/dist/parser/tests/statement/Continue.spec.js.map +1 -0
- package/dist/parser/tests/statement/Declaration.spec.js +19 -19
- package/dist/parser/tests/statement/Declaration.spec.js.map +1 -1
- package/dist/parser/tests/statement/Dim.spec.js +22 -22
- package/dist/parser/tests/statement/Dim.spec.js.map +1 -1
- package/dist/parser/tests/statement/Enum.spec.js +111 -300
- package/dist/parser/tests/statement/Enum.spec.js.map +1 -1
- package/dist/parser/tests/statement/For.spec.js +9 -10
- package/dist/parser/tests/statement/For.spec.js.map +1 -1
- package/dist/parser/tests/statement/ForEach.spec.js +8 -9
- package/dist/parser/tests/statement/ForEach.spec.js.map +1 -1
- package/dist/parser/tests/statement/Function.spec.js +44 -35
- package/dist/parser/tests/statement/Function.spec.js.map +1 -1
- package/dist/parser/tests/statement/Goto.spec.js +5 -5
- package/dist/parser/tests/statement/Goto.spec.js.map +1 -1
- package/dist/parser/tests/statement/Increment.spec.js +20 -20
- package/dist/parser/tests/statement/Increment.spec.js.map +1 -1
- package/dist/parser/tests/statement/InterfaceStatement.spec.js +30 -196
- package/dist/parser/tests/statement/InterfaceStatement.spec.js.map +1 -1
- package/dist/parser/tests/statement/LibraryStatement.spec.js +11 -11
- package/dist/parser/tests/statement/LibraryStatement.spec.js.map +1 -1
- package/dist/parser/tests/statement/Misc.spec.js +16 -78
- package/dist/parser/tests/statement/Misc.spec.js.map +1 -1
- package/dist/parser/tests/statement/PrintStatement.spec.js +107 -90
- package/dist/parser/tests/statement/PrintStatement.spec.js.map +1 -1
- package/dist/parser/tests/statement/ReturnStatement.spec.js +14 -12
- package/dist/parser/tests/statement/ReturnStatement.spec.js.map +1 -1
- package/dist/parser/tests/statement/Set.spec.js +48 -35
- package/dist/parser/tests/statement/Set.spec.js.map +1 -1
- package/dist/parser/tests/statement/Stop.spec.js +6 -6
- package/dist/parser/tests/statement/Stop.spec.js.map +1 -1
- package/dist/parser/tests/statement/Throw.spec.js +6 -6
- package/dist/parser/tests/statement/Throw.spec.js.map +1 -1
- package/dist/parser/tests/statement/TryCatch.spec.js +18 -16
- package/dist/parser/tests/statement/TryCatch.spec.js.map +1 -1
- package/dist/preprocessor/Manifest.d.ts +1 -1
- package/dist/preprocessor/Manifest.js +3 -3
- package/dist/preprocessor/Manifest.js.map +1 -1
- package/dist/preprocessor/Manifest.spec.js +8 -8
- package/dist/preprocessor/Manifest.spec.js.map +1 -1
- package/dist/preprocessor/Preprocessor.d.ts +5 -6
- package/dist/preprocessor/Preprocessor.js +15 -11
- package/dist/preprocessor/Preprocessor.js.map +1 -1
- package/dist/preprocessor/Preprocessor.spec.js +25 -25
- package/dist/preprocessor/Preprocessor.spec.js.map +1 -1
- package/dist/preprocessor/PreprocessorParser.d.ts +1 -1
- package/dist/preprocessor/PreprocessorParser.js +7 -1
- package/dist/preprocessor/PreprocessorParser.js.map +1 -1
- package/dist/preprocessor/PreprocessorParser.spec.js +13 -13
- package/dist/preprocessor/PreprocessorParser.spec.js.map +1 -1
- package/dist/roku-types/data.json +6544 -10519
- package/dist/roku-types/index.d.ts +662 -1934
- package/dist/types/ArrayType.d.ts +10 -9
- package/dist/types/ArrayType.js +65 -60
- package/dist/types/ArrayType.js.map +1 -1
- package/dist/types/ArrayType.spec.js +36 -68
- package/dist/types/ArrayType.spec.js.map +1 -1
- package/dist/types/AssociativeArrayType.d.ts +11 -0
- package/dist/types/AssociativeArrayType.js +52 -0
- package/dist/types/AssociativeArrayType.js.map +1 -0
- package/dist/types/BaseFunctionType.d.ts +9 -0
- package/dist/types/BaseFunctionType.js +25 -0
- package/dist/types/BaseFunctionType.js.map +1 -0
- package/dist/types/BooleanType.d.ts +8 -5
- package/dist/types/BooleanType.js +14 -7
- package/dist/types/BooleanType.js.map +1 -1
- package/dist/types/BooleanType.spec.js +10 -6
- package/dist/types/BooleanType.spec.js.map +1 -1
- package/dist/types/BscType.d.ts +32 -21
- package/dist/types/BscType.js +118 -21
- package/dist/types/BscType.js.map +1 -1
- package/dist/types/BscTypeKind.d.ts +25 -0
- package/dist/types/BscTypeKind.js +30 -0
- package/dist/types/BscTypeKind.js.map +1 -0
- package/dist/types/BuiltInInterfaceAdder.d.ts +23 -0
- package/dist/types/BuiltInInterfaceAdder.js +164 -0
- package/dist/types/BuiltInInterfaceAdder.js.map +1 -0
- package/dist/types/BuiltInInterfaceAdder.spec.d.ts +1 -0
- package/dist/types/BuiltInInterfaceAdder.spec.js +116 -0
- package/dist/types/BuiltInInterfaceAdder.spec.js.map +1 -0
- package/dist/types/ClassType.d.ts +17 -0
- package/dist/types/ClassType.js +58 -0
- package/dist/types/ClassType.js.map +1 -0
- package/dist/types/ClassType.spec.d.ts +1 -0
- package/dist/types/ClassType.spec.js +77 -0
- package/dist/types/ClassType.spec.js.map +1 -0
- package/dist/types/ComponentType.d.ts +26 -0
- package/dist/types/ComponentType.js +83 -0
- package/dist/types/ComponentType.js.map +1 -0
- package/dist/types/DoubleType.d.ts +8 -5
- package/dist/types/DoubleType.js +18 -16
- package/dist/types/DoubleType.js.map +1 -1
- package/dist/types/DoubleType.spec.js +12 -6
- package/dist/types/DoubleType.spec.js.map +1 -1
- package/dist/types/DynamicType.d.ts +10 -5
- package/dist/types/DynamicType.js +16 -4
- package/dist/types/DynamicType.js.map +1 -1
- package/dist/types/DynamicType.spec.js +16 -5
- package/dist/types/DynamicType.spec.js.map +1 -1
- package/dist/types/EnumType.d.ts +30 -12
- package/dist/types/EnumType.js +43 -17
- package/dist/types/EnumType.js.map +1 -1
- package/dist/types/EnumType.spec.d.ts +1 -0
- package/dist/types/EnumType.spec.js +33 -0
- package/dist/types/EnumType.spec.js.map +1 -0
- package/dist/types/FloatType.d.ts +8 -5
- package/dist/types/FloatType.js +18 -16
- package/dist/types/FloatType.js.map +1 -1
- package/dist/types/FloatType.spec.js +4 -6
- package/dist/types/FloatType.spec.js.map +1 -1
- package/dist/types/FunctionType.d.ts +13 -8
- package/dist/types/FunctionType.js +30 -14
- package/dist/types/FunctionType.js.map +1 -1
- package/dist/types/InheritableType.d.ts +28 -0
- package/dist/types/InheritableType.js +152 -0
- package/dist/types/InheritableType.js.map +1 -0
- package/dist/types/IntegerType.d.ts +8 -5
- package/dist/types/IntegerType.js +18 -16
- package/dist/types/IntegerType.js.map +1 -1
- package/dist/types/IntegerType.spec.js +8 -6
- package/dist/types/IntegerType.spec.js.map +1 -1
- package/dist/types/InterfaceType.d.ts +12 -13
- package/dist/types/InterfaceType.js +20 -48
- package/dist/types/InterfaceType.js.map +1 -1
- package/dist/types/InterfaceType.spec.js +90 -56
- package/dist/types/InterfaceType.spec.js.map +1 -1
- package/dist/types/InvalidType.d.ts +7 -5
- package/dist/types/InvalidType.js +13 -7
- package/dist/types/InvalidType.js.map +1 -1
- package/dist/types/InvalidType.spec.js +8 -6
- package/dist/types/InvalidType.spec.js.map +1 -1
- package/dist/types/LongIntegerType.d.ts +8 -5
- package/dist/types/LongIntegerType.js +17 -15
- package/dist/types/LongIntegerType.js.map +1 -1
- package/dist/types/LongIntegerType.spec.js +10 -6
- package/dist/types/LongIntegerType.spec.js.map +1 -1
- package/dist/types/NamespaceType.d.ts +12 -0
- package/dist/types/NamespaceType.js +28 -0
- package/dist/types/NamespaceType.js.map +1 -0
- package/dist/types/ObjectType.d.ts +9 -8
- package/dist/types/ObjectType.js +21 -11
- package/dist/types/ObjectType.js.map +1 -1
- package/dist/types/ObjectType.spec.js +3 -3
- package/dist/types/ObjectType.spec.js.map +1 -1
- package/dist/types/ReferenceType.d.ts +63 -0
- package/dist/types/ReferenceType.js +423 -0
- package/dist/types/ReferenceType.js.map +1 -0
- package/dist/types/ReferenceType.spec.d.ts +1 -0
- package/dist/types/ReferenceType.spec.js +137 -0
- package/dist/types/ReferenceType.spec.js.map +1 -0
- package/dist/types/StringType.d.ts +11 -5
- package/dist/types/StringType.js +18 -7
- package/dist/types/StringType.js.map +1 -1
- package/dist/types/StringType.spec.js +3 -5
- package/dist/types/StringType.spec.js.map +1 -1
- package/dist/types/TypedFunctionType.d.ts +22 -17
- package/dist/types/TypedFunctionType.js +78 -60
- package/dist/types/TypedFunctionType.js.map +1 -1
- package/dist/types/TypedFunctionType.spec.js +105 -20
- package/dist/types/TypedFunctionType.spec.js.map +1 -1
- package/dist/types/UninitializedType.d.ts +8 -6
- package/dist/types/UninitializedType.js +13 -7
- package/dist/types/UninitializedType.js.map +1 -1
- package/dist/types/UnionType.d.ts +20 -0
- package/dist/types/UnionType.js +123 -0
- package/dist/types/UnionType.js.map +1 -0
- package/dist/types/UnionType.spec.d.ts +1 -0
- package/dist/types/UnionType.spec.js +130 -0
- package/dist/types/UnionType.spec.js.map +1 -0
- package/dist/types/VoidType.d.ts +8 -5
- package/dist/types/VoidType.js +14 -7
- package/dist/types/VoidType.js.map +1 -1
- package/dist/types/VoidType.spec.js +3 -3
- package/dist/types/VoidType.spec.js.map +1 -1
- package/dist/types/helper.spec.d.ts +1 -0
- package/dist/types/helper.spec.js +145 -0
- package/dist/types/helper.spec.js.map +1 -0
- package/dist/types/helpers.d.ts +19 -37
- package/dist/types/helpers.js +159 -99
- package/dist/types/helpers.js.map +1 -1
- package/dist/types/index.d.ts +22 -0
- package/dist/types/index.js +39 -0
- package/dist/types/index.js.map +1 -0
- package/dist/util.d.ts +167 -131
- package/dist/util.js +890 -350
- package/dist/util.js.map +1 -1
- package/dist/validators/ClassValidator.d.ts +7 -25
- package/dist/validators/ClassValidator.js +103 -194
- package/dist/validators/ClassValidator.js.map +1 -1
- package/package.json +165 -149
- package/dist/astUtils/AstEditor.js.map +0 -1
- package/dist/astUtils/AstEditor.spec.js.map +0 -1
- package/dist/bscPlugin/transpile/BrsFilePreTranspileProcessor.d.ts +0 -8
- package/dist/bscPlugin/transpile/BrsFilePreTranspileProcessor.js +0 -40
- package/dist/bscPlugin/transpile/BrsFilePreTranspileProcessor.js.map +0 -1
- package/dist/bscPlugin/transpile/BrsFilePreTranspileProcessor.spec.js +0 -32
- package/dist/bscPlugin/transpile/BrsFilePreTranspileProcessor.spec.js.map +0 -1
- package/dist/parser/SGTypes.spec.js +0 -351
- package/dist/parser/SGTypes.spec.js.map +0 -1
- package/dist/types/CustomType.d.ts +0 -12
- package/dist/types/CustomType.js +0 -44
- package/dist/types/CustomType.js.map +0 -1
- package/dist/types/LazyType.d.ts +0 -16
- package/dist/types/LazyType.js +0 -44
- package/dist/types/LazyType.js.map +0 -1
- /package/dist/astUtils/{AstEditor.spec.d.ts → Editor.spec.d.ts} +0 -0
- /package/dist/bscPlugin/{transpile/BrsFilePreTranspileProcessor.spec.d.ts → completions/CompletionsProcessor.spec.d.ts} +0 -0
- /package/dist/{parser/SGTypes.spec.d.ts → bscPlugin/hover/HoverProcessor.spec.d.ts} +0 -0
package/dist/lexer/Lexer.spec.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
/* eslint no-template-curly-in-string: 0 */
|
|
4
|
-
const
|
|
4
|
+
const chai_config_spec_1 = require("../chai-config.spec");
|
|
5
5
|
const TokenKind_1 = require("./TokenKind");
|
|
6
6
|
const Lexer_1 = require("./Lexer");
|
|
7
7
|
const Token_1 = require("./Token");
|
|
@@ -9,9 +9,16 @@ const Parser_spec_1 = require("../parser/Parser.spec");
|
|
|
9
9
|
const vscode_languageserver_1 = require("vscode-languageserver");
|
|
10
10
|
const util_1 = require("../util");
|
|
11
11
|
describe('lexer', () => {
|
|
12
|
+
it('recognizes the `const` keyword', () => {
|
|
13
|
+
let { tokens } = Lexer_1.Lexer.scan('const');
|
|
14
|
+
(0, chai_config_spec_1.expect)(tokens.map(x => x.kind)).to.eql([
|
|
15
|
+
TokenKind_1.TokenKind.Const,
|
|
16
|
+
TokenKind_1.TokenKind.Eof
|
|
17
|
+
]);
|
|
18
|
+
});
|
|
12
19
|
it('recognizes namespace keywords', () => {
|
|
13
20
|
let { tokens } = Lexer_1.Lexer.scan('namespace end namespace endnamespace end namespace');
|
|
14
|
-
(0,
|
|
21
|
+
(0, chai_config_spec_1.expect)(tokens.map(x => x.kind)).to.eql([
|
|
15
22
|
TokenKind_1.TokenKind.Namespace,
|
|
16
23
|
TokenKind_1.TokenKind.EndNamespace,
|
|
17
24
|
TokenKind_1.TokenKind.EndNamespace,
|
|
@@ -77,39 +84,39 @@ describe('lexer', () => {
|
|
|
77
84
|
});
|
|
78
85
|
it('recognizes the callfunc operator', () => {
|
|
79
86
|
let { tokens } = Lexer_1.Lexer.scan('@.');
|
|
80
|
-
(0,
|
|
87
|
+
(0, chai_config_spec_1.expect)(tokens[0].kind).to.equal(TokenKind_1.TokenKind.Callfunc);
|
|
81
88
|
});
|
|
82
89
|
it('recognizes the import token', () => {
|
|
83
90
|
let { tokens } = Lexer_1.Lexer.scan('import');
|
|
84
|
-
(0,
|
|
91
|
+
(0, chai_config_spec_1.expect)(tokens[0].kind).to.eql(TokenKind_1.TokenKind.Import);
|
|
85
92
|
});
|
|
86
93
|
it('recognizes library token', () => {
|
|
87
94
|
let { tokens } = Lexer_1.Lexer.scan('library');
|
|
88
|
-
(0,
|
|
95
|
+
(0, chai_config_spec_1.expect)(tokens[0].kind).to.eql(TokenKind_1.TokenKind.Library);
|
|
89
96
|
});
|
|
90
97
|
it('produces an at symbol token', () => {
|
|
91
98
|
let { tokens } = Lexer_1.Lexer.scan('@');
|
|
92
|
-
(0,
|
|
99
|
+
(0, chai_config_spec_1.expect)(tokens[0].kind).to.equal(TokenKind_1.TokenKind.At);
|
|
93
100
|
});
|
|
94
101
|
it('produces a semicolon token', () => {
|
|
95
102
|
let { tokens } = Lexer_1.Lexer.scan(';');
|
|
96
|
-
(0,
|
|
103
|
+
(0, chai_config_spec_1.expect)(tokens[0].kind).to.equal(TokenKind_1.TokenKind.Semicolon);
|
|
97
104
|
});
|
|
98
105
|
it('emits error on unknown character type', () => {
|
|
99
106
|
let { diagnostics } = Lexer_1.Lexer.scan('\0');
|
|
100
|
-
(0,
|
|
107
|
+
(0, chai_config_spec_1.expect)(diagnostics).to.be.lengthOf(1);
|
|
101
108
|
});
|
|
102
109
|
it('includes an end-of-file marker', () => {
|
|
103
110
|
let { tokens } = Lexer_1.Lexer.scan('');
|
|
104
|
-
(0,
|
|
111
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Eof]);
|
|
105
112
|
});
|
|
106
113
|
it('ignores tabs and spaces', () => {
|
|
107
114
|
let { tokens } = Lexer_1.Lexer.scan('\t\t \t \t');
|
|
108
|
-
(0,
|
|
115
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Eof]);
|
|
109
116
|
});
|
|
110
117
|
it('retains every single newline', () => {
|
|
111
118
|
let { tokens } = Lexer_1.Lexer.scan('\n\n\'foo\n\n\nprint 2\n\n');
|
|
112
|
-
(0,
|
|
119
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
113
120
|
TokenKind_1.TokenKind.Newline,
|
|
114
121
|
TokenKind_1.TokenKind.Newline,
|
|
115
122
|
TokenKind_1.TokenKind.Comment,
|
|
@@ -131,7 +138,7 @@ describe('lexer', () => {
|
|
|
131
138
|
' print 0\r\n' +
|
|
132
139
|
' end if\r\n' +
|
|
133
140
|
'end function\r\n').tokens.map(x => x.kind);
|
|
134
|
-
(0,
|
|
141
|
+
(0, chai_config_spec_1.expect)(kinds).to.eql([
|
|
135
142
|
TokenKind_1.TokenKind.Function, TokenKind_1.TokenKind.Identifier, TokenKind_1.TokenKind.LeftParen, TokenKind_1.TokenKind.RightParen, TokenKind_1.TokenKind.As, TokenKind_1.TokenKind.String, TokenKind_1.TokenKind.Newline,
|
|
136
143
|
TokenKind_1.TokenKind.If, TokenKind_1.TokenKind.True, TokenKind_1.TokenKind.Then, TokenKind_1.TokenKind.Newline,
|
|
137
144
|
TokenKind_1.TokenKind.Print, TokenKind_1.TokenKind.IntegerLiteral, TokenKind_1.TokenKind.Newline,
|
|
@@ -164,20 +171,20 @@ describe('lexer', () => {
|
|
|
164
171
|
[2, 7, 2, 8] //Eof
|
|
165
172
|
];
|
|
166
173
|
/*eslint-enable*/
|
|
167
|
-
(0,
|
|
168
|
-
(0,
|
|
174
|
+
(0, chai_config_spec_1.expect)(withoutWhitespace, 'Without whitespace').to.eql(expectedLocations);
|
|
175
|
+
(0, chai_config_spec_1.expect)(withWhitespace, 'With whitespace').to.eql(expectedLocations);
|
|
169
176
|
});
|
|
170
177
|
it('retains original line endings', () => {
|
|
171
178
|
let { tokens } = Lexer_1.Lexer.scan('print "hello"\r\nprint "world"\n');
|
|
172
|
-
(0,
|
|
179
|
+
(0, chai_config_spec_1.expect)([
|
|
173
180
|
tokens[2].text.charCodeAt(0),
|
|
174
181
|
tokens[2].text.charCodeAt(1)
|
|
175
182
|
], 'should contain \\r\\n').to.eql([13, 10]);
|
|
176
|
-
(0,
|
|
183
|
+
(0, chai_config_spec_1.expect)(tokens[5].text.charCodeAt(0), 'should contain \\r\\n').to.eql(10);
|
|
177
184
|
});
|
|
178
185
|
it('correctly splits the elseif token', () => {
|
|
179
186
|
let { tokens } = Lexer_1.Lexer.scan('else if elseif else if');
|
|
180
|
-
(0,
|
|
187
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
181
188
|
TokenKind_1.TokenKind.Else,
|
|
182
189
|
TokenKind_1.TokenKind.If,
|
|
183
190
|
TokenKind_1.TokenKind.Else,
|
|
@@ -189,20 +196,20 @@ describe('lexer', () => {
|
|
|
189
196
|
});
|
|
190
197
|
it('gives the `as` keyword its own TokenKind', () => {
|
|
191
198
|
let { tokens } = Lexer_1.Lexer.scan('as');
|
|
192
|
-
(0,
|
|
199
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.As, TokenKind_1.TokenKind.Eof]);
|
|
193
200
|
});
|
|
194
201
|
it('gives the `stop` keyword its own TokenKind', () => {
|
|
195
202
|
let { tokens } = Lexer_1.Lexer.scan('stop');
|
|
196
|
-
(0,
|
|
203
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Stop, TokenKind_1.TokenKind.Eof]);
|
|
197
204
|
});
|
|
198
205
|
it('does not alias \'?\' to \'print\' - the parser will do that', () => {
|
|
199
206
|
let { tokens } = Lexer_1.Lexer.scan('?2');
|
|
200
|
-
(0,
|
|
207
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Question, TokenKind_1.TokenKind.IntegerLiteral, TokenKind_1.TokenKind.Eof]);
|
|
201
208
|
});
|
|
202
209
|
describe('comments', () => {
|
|
203
210
|
it('does not include carriage return character', () => {
|
|
204
211
|
let tokens = Lexer_1.Lexer.scan(`'someComment\r\nprint "hello"`).tokens;
|
|
205
|
-
(0,
|
|
212
|
+
(0, chai_config_spec_1.expect)(tokens[0].text).to.equal(`'someComment`);
|
|
206
213
|
});
|
|
207
214
|
it('includes the comment characters in the text', () => {
|
|
208
215
|
let text = Lexer_1.Lexer.scan(`
|
|
@@ -211,7 +218,7 @@ describe('lexer', () => {
|
|
|
211
218
|
`).tokens
|
|
212
219
|
.filter(x => ![TokenKind_1.TokenKind.Newline, TokenKind_1.TokenKind.Eof].includes(x.kind))
|
|
213
220
|
.map(x => x.text);
|
|
214
|
-
(0,
|
|
221
|
+
(0, chai_config_spec_1.expect)(text).to.eql([
|
|
215
222
|
`'comment`,
|
|
216
223
|
'REM some comment'
|
|
217
224
|
]);
|
|
@@ -225,7 +232,7 @@ describe('lexer', () => {
|
|
|
225
232
|
`, {
|
|
226
233
|
includeWhitespace: true
|
|
227
234
|
}).tokens.map(x => [...(0, Parser_spec_1.rangeToArray)(x.range), x.text]);
|
|
228
|
-
(0,
|
|
235
|
+
(0, chai_config_spec_1.expect)(tokens).to.eql([
|
|
229
236
|
[0, 0, 0, 1, '\n'],
|
|
230
237
|
[1, 0, 1, 16, ' '],
|
|
231
238
|
[1, 16, 1, 19, 'sub'],
|
|
@@ -260,14 +267,14 @@ describe('lexer', () => {
|
|
|
260
267
|
'comment
|
|
261
268
|
REM some comment
|
|
262
269
|
`).tokens.filter(x => ![TokenKind_1.TokenKind.Newline, TokenKind_1.TokenKind.Eof].includes(x.kind));
|
|
263
|
-
(0,
|
|
264
|
-
(0,
|
|
270
|
+
(0, chai_config_spec_1.expect)(tokens[0].range).to.eql(vscode_languageserver_1.Range.create(1, 16, 1, 24));
|
|
271
|
+
(0, chai_config_spec_1.expect)(tokens[1].range).to.eql(vscode_languageserver_1.Range.create(2, 16, 2, 32));
|
|
265
272
|
});
|
|
266
273
|
it('finds correct location for newlines', () => {
|
|
267
274
|
let tokens = Lexer_1.Lexer.scan('sub\nsub\r\nsub\n\n').tokens
|
|
268
275
|
//ignore the Eof token
|
|
269
276
|
.filter(x => x.kind !== TokenKind_1.TokenKind.Eof);
|
|
270
|
-
(0,
|
|
277
|
+
(0, chai_config_spec_1.expect)(tokens.map(x => x.range)).to.eql([
|
|
271
278
|
vscode_languageserver_1.Range.create(0, 0, 0, 3),
|
|
272
279
|
vscode_languageserver_1.Range.create(0, 3, 0, 4),
|
|
273
280
|
vscode_languageserver_1.Range.create(1, 0, 1, 3),
|
|
@@ -290,26 +297,26 @@ describe('lexer', () => {
|
|
|
290
297
|
end sub
|
|
291
298
|
`);
|
|
292
299
|
let comments = tokens.filter(x => x.kind === TokenKind_1.TokenKind.Comment);
|
|
293
|
-
(0,
|
|
294
|
-
(0,
|
|
300
|
+
(0, chai_config_spec_1.expect)(comments).to.be.lengthOf(1);
|
|
301
|
+
(0, chai_config_spec_1.expect)(comments[0].range).to.eql(vscode_languageserver_1.Range.create(8, 27, 8, 35));
|
|
295
302
|
});
|
|
296
303
|
it('ignores everything after `\'`', () => {
|
|
297
304
|
let { tokens } = Lexer_1.Lexer.scan('= \' (');
|
|
298
|
-
(0,
|
|
305
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Equal, TokenKind_1.TokenKind.Comment, TokenKind_1.TokenKind.Eof]);
|
|
299
306
|
});
|
|
300
307
|
it('ignores everything after `REM`', () => {
|
|
301
308
|
let { tokens } = Lexer_1.Lexer.scan('= REM (');
|
|
302
|
-
(0,
|
|
309
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Equal, TokenKind_1.TokenKind.Comment, TokenKind_1.TokenKind.Eof]);
|
|
303
310
|
});
|
|
304
311
|
it('ignores everything after `rem`', () => {
|
|
305
312
|
let { tokens } = Lexer_1.Lexer.scan('= rem (');
|
|
306
|
-
(0,
|
|
313
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Equal, TokenKind_1.TokenKind.Comment, TokenKind_1.TokenKind.Eof]);
|
|
307
314
|
});
|
|
308
315
|
}); // comments
|
|
309
316
|
describe('non-literals', () => {
|
|
310
317
|
it('reads parens & braces', () => {
|
|
311
318
|
let { tokens } = Lexer_1.Lexer.scan('(){}');
|
|
312
|
-
(0,
|
|
319
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
313
320
|
TokenKind_1.TokenKind.LeftParen,
|
|
314
321
|
TokenKind_1.TokenKind.RightParen,
|
|
315
322
|
TokenKind_1.TokenKind.LeftCurlyBrace,
|
|
@@ -319,7 +326,7 @@ describe('lexer', () => {
|
|
|
319
326
|
});
|
|
320
327
|
it('reads operators', () => {
|
|
321
328
|
let { tokens } = Lexer_1.Lexer.scan('^ - + * MOD / \\ -- ++');
|
|
322
|
-
(0,
|
|
329
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
323
330
|
TokenKind_1.TokenKind.Caret,
|
|
324
331
|
TokenKind_1.TokenKind.Minus,
|
|
325
332
|
TokenKind_1.TokenKind.Plus,
|
|
@@ -334,7 +341,7 @@ describe('lexer', () => {
|
|
|
334
341
|
});
|
|
335
342
|
it('reads bitshift operators', () => {
|
|
336
343
|
let { tokens } = Lexer_1.Lexer.scan('<< >> <<');
|
|
337
|
-
(0,
|
|
344
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
338
345
|
TokenKind_1.TokenKind.LeftShift,
|
|
339
346
|
TokenKind_1.TokenKind.RightShift,
|
|
340
347
|
TokenKind_1.TokenKind.LeftShift,
|
|
@@ -343,7 +350,7 @@ describe('lexer', () => {
|
|
|
343
350
|
});
|
|
344
351
|
it('reads bitshift assignment operators', () => {
|
|
345
352
|
let { tokens } = Lexer_1.Lexer.scan('<<= >>=');
|
|
346
|
-
(0,
|
|
353
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
347
354
|
TokenKind_1.TokenKind.LeftShiftEqual,
|
|
348
355
|
TokenKind_1.TokenKind.RightShiftEqual,
|
|
349
356
|
TokenKind_1.TokenKind.Eof
|
|
@@ -351,7 +358,7 @@ describe('lexer', () => {
|
|
|
351
358
|
});
|
|
352
359
|
it('reads comparators', () => {
|
|
353
360
|
let { tokens } = Lexer_1.Lexer.scan('< <= > >= = <>');
|
|
354
|
-
(0,
|
|
361
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
355
362
|
TokenKind_1.TokenKind.Less,
|
|
356
363
|
TokenKind_1.TokenKind.LessEqual,
|
|
357
364
|
TokenKind_1.TokenKind.Greater,
|
|
@@ -365,23 +372,23 @@ describe('lexer', () => {
|
|
|
365
372
|
describe('string literals', () => {
|
|
366
373
|
it('produces string literal tokens', () => {
|
|
367
374
|
let { tokens } = Lexer_1.Lexer.scan(`"hello world"`);
|
|
368
|
-
(0,
|
|
375
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.StringLiteral, TokenKind_1.TokenKind.Eof]);
|
|
369
376
|
});
|
|
370
377
|
it(`safely escapes " literals`, () => {
|
|
371
378
|
let { tokens } = Lexer_1.Lexer.scan(`"the cat says ""meow"""`);
|
|
372
|
-
(0,
|
|
379
|
+
(0, chai_config_spec_1.expect)(tokens[0].kind).to.equal(TokenKind_1.TokenKind.StringLiteral);
|
|
373
380
|
});
|
|
374
381
|
it('captures text to end of line for unterminated strings with LF', () => {
|
|
375
382
|
let { tokens } = Lexer_1.Lexer.scan(`"unterminated!\n`);
|
|
376
|
-
(0,
|
|
383
|
+
(0, chai_config_spec_1.expect)(tokens[0].kind).to.eql(TokenKind_1.TokenKind.StringLiteral);
|
|
377
384
|
});
|
|
378
385
|
it('captures text to end of line for unterminated strings with CRLF', () => {
|
|
379
386
|
let { tokens } = Lexer_1.Lexer.scan(`"unterminated!\r\n`);
|
|
380
|
-
(0,
|
|
387
|
+
(0, chai_config_spec_1.expect)(tokens[0].text).to.equal('"unterminated!');
|
|
381
388
|
});
|
|
382
389
|
it('disallows multiline strings', () => {
|
|
383
390
|
let { diagnostics } = Lexer_1.Lexer.scan(`"multi-line\n\n`);
|
|
384
|
-
(0,
|
|
391
|
+
(0, chai_config_spec_1.expect)(diagnostics.map(err => err.message)).to.deep.equal([
|
|
385
392
|
'Unterminated string at end of line'
|
|
386
393
|
]);
|
|
387
394
|
});
|
|
@@ -390,7 +397,7 @@ describe('lexer', () => {
|
|
|
390
397
|
describe('template string literals', () => {
|
|
391
398
|
it('supports escaped chars', () => {
|
|
392
399
|
let { tokens } = Lexer_1.Lexer.scan('`\\n\\`\\r\\n`');
|
|
393
|
-
(0,
|
|
400
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
394
401
|
TokenKind_1.TokenKind.BackTick,
|
|
395
402
|
TokenKind_1.TokenKind.TemplateStringQuasi,
|
|
396
403
|
TokenKind_1.TokenKind.EscapedCharCodeLiteral,
|
|
@@ -404,7 +411,7 @@ describe('lexer', () => {
|
|
|
404
411
|
TokenKind_1.TokenKind.BackTick,
|
|
405
412
|
TokenKind_1.TokenKind.Eof
|
|
406
413
|
]);
|
|
407
|
-
(0,
|
|
414
|
+
(0, chai_config_spec_1.expect)(tokens.map(x => x.charCode).filter(x => !!x)).to.eql([
|
|
408
415
|
10,
|
|
409
416
|
96,
|
|
410
417
|
13,
|
|
@@ -413,7 +420,7 @@ describe('lexer', () => {
|
|
|
413
420
|
});
|
|
414
421
|
it('prevents expressions when escaping the dollar sign', () => {
|
|
415
422
|
let { tokens } = Lexer_1.Lexer.scan('`\\${just text}`');
|
|
416
|
-
(0,
|
|
423
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
417
424
|
TokenKind_1.TokenKind.BackTick,
|
|
418
425
|
TokenKind_1.TokenKind.TemplateStringQuasi,
|
|
419
426
|
TokenKind_1.TokenKind.EscapedCharCodeLiteral,
|
|
@@ -424,7 +431,7 @@ describe('lexer', () => {
|
|
|
424
431
|
});
|
|
425
432
|
it('supports escaping unicode char codes', () => {
|
|
426
433
|
let { tokens } = Lexer_1.Lexer.scan('`\\c1\\c12\\c123`');
|
|
427
|
-
(0,
|
|
434
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
428
435
|
TokenKind_1.TokenKind.BackTick,
|
|
429
436
|
TokenKind_1.TokenKind.TemplateStringQuasi,
|
|
430
437
|
TokenKind_1.TokenKind.EscapedCharCodeLiteral,
|
|
@@ -436,7 +443,7 @@ describe('lexer', () => {
|
|
|
436
443
|
TokenKind_1.TokenKind.BackTick,
|
|
437
444
|
TokenKind_1.TokenKind.Eof
|
|
438
445
|
]);
|
|
439
|
-
(0,
|
|
446
|
+
(0, chai_config_spec_1.expect)(tokens.map(x => x.charCode).filter(x => !!x)).to.eql([
|
|
440
447
|
1,
|
|
441
448
|
12,
|
|
442
449
|
123
|
|
@@ -444,7 +451,7 @@ describe('lexer', () => {
|
|
|
444
451
|
});
|
|
445
452
|
it('converts doublequote to EscapedCharCodeLiteral', () => {
|
|
446
453
|
let { tokens } = Lexer_1.Lexer.scan('`"`');
|
|
447
|
-
(0,
|
|
454
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
448
455
|
TokenKind_1.TokenKind.BackTick,
|
|
449
456
|
TokenKind_1.TokenKind.TemplateStringQuasi,
|
|
450
457
|
TokenKind_1.TokenKind.EscapedCharCodeLiteral,
|
|
@@ -452,11 +459,11 @@ describe('lexer', () => {
|
|
|
452
459
|
TokenKind_1.TokenKind.BackTick,
|
|
453
460
|
TokenKind_1.TokenKind.Eof
|
|
454
461
|
]);
|
|
455
|
-
(0,
|
|
462
|
+
(0, chai_config_spec_1.expect)(tokens[2].charCode).to.equal(34);
|
|
456
463
|
});
|
|
457
464
|
it(`safely escapes \` literals`, () => {
|
|
458
465
|
let { tokens } = Lexer_1.Lexer.scan('`the cat says \\`meow\\` a lot`');
|
|
459
|
-
(0,
|
|
466
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
460
467
|
TokenKind_1.TokenKind.BackTick,
|
|
461
468
|
TokenKind_1.TokenKind.TemplateStringQuasi,
|
|
462
469
|
TokenKind_1.TokenKind.EscapedCharCodeLiteral,
|
|
@@ -466,7 +473,7 @@ describe('lexer', () => {
|
|
|
466
473
|
TokenKind_1.TokenKind.BackTick,
|
|
467
474
|
TokenKind_1.TokenKind.Eof
|
|
468
475
|
]);
|
|
469
|
-
(0,
|
|
476
|
+
(0, chai_config_spec_1.expect)(tokens.map(x => x.text)).to.eql([
|
|
470
477
|
'`',
|
|
471
478
|
'the cat says ',
|
|
472
479
|
'\\`',
|
|
@@ -479,17 +486,17 @@ describe('lexer', () => {
|
|
|
479
486
|
});
|
|
480
487
|
it('produces template string literal tokens', () => {
|
|
481
488
|
let { tokens } = Lexer_1.Lexer.scan('`hello world`');
|
|
482
|
-
(0,
|
|
489
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
483
490
|
TokenKind_1.TokenKind.BackTick,
|
|
484
491
|
TokenKind_1.TokenKind.TemplateStringQuasi,
|
|
485
492
|
TokenKind_1.TokenKind.BackTick,
|
|
486
493
|
TokenKind_1.TokenKind.Eof
|
|
487
494
|
]);
|
|
488
|
-
(0,
|
|
495
|
+
(0, chai_config_spec_1.expect)(tokens[1].text).to.deep.equal('hello world');
|
|
489
496
|
});
|
|
490
497
|
it('collects quasis outside and expressions inside of template strings', () => {
|
|
491
498
|
let { tokens } = Lexer_1.Lexer.scan('`hello ${"world"}!`');
|
|
492
|
-
(0,
|
|
499
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
493
500
|
TokenKind_1.TokenKind.BackTick,
|
|
494
501
|
TokenKind_1.TokenKind.TemplateStringQuasi,
|
|
495
502
|
TokenKind_1.TokenKind.TemplateStringExpressionBegin,
|
|
@@ -499,7 +506,7 @@ describe('lexer', () => {
|
|
|
499
506
|
TokenKind_1.TokenKind.BackTick,
|
|
500
507
|
TokenKind_1.TokenKind.Eof
|
|
501
508
|
]);
|
|
502
|
-
(0,
|
|
509
|
+
(0, chai_config_spec_1.expect)(tokens[1].text).to.deep.equal(`hello `);
|
|
503
510
|
});
|
|
504
511
|
it('real example, which is causing issues in the formatter', () => {
|
|
505
512
|
let { tokens } = Lexer_1.Lexer.scan(`
|
|
@@ -518,7 +525,7 @@ describe('lexer', () => {
|
|
|
518
525
|
\`
|
|
519
526
|
end function
|
|
520
527
|
`);
|
|
521
|
-
(0,
|
|
528
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
522
529
|
TokenKind_1.TokenKind.Newline,
|
|
523
530
|
TokenKind_1.TokenKind.Function,
|
|
524
531
|
TokenKind_1.TokenKind.Identifier,
|
|
@@ -596,7 +603,7 @@ describe('lexer', () => {
|
|
|
596
603
|
});
|
|
597
604
|
it('complicated example', () => {
|
|
598
605
|
let { tokens } = Lexer_1.Lexer.scan('`hello ${"world"}!I am a ${"template" + "string"} and I am very ${["pleased"][0]} to meet you ${m.top.getChildCount()}.The end`');
|
|
599
|
-
(0,
|
|
606
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.eql([
|
|
600
607
|
TokenKind_1.TokenKind.BackTick,
|
|
601
608
|
TokenKind_1.TokenKind.TemplateStringQuasi,
|
|
602
609
|
TokenKind_1.TokenKind.TemplateStringExpressionBegin,
|
|
@@ -634,7 +641,7 @@ describe('lexer', () => {
|
|
|
634
641
|
});
|
|
635
642
|
it('allows multiline strings', () => {
|
|
636
643
|
let { tokens } = Lexer_1.Lexer.scan('`multi-line\n\n`');
|
|
637
|
-
(0,
|
|
644
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
638
645
|
TokenKind_1.TokenKind.BackTick,
|
|
639
646
|
TokenKind_1.TokenKind.TemplateStringQuasi,
|
|
640
647
|
TokenKind_1.TokenKind.EscapedCharCodeLiteral,
|
|
@@ -644,7 +651,7 @@ describe('lexer', () => {
|
|
|
644
651
|
TokenKind_1.TokenKind.BackTick,
|
|
645
652
|
TokenKind_1.TokenKind.Eof
|
|
646
653
|
]);
|
|
647
|
-
(0,
|
|
654
|
+
(0, chai_config_spec_1.expect)(tokens.map(x => x.text)).to.eql([
|
|
648
655
|
'`',
|
|
649
656
|
'multi-line',
|
|
650
657
|
'\n',
|
|
@@ -657,7 +664,7 @@ describe('lexer', () => {
|
|
|
657
664
|
});
|
|
658
665
|
it('maintains proper line/column locations for multiline strings', () => {
|
|
659
666
|
let { tokens } = Lexer_1.Lexer.scan('123 `multi\nline\r\nstrings` true\nfalse');
|
|
660
|
-
(0,
|
|
667
|
+
(0, chai_config_spec_1.expect)(tokens.map(x => {
|
|
661
668
|
return {
|
|
662
669
|
range: x.range,
|
|
663
670
|
kind: x.kind
|
|
@@ -680,7 +687,7 @@ describe('lexer', () => {
|
|
|
680
687
|
});
|
|
681
688
|
it('Example that tripped up the expression tests', () => {
|
|
682
689
|
let { tokens } = Lexer_1.Lexer.scan('`I am a complex example\n${a.isRunning(["a","b","c"])}\nmore ${m.finish(true)}`');
|
|
683
|
-
(0,
|
|
690
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
684
691
|
TokenKind_1.TokenKind.BackTick,
|
|
685
692
|
TokenKind_1.TokenKind.TemplateStringQuasi,
|
|
686
693
|
TokenKind_1.TokenKind.EscapedCharCodeLiteral,
|
|
@@ -719,111 +726,111 @@ describe('lexer', () => {
|
|
|
719
726
|
describe('double literals', () => {
|
|
720
727
|
it('respects \'#\' suffix', () => {
|
|
721
728
|
let d = Lexer_1.Lexer.scan('123#').tokens[0];
|
|
722
|
-
(0,
|
|
723
|
-
(0,
|
|
729
|
+
(0, chai_config_spec_1.expect)(d.kind).to.equal(TokenKind_1.TokenKind.DoubleLiteral);
|
|
730
|
+
(0, chai_config_spec_1.expect)(d.text).to.eql('123#');
|
|
724
731
|
});
|
|
725
732
|
it('forces literals >= 10 digits into doubles', () => {
|
|
726
733
|
let d = Lexer_1.Lexer.scan('0000000005').tokens[0];
|
|
727
|
-
(0,
|
|
728
|
-
(0,
|
|
734
|
+
(0, chai_config_spec_1.expect)(d.kind).to.equal(TokenKind_1.TokenKind.DoubleLiteral);
|
|
735
|
+
(0, chai_config_spec_1.expect)(d.text).to.eql('0000000005');
|
|
729
736
|
});
|
|
730
737
|
it('forces literals with \'D\' in exponent into doubles', () => {
|
|
731
738
|
let d = Lexer_1.Lexer.scan('2.5d3').tokens[0];
|
|
732
|
-
(0,
|
|
733
|
-
(0,
|
|
739
|
+
(0, chai_config_spec_1.expect)(d.kind).to.equal(TokenKind_1.TokenKind.DoubleLiteral);
|
|
740
|
+
(0, chai_config_spec_1.expect)(d.text).to.eql('2.5d3');
|
|
734
741
|
});
|
|
735
742
|
it('allows digits before `.` to be elided', () => {
|
|
736
743
|
let f = Lexer_1.Lexer.scan('.123#').tokens[0];
|
|
737
|
-
(0,
|
|
738
|
-
(0,
|
|
744
|
+
(0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.DoubleLiteral);
|
|
745
|
+
(0, chai_config_spec_1.expect)(f.text).to.eql('.123#');
|
|
739
746
|
});
|
|
740
747
|
it('allows digits after `.` to be elided', () => {
|
|
741
748
|
let f = Lexer_1.Lexer.scan('12.#').tokens[0];
|
|
742
|
-
(0,
|
|
743
|
-
(0,
|
|
749
|
+
(0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.DoubleLiteral);
|
|
750
|
+
(0, chai_config_spec_1.expect)(f.text).to.eql('12.#');
|
|
744
751
|
});
|
|
745
752
|
});
|
|
746
753
|
describe('float literals', () => {
|
|
747
754
|
it('respects \'!\' suffix', () => {
|
|
748
755
|
let f = Lexer_1.Lexer.scan('0.00000008!').tokens[0];
|
|
749
|
-
(0,
|
|
756
|
+
(0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
|
|
750
757
|
// Floating precision will make this *not* equal
|
|
751
|
-
(0,
|
|
752
|
-
(0,
|
|
758
|
+
(0, chai_config_spec_1.expect)(f.text).not.to.equal(8e-8);
|
|
759
|
+
(0, chai_config_spec_1.expect)(f.text).to.eql('0.00000008!');
|
|
753
760
|
});
|
|
754
761
|
it('forces literals with a decimal into floats', () => {
|
|
755
762
|
let f = Lexer_1.Lexer.scan('1.0').tokens[0];
|
|
756
|
-
(0,
|
|
757
|
-
(0,
|
|
763
|
+
(0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
|
|
764
|
+
(0, chai_config_spec_1.expect)(f.text).to.equal('1.0');
|
|
758
765
|
});
|
|
759
766
|
it('forces literals with \'E\' in exponent into floats', () => {
|
|
760
767
|
let f = Lexer_1.Lexer.scan('2.5e3').tokens[0];
|
|
761
|
-
(0,
|
|
762
|
-
(0,
|
|
768
|
+
(0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
|
|
769
|
+
(0, chai_config_spec_1.expect)(f.text).to.eql('2.5e3');
|
|
763
770
|
});
|
|
764
771
|
it('supports larger-than-supported-precision floats to be defined with exponents', () => {
|
|
765
772
|
let f = Lexer_1.Lexer.scan('2.3659475627512424e-38').tokens[0];
|
|
766
|
-
(0,
|
|
767
|
-
(0,
|
|
773
|
+
(0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
|
|
774
|
+
(0, chai_config_spec_1.expect)(f.text).to.eql('2.3659475627512424e-38');
|
|
768
775
|
});
|
|
769
776
|
it('allows digits before `.` to be elided', () => {
|
|
770
777
|
let f = Lexer_1.Lexer.scan('.123').tokens[0];
|
|
771
|
-
(0,
|
|
772
|
-
(0,
|
|
778
|
+
(0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
|
|
779
|
+
(0, chai_config_spec_1.expect)(f.text).to.equal('.123');
|
|
773
780
|
});
|
|
774
781
|
it('allows digits after `.` to be elided', () => {
|
|
775
782
|
let f = Lexer_1.Lexer.scan('12.').tokens[0];
|
|
776
|
-
(0,
|
|
777
|
-
(0,
|
|
783
|
+
(0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
|
|
784
|
+
(0, chai_config_spec_1.expect)(f.text).to.equal('12.');
|
|
778
785
|
});
|
|
779
786
|
});
|
|
780
787
|
describe('long integer literals', () => {
|
|
781
788
|
it('respects \'&\' suffix', () => {
|
|
782
789
|
let f = Lexer_1.Lexer.scan('1&').tokens[0];
|
|
783
|
-
(0,
|
|
784
|
-
(0,
|
|
790
|
+
(0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.LongIntegerLiteral);
|
|
791
|
+
(0, chai_config_spec_1.expect)(f.text).to.eql('1&');
|
|
785
792
|
});
|
|
786
793
|
it('supports hexadecimal literals', () => {
|
|
787
794
|
let i = Lexer_1.Lexer.scan('&hf00d&').tokens[0];
|
|
788
|
-
(0,
|
|
789
|
-
(0,
|
|
795
|
+
(0, chai_config_spec_1.expect)(i.kind).to.equal(TokenKind_1.TokenKind.LongIntegerLiteral);
|
|
796
|
+
(0, chai_config_spec_1.expect)(i.text).to.equal('&hf00d&');
|
|
790
797
|
});
|
|
791
798
|
it('allows very long Int64 literals', () => {
|
|
792
799
|
let li = Lexer_1.Lexer.scan('9876543210&').tokens[0];
|
|
793
|
-
(0,
|
|
794
|
-
(0,
|
|
800
|
+
(0, chai_config_spec_1.expect)(li.kind).to.equal(TokenKind_1.TokenKind.LongIntegerLiteral);
|
|
801
|
+
(0, chai_config_spec_1.expect)(li.text).to.equal('9876543210&');
|
|
795
802
|
});
|
|
796
803
|
it('forces literals with \'&\' suffix into Int64s', () => {
|
|
797
804
|
let li = Lexer_1.Lexer.scan('123&').tokens[0];
|
|
798
|
-
(0,
|
|
799
|
-
(0,
|
|
805
|
+
(0, chai_config_spec_1.expect)(li.kind).to.equal(TokenKind_1.TokenKind.LongIntegerLiteral);
|
|
806
|
+
(0, chai_config_spec_1.expect)(li.text).to.deep.equal('123&');
|
|
800
807
|
});
|
|
801
808
|
});
|
|
802
809
|
describe('integer literals', () => {
|
|
803
810
|
it('respects \'%\' suffix', () => {
|
|
804
811
|
let f = Lexer_1.Lexer.scan('1%').tokens[0];
|
|
805
|
-
(0,
|
|
806
|
-
(0,
|
|
812
|
+
(0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.IntegerLiteral);
|
|
813
|
+
(0, chai_config_spec_1.expect)(f.text).to.eql('1%');
|
|
807
814
|
});
|
|
808
815
|
it('does not allow decimal numbers to end with %', () => {
|
|
809
816
|
let f = Lexer_1.Lexer.scan('1.2%').tokens[0];
|
|
810
|
-
(0,
|
|
811
|
-
(0,
|
|
817
|
+
(0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
|
|
818
|
+
(0, chai_config_spec_1.expect)(f.text).to.eql('1.2');
|
|
812
819
|
});
|
|
813
820
|
it('supports hexadecimal literals', () => {
|
|
814
821
|
let i = Lexer_1.Lexer.scan('&hFf').tokens[0];
|
|
815
|
-
(0,
|
|
816
|
-
(0,
|
|
822
|
+
(0, chai_config_spec_1.expect)(i.kind).to.equal(TokenKind_1.TokenKind.IntegerLiteral);
|
|
823
|
+
(0, chai_config_spec_1.expect)(i.text).to.deep.equal('&hFf');
|
|
817
824
|
});
|
|
818
825
|
it('falls back to a regular integer', () => {
|
|
819
826
|
let i = Lexer_1.Lexer.scan('123').tokens[0];
|
|
820
|
-
(0,
|
|
821
|
-
(0,
|
|
827
|
+
(0, chai_config_spec_1.expect)(i.kind).to.equal(TokenKind_1.TokenKind.IntegerLiteral);
|
|
828
|
+
(0, chai_config_spec_1.expect)(i.text).to.deep.equal('123');
|
|
822
829
|
});
|
|
823
830
|
});
|
|
824
831
|
describe('types', () => {
|
|
825
832
|
it('captures type tokens', () => {
|
|
826
|
-
(0,
|
|
833
|
+
(0, chai_config_spec_1.expect)(Lexer_1.Lexer.scan(`
|
|
827
834
|
void boolean integer longinteger float double string object interface invalid dynamic
|
|
828
835
|
`.trim()).tokens.map(x => x.kind)).to.eql([
|
|
829
836
|
TokenKind_1.TokenKind.Void,
|
|
@@ -846,7 +853,7 @@ describe('lexer', () => {
|
|
|
846
853
|
// test just a sample of single-word reserved words for now.
|
|
847
854
|
// if we find any that we've missed
|
|
848
855
|
let { tokens } = Lexer_1.Lexer.scan('and then or if else endif return true false line_num');
|
|
849
|
-
(0,
|
|
856
|
+
(0, chai_config_spec_1.expect)(tokens.map(w => w.kind)).to.deep.equal([
|
|
850
857
|
TokenKind_1.TokenKind.And,
|
|
851
858
|
TokenKind_1.TokenKind.Then,
|
|
852
859
|
TokenKind_1.TokenKind.Or,
|
|
@@ -862,7 +869,7 @@ describe('lexer', () => {
|
|
|
862
869
|
});
|
|
863
870
|
it('matches multi-word keywords', () => {
|
|
864
871
|
let { tokens } = Lexer_1.Lexer.scan('end if end while End Sub end Function Exit wHILe');
|
|
865
|
-
(0,
|
|
872
|
+
(0, chai_config_spec_1.expect)(tokens.map(w => w.kind)).to.deep.equal([
|
|
866
873
|
TokenKind_1.TokenKind.EndIf,
|
|
867
874
|
TokenKind_1.TokenKind.EndWhile,
|
|
868
875
|
TokenKind_1.TokenKind.EndSub,
|
|
@@ -873,7 +880,7 @@ describe('lexer', () => {
|
|
|
873
880
|
});
|
|
874
881
|
it('accepts \'exit for\' but not \'exitfor\'', () => {
|
|
875
882
|
let { tokens } = Lexer_1.Lexer.scan('exit for exitfor');
|
|
876
|
-
(0,
|
|
883
|
+
(0, chai_config_spec_1.expect)(tokens.map(w => w.kind)).to.deep.equal([
|
|
877
884
|
TokenKind_1.TokenKind.ExitFor,
|
|
878
885
|
TokenKind_1.TokenKind.Identifier,
|
|
879
886
|
TokenKind_1.TokenKind.Eof
|
|
@@ -881,7 +888,7 @@ describe('lexer', () => {
|
|
|
881
888
|
});
|
|
882
889
|
it('matches keywords with silly capitalization', () => {
|
|
883
890
|
let { tokens } = Lexer_1.Lexer.scan('iF ELSE eNDIf FUncTioN');
|
|
884
|
-
(0,
|
|
891
|
+
(0, chai_config_spec_1.expect)(tokens.map(w => w.kind)).to.deep.equal([
|
|
885
892
|
TokenKind_1.TokenKind.If,
|
|
886
893
|
TokenKind_1.TokenKind.Else,
|
|
887
894
|
TokenKind_1.TokenKind.EndIf,
|
|
@@ -891,14 +898,14 @@ describe('lexer', () => {
|
|
|
891
898
|
});
|
|
892
899
|
it('allows alpha-numeric (plus \'_\') identifiers', () => {
|
|
893
900
|
let identifier = Lexer_1.Lexer.scan('_abc_123_').tokens[0];
|
|
894
|
-
(0,
|
|
895
|
-
(0,
|
|
901
|
+
(0, chai_config_spec_1.expect)(identifier.kind).to.equal(TokenKind_1.TokenKind.Identifier);
|
|
902
|
+
(0, chai_config_spec_1.expect)(identifier.text).to.equal('_abc_123_');
|
|
896
903
|
});
|
|
897
904
|
it('allows identifiers with trailing type designators', () => {
|
|
898
905
|
let { tokens } = Lexer_1.Lexer.scan('lorem$ ipsum% dolor! sit# amet&');
|
|
899
906
|
let identifiers = tokens.filter(t => t.kind !== TokenKind_1.TokenKind.Eof);
|
|
900
|
-
(0,
|
|
901
|
-
(0,
|
|
907
|
+
(0, chai_config_spec_1.expect)(identifiers.every(t => t.kind === TokenKind_1.TokenKind.Identifier));
|
|
908
|
+
(0, chai_config_spec_1.expect)(identifiers.map(t => t.text)).to.deep.equal([
|
|
902
909
|
'lorem$',
|
|
903
910
|
'ipsum%',
|
|
904
911
|
'dolor!',
|
|
@@ -910,7 +917,7 @@ describe('lexer', () => {
|
|
|
910
917
|
describe('conditional compilation', () => {
|
|
911
918
|
it('reads constant declarations', () => {
|
|
912
919
|
let { tokens } = Lexer_1.Lexer.scan('#const foo true');
|
|
913
|
-
(0,
|
|
920
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
914
921
|
TokenKind_1.TokenKind.HashConst,
|
|
915
922
|
TokenKind_1.TokenKind.Identifier,
|
|
916
923
|
TokenKind_1.TokenKind.True,
|
|
@@ -919,7 +926,7 @@ describe('lexer', () => {
|
|
|
919
926
|
});
|
|
920
927
|
it('reads constant aliases', () => {
|
|
921
928
|
let { tokens } = Lexer_1.Lexer.scan('#const bar foo');
|
|
922
|
-
(0,
|
|
929
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
923
930
|
TokenKind_1.TokenKind.HashConst,
|
|
924
931
|
TokenKind_1.TokenKind.Identifier,
|
|
925
932
|
TokenKind_1.TokenKind.Identifier,
|
|
@@ -937,7 +944,7 @@ describe('lexer', () => {
|
|
|
937
944
|
`, {
|
|
938
945
|
includeWhitespace: false
|
|
939
946
|
});
|
|
940
|
-
(0,
|
|
947
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind).filter(x => x !== TokenKind_1.TokenKind.Newline)).to.deep.equal([
|
|
941
948
|
TokenKind_1.TokenKind.HashIf,
|
|
942
949
|
TokenKind_1.TokenKind.HashElseIf,
|
|
943
950
|
TokenKind_1.TokenKind.HashElseIf,
|
|
@@ -949,7 +956,7 @@ describe('lexer', () => {
|
|
|
949
956
|
});
|
|
950
957
|
it('treats text "constructor" as an identifier', () => {
|
|
951
958
|
let lexer = Lexer_1.Lexer.scan(`function constructor()\nend function`);
|
|
952
|
-
(0,
|
|
959
|
+
(0, chai_config_spec_1.expect)(lexer.tokens[1].kind).to.equal(TokenKind_1.TokenKind.Identifier);
|
|
953
960
|
});
|
|
954
961
|
it('reads upper case conditional directives', () => {
|
|
955
962
|
let { tokens } = Lexer_1.Lexer.scan(`
|
|
@@ -962,7 +969,7 @@ describe('lexer', () => {
|
|
|
962
969
|
`, {
|
|
963
970
|
includeWhitespace: false
|
|
964
971
|
});
|
|
965
|
-
(0,
|
|
972
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind).filter(x => x !== TokenKind_1.TokenKind.Newline)).to.deep.equal([
|
|
966
973
|
TokenKind_1.TokenKind.HashIf,
|
|
967
974
|
TokenKind_1.TokenKind.HashElseIf,
|
|
968
975
|
TokenKind_1.TokenKind.HashElseIf,
|
|
@@ -974,7 +981,7 @@ describe('lexer', () => {
|
|
|
974
981
|
});
|
|
975
982
|
it('supports various spacings between #endif', () => {
|
|
976
983
|
let { tokens } = Lexer_1.Lexer.scan('#endif #end if #end\tif #end if #end\t\t if');
|
|
977
|
-
(0,
|
|
984
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
978
985
|
TokenKind_1.TokenKind.HashEndIf,
|
|
979
986
|
TokenKind_1.TokenKind.HashEndIf,
|
|
980
987
|
TokenKind_1.TokenKind.HashEndIf,
|
|
@@ -987,20 +994,20 @@ describe('lexer', () => {
|
|
|
987
994
|
let { tokens } = Lexer_1.Lexer.scan('#error a message goes here\n', {
|
|
988
995
|
includeWhitespace: true
|
|
989
996
|
});
|
|
990
|
-
(0,
|
|
997
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
991
998
|
TokenKind_1.TokenKind.HashError,
|
|
992
999
|
TokenKind_1.TokenKind.Whitespace,
|
|
993
1000
|
TokenKind_1.TokenKind.HashErrorMessage,
|
|
994
1001
|
TokenKind_1.TokenKind.Newline,
|
|
995
1002
|
TokenKind_1.TokenKind.Eof
|
|
996
1003
|
]);
|
|
997
|
-
(0,
|
|
1004
|
+
(0, chai_config_spec_1.expect)(tokens[2].text).to.equal('a message goes here');
|
|
998
1005
|
});
|
|
999
1006
|
});
|
|
1000
1007
|
describe('location tracking', () => {
|
|
1001
1008
|
it('tracks starting and ending locations including whitespace', () => {
|
|
1002
1009
|
let { tokens } = Lexer_1.Lexer.scan(`sub foo()\n print "bar"\r\nend sub`, { includeWhitespace: true });
|
|
1003
|
-
(0,
|
|
1010
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.range)).to.eql([
|
|
1004
1011
|
vscode_languageserver_1.Range.create(0, 0, 0, 3),
|
|
1005
1012
|
vscode_languageserver_1.Range.create(0, 3, 0, 4),
|
|
1006
1013
|
vscode_languageserver_1.Range.create(0, 4, 0, 7),
|
|
@@ -1018,7 +1025,7 @@ describe('lexer', () => {
|
|
|
1018
1025
|
});
|
|
1019
1026
|
it('tracks starting and ending locations excluding whitespace', () => {
|
|
1020
1027
|
let { tokens } = Lexer_1.Lexer.scan(`sub foo()\n print "bar"\r\nend sub`, { includeWhitespace: false });
|
|
1021
|
-
(0,
|
|
1028
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.range)).to.eql([
|
|
1022
1029
|
vscode_languageserver_1.Range.create(0, 0, 0, 3),
|
|
1023
1030
|
vscode_languageserver_1.Range.create(0, 4, 0, 7),
|
|
1024
1031
|
vscode_languageserver_1.Range.create(0, 7, 0, 8),
|
|
@@ -1035,7 +1042,7 @@ describe('lexer', () => {
|
|
|
1035
1042
|
describe('two word keywords', () => {
|
|
1036
1043
|
it('supports various spacing between for each', () => {
|
|
1037
1044
|
let { tokens } = Lexer_1.Lexer.scan('for each for each for each for\teach for\t each for \teach for \t each');
|
|
1038
|
-
(0,
|
|
1045
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
|
|
1039
1046
|
TokenKind_1.TokenKind.ForEach,
|
|
1040
1047
|
TokenKind_1.TokenKind.ForEach,
|
|
1041
1048
|
TokenKind_1.TokenKind.ForEach,
|
|
@@ -1049,7 +1056,7 @@ describe('lexer', () => {
|
|
|
1049
1056
|
});
|
|
1050
1057
|
it('detects rem when used as keyword', () => {
|
|
1051
1058
|
let { tokens } = Lexer_1.Lexer.scan('person.rem=true');
|
|
1052
|
-
(0,
|
|
1059
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.eql([
|
|
1053
1060
|
TokenKind_1.TokenKind.Identifier,
|
|
1054
1061
|
TokenKind_1.TokenKind.Dot,
|
|
1055
1062
|
TokenKind_1.TokenKind.Identifier,
|
|
@@ -1058,7 +1065,7 @@ describe('lexer', () => {
|
|
|
1058
1065
|
TokenKind_1.TokenKind.Eof
|
|
1059
1066
|
]);
|
|
1060
1067
|
//verify the location of `rem`
|
|
1061
|
-
(0,
|
|
1068
|
+
(0, chai_config_spec_1.expect)(tokens.map(t => [t.range.start.character, t.range.end.character])).to.eql([
|
|
1062
1069
|
[0, 6],
|
|
1063
1070
|
[6, 7],
|
|
1064
1071
|
[7, 10],
|
|
@@ -1070,12 +1077,12 @@ describe('lexer', () => {
|
|
|
1070
1077
|
describe('isToken', () => {
|
|
1071
1078
|
it('works', () => {
|
|
1072
1079
|
let range = vscode_languageserver_1.Range.create(0, 0, 0, 2);
|
|
1073
|
-
(0,
|
|
1074
|
-
(0,
|
|
1080
|
+
(0, chai_config_spec_1.expect)((0, Token_1.isToken)({ kind: TokenKind_1.TokenKind.And, text: 'and', range: range })).is.true;
|
|
1081
|
+
(0, chai_config_spec_1.expect)((0, Token_1.isToken)({ text: 'and', range: range })).is.false;
|
|
1075
1082
|
});
|
|
1076
1083
|
});
|
|
1077
1084
|
it('recognizes enum-related keywords', () => {
|
|
1078
|
-
(0,
|
|
1085
|
+
(0, chai_config_spec_1.expect)(Lexer_1.Lexer.scan('enum end enum endenum').tokens.map(x => x.kind)).to.eql([
|
|
1079
1086
|
TokenKind_1.TokenKind.Enum,
|
|
1080
1087
|
TokenKind_1.TokenKind.EndEnum,
|
|
1081
1088
|
TokenKind_1.TokenKind.EndEnum,
|
|
@@ -1083,7 +1090,7 @@ describe('lexer', () => {
|
|
|
1083
1090
|
]);
|
|
1084
1091
|
});
|
|
1085
1092
|
it('recognizes class-related keywords', () => {
|
|
1086
|
-
(0,
|
|
1093
|
+
(0, chai_config_spec_1.expect)(Lexer_1.Lexer.scan('class public protected private end class endclass new override').tokens.map(x => x.kind)).to.eql([
|
|
1087
1094
|
TokenKind_1.TokenKind.Class,
|
|
1088
1095
|
TokenKind_1.TokenKind.Public,
|
|
1089
1096
|
TokenKind_1.TokenKind.Protected,
|
|
@@ -1098,14 +1105,14 @@ describe('lexer', () => {
|
|
|
1098
1105
|
describe('whitespace', () => {
|
|
1099
1106
|
it('preserves the exact number of whitespace characterswhitespace', () => {
|
|
1100
1107
|
let { tokens } = Lexer_1.Lexer.scan(' ', { includeWhitespace: true });
|
|
1101
|
-
(0,
|
|
1108
|
+
(0, chai_config_spec_1.expect)(tokens[0]).to.include({
|
|
1102
1109
|
kind: TokenKind_1.TokenKind.Whitespace,
|
|
1103
1110
|
text: ' '
|
|
1104
1111
|
});
|
|
1105
1112
|
});
|
|
1106
1113
|
it('tokenizes whitespace between things', () => {
|
|
1107
1114
|
let { tokens } = Lexer_1.Lexer.scan('sub main ( ) \n end sub', { includeWhitespace: true });
|
|
1108
|
-
(0,
|
|
1115
|
+
(0, chai_config_spec_1.expect)(tokens.map(x => x.kind)).to.eql([
|
|
1109
1116
|
TokenKind_1.TokenKind.Sub,
|
|
1110
1117
|
TokenKind_1.TokenKind.Whitespace,
|
|
1111
1118
|
TokenKind_1.TokenKind.Identifier,
|
|
@@ -1123,7 +1130,7 @@ describe('lexer', () => {
|
|
|
1123
1130
|
});
|
|
1124
1131
|
it('identifies brighterscript source literals', () => {
|
|
1125
1132
|
let { tokens } = Lexer_1.Lexer.scan('LINE_NUM SOURCE_FILE_PATH SOURCE_LINE_NUM FUNCTION_NAME SOURCE_FUNCTION_NAME SOURCE_LOCATION PKG_PATH PKG_LOCATION');
|
|
1126
|
-
(0,
|
|
1133
|
+
(0, chai_config_spec_1.expect)(tokens.map(x => x.kind)).to.eql([
|
|
1127
1134
|
TokenKind_1.TokenKind.LineNumLiteral,
|
|
1128
1135
|
TokenKind_1.TokenKind.SourceFilePathLiteral,
|
|
1129
1136
|
TokenKind_1.TokenKind.SourceLineNumLiteral,
|
|
@@ -1144,11 +1151,11 @@ describe('lexer', () => {
|
|
|
1144
1151
|
end sub
|
|
1145
1152
|
`;
|
|
1146
1153
|
const { tokens } = Lexer_1.Lexer.scan(text, { includeWhitespace: false });
|
|
1147
|
-
(0,
|
|
1154
|
+
(0, chai_config_spec_1.expect)(util_1.default.tokensToString(tokens)).to.equal(text);
|
|
1148
1155
|
});
|
|
1149
1156
|
it('properly detects try/catch tokens', () => {
|
|
1150
1157
|
const { tokens } = Lexer_1.Lexer.scan(`try catch endtry end try throw`, { includeWhitespace: false });
|
|
1151
|
-
(0,
|
|
1158
|
+
(0, chai_config_spec_1.expect)(tokens.map(x => x.kind)).to.eql([
|
|
1152
1159
|
TokenKind_1.TokenKind.Try,
|
|
1153
1160
|
TokenKind_1.TokenKind.Catch,
|
|
1154
1161
|
TokenKind_1.TokenKind.EndTry,
|
|
@@ -1165,7 +1172,7 @@ describe('lexer', () => {
|
|
|
1165
1172
|
const { tokens } = Lexer_1.Lexer.scan(regexp);
|
|
1166
1173
|
results.push(tokens[0].text);
|
|
1167
1174
|
}
|
|
1168
|
-
(0,
|
|
1175
|
+
(0, chai_config_spec_1.expect)(results).to.eql(regexps);
|
|
1169
1176
|
}
|
|
1170
1177
|
it('recognizes regex literals', () => {
|
|
1171
1178
|
testRegex(/simple/, /SimpleWithValidFlags/g, /UnknownFlags/gi, /with spaces/s, /with(parens)and[squarebraces]/,
|
|
@@ -1178,7 +1185,7 @@ describe('lexer', () => {
|
|
|
1178
1185
|
const { tokens } = Lexer_1.Lexer.scan(`one = 1/2 + 1/4 + 1/4`, {
|
|
1179
1186
|
includeWhitespace: false
|
|
1180
1187
|
});
|
|
1181
|
-
(0,
|
|
1188
|
+
(0, chai_config_spec_1.expect)(tokens.map(x => x.kind)).to.eql([
|
|
1182
1189
|
TokenKind_1.TokenKind.Identifier,
|
|
1183
1190
|
TokenKind_1.TokenKind.Equal,
|
|
1184
1191
|
TokenKind_1.TokenKind.IntegerLiteral,
|
|
@@ -1196,7 +1203,7 @@ describe('lexer', () => {
|
|
|
1196
1203
|
]);
|
|
1197
1204
|
});
|
|
1198
1205
|
it('only captures alphanumeric flags', () => {
|
|
1199
|
-
(0,
|
|
1206
|
+
(0, chai_config_spec_1.expect)(Lexer_1.Lexer.scan('speak(/a/)').tokens.map(x => x.kind)).to.eql([
|
|
1200
1207
|
TokenKind_1.TokenKind.Identifier,
|
|
1201
1208
|
TokenKind_1.TokenKind.LeftParen,
|
|
1202
1209
|
TokenKind_1.TokenKind.RegexLiteral,
|
|
@@ -1212,11 +1219,57 @@ describe('lexer', () => {
|
|
|
1212
1219
|
/\\\n/);
|
|
1213
1220
|
});
|
|
1214
1221
|
});
|
|
1222
|
+
it('detects "continue" as a keyword', () => {
|
|
1223
|
+
(0, chai_config_spec_1.expect)(Lexer_1.Lexer.scan('continue').tokens.map(x => x.kind)).to.eql([
|
|
1224
|
+
TokenKind_1.TokenKind.Continue,
|
|
1225
|
+
TokenKind_1.TokenKind.Eof
|
|
1226
|
+
]);
|
|
1227
|
+
});
|
|
1228
|
+
describe('trivia', () => {
|
|
1229
|
+
function stringify(tokens) {
|
|
1230
|
+
return tokens
|
|
1231
|
+
//exclude the explicit triva tokens since they'll be included in the leading/trailing arrays
|
|
1232
|
+
.filter(x => !TokenKind_1.AllowedTriviaTokens.includes(x.kind))
|
|
1233
|
+
.flatMap(x => [...x.leadingTrivia, x])
|
|
1234
|
+
.map(x => x.text)
|
|
1235
|
+
.join('');
|
|
1236
|
+
}
|
|
1237
|
+
it('combining token text and trivia can reproduce full input', () => {
|
|
1238
|
+
const input = `
|
|
1239
|
+
function test( )
|
|
1240
|
+
'comment
|
|
1241
|
+
print alpha ' blabla
|
|
1242
|
+
end function 'trailing
|
|
1243
|
+
'trailing2
|
|
1244
|
+
`;
|
|
1245
|
+
(0, chai_config_spec_1.expect)(stringify(Lexer_1.Lexer.scan(input).tokens)).to.eql(input);
|
|
1246
|
+
});
|
|
1247
|
+
function expectTrivia(text, expected) {
|
|
1248
|
+
const tokens = Lexer_1.Lexer.scan(text).tokens.filter(x => !TokenKind_1.AllowedTriviaTokens.includes(x.kind));
|
|
1249
|
+
(0, chai_config_spec_1.expect)(tokens.map(x => {
|
|
1250
|
+
return {
|
|
1251
|
+
text: x.text,
|
|
1252
|
+
leadingTrivia: x.leadingTrivia.map(x => x.text)
|
|
1253
|
+
};
|
|
1254
|
+
})).to.eql(expected.map(x => (Object.assign({ leadingTrivia: [] }, x))));
|
|
1255
|
+
}
|
|
1256
|
+
it('associates trailing items on same line with the preceeding token', () => {
|
|
1257
|
+
expectTrivia(`'leading\n` +
|
|
1258
|
+
`alpha = true 'trueComment\n` +
|
|
1259
|
+
`'eof`, [
|
|
1260
|
+
{ leadingTrivia: [`'leading`, `\n`], text: `alpha` },
|
|
1261
|
+
{ leadingTrivia: [` `], text: `=` },
|
|
1262
|
+
{ leadingTrivia: [` `], text: `true` },
|
|
1263
|
+
//EOF
|
|
1264
|
+
{ leadingTrivia: [` `, `'trueComment`, `\n`, `'eof`], text: `` }
|
|
1265
|
+
]);
|
|
1266
|
+
});
|
|
1267
|
+
});
|
|
1215
1268
|
});
|
|
1216
1269
|
function expectKinds(text, tokenKinds) {
|
|
1217
1270
|
let actual = Lexer_1.Lexer.scan(text).tokens.map(x => x.kind);
|
|
1218
1271
|
//remove the EOF token
|
|
1219
1272
|
actual.pop();
|
|
1220
|
-
(0,
|
|
1273
|
+
(0, chai_config_spec_1.expect)(actual).to.eql(tokenKinds);
|
|
1221
1274
|
}
|
|
1222
1275
|
//# sourceMappingURL=Lexer.spec.js.map
|