brighterscript 1.0.0-alpha.23 → 1.0.0-alpha.25

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (536) hide show
  1. package/CHANGELOG.md +585 -218
  2. package/README.md +45 -139
  3. package/bsconfig.schema.json +41 -0
  4. package/dist/ActionPipeline.d.ts +10 -0
  5. package/dist/ActionPipeline.js +40 -0
  6. package/dist/ActionPipeline.js.map +1 -0
  7. package/dist/AstValidationSegmenter.d.ts +25 -0
  8. package/dist/AstValidationSegmenter.js +152 -0
  9. package/dist/AstValidationSegmenter.js.map +1 -0
  10. package/dist/BsConfig.d.ts +39 -4
  11. package/dist/BusyStatusTracker.d.ts +31 -0
  12. package/dist/BusyStatusTracker.js +83 -0
  13. package/dist/BusyStatusTracker.js.map +1 -0
  14. package/dist/Cache.js +3 -3
  15. package/dist/Cache.js.map +1 -1
  16. package/dist/CacheVerifier.d.ts +7 -0
  17. package/dist/CacheVerifier.js +20 -0
  18. package/dist/CacheVerifier.js.map +1 -0
  19. package/dist/CodeActionUtil.d.ts +3 -3
  20. package/dist/CodeActionUtil.js.map +1 -1
  21. package/dist/CommentFlagProcessor.d.ts +3 -2
  22. package/dist/CommentFlagProcessor.js +5 -4
  23. package/dist/CommentFlagProcessor.js.map +1 -1
  24. package/dist/DependencyGraph.d.ts +3 -2
  25. package/dist/DependencyGraph.js +11 -10
  26. package/dist/DependencyGraph.js.map +1 -1
  27. package/dist/DiagnosticCollection.js +9 -5
  28. package/dist/DiagnosticCollection.js.map +1 -1
  29. package/dist/DiagnosticFilterer.d.ts +1 -0
  30. package/dist/DiagnosticFilterer.js +5 -3
  31. package/dist/DiagnosticFilterer.js.map +1 -1
  32. package/dist/DiagnosticMessages.d.ts +79 -15
  33. package/dist/DiagnosticMessages.js +134 -21
  34. package/dist/DiagnosticMessages.js.map +1 -1
  35. package/dist/DiagnosticSeverityAdjuster.d.ts +7 -0
  36. package/dist/DiagnosticSeverityAdjuster.js +41 -0
  37. package/dist/DiagnosticSeverityAdjuster.js.map +1 -0
  38. package/dist/FunctionScope.d.ts +28 -0
  39. package/dist/FunctionScope.js +52 -0
  40. package/dist/FunctionScope.js.map +1 -0
  41. package/dist/KeyedThrottler.d.ts +3 -3
  42. package/dist/KeyedThrottler.js +3 -3
  43. package/dist/KeyedThrottler.js.map +1 -1
  44. package/dist/LanguageServer.d.ts +23 -11
  45. package/dist/LanguageServer.js +222 -87
  46. package/dist/LanguageServer.js.map +1 -1
  47. package/dist/Logger.d.ts +3 -2
  48. package/dist/Logger.js +11 -3
  49. package/dist/Logger.js.map +1 -1
  50. package/dist/PluginInterface.d.ts +21 -3
  51. package/dist/PluginInterface.js +74 -6
  52. package/dist/PluginInterface.js.map +1 -1
  53. package/dist/Program.d.ts +162 -81
  54. package/dist/Program.js +903 -732
  55. package/dist/Program.js.map +1 -1
  56. package/dist/ProgramBuilder.d.ts +22 -12
  57. package/dist/ProgramBuilder.js +132 -104
  58. package/dist/ProgramBuilder.js.map +1 -1
  59. package/dist/Scope.d.ts +95 -134
  60. package/dist/Scope.js +477 -551
  61. package/dist/Scope.js.map +1 -1
  62. package/dist/Stopwatch.js +1 -1
  63. package/dist/Stopwatch.js.map +1 -1
  64. package/dist/SymbolTable.d.ts +95 -29
  65. package/dist/SymbolTable.js +256 -102
  66. package/dist/SymbolTable.js.map +1 -1
  67. package/dist/Throttler.d.ts +12 -0
  68. package/dist/Throttler.js +39 -0
  69. package/dist/Throttler.js.map +1 -1
  70. package/dist/Watcher.d.ts +0 -3
  71. package/dist/Watcher.js +0 -3
  72. package/dist/Watcher.js.map +1 -1
  73. package/dist/XmlScope.d.ts +4 -6
  74. package/dist/XmlScope.js +74 -68
  75. package/dist/XmlScope.js.map +1 -1
  76. package/dist/astUtils/CachedLookups.d.ts +48 -0
  77. package/dist/astUtils/CachedLookups.js +323 -0
  78. package/dist/astUtils/CachedLookups.js.map +1 -0
  79. package/dist/astUtils/{AstEditor.d.ts → Editor.d.ts} +9 -5
  80. package/dist/astUtils/{AstEditor.js → Editor.js} +10 -4
  81. package/dist/astUtils/Editor.js.map +1 -0
  82. package/dist/astUtils/{AstEditor.spec.js → Editor.spec.js} +68 -64
  83. package/dist/astUtils/Editor.spec.js.map +1 -0
  84. package/dist/astUtils/creators.d.ts +10 -10
  85. package/dist/astUtils/creators.js +26 -16
  86. package/dist/astUtils/creators.js.map +1 -1
  87. package/dist/astUtils/creators.spec.js +5 -5
  88. package/dist/astUtils/creators.spec.js.map +1 -1
  89. package/dist/astUtils/reflection.d.ts +132 -100
  90. package/dist/astUtils/reflection.js +225 -166
  91. package/dist/astUtils/reflection.js.map +1 -1
  92. package/dist/astUtils/reflection.spec.js +208 -126
  93. package/dist/astUtils/reflection.spec.js.map +1 -1
  94. package/dist/astUtils/stackedVisitor.spec.js +12 -12
  95. package/dist/astUtils/stackedVisitor.spec.js.map +1 -1
  96. package/dist/astUtils/visitors.d.ts +54 -35
  97. package/dist/astUtils/visitors.js +29 -3
  98. package/dist/astUtils/visitors.js.map +1 -1
  99. package/dist/astUtils/visitors.spec.js +178 -33
  100. package/dist/astUtils/visitors.spec.js.map +1 -1
  101. package/dist/astUtils/xml.d.ts +9 -9
  102. package/dist/astUtils/xml.js +9 -9
  103. package/dist/astUtils/xml.js.map +1 -1
  104. package/dist/bscPlugin/BscPlugin.d.ts +12 -2
  105. package/dist/bscPlugin/BscPlugin.js +41 -3
  106. package/dist/bscPlugin/BscPlugin.js.map +1 -1
  107. package/dist/bscPlugin/CallExpressionInfo.d.ts +36 -0
  108. package/dist/bscPlugin/CallExpressionInfo.js +131 -0
  109. package/dist/bscPlugin/CallExpressionInfo.js.map +1 -0
  110. package/dist/bscPlugin/FileWriter.d.ts +6 -0
  111. package/dist/bscPlugin/FileWriter.js +24 -0
  112. package/dist/bscPlugin/FileWriter.js.map +1 -0
  113. package/dist/bscPlugin/SignatureHelpUtil.d.ts +10 -0
  114. package/dist/bscPlugin/SignatureHelpUtil.js +135 -0
  115. package/dist/bscPlugin/SignatureHelpUtil.js.map +1 -0
  116. package/dist/bscPlugin/codeActions/CodeActionsProcessor.d.ts +1 -1
  117. package/dist/bscPlugin/codeActions/CodeActionsProcessor.js +21 -12
  118. package/dist/bscPlugin/codeActions/CodeActionsProcessor.js.map +1 -1
  119. package/dist/bscPlugin/codeActions/CodeActionsProcessor.spec.js +86 -12
  120. package/dist/bscPlugin/codeActions/CodeActionsProcessor.spec.js.map +1 -1
  121. package/dist/bscPlugin/completions/CompletionsProcessor.d.ts +57 -0
  122. package/dist/bscPlugin/completions/CompletionsProcessor.js +544 -0
  123. package/dist/bscPlugin/completions/CompletionsProcessor.js.map +1 -0
  124. package/dist/bscPlugin/completions/CompletionsProcessor.spec.js +1909 -0
  125. package/dist/bscPlugin/completions/CompletionsProcessor.spec.js.map +1 -0
  126. package/dist/bscPlugin/fileProviders/FileProvider.d.ts +9 -0
  127. package/dist/bscPlugin/fileProviders/FileProvider.js +51 -0
  128. package/dist/bscPlugin/fileProviders/FileProvider.js.map +1 -0
  129. package/dist/bscPlugin/hover/HoverProcessor.d.ts +17 -0
  130. package/dist/bscPlugin/hover/HoverProcessor.js +188 -0
  131. package/dist/bscPlugin/hover/HoverProcessor.js.map +1 -0
  132. package/dist/bscPlugin/hover/HoverProcessor.spec.js +513 -0
  133. package/dist/bscPlugin/hover/HoverProcessor.spec.js.map +1 -0
  134. package/dist/bscPlugin/semanticTokens/BrsFileSemanticTokensProcessor.d.ts +3 -1
  135. package/dist/bscPlugin/semanticTokens/BrsFileSemanticTokensProcessor.js +102 -29
  136. package/dist/bscPlugin/semanticTokens/BrsFileSemanticTokensProcessor.js.map +1 -1
  137. package/dist/bscPlugin/semanticTokens/BrsFileSemanticTokensProcessor.spec.js +167 -6
  138. package/dist/bscPlugin/semanticTokens/BrsFileSemanticTokensProcessor.spec.js.map +1 -1
  139. package/dist/bscPlugin/serialize/BslibInjector.spec.d.ts +1 -0
  140. package/dist/bscPlugin/serialize/BslibInjector.spec.js +19 -0
  141. package/dist/bscPlugin/serialize/BslibInjector.spec.js.map +1 -0
  142. package/dist/bscPlugin/serialize/BslibManager.d.ts +9 -0
  143. package/dist/bscPlugin/serialize/BslibManager.js +40 -0
  144. package/dist/bscPlugin/serialize/BslibManager.js.map +1 -0
  145. package/dist/bscPlugin/serialize/FileSerializer.d.ts +9 -0
  146. package/dist/bscPlugin/serialize/FileSerializer.js +72 -0
  147. package/dist/bscPlugin/serialize/FileSerializer.js.map +1 -0
  148. package/dist/bscPlugin/transpile/BrsFileTranspileProcessor.d.ts +16 -0
  149. package/dist/bscPlugin/transpile/BrsFileTranspileProcessor.js +123 -0
  150. package/dist/bscPlugin/transpile/BrsFileTranspileProcessor.js.map +1 -0
  151. package/dist/bscPlugin/transpile/BrsFileTranspileProcessor.spec.d.ts +1 -0
  152. package/dist/bscPlugin/transpile/BrsFileTranspileProcessor.spec.js +41 -0
  153. package/dist/bscPlugin/transpile/BrsFileTranspileProcessor.spec.js.map +1 -0
  154. package/dist/bscPlugin/transpile/XmlFilePreTranspileProcessor.d.ts +12 -0
  155. package/dist/bscPlugin/transpile/XmlFilePreTranspileProcessor.js +99 -0
  156. package/dist/bscPlugin/transpile/XmlFilePreTranspileProcessor.js.map +1 -0
  157. package/dist/bscPlugin/validation/BrsFileValidator.d.ts +22 -1
  158. package/dist/bscPlugin/validation/BrsFileValidator.js +316 -29
  159. package/dist/bscPlugin/validation/BrsFileValidator.js.map +1 -1
  160. package/dist/bscPlugin/validation/BrsFileValidator.spec.d.ts +1 -0
  161. package/dist/bscPlugin/validation/BrsFileValidator.spec.js +264 -0
  162. package/dist/bscPlugin/validation/BrsFileValidator.spec.js.map +1 -0
  163. package/dist/bscPlugin/validation/ProgramValidator.d.ts +10 -0
  164. package/dist/bscPlugin/validation/ProgramValidator.js +32 -0
  165. package/dist/bscPlugin/validation/ProgramValidator.js.map +1 -0
  166. package/dist/bscPlugin/validation/ScopeValidator.d.ts +56 -8
  167. package/dist/bscPlugin/validation/ScopeValidator.js +514 -116
  168. package/dist/bscPlugin/validation/ScopeValidator.js.map +1 -1
  169. package/dist/bscPlugin/validation/ScopeValidator.spec.d.ts +1 -0
  170. package/dist/bscPlugin/validation/ScopeValidator.spec.js +2454 -0
  171. package/dist/bscPlugin/validation/ScopeValidator.spec.js.map +1 -0
  172. package/dist/bscPlugin/validation/XmlFileValidator.d.ts +8 -0
  173. package/dist/bscPlugin/validation/XmlFileValidator.js +44 -0
  174. package/dist/bscPlugin/validation/XmlFileValidator.js.map +1 -0
  175. package/dist/cli.js +107 -8
  176. package/dist/cli.js.map +1 -1
  177. package/dist/deferred.d.ts +3 -3
  178. package/dist/deferred.js.map +1 -1
  179. package/dist/diagnosticUtils.d.ts +8 -2
  180. package/dist/diagnosticUtils.js +47 -17
  181. package/dist/diagnosticUtils.js.map +1 -1
  182. package/dist/examples/plugins/removePrint.js +8 -10
  183. package/dist/examples/plugins/removePrint.js.map +1 -1
  184. package/dist/files/AssetFile.d.ts +26 -0
  185. package/dist/files/AssetFile.js +26 -0
  186. package/dist/files/AssetFile.js.map +1 -0
  187. package/dist/files/BrsFile.Class.spec.js +529 -486
  188. package/dist/files/BrsFile.Class.spec.js.map +1 -1
  189. package/dist/files/BrsFile.d.ts +124 -112
  190. package/dist/files/BrsFile.js +819 -1131
  191. package/dist/files/BrsFile.js.map +1 -1
  192. package/dist/files/BrsFile.spec.js +1869 -1277
  193. package/dist/files/BrsFile.spec.js.map +1 -1
  194. package/dist/files/BscFile.d.ts +104 -0
  195. package/dist/files/BscFile.js +16 -0
  196. package/dist/files/BscFile.js.map +1 -0
  197. package/dist/files/Factory.d.ts +25 -0
  198. package/dist/files/Factory.js +22 -0
  199. package/dist/files/Factory.js.map +1 -0
  200. package/dist/files/LazyFileData.d.ts +20 -0
  201. package/dist/files/LazyFileData.js +54 -0
  202. package/dist/files/LazyFileData.js.map +1 -0
  203. package/dist/files/LazyFileData.spec.d.ts +1 -0
  204. package/dist/files/LazyFileData.spec.js +27 -0
  205. package/dist/files/LazyFileData.spec.js.map +1 -0
  206. package/dist/files/XmlFile.d.ts +70 -32
  207. package/dist/files/XmlFile.js +106 -117
  208. package/dist/files/XmlFile.js.map +1 -1
  209. package/dist/files/XmlFile.spec.js +325 -262
  210. package/dist/files/XmlFile.spec.js.map +1 -1
  211. package/dist/files/tests/imports.spec.js +49 -41
  212. package/dist/files/tests/imports.spec.js.map +1 -1
  213. package/dist/files/tests/optionalChaning.spec.js +104 -40
  214. package/dist/files/tests/optionalChaning.spec.js.map +1 -1
  215. package/dist/globalCallables.js +16 -18
  216. package/dist/globalCallables.js.map +1 -1
  217. package/dist/index.d.ts +13 -2
  218. package/dist/index.js +15 -2
  219. package/dist/index.js.map +1 -1
  220. package/dist/interfaces.d.ts +440 -150
  221. package/dist/interfaces.js +27 -0
  222. package/dist/interfaces.js.map +1 -1
  223. package/dist/lexer/Character.spec.js +5 -5
  224. package/dist/lexer/Character.spec.js.map +1 -1
  225. package/dist/lexer/Lexer.d.ts +12 -5
  226. package/dist/lexer/Lexer.js +28 -13
  227. package/dist/lexer/Lexer.js.map +1 -1
  228. package/dist/lexer/Lexer.spec.js +187 -134
  229. package/dist/lexer/Lexer.spec.js.map +1 -1
  230. package/dist/lexer/Token.d.ts +9 -1
  231. package/dist/lexer/Token.js +9 -1
  232. package/dist/lexer/Token.js.map +1 -1
  233. package/dist/lexer/TokenKind.d.ts +9 -0
  234. package/dist/lexer/TokenKind.js +30 -5
  235. package/dist/lexer/TokenKind.js.map +1 -1
  236. package/dist/parser/AstNode.d.ts +162 -0
  237. package/dist/parser/AstNode.js +225 -0
  238. package/dist/parser/AstNode.js.map +1 -0
  239. package/dist/parser/AstNode.spec.d.ts +1 -0
  240. package/dist/parser/AstNode.spec.js +165 -0
  241. package/dist/parser/AstNode.spec.js.map +1 -0
  242. package/dist/parser/BrsTranspileState.d.ts +4 -7
  243. package/dist/parser/BrsTranspileState.js +4 -12
  244. package/dist/parser/BrsTranspileState.js.map +1 -1
  245. package/dist/parser/Expression.d.ts +126 -167
  246. package/dist/parser/Expression.js +524 -394
  247. package/dist/parser/Expression.js.map +1 -1
  248. package/dist/parser/Parser.Class.spec.js +152 -146
  249. package/dist/parser/Parser.Class.spec.js.map +1 -1
  250. package/dist/parser/Parser.d.ts +45 -196
  251. package/dist/parser/Parser.js +470 -926
  252. package/dist/parser/Parser.js.map +1 -1
  253. package/dist/parser/Parser.spec.d.ts +3 -1
  254. package/dist/parser/Parser.spec.js +1034 -805
  255. package/dist/parser/Parser.spec.js.map +1 -1
  256. package/dist/parser/SGParser.d.ts +9 -8
  257. package/dist/parser/SGParser.js +10 -8
  258. package/dist/parser/SGParser.js.map +1 -1
  259. package/dist/parser/SGParser.spec.js +27 -38
  260. package/dist/parser/SGParser.spec.js.map +1 -1
  261. package/dist/parser/SGTypes.d.ts +98 -35
  262. package/dist/parser/SGTypes.js +169 -99
  263. package/dist/parser/SGTypes.js.map +1 -1
  264. package/dist/parser/Statement.d.ts +208 -122
  265. package/dist/parser/Statement.js +599 -364
  266. package/dist/parser/Statement.js.map +1 -1
  267. package/dist/parser/Statement.spec.js +45 -21
  268. package/dist/parser/Statement.spec.js.map +1 -1
  269. package/dist/parser/TranspileState.d.ts +1 -1
  270. package/dist/parser/TranspileState.js +7 -12
  271. package/dist/parser/TranspileState.js.map +1 -1
  272. package/dist/parser/tests/Parser.spec.js +3 -2
  273. package/dist/parser/tests/Parser.spec.js.map +1 -1
  274. package/dist/parser/tests/controlFlow/For.spec.js +33 -23
  275. package/dist/parser/tests/controlFlow/For.spec.js.map +1 -1
  276. package/dist/parser/tests/controlFlow/ForEach.spec.js +25 -20
  277. package/dist/parser/tests/controlFlow/ForEach.spec.js.map +1 -1
  278. package/dist/parser/tests/controlFlow/If.spec.js +96 -94
  279. package/dist/parser/tests/controlFlow/If.spec.js.map +1 -1
  280. package/dist/parser/tests/controlFlow/While.spec.js +22 -16
  281. package/dist/parser/tests/controlFlow/While.spec.js.map +1 -1
  282. package/dist/parser/tests/expression/Additive.spec.js +8 -8
  283. package/dist/parser/tests/expression/Additive.spec.js.map +1 -1
  284. package/dist/parser/tests/expression/ArrayLiterals.spec.js +58 -21
  285. package/dist/parser/tests/expression/ArrayLiterals.spec.js.map +1 -1
  286. package/dist/parser/tests/expression/AssociativeArrayLiterals.spec.js +61 -20
  287. package/dist/parser/tests/expression/AssociativeArrayLiterals.spec.js.map +1 -1
  288. package/dist/parser/tests/expression/Boolean.spec.js +8 -8
  289. package/dist/parser/tests/expression/Boolean.spec.js.map +1 -1
  290. package/dist/parser/tests/expression/Call.spec.js +129 -21
  291. package/dist/parser/tests/expression/Call.spec.js.map +1 -1
  292. package/dist/parser/tests/expression/Exponential.spec.js +5 -5
  293. package/dist/parser/tests/expression/Exponential.spec.js.map +1 -1
  294. package/dist/parser/tests/expression/Function.spec.js +36 -36
  295. package/dist/parser/tests/expression/Function.spec.js.map +1 -1
  296. package/dist/parser/tests/expression/Indexing.spec.js +67 -22
  297. package/dist/parser/tests/expression/Indexing.spec.js.map +1 -1
  298. package/dist/parser/tests/expression/Multiplicative.spec.js +9 -9
  299. package/dist/parser/tests/expression/Multiplicative.spec.js.map +1 -1
  300. package/dist/parser/tests/expression/NullCoalescenceExpression.spec.js +123 -81
  301. package/dist/parser/tests/expression/NullCoalescenceExpression.spec.js.map +1 -1
  302. package/dist/parser/tests/expression/PrefixUnary.spec.js +12 -12
  303. package/dist/parser/tests/expression/PrefixUnary.spec.js.map +1 -1
  304. package/dist/parser/tests/expression/Primary.spec.js +12 -12
  305. package/dist/parser/tests/expression/Primary.spec.js.map +1 -1
  306. package/dist/parser/tests/expression/RegexLiteralExpression.spec.js +10 -10
  307. package/dist/parser/tests/expression/RegexLiteralExpression.spec.js.map +1 -1
  308. package/dist/parser/tests/expression/Relational.spec.js +13 -13
  309. package/dist/parser/tests/expression/Relational.spec.js.map +1 -1
  310. package/dist/parser/tests/expression/SourceLiteralExpression.spec.js +24 -24
  311. package/dist/parser/tests/expression/SourceLiteralExpression.spec.js.map +1 -1
  312. package/dist/parser/tests/expression/TemplateStringExpression.spec.js +221 -81
  313. package/dist/parser/tests/expression/TemplateStringExpression.spec.js.map +1 -1
  314. package/dist/parser/tests/expression/TernaryExpression.spec.js +287 -105
  315. package/dist/parser/tests/expression/TernaryExpression.spec.js.map +1 -1
  316. package/dist/parser/tests/expression/TypeExpression.spec.d.ts +1 -0
  317. package/dist/parser/tests/expression/TypeExpression.spec.js +127 -0
  318. package/dist/parser/tests/expression/TypeExpression.spec.js.map +1 -0
  319. package/dist/parser/tests/expression/UnaryExpression.spec.d.ts +1 -0
  320. package/dist/parser/tests/expression/UnaryExpression.spec.js +52 -0
  321. package/dist/parser/tests/expression/UnaryExpression.spec.js.map +1 -0
  322. package/dist/parser/tests/statement/AssignmentOperators.spec.js +15 -15
  323. package/dist/parser/tests/statement/AssignmentOperators.spec.js.map +1 -1
  324. package/dist/parser/tests/statement/ConstStatement.spec.d.ts +1 -0
  325. package/dist/parser/tests/statement/ConstStatement.spec.js +262 -0
  326. package/dist/parser/tests/statement/ConstStatement.spec.js.map +1 -0
  327. package/dist/parser/tests/statement/Continue.spec.d.ts +1 -0
  328. package/dist/parser/tests/statement/Continue.spec.js +119 -0
  329. package/dist/parser/tests/statement/Continue.spec.js.map +1 -0
  330. package/dist/parser/tests/statement/Declaration.spec.js +19 -19
  331. package/dist/parser/tests/statement/Declaration.spec.js.map +1 -1
  332. package/dist/parser/tests/statement/Dim.spec.js +22 -22
  333. package/dist/parser/tests/statement/Dim.spec.js.map +1 -1
  334. package/dist/parser/tests/statement/Enum.spec.js +111 -300
  335. package/dist/parser/tests/statement/Enum.spec.js.map +1 -1
  336. package/dist/parser/tests/statement/For.spec.js +9 -10
  337. package/dist/parser/tests/statement/For.spec.js.map +1 -1
  338. package/dist/parser/tests/statement/ForEach.spec.js +8 -9
  339. package/dist/parser/tests/statement/ForEach.spec.js.map +1 -1
  340. package/dist/parser/tests/statement/Function.spec.js +44 -35
  341. package/dist/parser/tests/statement/Function.spec.js.map +1 -1
  342. package/dist/parser/tests/statement/Goto.spec.js +5 -5
  343. package/dist/parser/tests/statement/Goto.spec.js.map +1 -1
  344. package/dist/parser/tests/statement/Increment.spec.js +20 -20
  345. package/dist/parser/tests/statement/Increment.spec.js.map +1 -1
  346. package/dist/parser/tests/statement/InterfaceStatement.spec.js +30 -196
  347. package/dist/parser/tests/statement/InterfaceStatement.spec.js.map +1 -1
  348. package/dist/parser/tests/statement/LibraryStatement.spec.js +11 -11
  349. package/dist/parser/tests/statement/LibraryStatement.spec.js.map +1 -1
  350. package/dist/parser/tests/statement/Misc.spec.js +16 -78
  351. package/dist/parser/tests/statement/Misc.spec.js.map +1 -1
  352. package/dist/parser/tests/statement/PrintStatement.spec.js +107 -90
  353. package/dist/parser/tests/statement/PrintStatement.spec.js.map +1 -1
  354. package/dist/parser/tests/statement/ReturnStatement.spec.js +14 -12
  355. package/dist/parser/tests/statement/ReturnStatement.spec.js.map +1 -1
  356. package/dist/parser/tests/statement/Set.spec.js +48 -35
  357. package/dist/parser/tests/statement/Set.spec.js.map +1 -1
  358. package/dist/parser/tests/statement/Stop.spec.js +6 -6
  359. package/dist/parser/tests/statement/Stop.spec.js.map +1 -1
  360. package/dist/parser/tests/statement/Throw.spec.js +6 -6
  361. package/dist/parser/tests/statement/Throw.spec.js.map +1 -1
  362. package/dist/parser/tests/statement/TryCatch.spec.js +18 -16
  363. package/dist/parser/tests/statement/TryCatch.spec.js.map +1 -1
  364. package/dist/preprocessor/Manifest.d.ts +1 -1
  365. package/dist/preprocessor/Manifest.js +3 -3
  366. package/dist/preprocessor/Manifest.js.map +1 -1
  367. package/dist/preprocessor/Manifest.spec.js +8 -8
  368. package/dist/preprocessor/Manifest.spec.js.map +1 -1
  369. package/dist/preprocessor/Preprocessor.d.ts +5 -6
  370. package/dist/preprocessor/Preprocessor.js +15 -11
  371. package/dist/preprocessor/Preprocessor.js.map +1 -1
  372. package/dist/preprocessor/Preprocessor.spec.js +25 -25
  373. package/dist/preprocessor/Preprocessor.spec.js.map +1 -1
  374. package/dist/preprocessor/PreprocessorParser.d.ts +1 -1
  375. package/dist/preprocessor/PreprocessorParser.js +7 -1
  376. package/dist/preprocessor/PreprocessorParser.js.map +1 -1
  377. package/dist/preprocessor/PreprocessorParser.spec.js +13 -13
  378. package/dist/preprocessor/PreprocessorParser.spec.js.map +1 -1
  379. package/dist/roku-types/data.json +6544 -10519
  380. package/dist/roku-types/index.d.ts +662 -1934
  381. package/dist/types/ArrayType.d.ts +10 -9
  382. package/dist/types/ArrayType.js +65 -60
  383. package/dist/types/ArrayType.js.map +1 -1
  384. package/dist/types/ArrayType.spec.js +36 -68
  385. package/dist/types/ArrayType.spec.js.map +1 -1
  386. package/dist/types/AssociativeArrayType.d.ts +11 -0
  387. package/dist/types/AssociativeArrayType.js +52 -0
  388. package/dist/types/AssociativeArrayType.js.map +1 -0
  389. package/dist/types/BaseFunctionType.d.ts +9 -0
  390. package/dist/types/BaseFunctionType.js +25 -0
  391. package/dist/types/BaseFunctionType.js.map +1 -0
  392. package/dist/types/BooleanType.d.ts +8 -5
  393. package/dist/types/BooleanType.js +14 -7
  394. package/dist/types/BooleanType.js.map +1 -1
  395. package/dist/types/BooleanType.spec.js +10 -6
  396. package/dist/types/BooleanType.spec.js.map +1 -1
  397. package/dist/types/BscType.d.ts +32 -21
  398. package/dist/types/BscType.js +118 -21
  399. package/dist/types/BscType.js.map +1 -1
  400. package/dist/types/BscTypeKind.d.ts +25 -0
  401. package/dist/types/BscTypeKind.js +30 -0
  402. package/dist/types/BscTypeKind.js.map +1 -0
  403. package/dist/types/BuiltInInterfaceAdder.d.ts +23 -0
  404. package/dist/types/BuiltInInterfaceAdder.js +164 -0
  405. package/dist/types/BuiltInInterfaceAdder.js.map +1 -0
  406. package/dist/types/BuiltInInterfaceAdder.spec.d.ts +1 -0
  407. package/dist/types/BuiltInInterfaceAdder.spec.js +116 -0
  408. package/dist/types/BuiltInInterfaceAdder.spec.js.map +1 -0
  409. package/dist/types/ClassType.d.ts +17 -0
  410. package/dist/types/ClassType.js +58 -0
  411. package/dist/types/ClassType.js.map +1 -0
  412. package/dist/types/ClassType.spec.d.ts +1 -0
  413. package/dist/types/ClassType.spec.js +77 -0
  414. package/dist/types/ClassType.spec.js.map +1 -0
  415. package/dist/types/ComponentType.d.ts +26 -0
  416. package/dist/types/ComponentType.js +83 -0
  417. package/dist/types/ComponentType.js.map +1 -0
  418. package/dist/types/DoubleType.d.ts +8 -5
  419. package/dist/types/DoubleType.js +18 -16
  420. package/dist/types/DoubleType.js.map +1 -1
  421. package/dist/types/DoubleType.spec.js +12 -6
  422. package/dist/types/DoubleType.spec.js.map +1 -1
  423. package/dist/types/DynamicType.d.ts +10 -5
  424. package/dist/types/DynamicType.js +16 -4
  425. package/dist/types/DynamicType.js.map +1 -1
  426. package/dist/types/DynamicType.spec.js +16 -5
  427. package/dist/types/DynamicType.spec.js.map +1 -1
  428. package/dist/types/EnumType.d.ts +30 -12
  429. package/dist/types/EnumType.js +43 -17
  430. package/dist/types/EnumType.js.map +1 -1
  431. package/dist/types/EnumType.spec.d.ts +1 -0
  432. package/dist/types/EnumType.spec.js +33 -0
  433. package/dist/types/EnumType.spec.js.map +1 -0
  434. package/dist/types/FloatType.d.ts +8 -5
  435. package/dist/types/FloatType.js +18 -16
  436. package/dist/types/FloatType.js.map +1 -1
  437. package/dist/types/FloatType.spec.js +4 -6
  438. package/dist/types/FloatType.spec.js.map +1 -1
  439. package/dist/types/FunctionType.d.ts +13 -8
  440. package/dist/types/FunctionType.js +30 -14
  441. package/dist/types/FunctionType.js.map +1 -1
  442. package/dist/types/InheritableType.d.ts +28 -0
  443. package/dist/types/InheritableType.js +152 -0
  444. package/dist/types/InheritableType.js.map +1 -0
  445. package/dist/types/IntegerType.d.ts +8 -5
  446. package/dist/types/IntegerType.js +18 -16
  447. package/dist/types/IntegerType.js.map +1 -1
  448. package/dist/types/IntegerType.spec.js +8 -6
  449. package/dist/types/IntegerType.spec.js.map +1 -1
  450. package/dist/types/InterfaceType.d.ts +12 -13
  451. package/dist/types/InterfaceType.js +20 -48
  452. package/dist/types/InterfaceType.js.map +1 -1
  453. package/dist/types/InterfaceType.spec.js +90 -56
  454. package/dist/types/InterfaceType.spec.js.map +1 -1
  455. package/dist/types/InvalidType.d.ts +7 -5
  456. package/dist/types/InvalidType.js +13 -7
  457. package/dist/types/InvalidType.js.map +1 -1
  458. package/dist/types/InvalidType.spec.js +8 -6
  459. package/dist/types/InvalidType.spec.js.map +1 -1
  460. package/dist/types/LongIntegerType.d.ts +8 -5
  461. package/dist/types/LongIntegerType.js +17 -15
  462. package/dist/types/LongIntegerType.js.map +1 -1
  463. package/dist/types/LongIntegerType.spec.js +10 -6
  464. package/dist/types/LongIntegerType.spec.js.map +1 -1
  465. package/dist/types/NamespaceType.d.ts +12 -0
  466. package/dist/types/NamespaceType.js +28 -0
  467. package/dist/types/NamespaceType.js.map +1 -0
  468. package/dist/types/ObjectType.d.ts +9 -8
  469. package/dist/types/ObjectType.js +21 -11
  470. package/dist/types/ObjectType.js.map +1 -1
  471. package/dist/types/ObjectType.spec.js +3 -3
  472. package/dist/types/ObjectType.spec.js.map +1 -1
  473. package/dist/types/ReferenceType.d.ts +63 -0
  474. package/dist/types/ReferenceType.js +423 -0
  475. package/dist/types/ReferenceType.js.map +1 -0
  476. package/dist/types/ReferenceType.spec.d.ts +1 -0
  477. package/dist/types/ReferenceType.spec.js +137 -0
  478. package/dist/types/ReferenceType.spec.js.map +1 -0
  479. package/dist/types/StringType.d.ts +11 -5
  480. package/dist/types/StringType.js +18 -7
  481. package/dist/types/StringType.js.map +1 -1
  482. package/dist/types/StringType.spec.js +3 -5
  483. package/dist/types/StringType.spec.js.map +1 -1
  484. package/dist/types/TypedFunctionType.d.ts +22 -17
  485. package/dist/types/TypedFunctionType.js +78 -60
  486. package/dist/types/TypedFunctionType.js.map +1 -1
  487. package/dist/types/TypedFunctionType.spec.js +105 -20
  488. package/dist/types/TypedFunctionType.spec.js.map +1 -1
  489. package/dist/types/UninitializedType.d.ts +8 -6
  490. package/dist/types/UninitializedType.js +13 -7
  491. package/dist/types/UninitializedType.js.map +1 -1
  492. package/dist/types/UnionType.d.ts +20 -0
  493. package/dist/types/UnionType.js +123 -0
  494. package/dist/types/UnionType.js.map +1 -0
  495. package/dist/types/UnionType.spec.d.ts +1 -0
  496. package/dist/types/UnionType.spec.js +130 -0
  497. package/dist/types/UnionType.spec.js.map +1 -0
  498. package/dist/types/VoidType.d.ts +8 -5
  499. package/dist/types/VoidType.js +14 -7
  500. package/dist/types/VoidType.js.map +1 -1
  501. package/dist/types/VoidType.spec.js +3 -3
  502. package/dist/types/VoidType.spec.js.map +1 -1
  503. package/dist/types/helper.spec.d.ts +1 -0
  504. package/dist/types/helper.spec.js +145 -0
  505. package/dist/types/helper.spec.js.map +1 -0
  506. package/dist/types/helpers.d.ts +19 -37
  507. package/dist/types/helpers.js +159 -99
  508. package/dist/types/helpers.js.map +1 -1
  509. package/dist/types/index.d.ts +22 -0
  510. package/dist/types/index.js +39 -0
  511. package/dist/types/index.js.map +1 -0
  512. package/dist/util.d.ts +167 -131
  513. package/dist/util.js +890 -350
  514. package/dist/util.js.map +1 -1
  515. package/dist/validators/ClassValidator.d.ts +7 -25
  516. package/dist/validators/ClassValidator.js +103 -194
  517. package/dist/validators/ClassValidator.js.map +1 -1
  518. package/package.json +165 -149
  519. package/dist/astUtils/AstEditor.js.map +0 -1
  520. package/dist/astUtils/AstEditor.spec.js.map +0 -1
  521. package/dist/bscPlugin/transpile/BrsFilePreTranspileProcessor.d.ts +0 -8
  522. package/dist/bscPlugin/transpile/BrsFilePreTranspileProcessor.js +0 -40
  523. package/dist/bscPlugin/transpile/BrsFilePreTranspileProcessor.js.map +0 -1
  524. package/dist/bscPlugin/transpile/BrsFilePreTranspileProcessor.spec.js +0 -32
  525. package/dist/bscPlugin/transpile/BrsFilePreTranspileProcessor.spec.js.map +0 -1
  526. package/dist/parser/SGTypes.spec.js +0 -351
  527. package/dist/parser/SGTypes.spec.js.map +0 -1
  528. package/dist/types/CustomType.d.ts +0 -12
  529. package/dist/types/CustomType.js +0 -44
  530. package/dist/types/CustomType.js.map +0 -1
  531. package/dist/types/LazyType.d.ts +0 -16
  532. package/dist/types/LazyType.js +0 -44
  533. package/dist/types/LazyType.js.map +0 -1
  534. /package/dist/astUtils/{AstEditor.spec.d.ts → Editor.spec.d.ts} +0 -0
  535. /package/dist/bscPlugin/{transpile/BrsFilePreTranspileProcessor.spec.d.ts → completions/CompletionsProcessor.spec.d.ts} +0 -0
  536. /package/dist/{parser/SGTypes.spec.d.ts → bscPlugin/hover/HoverProcessor.spec.d.ts} +0 -0
@@ -1,7 +1,7 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  /* eslint no-template-curly-in-string: 0 */
4
- const chai_1 = require("chai");
4
+ const chai_config_spec_1 = require("../chai-config.spec");
5
5
  const TokenKind_1 = require("./TokenKind");
6
6
  const Lexer_1 = require("./Lexer");
7
7
  const Token_1 = require("./Token");
@@ -9,9 +9,16 @@ const Parser_spec_1 = require("../parser/Parser.spec");
9
9
  const vscode_languageserver_1 = require("vscode-languageserver");
10
10
  const util_1 = require("../util");
11
11
  describe('lexer', () => {
12
+ it('recognizes the `const` keyword', () => {
13
+ let { tokens } = Lexer_1.Lexer.scan('const');
14
+ (0, chai_config_spec_1.expect)(tokens.map(x => x.kind)).to.eql([
15
+ TokenKind_1.TokenKind.Const,
16
+ TokenKind_1.TokenKind.Eof
17
+ ]);
18
+ });
12
19
  it('recognizes namespace keywords', () => {
13
20
  let { tokens } = Lexer_1.Lexer.scan('namespace end namespace endnamespace end namespace');
14
- (0, chai_1.expect)(tokens.map(x => x.kind)).to.eql([
21
+ (0, chai_config_spec_1.expect)(tokens.map(x => x.kind)).to.eql([
15
22
  TokenKind_1.TokenKind.Namespace,
16
23
  TokenKind_1.TokenKind.EndNamespace,
17
24
  TokenKind_1.TokenKind.EndNamespace,
@@ -77,39 +84,39 @@ describe('lexer', () => {
77
84
  });
78
85
  it('recognizes the callfunc operator', () => {
79
86
  let { tokens } = Lexer_1.Lexer.scan('@.');
80
- (0, chai_1.expect)(tokens[0].kind).to.equal(TokenKind_1.TokenKind.Callfunc);
87
+ (0, chai_config_spec_1.expect)(tokens[0].kind).to.equal(TokenKind_1.TokenKind.Callfunc);
81
88
  });
82
89
  it('recognizes the import token', () => {
83
90
  let { tokens } = Lexer_1.Lexer.scan('import');
84
- (0, chai_1.expect)(tokens[0].kind).to.eql(TokenKind_1.TokenKind.Import);
91
+ (0, chai_config_spec_1.expect)(tokens[0].kind).to.eql(TokenKind_1.TokenKind.Import);
85
92
  });
86
93
  it('recognizes library token', () => {
87
94
  let { tokens } = Lexer_1.Lexer.scan('library');
88
- (0, chai_1.expect)(tokens[0].kind).to.eql(TokenKind_1.TokenKind.Library);
95
+ (0, chai_config_spec_1.expect)(tokens[0].kind).to.eql(TokenKind_1.TokenKind.Library);
89
96
  });
90
97
  it('produces an at symbol token', () => {
91
98
  let { tokens } = Lexer_1.Lexer.scan('@');
92
- (0, chai_1.expect)(tokens[0].kind).to.equal(TokenKind_1.TokenKind.At);
99
+ (0, chai_config_spec_1.expect)(tokens[0].kind).to.equal(TokenKind_1.TokenKind.At);
93
100
  });
94
101
  it('produces a semicolon token', () => {
95
102
  let { tokens } = Lexer_1.Lexer.scan(';');
96
- (0, chai_1.expect)(tokens[0].kind).to.equal(TokenKind_1.TokenKind.Semicolon);
103
+ (0, chai_config_spec_1.expect)(tokens[0].kind).to.equal(TokenKind_1.TokenKind.Semicolon);
97
104
  });
98
105
  it('emits error on unknown character type', () => {
99
106
  let { diagnostics } = Lexer_1.Lexer.scan('\0');
100
- (0, chai_1.expect)(diagnostics).to.be.lengthOf(1);
107
+ (0, chai_config_spec_1.expect)(diagnostics).to.be.lengthOf(1);
101
108
  });
102
109
  it('includes an end-of-file marker', () => {
103
110
  let { tokens } = Lexer_1.Lexer.scan('');
104
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Eof]);
111
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Eof]);
105
112
  });
106
113
  it('ignores tabs and spaces', () => {
107
114
  let { tokens } = Lexer_1.Lexer.scan('\t\t \t \t');
108
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Eof]);
115
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Eof]);
109
116
  });
110
117
  it('retains every single newline', () => {
111
118
  let { tokens } = Lexer_1.Lexer.scan('\n\n\'foo\n\n\nprint 2\n\n');
112
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
119
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
113
120
  TokenKind_1.TokenKind.Newline,
114
121
  TokenKind_1.TokenKind.Newline,
115
122
  TokenKind_1.TokenKind.Comment,
@@ -131,7 +138,7 @@ describe('lexer', () => {
131
138
  ' print 0\r\n' +
132
139
  ' end if\r\n' +
133
140
  'end function\r\n').tokens.map(x => x.kind);
134
- (0, chai_1.expect)(kinds).to.eql([
141
+ (0, chai_config_spec_1.expect)(kinds).to.eql([
135
142
  TokenKind_1.TokenKind.Function, TokenKind_1.TokenKind.Identifier, TokenKind_1.TokenKind.LeftParen, TokenKind_1.TokenKind.RightParen, TokenKind_1.TokenKind.As, TokenKind_1.TokenKind.String, TokenKind_1.TokenKind.Newline,
136
143
  TokenKind_1.TokenKind.If, TokenKind_1.TokenKind.True, TokenKind_1.TokenKind.Then, TokenKind_1.TokenKind.Newline,
137
144
  TokenKind_1.TokenKind.Print, TokenKind_1.TokenKind.IntegerLiteral, TokenKind_1.TokenKind.Newline,
@@ -164,20 +171,20 @@ describe('lexer', () => {
164
171
  [2, 7, 2, 8] //Eof
165
172
  ];
166
173
  /*eslint-enable*/
167
- (0, chai_1.expect)(withoutWhitespace, 'Without whitespace').to.eql(expectedLocations);
168
- (0, chai_1.expect)(withWhitespace, 'With whitespace').to.eql(expectedLocations);
174
+ (0, chai_config_spec_1.expect)(withoutWhitespace, 'Without whitespace').to.eql(expectedLocations);
175
+ (0, chai_config_spec_1.expect)(withWhitespace, 'With whitespace').to.eql(expectedLocations);
169
176
  });
170
177
  it('retains original line endings', () => {
171
178
  let { tokens } = Lexer_1.Lexer.scan('print "hello"\r\nprint "world"\n');
172
- (0, chai_1.expect)([
179
+ (0, chai_config_spec_1.expect)([
173
180
  tokens[2].text.charCodeAt(0),
174
181
  tokens[2].text.charCodeAt(1)
175
182
  ], 'should contain \\r\\n').to.eql([13, 10]);
176
- (0, chai_1.expect)(tokens[5].text.charCodeAt(0), 'should contain \\r\\n').to.eql(10);
183
+ (0, chai_config_spec_1.expect)(tokens[5].text.charCodeAt(0), 'should contain \\r\\n').to.eql(10);
177
184
  });
178
185
  it('correctly splits the elseif token', () => {
179
186
  let { tokens } = Lexer_1.Lexer.scan('else if elseif else if');
180
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
187
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
181
188
  TokenKind_1.TokenKind.Else,
182
189
  TokenKind_1.TokenKind.If,
183
190
  TokenKind_1.TokenKind.Else,
@@ -189,20 +196,20 @@ describe('lexer', () => {
189
196
  });
190
197
  it('gives the `as` keyword its own TokenKind', () => {
191
198
  let { tokens } = Lexer_1.Lexer.scan('as');
192
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.As, TokenKind_1.TokenKind.Eof]);
199
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.As, TokenKind_1.TokenKind.Eof]);
193
200
  });
194
201
  it('gives the `stop` keyword its own TokenKind', () => {
195
202
  let { tokens } = Lexer_1.Lexer.scan('stop');
196
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Stop, TokenKind_1.TokenKind.Eof]);
203
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Stop, TokenKind_1.TokenKind.Eof]);
197
204
  });
198
205
  it('does not alias \'?\' to \'print\' - the parser will do that', () => {
199
206
  let { tokens } = Lexer_1.Lexer.scan('?2');
200
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Question, TokenKind_1.TokenKind.IntegerLiteral, TokenKind_1.TokenKind.Eof]);
207
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Question, TokenKind_1.TokenKind.IntegerLiteral, TokenKind_1.TokenKind.Eof]);
201
208
  });
202
209
  describe('comments', () => {
203
210
  it('does not include carriage return character', () => {
204
211
  let tokens = Lexer_1.Lexer.scan(`'someComment\r\nprint "hello"`).tokens;
205
- (0, chai_1.expect)(tokens[0].text).to.equal(`'someComment`);
212
+ (0, chai_config_spec_1.expect)(tokens[0].text).to.equal(`'someComment`);
206
213
  });
207
214
  it('includes the comment characters in the text', () => {
208
215
  let text = Lexer_1.Lexer.scan(`
@@ -211,7 +218,7 @@ describe('lexer', () => {
211
218
  `).tokens
212
219
  .filter(x => ![TokenKind_1.TokenKind.Newline, TokenKind_1.TokenKind.Eof].includes(x.kind))
213
220
  .map(x => x.text);
214
- (0, chai_1.expect)(text).to.eql([
221
+ (0, chai_config_spec_1.expect)(text).to.eql([
215
222
  `'comment`,
216
223
  'REM some comment'
217
224
  ]);
@@ -225,7 +232,7 @@ describe('lexer', () => {
225
232
  `, {
226
233
  includeWhitespace: true
227
234
  }).tokens.map(x => [...(0, Parser_spec_1.rangeToArray)(x.range), x.text]);
228
- (0, chai_1.expect)(tokens).to.eql([
235
+ (0, chai_config_spec_1.expect)(tokens).to.eql([
229
236
  [0, 0, 0, 1, '\n'],
230
237
  [1, 0, 1, 16, ' '],
231
238
  [1, 16, 1, 19, 'sub'],
@@ -260,14 +267,14 @@ describe('lexer', () => {
260
267
  'comment
261
268
  REM some comment
262
269
  `).tokens.filter(x => ![TokenKind_1.TokenKind.Newline, TokenKind_1.TokenKind.Eof].includes(x.kind));
263
- (0, chai_1.expect)(tokens[0].range).to.eql(vscode_languageserver_1.Range.create(1, 16, 1, 24));
264
- (0, chai_1.expect)(tokens[1].range).to.eql(vscode_languageserver_1.Range.create(2, 16, 2, 32));
270
+ (0, chai_config_spec_1.expect)(tokens[0].range).to.eql(vscode_languageserver_1.Range.create(1, 16, 1, 24));
271
+ (0, chai_config_spec_1.expect)(tokens[1].range).to.eql(vscode_languageserver_1.Range.create(2, 16, 2, 32));
265
272
  });
266
273
  it('finds correct location for newlines', () => {
267
274
  let tokens = Lexer_1.Lexer.scan('sub\nsub\r\nsub\n\n').tokens
268
275
  //ignore the Eof token
269
276
  .filter(x => x.kind !== TokenKind_1.TokenKind.Eof);
270
- (0, chai_1.expect)(tokens.map(x => x.range)).to.eql([
277
+ (0, chai_config_spec_1.expect)(tokens.map(x => x.range)).to.eql([
271
278
  vscode_languageserver_1.Range.create(0, 0, 0, 3),
272
279
  vscode_languageserver_1.Range.create(0, 3, 0, 4),
273
280
  vscode_languageserver_1.Range.create(1, 0, 1, 3),
@@ -290,26 +297,26 @@ describe('lexer', () => {
290
297
  end sub
291
298
  `);
292
299
  let comments = tokens.filter(x => x.kind === TokenKind_1.TokenKind.Comment);
293
- (0, chai_1.expect)(comments).to.be.lengthOf(1);
294
- (0, chai_1.expect)(comments[0].range).to.eql(vscode_languageserver_1.Range.create(8, 27, 8, 35));
300
+ (0, chai_config_spec_1.expect)(comments).to.be.lengthOf(1);
301
+ (0, chai_config_spec_1.expect)(comments[0].range).to.eql(vscode_languageserver_1.Range.create(8, 27, 8, 35));
295
302
  });
296
303
  it('ignores everything after `\'`', () => {
297
304
  let { tokens } = Lexer_1.Lexer.scan('= \' (');
298
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Equal, TokenKind_1.TokenKind.Comment, TokenKind_1.TokenKind.Eof]);
305
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Equal, TokenKind_1.TokenKind.Comment, TokenKind_1.TokenKind.Eof]);
299
306
  });
300
307
  it('ignores everything after `REM`', () => {
301
308
  let { tokens } = Lexer_1.Lexer.scan('= REM (');
302
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Equal, TokenKind_1.TokenKind.Comment, TokenKind_1.TokenKind.Eof]);
309
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Equal, TokenKind_1.TokenKind.Comment, TokenKind_1.TokenKind.Eof]);
303
310
  });
304
311
  it('ignores everything after `rem`', () => {
305
312
  let { tokens } = Lexer_1.Lexer.scan('= rem (');
306
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Equal, TokenKind_1.TokenKind.Comment, TokenKind_1.TokenKind.Eof]);
313
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Equal, TokenKind_1.TokenKind.Comment, TokenKind_1.TokenKind.Eof]);
307
314
  });
308
315
  }); // comments
309
316
  describe('non-literals', () => {
310
317
  it('reads parens & braces', () => {
311
318
  let { tokens } = Lexer_1.Lexer.scan('(){}');
312
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
319
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
313
320
  TokenKind_1.TokenKind.LeftParen,
314
321
  TokenKind_1.TokenKind.RightParen,
315
322
  TokenKind_1.TokenKind.LeftCurlyBrace,
@@ -319,7 +326,7 @@ describe('lexer', () => {
319
326
  });
320
327
  it('reads operators', () => {
321
328
  let { tokens } = Lexer_1.Lexer.scan('^ - + * MOD / \\ -- ++');
322
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
329
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
323
330
  TokenKind_1.TokenKind.Caret,
324
331
  TokenKind_1.TokenKind.Minus,
325
332
  TokenKind_1.TokenKind.Plus,
@@ -334,7 +341,7 @@ describe('lexer', () => {
334
341
  });
335
342
  it('reads bitshift operators', () => {
336
343
  let { tokens } = Lexer_1.Lexer.scan('<< >> <<');
337
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
344
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
338
345
  TokenKind_1.TokenKind.LeftShift,
339
346
  TokenKind_1.TokenKind.RightShift,
340
347
  TokenKind_1.TokenKind.LeftShift,
@@ -343,7 +350,7 @@ describe('lexer', () => {
343
350
  });
344
351
  it('reads bitshift assignment operators', () => {
345
352
  let { tokens } = Lexer_1.Lexer.scan('<<= >>=');
346
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
353
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
347
354
  TokenKind_1.TokenKind.LeftShiftEqual,
348
355
  TokenKind_1.TokenKind.RightShiftEqual,
349
356
  TokenKind_1.TokenKind.Eof
@@ -351,7 +358,7 @@ describe('lexer', () => {
351
358
  });
352
359
  it('reads comparators', () => {
353
360
  let { tokens } = Lexer_1.Lexer.scan('< <= > >= = <>');
354
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
361
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
355
362
  TokenKind_1.TokenKind.Less,
356
363
  TokenKind_1.TokenKind.LessEqual,
357
364
  TokenKind_1.TokenKind.Greater,
@@ -365,23 +372,23 @@ describe('lexer', () => {
365
372
  describe('string literals', () => {
366
373
  it('produces string literal tokens', () => {
367
374
  let { tokens } = Lexer_1.Lexer.scan(`"hello world"`);
368
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.StringLiteral, TokenKind_1.TokenKind.Eof]);
375
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.StringLiteral, TokenKind_1.TokenKind.Eof]);
369
376
  });
370
377
  it(`safely escapes " literals`, () => {
371
378
  let { tokens } = Lexer_1.Lexer.scan(`"the cat says ""meow"""`);
372
- (0, chai_1.expect)(tokens[0].kind).to.equal(TokenKind_1.TokenKind.StringLiteral);
379
+ (0, chai_config_spec_1.expect)(tokens[0].kind).to.equal(TokenKind_1.TokenKind.StringLiteral);
373
380
  });
374
381
  it('captures text to end of line for unterminated strings with LF', () => {
375
382
  let { tokens } = Lexer_1.Lexer.scan(`"unterminated!\n`);
376
- (0, chai_1.expect)(tokens[0].kind).to.eql(TokenKind_1.TokenKind.StringLiteral);
383
+ (0, chai_config_spec_1.expect)(tokens[0].kind).to.eql(TokenKind_1.TokenKind.StringLiteral);
377
384
  });
378
385
  it('captures text to end of line for unterminated strings with CRLF', () => {
379
386
  let { tokens } = Lexer_1.Lexer.scan(`"unterminated!\r\n`);
380
- (0, chai_1.expect)(tokens[0].text).to.equal('"unterminated!');
387
+ (0, chai_config_spec_1.expect)(tokens[0].text).to.equal('"unterminated!');
381
388
  });
382
389
  it('disallows multiline strings', () => {
383
390
  let { diagnostics } = Lexer_1.Lexer.scan(`"multi-line\n\n`);
384
- (0, chai_1.expect)(diagnostics.map(err => err.message)).to.deep.equal([
391
+ (0, chai_config_spec_1.expect)(diagnostics.map(err => err.message)).to.deep.equal([
385
392
  'Unterminated string at end of line'
386
393
  ]);
387
394
  });
@@ -390,7 +397,7 @@ describe('lexer', () => {
390
397
  describe('template string literals', () => {
391
398
  it('supports escaped chars', () => {
392
399
  let { tokens } = Lexer_1.Lexer.scan('`\\n\\`\\r\\n`');
393
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
400
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
394
401
  TokenKind_1.TokenKind.BackTick,
395
402
  TokenKind_1.TokenKind.TemplateStringQuasi,
396
403
  TokenKind_1.TokenKind.EscapedCharCodeLiteral,
@@ -404,7 +411,7 @@ describe('lexer', () => {
404
411
  TokenKind_1.TokenKind.BackTick,
405
412
  TokenKind_1.TokenKind.Eof
406
413
  ]);
407
- (0, chai_1.expect)(tokens.map(x => x.charCode).filter(x => !!x)).to.eql([
414
+ (0, chai_config_spec_1.expect)(tokens.map(x => x.charCode).filter(x => !!x)).to.eql([
408
415
  10,
409
416
  96,
410
417
  13,
@@ -413,7 +420,7 @@ describe('lexer', () => {
413
420
  });
414
421
  it('prevents expressions when escaping the dollar sign', () => {
415
422
  let { tokens } = Lexer_1.Lexer.scan('`\\${just text}`');
416
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
423
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
417
424
  TokenKind_1.TokenKind.BackTick,
418
425
  TokenKind_1.TokenKind.TemplateStringQuasi,
419
426
  TokenKind_1.TokenKind.EscapedCharCodeLiteral,
@@ -424,7 +431,7 @@ describe('lexer', () => {
424
431
  });
425
432
  it('supports escaping unicode char codes', () => {
426
433
  let { tokens } = Lexer_1.Lexer.scan('`\\c1\\c12\\c123`');
427
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
434
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
428
435
  TokenKind_1.TokenKind.BackTick,
429
436
  TokenKind_1.TokenKind.TemplateStringQuasi,
430
437
  TokenKind_1.TokenKind.EscapedCharCodeLiteral,
@@ -436,7 +443,7 @@ describe('lexer', () => {
436
443
  TokenKind_1.TokenKind.BackTick,
437
444
  TokenKind_1.TokenKind.Eof
438
445
  ]);
439
- (0, chai_1.expect)(tokens.map(x => x.charCode).filter(x => !!x)).to.eql([
446
+ (0, chai_config_spec_1.expect)(tokens.map(x => x.charCode).filter(x => !!x)).to.eql([
440
447
  1,
441
448
  12,
442
449
  123
@@ -444,7 +451,7 @@ describe('lexer', () => {
444
451
  });
445
452
  it('converts doublequote to EscapedCharCodeLiteral', () => {
446
453
  let { tokens } = Lexer_1.Lexer.scan('`"`');
447
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
454
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
448
455
  TokenKind_1.TokenKind.BackTick,
449
456
  TokenKind_1.TokenKind.TemplateStringQuasi,
450
457
  TokenKind_1.TokenKind.EscapedCharCodeLiteral,
@@ -452,11 +459,11 @@ describe('lexer', () => {
452
459
  TokenKind_1.TokenKind.BackTick,
453
460
  TokenKind_1.TokenKind.Eof
454
461
  ]);
455
- (0, chai_1.expect)(tokens[2].charCode).to.equal(34);
462
+ (0, chai_config_spec_1.expect)(tokens[2].charCode).to.equal(34);
456
463
  });
457
464
  it(`safely escapes \` literals`, () => {
458
465
  let { tokens } = Lexer_1.Lexer.scan('`the cat says \\`meow\\` a lot`');
459
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
466
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
460
467
  TokenKind_1.TokenKind.BackTick,
461
468
  TokenKind_1.TokenKind.TemplateStringQuasi,
462
469
  TokenKind_1.TokenKind.EscapedCharCodeLiteral,
@@ -466,7 +473,7 @@ describe('lexer', () => {
466
473
  TokenKind_1.TokenKind.BackTick,
467
474
  TokenKind_1.TokenKind.Eof
468
475
  ]);
469
- (0, chai_1.expect)(tokens.map(x => x.text)).to.eql([
476
+ (0, chai_config_spec_1.expect)(tokens.map(x => x.text)).to.eql([
470
477
  '`',
471
478
  'the cat says ',
472
479
  '\\`',
@@ -479,17 +486,17 @@ describe('lexer', () => {
479
486
  });
480
487
  it('produces template string literal tokens', () => {
481
488
  let { tokens } = Lexer_1.Lexer.scan('`hello world`');
482
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
489
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
483
490
  TokenKind_1.TokenKind.BackTick,
484
491
  TokenKind_1.TokenKind.TemplateStringQuasi,
485
492
  TokenKind_1.TokenKind.BackTick,
486
493
  TokenKind_1.TokenKind.Eof
487
494
  ]);
488
- (0, chai_1.expect)(tokens[1].text).to.deep.equal('hello world');
495
+ (0, chai_config_spec_1.expect)(tokens[1].text).to.deep.equal('hello world');
489
496
  });
490
497
  it('collects quasis outside and expressions inside of template strings', () => {
491
498
  let { tokens } = Lexer_1.Lexer.scan('`hello ${"world"}!`');
492
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
499
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
493
500
  TokenKind_1.TokenKind.BackTick,
494
501
  TokenKind_1.TokenKind.TemplateStringQuasi,
495
502
  TokenKind_1.TokenKind.TemplateStringExpressionBegin,
@@ -499,7 +506,7 @@ describe('lexer', () => {
499
506
  TokenKind_1.TokenKind.BackTick,
500
507
  TokenKind_1.TokenKind.Eof
501
508
  ]);
502
- (0, chai_1.expect)(tokens[1].text).to.deep.equal(`hello `);
509
+ (0, chai_config_spec_1.expect)(tokens[1].text).to.deep.equal(`hello `);
503
510
  });
504
511
  it('real example, which is causing issues in the formatter', () => {
505
512
  let { tokens } = Lexer_1.Lexer.scan(`
@@ -518,7 +525,7 @@ describe('lexer', () => {
518
525
  \`
519
526
  end function
520
527
  `);
521
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
528
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
522
529
  TokenKind_1.TokenKind.Newline,
523
530
  TokenKind_1.TokenKind.Function,
524
531
  TokenKind_1.TokenKind.Identifier,
@@ -596,7 +603,7 @@ describe('lexer', () => {
596
603
  });
597
604
  it('complicated example', () => {
598
605
  let { tokens } = Lexer_1.Lexer.scan('`hello ${"world"}!I am a ${"template" + "string"} and I am very ${["pleased"][0]} to meet you ${m.top.getChildCount()}.The end`');
599
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.eql([
606
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.eql([
600
607
  TokenKind_1.TokenKind.BackTick,
601
608
  TokenKind_1.TokenKind.TemplateStringQuasi,
602
609
  TokenKind_1.TokenKind.TemplateStringExpressionBegin,
@@ -634,7 +641,7 @@ describe('lexer', () => {
634
641
  });
635
642
  it('allows multiline strings', () => {
636
643
  let { tokens } = Lexer_1.Lexer.scan('`multi-line\n\n`');
637
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
644
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
638
645
  TokenKind_1.TokenKind.BackTick,
639
646
  TokenKind_1.TokenKind.TemplateStringQuasi,
640
647
  TokenKind_1.TokenKind.EscapedCharCodeLiteral,
@@ -644,7 +651,7 @@ describe('lexer', () => {
644
651
  TokenKind_1.TokenKind.BackTick,
645
652
  TokenKind_1.TokenKind.Eof
646
653
  ]);
647
- (0, chai_1.expect)(tokens.map(x => x.text)).to.eql([
654
+ (0, chai_config_spec_1.expect)(tokens.map(x => x.text)).to.eql([
648
655
  '`',
649
656
  'multi-line',
650
657
  '\n',
@@ -657,7 +664,7 @@ describe('lexer', () => {
657
664
  });
658
665
  it('maintains proper line/column locations for multiline strings', () => {
659
666
  let { tokens } = Lexer_1.Lexer.scan('123 `multi\nline\r\nstrings` true\nfalse');
660
- (0, chai_1.expect)(tokens.map(x => {
667
+ (0, chai_config_spec_1.expect)(tokens.map(x => {
661
668
  return {
662
669
  range: x.range,
663
670
  kind: x.kind
@@ -680,7 +687,7 @@ describe('lexer', () => {
680
687
  });
681
688
  it('Example that tripped up the expression tests', () => {
682
689
  let { tokens } = Lexer_1.Lexer.scan('`I am a complex example\n${a.isRunning(["a","b","c"])}\nmore ${m.finish(true)}`');
683
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
690
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
684
691
  TokenKind_1.TokenKind.BackTick,
685
692
  TokenKind_1.TokenKind.TemplateStringQuasi,
686
693
  TokenKind_1.TokenKind.EscapedCharCodeLiteral,
@@ -719,111 +726,111 @@ describe('lexer', () => {
719
726
  describe('double literals', () => {
720
727
  it('respects \'#\' suffix', () => {
721
728
  let d = Lexer_1.Lexer.scan('123#').tokens[0];
722
- (0, chai_1.expect)(d.kind).to.equal(TokenKind_1.TokenKind.DoubleLiteral);
723
- (0, chai_1.expect)(d.text).to.eql('123#');
729
+ (0, chai_config_spec_1.expect)(d.kind).to.equal(TokenKind_1.TokenKind.DoubleLiteral);
730
+ (0, chai_config_spec_1.expect)(d.text).to.eql('123#');
724
731
  });
725
732
  it('forces literals >= 10 digits into doubles', () => {
726
733
  let d = Lexer_1.Lexer.scan('0000000005').tokens[0];
727
- (0, chai_1.expect)(d.kind).to.equal(TokenKind_1.TokenKind.DoubleLiteral);
728
- (0, chai_1.expect)(d.text).to.eql('0000000005');
734
+ (0, chai_config_spec_1.expect)(d.kind).to.equal(TokenKind_1.TokenKind.DoubleLiteral);
735
+ (0, chai_config_spec_1.expect)(d.text).to.eql('0000000005');
729
736
  });
730
737
  it('forces literals with \'D\' in exponent into doubles', () => {
731
738
  let d = Lexer_1.Lexer.scan('2.5d3').tokens[0];
732
- (0, chai_1.expect)(d.kind).to.equal(TokenKind_1.TokenKind.DoubleLiteral);
733
- (0, chai_1.expect)(d.text).to.eql('2.5d3');
739
+ (0, chai_config_spec_1.expect)(d.kind).to.equal(TokenKind_1.TokenKind.DoubleLiteral);
740
+ (0, chai_config_spec_1.expect)(d.text).to.eql('2.5d3');
734
741
  });
735
742
  it('allows digits before `.` to be elided', () => {
736
743
  let f = Lexer_1.Lexer.scan('.123#').tokens[0];
737
- (0, chai_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.DoubleLiteral);
738
- (0, chai_1.expect)(f.text).to.eql('.123#');
744
+ (0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.DoubleLiteral);
745
+ (0, chai_config_spec_1.expect)(f.text).to.eql('.123#');
739
746
  });
740
747
  it('allows digits after `.` to be elided', () => {
741
748
  let f = Lexer_1.Lexer.scan('12.#').tokens[0];
742
- (0, chai_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.DoubleLiteral);
743
- (0, chai_1.expect)(f.text).to.eql('12.#');
749
+ (0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.DoubleLiteral);
750
+ (0, chai_config_spec_1.expect)(f.text).to.eql('12.#');
744
751
  });
745
752
  });
746
753
  describe('float literals', () => {
747
754
  it('respects \'!\' suffix', () => {
748
755
  let f = Lexer_1.Lexer.scan('0.00000008!').tokens[0];
749
- (0, chai_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
756
+ (0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
750
757
  // Floating precision will make this *not* equal
751
- (0, chai_1.expect)(f.text).not.to.equal(8e-8);
752
- (0, chai_1.expect)(f.text).to.eql('0.00000008!');
758
+ (0, chai_config_spec_1.expect)(f.text).not.to.equal(8e-8);
759
+ (0, chai_config_spec_1.expect)(f.text).to.eql('0.00000008!');
753
760
  });
754
761
  it('forces literals with a decimal into floats', () => {
755
762
  let f = Lexer_1.Lexer.scan('1.0').tokens[0];
756
- (0, chai_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
757
- (0, chai_1.expect)(f.text).to.equal('1.0');
763
+ (0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
764
+ (0, chai_config_spec_1.expect)(f.text).to.equal('1.0');
758
765
  });
759
766
  it('forces literals with \'E\' in exponent into floats', () => {
760
767
  let f = Lexer_1.Lexer.scan('2.5e3').tokens[0];
761
- (0, chai_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
762
- (0, chai_1.expect)(f.text).to.eql('2.5e3');
768
+ (0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
769
+ (0, chai_config_spec_1.expect)(f.text).to.eql('2.5e3');
763
770
  });
764
771
  it('supports larger-than-supported-precision floats to be defined with exponents', () => {
765
772
  let f = Lexer_1.Lexer.scan('2.3659475627512424e-38').tokens[0];
766
- (0, chai_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
767
- (0, chai_1.expect)(f.text).to.eql('2.3659475627512424e-38');
773
+ (0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
774
+ (0, chai_config_spec_1.expect)(f.text).to.eql('2.3659475627512424e-38');
768
775
  });
769
776
  it('allows digits before `.` to be elided', () => {
770
777
  let f = Lexer_1.Lexer.scan('.123').tokens[0];
771
- (0, chai_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
772
- (0, chai_1.expect)(f.text).to.equal('.123');
778
+ (0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
779
+ (0, chai_config_spec_1.expect)(f.text).to.equal('.123');
773
780
  });
774
781
  it('allows digits after `.` to be elided', () => {
775
782
  let f = Lexer_1.Lexer.scan('12.').tokens[0];
776
- (0, chai_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
777
- (0, chai_1.expect)(f.text).to.equal('12.');
783
+ (0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
784
+ (0, chai_config_spec_1.expect)(f.text).to.equal('12.');
778
785
  });
779
786
  });
780
787
  describe('long integer literals', () => {
781
788
  it('respects \'&\' suffix', () => {
782
789
  let f = Lexer_1.Lexer.scan('1&').tokens[0];
783
- (0, chai_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.LongIntegerLiteral);
784
- (0, chai_1.expect)(f.text).to.eql('1&');
790
+ (0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.LongIntegerLiteral);
791
+ (0, chai_config_spec_1.expect)(f.text).to.eql('1&');
785
792
  });
786
793
  it('supports hexadecimal literals', () => {
787
794
  let i = Lexer_1.Lexer.scan('&hf00d&').tokens[0];
788
- (0, chai_1.expect)(i.kind).to.equal(TokenKind_1.TokenKind.LongIntegerLiteral);
789
- (0, chai_1.expect)(i.text).to.equal('&hf00d&');
795
+ (0, chai_config_spec_1.expect)(i.kind).to.equal(TokenKind_1.TokenKind.LongIntegerLiteral);
796
+ (0, chai_config_spec_1.expect)(i.text).to.equal('&hf00d&');
790
797
  });
791
798
  it('allows very long Int64 literals', () => {
792
799
  let li = Lexer_1.Lexer.scan('9876543210&').tokens[0];
793
- (0, chai_1.expect)(li.kind).to.equal(TokenKind_1.TokenKind.LongIntegerLiteral);
794
- (0, chai_1.expect)(li.text).to.equal('9876543210&');
800
+ (0, chai_config_spec_1.expect)(li.kind).to.equal(TokenKind_1.TokenKind.LongIntegerLiteral);
801
+ (0, chai_config_spec_1.expect)(li.text).to.equal('9876543210&');
795
802
  });
796
803
  it('forces literals with \'&\' suffix into Int64s', () => {
797
804
  let li = Lexer_1.Lexer.scan('123&').tokens[0];
798
- (0, chai_1.expect)(li.kind).to.equal(TokenKind_1.TokenKind.LongIntegerLiteral);
799
- (0, chai_1.expect)(li.text).to.deep.equal('123&');
805
+ (0, chai_config_spec_1.expect)(li.kind).to.equal(TokenKind_1.TokenKind.LongIntegerLiteral);
806
+ (0, chai_config_spec_1.expect)(li.text).to.deep.equal('123&');
800
807
  });
801
808
  });
802
809
  describe('integer literals', () => {
803
810
  it('respects \'%\' suffix', () => {
804
811
  let f = Lexer_1.Lexer.scan('1%').tokens[0];
805
- (0, chai_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.IntegerLiteral);
806
- (0, chai_1.expect)(f.text).to.eql('1%');
812
+ (0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.IntegerLiteral);
813
+ (0, chai_config_spec_1.expect)(f.text).to.eql('1%');
807
814
  });
808
815
  it('does not allow decimal numbers to end with %', () => {
809
816
  let f = Lexer_1.Lexer.scan('1.2%').tokens[0];
810
- (0, chai_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
811
- (0, chai_1.expect)(f.text).to.eql('1.2');
817
+ (0, chai_config_spec_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
818
+ (0, chai_config_spec_1.expect)(f.text).to.eql('1.2');
812
819
  });
813
820
  it('supports hexadecimal literals', () => {
814
821
  let i = Lexer_1.Lexer.scan('&hFf').tokens[0];
815
- (0, chai_1.expect)(i.kind).to.equal(TokenKind_1.TokenKind.IntegerLiteral);
816
- (0, chai_1.expect)(i.text).to.deep.equal('&hFf');
822
+ (0, chai_config_spec_1.expect)(i.kind).to.equal(TokenKind_1.TokenKind.IntegerLiteral);
823
+ (0, chai_config_spec_1.expect)(i.text).to.deep.equal('&hFf');
817
824
  });
818
825
  it('falls back to a regular integer', () => {
819
826
  let i = Lexer_1.Lexer.scan('123').tokens[0];
820
- (0, chai_1.expect)(i.kind).to.equal(TokenKind_1.TokenKind.IntegerLiteral);
821
- (0, chai_1.expect)(i.text).to.deep.equal('123');
827
+ (0, chai_config_spec_1.expect)(i.kind).to.equal(TokenKind_1.TokenKind.IntegerLiteral);
828
+ (0, chai_config_spec_1.expect)(i.text).to.deep.equal('123');
822
829
  });
823
830
  });
824
831
  describe('types', () => {
825
832
  it('captures type tokens', () => {
826
- (0, chai_1.expect)(Lexer_1.Lexer.scan(`
833
+ (0, chai_config_spec_1.expect)(Lexer_1.Lexer.scan(`
827
834
  void boolean integer longinteger float double string object interface invalid dynamic
828
835
  `.trim()).tokens.map(x => x.kind)).to.eql([
829
836
  TokenKind_1.TokenKind.Void,
@@ -846,7 +853,7 @@ describe('lexer', () => {
846
853
  // test just a sample of single-word reserved words for now.
847
854
  // if we find any that we've missed
848
855
  let { tokens } = Lexer_1.Lexer.scan('and then or if else endif return true false line_num');
849
- (0, chai_1.expect)(tokens.map(w => w.kind)).to.deep.equal([
856
+ (0, chai_config_spec_1.expect)(tokens.map(w => w.kind)).to.deep.equal([
850
857
  TokenKind_1.TokenKind.And,
851
858
  TokenKind_1.TokenKind.Then,
852
859
  TokenKind_1.TokenKind.Or,
@@ -862,7 +869,7 @@ describe('lexer', () => {
862
869
  });
863
870
  it('matches multi-word keywords', () => {
864
871
  let { tokens } = Lexer_1.Lexer.scan('end if end while End Sub end Function Exit wHILe');
865
- (0, chai_1.expect)(tokens.map(w => w.kind)).to.deep.equal([
872
+ (0, chai_config_spec_1.expect)(tokens.map(w => w.kind)).to.deep.equal([
866
873
  TokenKind_1.TokenKind.EndIf,
867
874
  TokenKind_1.TokenKind.EndWhile,
868
875
  TokenKind_1.TokenKind.EndSub,
@@ -873,7 +880,7 @@ describe('lexer', () => {
873
880
  });
874
881
  it('accepts \'exit for\' but not \'exitfor\'', () => {
875
882
  let { tokens } = Lexer_1.Lexer.scan('exit for exitfor');
876
- (0, chai_1.expect)(tokens.map(w => w.kind)).to.deep.equal([
883
+ (0, chai_config_spec_1.expect)(tokens.map(w => w.kind)).to.deep.equal([
877
884
  TokenKind_1.TokenKind.ExitFor,
878
885
  TokenKind_1.TokenKind.Identifier,
879
886
  TokenKind_1.TokenKind.Eof
@@ -881,7 +888,7 @@ describe('lexer', () => {
881
888
  });
882
889
  it('matches keywords with silly capitalization', () => {
883
890
  let { tokens } = Lexer_1.Lexer.scan('iF ELSE eNDIf FUncTioN');
884
- (0, chai_1.expect)(tokens.map(w => w.kind)).to.deep.equal([
891
+ (0, chai_config_spec_1.expect)(tokens.map(w => w.kind)).to.deep.equal([
885
892
  TokenKind_1.TokenKind.If,
886
893
  TokenKind_1.TokenKind.Else,
887
894
  TokenKind_1.TokenKind.EndIf,
@@ -891,14 +898,14 @@ describe('lexer', () => {
891
898
  });
892
899
  it('allows alpha-numeric (plus \'_\') identifiers', () => {
893
900
  let identifier = Lexer_1.Lexer.scan('_abc_123_').tokens[0];
894
- (0, chai_1.expect)(identifier.kind).to.equal(TokenKind_1.TokenKind.Identifier);
895
- (0, chai_1.expect)(identifier.text).to.equal('_abc_123_');
901
+ (0, chai_config_spec_1.expect)(identifier.kind).to.equal(TokenKind_1.TokenKind.Identifier);
902
+ (0, chai_config_spec_1.expect)(identifier.text).to.equal('_abc_123_');
896
903
  });
897
904
  it('allows identifiers with trailing type designators', () => {
898
905
  let { tokens } = Lexer_1.Lexer.scan('lorem$ ipsum% dolor! sit# amet&');
899
906
  let identifiers = tokens.filter(t => t.kind !== TokenKind_1.TokenKind.Eof);
900
- (0, chai_1.expect)(identifiers.every(t => t.kind === TokenKind_1.TokenKind.Identifier));
901
- (0, chai_1.expect)(identifiers.map(t => t.text)).to.deep.equal([
907
+ (0, chai_config_spec_1.expect)(identifiers.every(t => t.kind === TokenKind_1.TokenKind.Identifier));
908
+ (0, chai_config_spec_1.expect)(identifiers.map(t => t.text)).to.deep.equal([
902
909
  'lorem$',
903
910
  'ipsum%',
904
911
  'dolor!',
@@ -910,7 +917,7 @@ describe('lexer', () => {
910
917
  describe('conditional compilation', () => {
911
918
  it('reads constant declarations', () => {
912
919
  let { tokens } = Lexer_1.Lexer.scan('#const foo true');
913
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
920
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
914
921
  TokenKind_1.TokenKind.HashConst,
915
922
  TokenKind_1.TokenKind.Identifier,
916
923
  TokenKind_1.TokenKind.True,
@@ -919,7 +926,7 @@ describe('lexer', () => {
919
926
  });
920
927
  it('reads constant aliases', () => {
921
928
  let { tokens } = Lexer_1.Lexer.scan('#const bar foo');
922
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
929
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
923
930
  TokenKind_1.TokenKind.HashConst,
924
931
  TokenKind_1.TokenKind.Identifier,
925
932
  TokenKind_1.TokenKind.Identifier,
@@ -937,7 +944,7 @@ describe('lexer', () => {
937
944
  `, {
938
945
  includeWhitespace: false
939
946
  });
940
- (0, chai_1.expect)(tokens.map(t => t.kind).filter(x => x !== TokenKind_1.TokenKind.Newline)).to.deep.equal([
947
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind).filter(x => x !== TokenKind_1.TokenKind.Newline)).to.deep.equal([
941
948
  TokenKind_1.TokenKind.HashIf,
942
949
  TokenKind_1.TokenKind.HashElseIf,
943
950
  TokenKind_1.TokenKind.HashElseIf,
@@ -949,7 +956,7 @@ describe('lexer', () => {
949
956
  });
950
957
  it('treats text "constructor" as an identifier', () => {
951
958
  let lexer = Lexer_1.Lexer.scan(`function constructor()\nend function`);
952
- (0, chai_1.expect)(lexer.tokens[1].kind).to.equal(TokenKind_1.TokenKind.Identifier);
959
+ (0, chai_config_spec_1.expect)(lexer.tokens[1].kind).to.equal(TokenKind_1.TokenKind.Identifier);
953
960
  });
954
961
  it('reads upper case conditional directives', () => {
955
962
  let { tokens } = Lexer_1.Lexer.scan(`
@@ -962,7 +969,7 @@ describe('lexer', () => {
962
969
  `, {
963
970
  includeWhitespace: false
964
971
  });
965
- (0, chai_1.expect)(tokens.map(t => t.kind).filter(x => x !== TokenKind_1.TokenKind.Newline)).to.deep.equal([
972
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind).filter(x => x !== TokenKind_1.TokenKind.Newline)).to.deep.equal([
966
973
  TokenKind_1.TokenKind.HashIf,
967
974
  TokenKind_1.TokenKind.HashElseIf,
968
975
  TokenKind_1.TokenKind.HashElseIf,
@@ -974,7 +981,7 @@ describe('lexer', () => {
974
981
  });
975
982
  it('supports various spacings between #endif', () => {
976
983
  let { tokens } = Lexer_1.Lexer.scan('#endif #end if #end\tif #end if #end\t\t if');
977
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
984
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
978
985
  TokenKind_1.TokenKind.HashEndIf,
979
986
  TokenKind_1.TokenKind.HashEndIf,
980
987
  TokenKind_1.TokenKind.HashEndIf,
@@ -987,20 +994,20 @@ describe('lexer', () => {
987
994
  let { tokens } = Lexer_1.Lexer.scan('#error a message goes here\n', {
988
995
  includeWhitespace: true
989
996
  });
990
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
997
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
991
998
  TokenKind_1.TokenKind.HashError,
992
999
  TokenKind_1.TokenKind.Whitespace,
993
1000
  TokenKind_1.TokenKind.HashErrorMessage,
994
1001
  TokenKind_1.TokenKind.Newline,
995
1002
  TokenKind_1.TokenKind.Eof
996
1003
  ]);
997
- (0, chai_1.expect)(tokens[2].text).to.equal('a message goes here');
1004
+ (0, chai_config_spec_1.expect)(tokens[2].text).to.equal('a message goes here');
998
1005
  });
999
1006
  });
1000
1007
  describe('location tracking', () => {
1001
1008
  it('tracks starting and ending locations including whitespace', () => {
1002
1009
  let { tokens } = Lexer_1.Lexer.scan(`sub foo()\n print "bar"\r\nend sub`, { includeWhitespace: true });
1003
- (0, chai_1.expect)(tokens.map(t => t.range)).to.eql([
1010
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.range)).to.eql([
1004
1011
  vscode_languageserver_1.Range.create(0, 0, 0, 3),
1005
1012
  vscode_languageserver_1.Range.create(0, 3, 0, 4),
1006
1013
  vscode_languageserver_1.Range.create(0, 4, 0, 7),
@@ -1018,7 +1025,7 @@ describe('lexer', () => {
1018
1025
  });
1019
1026
  it('tracks starting and ending locations excluding whitespace', () => {
1020
1027
  let { tokens } = Lexer_1.Lexer.scan(`sub foo()\n print "bar"\r\nend sub`, { includeWhitespace: false });
1021
- (0, chai_1.expect)(tokens.map(t => t.range)).to.eql([
1028
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.range)).to.eql([
1022
1029
  vscode_languageserver_1.Range.create(0, 0, 0, 3),
1023
1030
  vscode_languageserver_1.Range.create(0, 4, 0, 7),
1024
1031
  vscode_languageserver_1.Range.create(0, 7, 0, 8),
@@ -1035,7 +1042,7 @@ describe('lexer', () => {
1035
1042
  describe('two word keywords', () => {
1036
1043
  it('supports various spacing between for each', () => {
1037
1044
  let { tokens } = Lexer_1.Lexer.scan('for each for each for each for\teach for\t each for \teach for \t each');
1038
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
1045
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
1039
1046
  TokenKind_1.TokenKind.ForEach,
1040
1047
  TokenKind_1.TokenKind.ForEach,
1041
1048
  TokenKind_1.TokenKind.ForEach,
@@ -1049,7 +1056,7 @@ describe('lexer', () => {
1049
1056
  });
1050
1057
  it('detects rem when used as keyword', () => {
1051
1058
  let { tokens } = Lexer_1.Lexer.scan('person.rem=true');
1052
- (0, chai_1.expect)(tokens.map(t => t.kind)).to.eql([
1059
+ (0, chai_config_spec_1.expect)(tokens.map(t => t.kind)).to.eql([
1053
1060
  TokenKind_1.TokenKind.Identifier,
1054
1061
  TokenKind_1.TokenKind.Dot,
1055
1062
  TokenKind_1.TokenKind.Identifier,
@@ -1058,7 +1065,7 @@ describe('lexer', () => {
1058
1065
  TokenKind_1.TokenKind.Eof
1059
1066
  ]);
1060
1067
  //verify the location of `rem`
1061
- (0, chai_1.expect)(tokens.map(t => [t.range.start.character, t.range.end.character])).to.eql([
1068
+ (0, chai_config_spec_1.expect)(tokens.map(t => [t.range.start.character, t.range.end.character])).to.eql([
1062
1069
  [0, 6],
1063
1070
  [6, 7],
1064
1071
  [7, 10],
@@ -1070,12 +1077,12 @@ describe('lexer', () => {
1070
1077
  describe('isToken', () => {
1071
1078
  it('works', () => {
1072
1079
  let range = vscode_languageserver_1.Range.create(0, 0, 0, 2);
1073
- (0, chai_1.expect)((0, Token_1.isToken)({ kind: TokenKind_1.TokenKind.And, text: 'and', range: range })).is.true;
1074
- (0, chai_1.expect)((0, Token_1.isToken)({ text: 'and', range: range })).is.false;
1080
+ (0, chai_config_spec_1.expect)((0, Token_1.isToken)({ kind: TokenKind_1.TokenKind.And, text: 'and', range: range })).is.true;
1081
+ (0, chai_config_spec_1.expect)((0, Token_1.isToken)({ text: 'and', range: range })).is.false;
1075
1082
  });
1076
1083
  });
1077
1084
  it('recognizes enum-related keywords', () => {
1078
- (0, chai_1.expect)(Lexer_1.Lexer.scan('enum end enum endenum').tokens.map(x => x.kind)).to.eql([
1085
+ (0, chai_config_spec_1.expect)(Lexer_1.Lexer.scan('enum end enum endenum').tokens.map(x => x.kind)).to.eql([
1079
1086
  TokenKind_1.TokenKind.Enum,
1080
1087
  TokenKind_1.TokenKind.EndEnum,
1081
1088
  TokenKind_1.TokenKind.EndEnum,
@@ -1083,7 +1090,7 @@ describe('lexer', () => {
1083
1090
  ]);
1084
1091
  });
1085
1092
  it('recognizes class-related keywords', () => {
1086
- (0, chai_1.expect)(Lexer_1.Lexer.scan('class public protected private end class endclass new override').tokens.map(x => x.kind)).to.eql([
1093
+ (0, chai_config_spec_1.expect)(Lexer_1.Lexer.scan('class public protected private end class endclass new override').tokens.map(x => x.kind)).to.eql([
1087
1094
  TokenKind_1.TokenKind.Class,
1088
1095
  TokenKind_1.TokenKind.Public,
1089
1096
  TokenKind_1.TokenKind.Protected,
@@ -1098,14 +1105,14 @@ describe('lexer', () => {
1098
1105
  describe('whitespace', () => {
1099
1106
  it('preserves the exact number of whitespace characterswhitespace', () => {
1100
1107
  let { tokens } = Lexer_1.Lexer.scan(' ', { includeWhitespace: true });
1101
- (0, chai_1.expect)(tokens[0]).to.include({
1108
+ (0, chai_config_spec_1.expect)(tokens[0]).to.include({
1102
1109
  kind: TokenKind_1.TokenKind.Whitespace,
1103
1110
  text: ' '
1104
1111
  });
1105
1112
  });
1106
1113
  it('tokenizes whitespace between things', () => {
1107
1114
  let { tokens } = Lexer_1.Lexer.scan('sub main ( ) \n end sub', { includeWhitespace: true });
1108
- (0, chai_1.expect)(tokens.map(x => x.kind)).to.eql([
1115
+ (0, chai_config_spec_1.expect)(tokens.map(x => x.kind)).to.eql([
1109
1116
  TokenKind_1.TokenKind.Sub,
1110
1117
  TokenKind_1.TokenKind.Whitespace,
1111
1118
  TokenKind_1.TokenKind.Identifier,
@@ -1123,7 +1130,7 @@ describe('lexer', () => {
1123
1130
  });
1124
1131
  it('identifies brighterscript source literals', () => {
1125
1132
  let { tokens } = Lexer_1.Lexer.scan('LINE_NUM SOURCE_FILE_PATH SOURCE_LINE_NUM FUNCTION_NAME SOURCE_FUNCTION_NAME SOURCE_LOCATION PKG_PATH PKG_LOCATION');
1126
- (0, chai_1.expect)(tokens.map(x => x.kind)).to.eql([
1133
+ (0, chai_config_spec_1.expect)(tokens.map(x => x.kind)).to.eql([
1127
1134
  TokenKind_1.TokenKind.LineNumLiteral,
1128
1135
  TokenKind_1.TokenKind.SourceFilePathLiteral,
1129
1136
  TokenKind_1.TokenKind.SourceLineNumLiteral,
@@ -1144,11 +1151,11 @@ describe('lexer', () => {
1144
1151
  end sub
1145
1152
  `;
1146
1153
  const { tokens } = Lexer_1.Lexer.scan(text, { includeWhitespace: false });
1147
- (0, chai_1.expect)(util_1.default.tokensToString(tokens)).to.equal(text);
1154
+ (0, chai_config_spec_1.expect)(util_1.default.tokensToString(tokens)).to.equal(text);
1148
1155
  });
1149
1156
  it('properly detects try/catch tokens', () => {
1150
1157
  const { tokens } = Lexer_1.Lexer.scan(`try catch endtry end try throw`, { includeWhitespace: false });
1151
- (0, chai_1.expect)(tokens.map(x => x.kind)).to.eql([
1158
+ (0, chai_config_spec_1.expect)(tokens.map(x => x.kind)).to.eql([
1152
1159
  TokenKind_1.TokenKind.Try,
1153
1160
  TokenKind_1.TokenKind.Catch,
1154
1161
  TokenKind_1.TokenKind.EndTry,
@@ -1165,7 +1172,7 @@ describe('lexer', () => {
1165
1172
  const { tokens } = Lexer_1.Lexer.scan(regexp);
1166
1173
  results.push(tokens[0].text);
1167
1174
  }
1168
- (0, chai_1.expect)(results).to.eql(regexps);
1175
+ (0, chai_config_spec_1.expect)(results).to.eql(regexps);
1169
1176
  }
1170
1177
  it('recognizes regex literals', () => {
1171
1178
  testRegex(/simple/, /SimpleWithValidFlags/g, /UnknownFlags/gi, /with spaces/s, /with(parens)and[squarebraces]/,
@@ -1178,7 +1185,7 @@ describe('lexer', () => {
1178
1185
  const { tokens } = Lexer_1.Lexer.scan(`one = 1/2 + 1/4 + 1/4`, {
1179
1186
  includeWhitespace: false
1180
1187
  });
1181
- (0, chai_1.expect)(tokens.map(x => x.kind)).to.eql([
1188
+ (0, chai_config_spec_1.expect)(tokens.map(x => x.kind)).to.eql([
1182
1189
  TokenKind_1.TokenKind.Identifier,
1183
1190
  TokenKind_1.TokenKind.Equal,
1184
1191
  TokenKind_1.TokenKind.IntegerLiteral,
@@ -1196,7 +1203,7 @@ describe('lexer', () => {
1196
1203
  ]);
1197
1204
  });
1198
1205
  it('only captures alphanumeric flags', () => {
1199
- (0, chai_1.expect)(Lexer_1.Lexer.scan('speak(/a/)').tokens.map(x => x.kind)).to.eql([
1206
+ (0, chai_config_spec_1.expect)(Lexer_1.Lexer.scan('speak(/a/)').tokens.map(x => x.kind)).to.eql([
1200
1207
  TokenKind_1.TokenKind.Identifier,
1201
1208
  TokenKind_1.TokenKind.LeftParen,
1202
1209
  TokenKind_1.TokenKind.RegexLiteral,
@@ -1212,11 +1219,57 @@ describe('lexer', () => {
1212
1219
  /\\\n/);
1213
1220
  });
1214
1221
  });
1222
+ it('detects "continue" as a keyword', () => {
1223
+ (0, chai_config_spec_1.expect)(Lexer_1.Lexer.scan('continue').tokens.map(x => x.kind)).to.eql([
1224
+ TokenKind_1.TokenKind.Continue,
1225
+ TokenKind_1.TokenKind.Eof
1226
+ ]);
1227
+ });
1228
+ describe('trivia', () => {
1229
+ function stringify(tokens) {
1230
+ return tokens
1231
+ //exclude the explicit triva tokens since they'll be included in the leading/trailing arrays
1232
+ .filter(x => !TokenKind_1.AllowedTriviaTokens.includes(x.kind))
1233
+ .flatMap(x => [...x.leadingTrivia, x])
1234
+ .map(x => x.text)
1235
+ .join('');
1236
+ }
1237
+ it('combining token text and trivia can reproduce full input', () => {
1238
+ const input = `
1239
+ function test( )
1240
+ 'comment
1241
+ print alpha ' blabla
1242
+ end function 'trailing
1243
+ 'trailing2
1244
+ `;
1245
+ (0, chai_config_spec_1.expect)(stringify(Lexer_1.Lexer.scan(input).tokens)).to.eql(input);
1246
+ });
1247
+ function expectTrivia(text, expected) {
1248
+ const tokens = Lexer_1.Lexer.scan(text).tokens.filter(x => !TokenKind_1.AllowedTriviaTokens.includes(x.kind));
1249
+ (0, chai_config_spec_1.expect)(tokens.map(x => {
1250
+ return {
1251
+ text: x.text,
1252
+ leadingTrivia: x.leadingTrivia.map(x => x.text)
1253
+ };
1254
+ })).to.eql(expected.map(x => (Object.assign({ leadingTrivia: [] }, x))));
1255
+ }
1256
+ it('associates trailing items on same line with the preceeding token', () => {
1257
+ expectTrivia(`'leading\n` +
1258
+ `alpha = true 'trueComment\n` +
1259
+ `'eof`, [
1260
+ { leadingTrivia: [`'leading`, `\n`], text: `alpha` },
1261
+ { leadingTrivia: [` `], text: `=` },
1262
+ { leadingTrivia: [` `], text: `true` },
1263
+ //EOF
1264
+ { leadingTrivia: [` `, `'trueComment`, `\n`, `'eof`], text: `` }
1265
+ ]);
1266
+ });
1267
+ });
1215
1268
  });
1216
1269
  function expectKinds(text, tokenKinds) {
1217
1270
  let actual = Lexer_1.Lexer.scan(text).tokens.map(x => x.kind);
1218
1271
  //remove the EOF token
1219
1272
  actual.pop();
1220
- (0, chai_1.expect)(actual).to.eql(tokenKinds);
1273
+ (0, chai_config_spec_1.expect)(actual).to.eql(tokenKinds);
1221
1274
  }
1222
1275
  //# sourceMappingURL=Lexer.spec.js.map