brighterscript 1.0.0-alpha.2 → 1.0.0-alpha.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (388) hide show
  1. package/CHANGELOG.md +643 -253
  2. package/README.md +33 -9
  3. package/bsconfig.schema.json +22 -2
  4. package/dist/BsConfig.d.ts +9 -0
  5. package/dist/Cache.d.ts +5 -6
  6. package/dist/Cache.js +12 -11
  7. package/dist/Cache.js.map +1 -1
  8. package/dist/CodeActionUtil.d.ts +11 -2
  9. package/dist/CodeActionUtil.js +17 -3
  10. package/dist/CodeActionUtil.js.map +1 -1
  11. package/dist/CommentFlagProcessor.d.ts +4 -4
  12. package/dist/CommentFlagProcessor.js +5 -3
  13. package/dist/CommentFlagProcessor.js.map +1 -1
  14. package/dist/DependencyGraph.d.ts +2 -2
  15. package/dist/DependencyGraph.js +20 -7
  16. package/dist/DependencyGraph.js.map +1 -1
  17. package/dist/DiagnosticCollection.d.ts +3 -3
  18. package/dist/DiagnosticCollection.js +11 -11
  19. package/dist/DiagnosticCollection.js.map +1 -1
  20. package/dist/DiagnosticFilterer.js +5 -4
  21. package/dist/DiagnosticFilterer.js.map +1 -1
  22. package/dist/DiagnosticMessages.d.ts +59 -4
  23. package/dist/DiagnosticMessages.js +65 -7
  24. package/dist/DiagnosticMessages.js.map +1 -1
  25. package/dist/LanguageServer.d.ts +51 -39
  26. package/dist/LanguageServer.js +316 -232
  27. package/dist/LanguageServer.js.map +1 -1
  28. package/dist/Logger.d.ts +2 -0
  29. package/dist/Logger.js +10 -8
  30. package/dist/Logger.js.map +1 -1
  31. package/dist/PluginInterface.d.ts +7 -3
  32. package/dist/PluginInterface.js +9 -0
  33. package/dist/PluginInterface.js.map +1 -1
  34. package/dist/Program.d.ts +43 -25
  35. package/dist/Program.js +212 -95
  36. package/dist/Program.js.map +1 -1
  37. package/dist/ProgramBuilder.d.ts +4 -0
  38. package/dist/ProgramBuilder.js +36 -20
  39. package/dist/ProgramBuilder.js.map +1 -1
  40. package/dist/Scope.d.ts +126 -29
  41. package/dist/Scope.js +433 -156
  42. package/dist/Scope.js.map +1 -1
  43. package/dist/SemanticTokenUtils.d.ts +14 -0
  44. package/dist/SemanticTokenUtils.js +81 -0
  45. package/dist/SemanticTokenUtils.js.map +1 -0
  46. package/dist/SymbolTable.d.ts +10 -4
  47. package/dist/SymbolTable.js +55 -13
  48. package/dist/SymbolTable.js.map +1 -1
  49. package/dist/XmlScope.d.ts +7 -2
  50. package/dist/XmlScope.js +65 -27
  51. package/dist/XmlScope.js.map +1 -1
  52. package/dist/astUtils/AstEditor.d.ts +65 -0
  53. package/dist/astUtils/AstEditor.js +239 -0
  54. package/dist/astUtils/AstEditor.js.map +1 -0
  55. package/dist/{types/FunctionType.spec.d.ts → astUtils/AstEditor.spec.d.ts} +0 -0
  56. package/dist/astUtils/AstEditor.spec.js +254 -0
  57. package/dist/astUtils/AstEditor.spec.js.map +1 -0
  58. package/dist/astUtils/creators.d.ts +28 -6
  59. package/dist/astUtils/creators.js +137 -19
  60. package/dist/astUtils/creators.js.map +1 -1
  61. package/dist/astUtils/creators.spec.js +14 -4
  62. package/dist/astUtils/creators.spec.js.map +1 -1
  63. package/dist/astUtils/reflection.d.ts +32 -10
  64. package/dist/astUtils/reflection.js +82 -7
  65. package/dist/astUtils/reflection.js.map +1 -1
  66. package/dist/astUtils/reflection.spec.js +130 -119
  67. package/dist/astUtils/reflection.spec.js.map +1 -1
  68. package/dist/astUtils/stackedVisitor.js.map +1 -1
  69. package/dist/astUtils/stackedVisitor.spec.js +13 -13
  70. package/dist/astUtils/stackedVisitor.spec.js.map +1 -1
  71. package/dist/astUtils/visitors.d.ts +76 -51
  72. package/dist/astUtils/visitors.js +31 -11
  73. package/dist/astUtils/visitors.js.map +1 -1
  74. package/dist/astUtils/visitors.spec.js +126 -32
  75. package/dist/astUtils/visitors.spec.js.map +1 -1
  76. package/dist/astUtils/xml.d.ts +4 -3
  77. package/dist/astUtils/xml.js +8 -3
  78. package/dist/astUtils/xml.js.map +1 -1
  79. package/dist/bscPlugin/BscPlugin.d.ts +7 -1
  80. package/dist/bscPlugin/BscPlugin.js +28 -0
  81. package/dist/bscPlugin/BscPlugin.js.map +1 -1
  82. package/dist/bscPlugin/codeActions/CodeActionsProcessor.js +4 -4
  83. package/dist/bscPlugin/codeActions/CodeActionsProcessor.js.map +1 -1
  84. package/dist/bscPlugin/codeActions/CodeActionsProcessor.spec.js +26 -26
  85. package/dist/bscPlugin/codeActions/CodeActionsProcessor.spec.js.map +1 -1
  86. package/dist/bscPlugin/semanticTokens/BrsFileSemanticTokensProcessor.d.ts +9 -0
  87. package/dist/bscPlugin/semanticTokens/BrsFileSemanticTokensProcessor.js +108 -0
  88. package/dist/bscPlugin/semanticTokens/BrsFileSemanticTokensProcessor.js.map +1 -0
  89. package/dist/bscPlugin/semanticTokens/BrsFileSemanticTokensProcessor.spec.d.ts +1 -0
  90. package/dist/bscPlugin/semanticTokens/BrsFileSemanticTokensProcessor.spec.js +130 -0
  91. package/dist/bscPlugin/semanticTokens/BrsFileSemanticTokensProcessor.spec.js.map +1 -0
  92. package/dist/bscPlugin/transpile/BrsFilePreTranspileProcessor.d.ts +8 -0
  93. package/dist/bscPlugin/transpile/BrsFilePreTranspileProcessor.js +52 -0
  94. package/dist/bscPlugin/transpile/BrsFilePreTranspileProcessor.js.map +1 -0
  95. package/dist/bscPlugin/transpile/BrsFilePreTranspileProcessor.spec.d.ts +1 -0
  96. package/dist/bscPlugin/transpile/BrsFilePreTranspileProcessor.spec.js +32 -0
  97. package/dist/bscPlugin/transpile/BrsFilePreTranspileProcessor.spec.js.map +1 -0
  98. package/dist/bscPlugin/validation/BrsFileValidator.d.ts +9 -0
  99. package/dist/bscPlugin/validation/BrsFileValidator.js +66 -0
  100. package/dist/bscPlugin/validation/BrsFileValidator.js.map +1 -0
  101. package/dist/bscPlugin/validation/ScopeValidator.d.ts +29 -0
  102. package/dist/bscPlugin/validation/ScopeValidator.js +183 -0
  103. package/dist/bscPlugin/validation/ScopeValidator.js.map +1 -0
  104. package/dist/cli.js +10 -3
  105. package/dist/cli.js.map +1 -1
  106. package/dist/diagnosticUtils.d.ts +1 -0
  107. package/dist/diagnosticUtils.js +15 -8
  108. package/dist/diagnosticUtils.js.map +1 -1
  109. package/dist/examples/plugins/removePrint.js +12 -14
  110. package/dist/examples/plugins/removePrint.js.map +1 -1
  111. package/dist/files/BrsFile.Class.spec.js +717 -147
  112. package/dist/files/BrsFile.Class.spec.js.map +1 -1
  113. package/dist/files/BrsFile.d.ts +70 -30
  114. package/dist/files/BrsFile.js +719 -353
  115. package/dist/files/BrsFile.js.map +1 -1
  116. package/dist/files/BrsFile.spec.js +1238 -449
  117. package/dist/files/BrsFile.spec.js.map +1 -1
  118. package/dist/files/XmlFile.d.ts +6 -5
  119. package/dist/files/XmlFile.js +38 -30
  120. package/dist/files/XmlFile.js.map +1 -1
  121. package/dist/files/XmlFile.spec.js +302 -237
  122. package/dist/files/XmlFile.spec.js.map +1 -1
  123. package/dist/files/tests/imports.spec.js +44 -42
  124. package/dist/files/tests/imports.spec.js.map +1 -1
  125. package/dist/files/tests/optionalChaning.spec.d.ts +1 -0
  126. package/dist/files/tests/optionalChaning.spec.js +88 -0
  127. package/dist/files/tests/optionalChaning.spec.js.map +1 -0
  128. package/dist/globalCallables.d.ts +3 -1
  129. package/dist/globalCallables.js +424 -152
  130. package/dist/globalCallables.js.map +1 -1
  131. package/dist/index.d.ts +13 -3
  132. package/dist/index.js +28 -5
  133. package/dist/index.js.map +1 -1
  134. package/dist/interfaces.d.ts +133 -16
  135. package/dist/lexer/Lexer.d.ts +19 -1
  136. package/dist/lexer/Lexer.js +127 -21
  137. package/dist/lexer/Lexer.js.map +1 -1
  138. package/dist/lexer/Lexer.spec.js +657 -536
  139. package/dist/lexer/Lexer.spec.js.map +1 -1
  140. package/dist/lexer/Token.d.ts +2 -2
  141. package/dist/lexer/TokenKind.d.ts +13 -1
  142. package/dist/lexer/TokenKind.js +60 -3
  143. package/dist/lexer/TokenKind.js.map +1 -1
  144. package/dist/parser/BrsTranspileState.d.ts +9 -0
  145. package/dist/parser/BrsTranspileState.js +14 -0
  146. package/dist/parser/BrsTranspileState.js.map +1 -1
  147. package/dist/parser/Expression.d.ts +150 -34
  148. package/dist/parser/Expression.js +335 -165
  149. package/dist/parser/Expression.js.map +1 -1
  150. package/dist/parser/Parser.Class.spec.js +189 -89
  151. package/dist/parser/Parser.Class.spec.js.map +1 -1
  152. package/dist/parser/Parser.d.ts +153 -30
  153. package/dist/parser/Parser.js +1100 -503
  154. package/dist/parser/Parser.js.map +1 -1
  155. package/dist/parser/Parser.spec.js +687 -266
  156. package/dist/parser/Parser.spec.js.map +1 -1
  157. package/dist/parser/SGParser.d.ts +41 -4
  158. package/dist/parser/SGParser.js +186 -175
  159. package/dist/parser/SGParser.js.map +1 -1
  160. package/dist/parser/SGParser.spec.js +35 -22
  161. package/dist/parser/SGParser.spec.js.map +1 -1
  162. package/dist/parser/SGTypes.d.ts +206 -38
  163. package/dist/parser/SGTypes.js +470 -161
  164. package/dist/parser/SGTypes.js.map +1 -1
  165. package/dist/parser/SGTypes.spec.d.ts +1 -0
  166. package/dist/parser/SGTypes.spec.js +351 -0
  167. package/dist/parser/SGTypes.spec.js.map +1 -0
  168. package/dist/parser/Statement.d.ts +202 -48
  169. package/dist/parser/Statement.js +648 -193
  170. package/dist/parser/Statement.js.map +1 -1
  171. package/dist/parser/Statement.spec.js +11 -11
  172. package/dist/parser/Statement.spec.js.map +1 -1
  173. package/dist/parser/TranspileState.d.ts +1 -1
  174. package/dist/parser/TranspileState.js +15 -7
  175. package/dist/parser/TranspileState.js.map +1 -1
  176. package/dist/parser/tests/Parser.spec.d.ts +10 -9
  177. package/dist/parser/tests/Parser.spec.js +15 -11
  178. package/dist/parser/tests/Parser.spec.js.map +1 -1
  179. package/dist/parser/tests/controlFlow/For.spec.js +60 -60
  180. package/dist/parser/tests/controlFlow/For.spec.js.map +1 -1
  181. package/dist/parser/tests/controlFlow/ForEach.spec.js +40 -39
  182. package/dist/parser/tests/controlFlow/ForEach.spec.js.map +1 -1
  183. package/dist/parser/tests/controlFlow/If.spec.js +213 -194
  184. package/dist/parser/tests/controlFlow/If.spec.js.map +1 -1
  185. package/dist/parser/tests/controlFlow/While.spec.js +37 -37
  186. package/dist/parser/tests/controlFlow/While.spec.js.map +1 -1
  187. package/dist/parser/tests/expression/Additive.spec.js +30 -30
  188. package/dist/parser/tests/expression/Additive.spec.js.map +1 -1
  189. package/dist/parser/tests/expression/ArrayLiterals.spec.js +119 -119
  190. package/dist/parser/tests/expression/ArrayLiterals.spec.js.map +1 -1
  191. package/dist/parser/tests/expression/AssociativeArrayLiterals.spec.js +162 -138
  192. package/dist/parser/tests/expression/AssociativeArrayLiterals.spec.js.map +1 -1
  193. package/dist/parser/tests/expression/Boolean.spec.js +24 -24
  194. package/dist/parser/tests/expression/Boolean.spec.js.map +1 -1
  195. package/dist/parser/tests/expression/Call.spec.js +41 -40
  196. package/dist/parser/tests/expression/Call.spec.js.map +1 -1
  197. package/dist/parser/tests/expression/Exponential.spec.js +17 -17
  198. package/dist/parser/tests/expression/Exponential.spec.js.map +1 -1
  199. package/dist/parser/tests/expression/Function.spec.js +256 -256
  200. package/dist/parser/tests/expression/Function.spec.js.map +1 -1
  201. package/dist/parser/tests/expression/Indexing.spec.js +87 -87
  202. package/dist/parser/tests/expression/Indexing.spec.js.map +1 -1
  203. package/dist/parser/tests/expression/Multiplicative.spec.js +37 -37
  204. package/dist/parser/tests/expression/Multiplicative.spec.js.map +1 -1
  205. package/dist/parser/tests/expression/NullCoalescenceExpression.spec.js +75 -63
  206. package/dist/parser/tests/expression/NullCoalescenceExpression.spec.js.map +1 -1
  207. package/dist/parser/tests/expression/PrefixUnary.spec.js +41 -41
  208. package/dist/parser/tests/expression/PrefixUnary.spec.js.map +1 -1
  209. package/dist/parser/tests/expression/Primary.spec.js +41 -41
  210. package/dist/parser/tests/expression/Primary.spec.js.map +1 -1
  211. package/dist/parser/tests/expression/RegexLiteralExpression.spec.d.ts +1 -0
  212. package/dist/parser/tests/expression/RegexLiteralExpression.spec.js +171 -0
  213. package/dist/parser/tests/expression/RegexLiteralExpression.spec.js.map +1 -0
  214. package/dist/parser/tests/expression/Relational.spec.js +43 -43
  215. package/dist/parser/tests/expression/Relational.spec.js.map +1 -1
  216. package/dist/parser/tests/expression/SourceLiteralExpression.spec.js +9 -9
  217. package/dist/parser/tests/expression/SourceLiteralExpression.spec.js.map +1 -1
  218. package/dist/parser/tests/expression/TemplateStringExpression.spec.js +28 -28
  219. package/dist/parser/tests/expression/TemplateStringExpression.spec.js.map +1 -1
  220. package/dist/parser/tests/expression/TernaryExpression.spec.js +102 -102
  221. package/dist/parser/tests/expression/TernaryExpression.spec.js.map +1 -1
  222. package/dist/parser/tests/statement/AssignmentOperators.spec.js +36 -36
  223. package/dist/parser/tests/statement/AssignmentOperators.spec.js.map +1 -1
  224. package/dist/parser/tests/statement/Declaration.spec.js +44 -44
  225. package/dist/parser/tests/statement/Declaration.spec.js.map +1 -1
  226. package/dist/parser/tests/statement/Dim.spec.js +21 -21
  227. package/dist/parser/tests/statement/Dim.spec.js.map +1 -1
  228. package/dist/parser/tests/statement/Enum.spec.d.ts +1 -0
  229. package/dist/parser/tests/statement/Enum.spec.js +840 -0
  230. package/dist/parser/tests/statement/Enum.spec.js.map +1 -0
  231. package/dist/parser/tests/statement/For.spec.d.ts +1 -0
  232. package/dist/parser/tests/statement/For.spec.js +46 -0
  233. package/dist/parser/tests/statement/For.spec.js.map +1 -0
  234. package/dist/parser/tests/statement/ForEach.spec.d.ts +1 -0
  235. package/dist/parser/tests/statement/ForEach.spec.js +37 -0
  236. package/dist/parser/tests/statement/ForEach.spec.js.map +1 -0
  237. package/dist/parser/tests/statement/Function.spec.js +198 -197
  238. package/dist/parser/tests/statement/Function.spec.js.map +1 -1
  239. package/dist/parser/tests/statement/Goto.spec.js +15 -14
  240. package/dist/parser/tests/statement/Goto.spec.js.map +1 -1
  241. package/dist/parser/tests/statement/Increment.spec.js +50 -50
  242. package/dist/parser/tests/statement/Increment.spec.js.map +1 -1
  243. package/dist/parser/tests/statement/InterfaceStatement.spec.d.ts +1 -0
  244. package/dist/parser/tests/statement/InterfaceStatement.spec.js +254 -0
  245. package/dist/parser/tests/statement/InterfaceStatement.spec.js.map +1 -0
  246. package/dist/parser/tests/statement/LibraryStatement.spec.js +17 -17
  247. package/dist/parser/tests/statement/LibraryStatement.spec.js.map +1 -1
  248. package/dist/parser/tests/statement/Misc.spec.js +108 -106
  249. package/dist/parser/tests/statement/Misc.spec.js.map +1 -1
  250. package/dist/parser/tests/statement/PrintStatement.spec.js +40 -40
  251. package/dist/parser/tests/statement/PrintStatement.spec.js.map +1 -1
  252. package/dist/parser/tests/statement/ReturnStatement.spec.js +46 -46
  253. package/dist/parser/tests/statement/ReturnStatement.spec.js.map +1 -1
  254. package/dist/parser/tests/statement/Set.spec.js +83 -83
  255. package/dist/parser/tests/statement/Set.spec.js.map +1 -1
  256. package/dist/parser/tests/statement/Stop.spec.js +12 -11
  257. package/dist/parser/tests/statement/Stop.spec.js.map +1 -1
  258. package/dist/parser/tests/statement/Throw.spec.js +5 -5
  259. package/dist/parser/tests/statement/Throw.spec.js.map +1 -1
  260. package/dist/parser/tests/statement/TryCatch.spec.js +15 -13
  261. package/dist/parser/tests/statement/TryCatch.spec.js.map +1 -1
  262. package/dist/preprocessor/Chunk.d.ts +1 -1
  263. package/dist/preprocessor/Chunk.js.map +1 -1
  264. package/dist/preprocessor/Manifest.d.ts +5 -5
  265. package/dist/preprocessor/Manifest.js +14 -35
  266. package/dist/preprocessor/Manifest.js.map +1 -1
  267. package/dist/preprocessor/Manifest.spec.d.ts +1 -0
  268. package/dist/preprocessor/Manifest.spec.js +78 -103
  269. package/dist/preprocessor/Manifest.spec.js.map +1 -1
  270. package/dist/preprocessor/Preprocessor.d.ts +1 -1
  271. package/dist/preprocessor/Preprocessor.js +8 -8
  272. package/dist/preprocessor/Preprocessor.js.map +1 -1
  273. package/dist/preprocessor/Preprocessor.spec.js +49 -49
  274. package/dist/preprocessor/Preprocessor.spec.js.map +1 -1
  275. package/dist/preprocessor/PreprocessorParser.spec.js +72 -72
  276. package/dist/preprocessor/PreprocessorParser.spec.js.map +1 -1
  277. package/dist/roku-types/data.json +21891 -0
  278. package/dist/roku-types/index.d.ts +6776 -0
  279. package/dist/roku-types/index.js +11 -0
  280. package/dist/roku-types/index.js.map +1 -0
  281. package/dist/types/ArrayType.d.ts +8 -5
  282. package/dist/types/ArrayType.js +52 -12
  283. package/dist/types/ArrayType.js.map +1 -1
  284. package/dist/types/ArrayType.spec.js +72 -11
  285. package/dist/types/ArrayType.spec.js.map +1 -1
  286. package/dist/types/BooleanType.d.ts +4 -2
  287. package/dist/types/BooleanType.js +9 -4
  288. package/dist/types/BooleanType.js.map +1 -1
  289. package/dist/types/BooleanType.spec.js +5 -3
  290. package/dist/types/BooleanType.spec.js.map +1 -1
  291. package/dist/types/BscType.d.ts +20 -5
  292. package/dist/types/BscType.js +24 -0
  293. package/dist/types/BscType.js.map +1 -1
  294. package/dist/types/CustomType.d.ts +8 -6
  295. package/dist/types/CustomType.js +20 -11
  296. package/dist/types/CustomType.js.map +1 -1
  297. package/dist/types/DoubleType.d.ts +2 -0
  298. package/dist/types/DoubleType.js +14 -9
  299. package/dist/types/DoubleType.js.map +1 -1
  300. package/dist/types/DoubleType.spec.js +5 -3
  301. package/dist/types/DoubleType.spec.js.map +1 -1
  302. package/dist/types/DynamicType.d.ts +2 -0
  303. package/dist/types/DynamicType.js +6 -2
  304. package/dist/types/DynamicType.js.map +1 -1
  305. package/dist/types/DynamicType.spec.js +2 -2
  306. package/dist/types/DynamicType.spec.js.map +1 -1
  307. package/dist/types/EnumType.d.ts +22 -0
  308. package/dist/types/EnumType.js +55 -0
  309. package/dist/types/EnumType.js.map +1 -0
  310. package/dist/types/FloatType.d.ts +2 -0
  311. package/dist/types/FloatType.js +14 -9
  312. package/dist/types/FloatType.js.map +1 -1
  313. package/dist/types/FloatType.spec.js +4 -2
  314. package/dist/types/FloatType.spec.js.map +1 -1
  315. package/dist/types/FunctionType.d.ts +7 -31
  316. package/dist/types/FunctionType.js +11 -57
  317. package/dist/types/FunctionType.js.map +1 -1
  318. package/dist/types/IntegerType.d.ts +2 -0
  319. package/dist/types/IntegerType.js +14 -9
  320. package/dist/types/IntegerType.js.map +1 -1
  321. package/dist/types/IntegerType.spec.js +5 -3
  322. package/dist/types/IntegerType.spec.js.map +1 -1
  323. package/dist/types/InterfaceType.d.ts +13 -4
  324. package/dist/types/InterfaceType.js +48 -8
  325. package/dist/types/InterfaceType.js.map +1 -1
  326. package/dist/types/InterfaceType.spec.d.ts +1 -0
  327. package/dist/types/InterfaceType.spec.js +194 -0
  328. package/dist/types/InterfaceType.spec.js.map +1 -0
  329. package/dist/types/InvalidType.d.ts +4 -2
  330. package/dist/types/InvalidType.js +10 -5
  331. package/dist/types/InvalidType.js.map +1 -1
  332. package/dist/types/InvalidType.spec.js +4 -2
  333. package/dist/types/InvalidType.spec.js.map +1 -1
  334. package/dist/types/LazyType.d.ts +8 -7
  335. package/dist/types/LazyType.js +22 -10
  336. package/dist/types/LazyType.js.map +1 -1
  337. package/dist/types/LongIntegerType.d.ts +2 -0
  338. package/dist/types/LongIntegerType.js +14 -9
  339. package/dist/types/LongIntegerType.js.map +1 -1
  340. package/dist/types/LongIntegerType.spec.js +4 -2
  341. package/dist/types/LongIntegerType.spec.js.map +1 -1
  342. package/dist/types/ObjectType.d.ts +8 -4
  343. package/dist/types/ObjectType.js +9 -4
  344. package/dist/types/ObjectType.js.map +1 -1
  345. package/dist/types/ObjectType.spec.js +2 -2
  346. package/dist/types/ObjectType.spec.js.map +1 -1
  347. package/dist/types/StringType.d.ts +4 -2
  348. package/dist/types/StringType.js +9 -4
  349. package/dist/types/StringType.js.map +1 -1
  350. package/dist/types/StringType.spec.js +4 -2
  351. package/dist/types/StringType.spec.js.map +1 -1
  352. package/dist/types/TypedFunctionType.d.ts +28 -0
  353. package/dist/types/TypedFunctionType.js +88 -0
  354. package/dist/types/TypedFunctionType.js.map +1 -0
  355. package/dist/types/TypedFunctionType.spec.d.ts +1 -0
  356. package/dist/types/TypedFunctionType.spec.js +37 -0
  357. package/dist/types/TypedFunctionType.spec.js.map +1 -0
  358. package/dist/types/UninitializedType.js +3 -3
  359. package/dist/types/UninitializedType.js.map +1 -1
  360. package/dist/types/VoidType.d.ts +4 -2
  361. package/dist/types/VoidType.js +8 -4
  362. package/dist/types/VoidType.js.map +1 -1
  363. package/dist/types/VoidType.spec.js +2 -2
  364. package/dist/types/VoidType.spec.js.map +1 -1
  365. package/dist/types/helpers.d.ts +42 -0
  366. package/dist/types/helpers.js +118 -0
  367. package/dist/types/helpers.js.map +1 -0
  368. package/dist/util.d.ts +91 -21
  369. package/dist/util.js +364 -114
  370. package/dist/util.js.map +1 -1
  371. package/dist/validators/ClassValidator.d.ts +19 -2
  372. package/dist/validators/ClassValidator.js +164 -103
  373. package/dist/validators/ClassValidator.js.map +1 -1
  374. package/package.json +30 -19
  375. package/dist/astUtils/index.d.ts +0 -7
  376. package/dist/astUtils/index.js +0 -26
  377. package/dist/astUtils/index.js.map +0 -1
  378. package/dist/lexer/index.d.ts +0 -3
  379. package/dist/lexer/index.js +0 -17
  380. package/dist/lexer/index.js.map +0 -1
  381. package/dist/parser/index.d.ts +0 -3
  382. package/dist/parser/index.js +0 -16
  383. package/dist/parser/index.js.map +0 -1
  384. package/dist/preprocessor/index.d.ts +0 -3
  385. package/dist/preprocessor/index.js +0 -16
  386. package/dist/preprocessor/index.js.map +0 -1
  387. package/dist/types/FunctionType.spec.js +0 -23
  388. package/dist/types/FunctionType.spec.js.map +0 -1
@@ -2,7 +2,7 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  /* eslint no-template-curly-in-string: 0 */
4
4
  const chai_1 = require("chai");
5
- const _1 = require(".");
5
+ const TokenKind_1 = require("./TokenKind");
6
6
  const Lexer_1 = require("./Lexer");
7
7
  const Token_1 = require("./Token");
8
8
  const Parser_spec_1 = require("../parser/Parser.spec");
@@ -11,64 +11,116 @@ const util_1 = require("../util");
11
11
  describe('lexer', () => {
12
12
  it('recognizes namespace keywords', () => {
13
13
  let { tokens } = Lexer_1.Lexer.scan('namespace end namespace endnamespace end namespace');
14
- chai_1.expect(tokens.map(x => x.kind)).to.eql([
15
- _1.TokenKind.Namespace,
16
- _1.TokenKind.EndNamespace,
17
- _1.TokenKind.EndNamespace,
18
- _1.TokenKind.EndNamespace,
19
- _1.TokenKind.Eof
14
+ (0, chai_1.expect)(tokens.map(x => x.kind)).to.eql([
15
+ TokenKind_1.TokenKind.Namespace,
16
+ TokenKind_1.TokenKind.EndNamespace,
17
+ TokenKind_1.TokenKind.EndNamespace,
18
+ TokenKind_1.TokenKind.EndNamespace,
19
+ TokenKind_1.TokenKind.Eof
20
+ ]);
21
+ });
22
+ it('recognizes the question mark operator in various contexts', () => {
23
+ expectKinds('? ?? ?. ?[ ?.[ ?( ?@', [
24
+ TokenKind_1.TokenKind.Question,
25
+ TokenKind_1.TokenKind.QuestionQuestion,
26
+ TokenKind_1.TokenKind.QuestionDot,
27
+ TokenKind_1.TokenKind.QuestionLeftSquare,
28
+ TokenKind_1.TokenKind.QuestionDot,
29
+ TokenKind_1.TokenKind.LeftSquareBracket,
30
+ TokenKind_1.TokenKind.QuestionLeftParen,
31
+ TokenKind_1.TokenKind.QuestionAt
32
+ ]);
33
+ });
34
+ it('separates optional chain characters and LeftSquare when found at beginning of statement locations', () => {
35
+ //a statement starting with a question mark is actually a print statement, so we need to keep the ? separate from [
36
+ expectKinds(`?[ ?[ : ?[ ?[`, [
37
+ TokenKind_1.TokenKind.Question,
38
+ TokenKind_1.TokenKind.LeftSquareBracket,
39
+ TokenKind_1.TokenKind.QuestionLeftSquare,
40
+ TokenKind_1.TokenKind.Colon,
41
+ TokenKind_1.TokenKind.Question,
42
+ TokenKind_1.TokenKind.LeftSquareBracket,
43
+ TokenKind_1.TokenKind.QuestionLeftSquare
44
+ ]);
45
+ });
46
+ it('separates optional chain characters and LeftParen when found at beginning of statement locations', () => {
47
+ //a statement starting with a question mark is actually a print statement, so we need to keep the ? separate from [
48
+ expectKinds(`?( ?( : ?( ?(`, [
49
+ TokenKind_1.TokenKind.Question,
50
+ TokenKind_1.TokenKind.LeftParen,
51
+ TokenKind_1.TokenKind.QuestionLeftParen,
52
+ TokenKind_1.TokenKind.Colon,
53
+ TokenKind_1.TokenKind.Question,
54
+ TokenKind_1.TokenKind.LeftParen,
55
+ TokenKind_1.TokenKind.QuestionLeftParen
56
+ ]);
57
+ });
58
+ it('handles QuestionDot and Square properly', () => {
59
+ expectKinds('?.[ ?. [', [
60
+ TokenKind_1.TokenKind.QuestionDot,
61
+ TokenKind_1.TokenKind.LeftSquareBracket,
62
+ TokenKind_1.TokenKind.QuestionDot,
63
+ TokenKind_1.TokenKind.LeftSquareBracket
64
+ ]);
65
+ });
66
+ it('does not make conditional chaining tokens with space between', () => {
67
+ expectKinds('? . ? [ ? ( ? @', [
68
+ TokenKind_1.TokenKind.Question,
69
+ TokenKind_1.TokenKind.Dot,
70
+ TokenKind_1.TokenKind.Question,
71
+ TokenKind_1.TokenKind.LeftSquareBracket,
72
+ TokenKind_1.TokenKind.Question,
73
+ TokenKind_1.TokenKind.LeftParen,
74
+ TokenKind_1.TokenKind.Question,
75
+ TokenKind_1.TokenKind.At
20
76
  ]);
21
77
  });
22
78
  it('recognizes the callfunc operator', () => {
23
79
  let { tokens } = Lexer_1.Lexer.scan('@.');
24
- chai_1.expect(tokens[0].kind).to.equal(_1.TokenKind.Callfunc);
80
+ (0, chai_1.expect)(tokens[0].kind).to.equal(TokenKind_1.TokenKind.Callfunc);
25
81
  });
26
82
  it('recognizes the import token', () => {
27
83
  let { tokens } = Lexer_1.Lexer.scan('import');
28
- chai_1.expect(tokens[0].kind).to.eql(_1.TokenKind.Import);
84
+ (0, chai_1.expect)(tokens[0].kind).to.eql(TokenKind_1.TokenKind.Import);
29
85
  });
30
86
  it('recognizes library token', () => {
31
87
  let { tokens } = Lexer_1.Lexer.scan('library');
32
- chai_1.expect(tokens[0].kind).to.eql(_1.TokenKind.Library);
33
- });
34
- it('recognizes the question mark operator', () => {
35
- let { tokens } = Lexer_1.Lexer.scan('?');
36
- chai_1.expect(tokens[0].kind).to.equal(_1.TokenKind.Question);
88
+ (0, chai_1.expect)(tokens[0].kind).to.eql(TokenKind_1.TokenKind.Library);
37
89
  });
38
90
  it('produces an at symbol token', () => {
39
91
  let { tokens } = Lexer_1.Lexer.scan('@');
40
- chai_1.expect(tokens[0].kind).to.equal(_1.TokenKind.At);
92
+ (0, chai_1.expect)(tokens[0].kind).to.equal(TokenKind_1.TokenKind.At);
41
93
  });
42
94
  it('produces a semicolon token', () => {
43
95
  let { tokens } = Lexer_1.Lexer.scan(';');
44
- chai_1.expect(tokens[0].kind).to.equal(_1.TokenKind.Semicolon);
96
+ (0, chai_1.expect)(tokens[0].kind).to.equal(TokenKind_1.TokenKind.Semicolon);
45
97
  });
46
98
  it('emits error on unknown character type', () => {
47
99
  let { diagnostics } = Lexer_1.Lexer.scan('\0');
48
- chai_1.expect(diagnostics).to.be.lengthOf(1);
100
+ (0, chai_1.expect)(diagnostics).to.be.lengthOf(1);
49
101
  });
50
102
  it('includes an end-of-file marker', () => {
51
103
  let { tokens } = Lexer_1.Lexer.scan('');
52
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([_1.TokenKind.Eof]);
104
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Eof]);
53
105
  });
54
106
  it('ignores tabs and spaces', () => {
55
107
  let { tokens } = Lexer_1.Lexer.scan('\t\t \t \t');
56
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([_1.TokenKind.Eof]);
108
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Eof]);
57
109
  });
58
110
  it('retains every single newline', () => {
59
111
  let { tokens } = Lexer_1.Lexer.scan('\n\n\'foo\n\n\nprint 2\n\n');
60
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([
61
- _1.TokenKind.Newline,
62
- _1.TokenKind.Newline,
63
- _1.TokenKind.Comment,
64
- _1.TokenKind.Newline,
65
- _1.TokenKind.Newline,
66
- _1.TokenKind.Newline,
67
- _1.TokenKind.Print,
68
- _1.TokenKind.IntegerLiteral,
69
- _1.TokenKind.Newline,
70
- _1.TokenKind.Newline,
71
- _1.TokenKind.Eof
112
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
113
+ TokenKind_1.TokenKind.Newline,
114
+ TokenKind_1.TokenKind.Newline,
115
+ TokenKind_1.TokenKind.Comment,
116
+ TokenKind_1.TokenKind.Newline,
117
+ TokenKind_1.TokenKind.Newline,
118
+ TokenKind_1.TokenKind.Newline,
119
+ TokenKind_1.TokenKind.Print,
120
+ TokenKind_1.TokenKind.IntegerLiteral,
121
+ TokenKind_1.TokenKind.Newline,
122
+ TokenKind_1.TokenKind.Newline,
123
+ TokenKind_1.TokenKind.Eof
72
124
  ]);
73
125
  });
74
126
  it('does not insert double newlines with the windows \\r\\n newline', () => {
@@ -79,24 +131,24 @@ describe('lexer', () => {
79
131
  ' print 0\r\n' +
80
132
  ' end if\r\n' +
81
133
  'end function\r\n').tokens.map(x => x.kind);
82
- chai_1.expect(kinds).to.eql([
83
- _1.TokenKind.Function, _1.TokenKind.Identifier, _1.TokenKind.LeftParen, _1.TokenKind.RightParen, _1.TokenKind.As, _1.TokenKind.String, _1.TokenKind.Newline,
84
- _1.TokenKind.If, _1.TokenKind.True, _1.TokenKind.Then, _1.TokenKind.Newline,
85
- _1.TokenKind.Print, _1.TokenKind.IntegerLiteral, _1.TokenKind.Newline,
86
- _1.TokenKind.Else, _1.TokenKind.Newline,
87
- _1.TokenKind.Print, _1.TokenKind.IntegerLiteral, _1.TokenKind.Newline,
88
- _1.TokenKind.EndIf, _1.TokenKind.Newline,
89
- _1.TokenKind.EndFunction, _1.TokenKind.Newline,
90
- _1.TokenKind.Eof
134
+ (0, chai_1.expect)(kinds).to.eql([
135
+ TokenKind_1.TokenKind.Function, TokenKind_1.TokenKind.Identifier, TokenKind_1.TokenKind.LeftParen, TokenKind_1.TokenKind.RightParen, TokenKind_1.TokenKind.As, TokenKind_1.TokenKind.String, TokenKind_1.TokenKind.Newline,
136
+ TokenKind_1.TokenKind.If, TokenKind_1.TokenKind.True, TokenKind_1.TokenKind.Then, TokenKind_1.TokenKind.Newline,
137
+ TokenKind_1.TokenKind.Print, TokenKind_1.TokenKind.IntegerLiteral, TokenKind_1.TokenKind.Newline,
138
+ TokenKind_1.TokenKind.Else, TokenKind_1.TokenKind.Newline,
139
+ TokenKind_1.TokenKind.Print, TokenKind_1.TokenKind.IntegerLiteral, TokenKind_1.TokenKind.Newline,
140
+ TokenKind_1.TokenKind.EndIf, TokenKind_1.TokenKind.Newline,
141
+ TokenKind_1.TokenKind.EndFunction, TokenKind_1.TokenKind.Newline,
142
+ TokenKind_1.TokenKind.Eof
91
143
  ]);
92
144
  });
93
145
  it('computes range properly both with and without whitespace', () => {
94
146
  let withoutWhitespace = Lexer_1.Lexer.scan(`sub Main()\n bob = true\nend sub`).tokens
95
- .map(x => Parser_spec_1.rangeToArray(x.range));
147
+ .map(x => (0, Parser_spec_1.rangeToArray)(x.range));
96
148
  let withWhitespace = Lexer_1.Lexer.scan(`sub Main()\n bob = true\nend sub`).tokens
97
149
  //filter out the whitespace...we only care that it was computed during the scan
98
- .filter(x => x.kind !== _1.TokenKind.Whitespace)
99
- .map(x => Parser_spec_1.rangeToArray(x.range));
150
+ .filter(x => x.kind !== TokenKind_1.TokenKind.Whitespace)
151
+ .map(x => (0, Parser_spec_1.rangeToArray)(x.range));
100
152
  /*eslint-disable */
101
153
  let expectedLocations = [
102
154
  [0, 0, 0, 3],
@@ -112,54 +164,54 @@ describe('lexer', () => {
112
164
  [2, 7, 2, 8] //Eof
113
165
  ];
114
166
  /*eslint-enable*/
115
- chai_1.expect(withoutWhitespace, 'Without whitespace').to.eql(expectedLocations);
116
- chai_1.expect(withWhitespace, 'With whitespace').to.eql(expectedLocations);
167
+ (0, chai_1.expect)(withoutWhitespace, 'Without whitespace').to.eql(expectedLocations);
168
+ (0, chai_1.expect)(withWhitespace, 'With whitespace').to.eql(expectedLocations);
117
169
  });
118
170
  it('retains original line endings', () => {
119
171
  let { tokens } = Lexer_1.Lexer.scan('print "hello"\r\nprint "world"\n');
120
- chai_1.expect([
172
+ (0, chai_1.expect)([
121
173
  tokens[2].text.charCodeAt(0),
122
174
  tokens[2].text.charCodeAt(1)
123
175
  ], 'should contain \\r\\n').to.eql([13, 10]);
124
- chai_1.expect(tokens[5].text.charCodeAt(0), 'should contain \\r\\n').to.eql(10);
176
+ (0, chai_1.expect)(tokens[5].text.charCodeAt(0), 'should contain \\r\\n').to.eql(10);
125
177
  });
126
178
  it('correctly splits the elseif token', () => {
127
179
  let { tokens } = Lexer_1.Lexer.scan('else if elseif else if');
128
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([
129
- _1.TokenKind.Else,
130
- _1.TokenKind.If,
131
- _1.TokenKind.Else,
132
- _1.TokenKind.If,
133
- _1.TokenKind.Else,
134
- _1.TokenKind.If,
135
- _1.TokenKind.Eof
180
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
181
+ TokenKind_1.TokenKind.Else,
182
+ TokenKind_1.TokenKind.If,
183
+ TokenKind_1.TokenKind.Else,
184
+ TokenKind_1.TokenKind.If,
185
+ TokenKind_1.TokenKind.Else,
186
+ TokenKind_1.TokenKind.If,
187
+ TokenKind_1.TokenKind.Eof
136
188
  ]);
137
189
  });
138
190
  it('gives the `as` keyword its own TokenKind', () => {
139
191
  let { tokens } = Lexer_1.Lexer.scan('as');
140
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([_1.TokenKind.As, _1.TokenKind.Eof]);
192
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.As, TokenKind_1.TokenKind.Eof]);
141
193
  });
142
194
  it('gives the `stop` keyword its own TokenKind', () => {
143
195
  let { tokens } = Lexer_1.Lexer.scan('stop');
144
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([_1.TokenKind.Stop, _1.TokenKind.Eof]);
196
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Stop, TokenKind_1.TokenKind.Eof]);
145
197
  });
146
198
  it('does not alias \'?\' to \'print\' - the parser will do that', () => {
147
199
  let { tokens } = Lexer_1.Lexer.scan('?2');
148
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([_1.TokenKind.Question, _1.TokenKind.IntegerLiteral, _1.TokenKind.Eof]);
200
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Question, TokenKind_1.TokenKind.IntegerLiteral, TokenKind_1.TokenKind.Eof]);
149
201
  });
150
202
  describe('comments', () => {
151
203
  it('does not include carriage return character', () => {
152
204
  let tokens = Lexer_1.Lexer.scan(`'someComment\r\nprint "hello"`).tokens;
153
- chai_1.expect(tokens[0].text).to.equal(`'someComment`);
205
+ (0, chai_1.expect)(tokens[0].text).to.equal(`'someComment`);
154
206
  });
155
207
  it('includes the comment characters in the text', () => {
156
208
  let text = Lexer_1.Lexer.scan(`
157
209
  'comment
158
210
  REM some comment
159
211
  `).tokens
160
- .filter(x => ![_1.TokenKind.Newline, _1.TokenKind.Eof].includes(x.kind))
212
+ .filter(x => ![TokenKind_1.TokenKind.Newline, TokenKind_1.TokenKind.Eof].includes(x.kind))
161
213
  .map(x => x.text);
162
- chai_1.expect(text).to.eql([
214
+ (0, chai_1.expect)(text).to.eql([
163
215
  `'comment`,
164
216
  'REM some comment'
165
217
  ]);
@@ -172,8 +224,8 @@ describe('lexer', () => {
172
224
  end sub
173
225
  `, {
174
226
  includeWhitespace: true
175
- }).tokens.map(x => [...Parser_spec_1.rangeToArray(x.range), x.text]);
176
- chai_1.expect(tokens).to.eql([
227
+ }).tokens.map(x => [...(0, Parser_spec_1.rangeToArray)(x.range), x.text]);
228
+ (0, chai_1.expect)(tokens).to.eql([
177
229
  [0, 0, 0, 1, '\n'],
178
230
  [1, 0, 1, 16, ' '],
179
231
  [1, 16, 1, 19, 'sub'],
@@ -207,15 +259,15 @@ describe('lexer', () => {
207
259
  let tokens = Lexer_1.Lexer.scan(`
208
260
  'comment
209
261
  REM some comment
210
- `).tokens.filter(x => ![_1.TokenKind.Newline, _1.TokenKind.Eof].includes(x.kind));
211
- chai_1.expect(tokens[0].range).to.eql(vscode_languageserver_1.Range.create(1, 16, 1, 24));
212
- chai_1.expect(tokens[1].range).to.eql(vscode_languageserver_1.Range.create(2, 16, 2, 32));
262
+ `).tokens.filter(x => ![TokenKind_1.TokenKind.Newline, TokenKind_1.TokenKind.Eof].includes(x.kind));
263
+ (0, chai_1.expect)(tokens[0].range).to.eql(vscode_languageserver_1.Range.create(1, 16, 1, 24));
264
+ (0, chai_1.expect)(tokens[1].range).to.eql(vscode_languageserver_1.Range.create(2, 16, 2, 32));
213
265
  });
214
266
  it('finds correct location for newlines', () => {
215
267
  let tokens = Lexer_1.Lexer.scan('sub\nsub\r\nsub\n\n').tokens
216
268
  //ignore the Eof token
217
- .filter(x => x.kind !== _1.TokenKind.Eof);
218
- chai_1.expect(tokens.map(x => x.range)).to.eql([
269
+ .filter(x => x.kind !== TokenKind_1.TokenKind.Eof);
270
+ (0, chai_1.expect)(tokens.map(x => x.range)).to.eql([
219
271
  vscode_languageserver_1.Range.create(0, 0, 0, 3),
220
272
  vscode_languageserver_1.Range.create(0, 3, 0, 4),
221
273
  vscode_languageserver_1.Range.create(1, 0, 1, 3),
@@ -237,99 +289,99 @@ describe('lexer', () => {
237
289
  end if 'comment
238
290
  end sub
239
291
  `);
240
- let comments = tokens.filter(x => x.kind === _1.TokenKind.Comment);
241
- chai_1.expect(comments).to.be.lengthOf(1);
242
- chai_1.expect(comments[0].range).to.eql(vscode_languageserver_1.Range.create(8, 27, 8, 35));
292
+ let comments = tokens.filter(x => x.kind === TokenKind_1.TokenKind.Comment);
293
+ (0, chai_1.expect)(comments).to.be.lengthOf(1);
294
+ (0, chai_1.expect)(comments[0].range).to.eql(vscode_languageserver_1.Range.create(8, 27, 8, 35));
243
295
  });
244
296
  it('ignores everything after `\'`', () => {
245
297
  let { tokens } = Lexer_1.Lexer.scan('= \' (');
246
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([_1.TokenKind.Equal, _1.TokenKind.Comment, _1.TokenKind.Eof]);
298
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Equal, TokenKind_1.TokenKind.Comment, TokenKind_1.TokenKind.Eof]);
247
299
  });
248
300
  it('ignores everything after `REM`', () => {
249
301
  let { tokens } = Lexer_1.Lexer.scan('= REM (');
250
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([_1.TokenKind.Equal, _1.TokenKind.Comment, _1.TokenKind.Eof]);
302
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Equal, TokenKind_1.TokenKind.Comment, TokenKind_1.TokenKind.Eof]);
251
303
  });
252
304
  it('ignores everything after `rem`', () => {
253
305
  let { tokens } = Lexer_1.Lexer.scan('= rem (');
254
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([_1.TokenKind.Equal, _1.TokenKind.Comment, _1.TokenKind.Eof]);
306
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.Equal, TokenKind_1.TokenKind.Comment, TokenKind_1.TokenKind.Eof]);
255
307
  });
256
308
  }); // comments
257
309
  describe('non-literals', () => {
258
310
  it('reads parens & braces', () => {
259
311
  let { tokens } = Lexer_1.Lexer.scan('(){}');
260
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([
261
- _1.TokenKind.LeftParen,
262
- _1.TokenKind.RightParen,
263
- _1.TokenKind.LeftCurlyBrace,
264
- _1.TokenKind.RightCurlyBrace,
265
- _1.TokenKind.Eof
312
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
313
+ TokenKind_1.TokenKind.LeftParen,
314
+ TokenKind_1.TokenKind.RightParen,
315
+ TokenKind_1.TokenKind.LeftCurlyBrace,
316
+ TokenKind_1.TokenKind.RightCurlyBrace,
317
+ TokenKind_1.TokenKind.Eof
266
318
  ]);
267
319
  });
268
320
  it('reads operators', () => {
269
321
  let { tokens } = Lexer_1.Lexer.scan('^ - + * MOD / \\ -- ++');
270
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([
271
- _1.TokenKind.Caret,
272
- _1.TokenKind.Minus,
273
- _1.TokenKind.Plus,
274
- _1.TokenKind.Star,
275
- _1.TokenKind.Mod,
276
- _1.TokenKind.Forwardslash,
277
- _1.TokenKind.Backslash,
278
- _1.TokenKind.MinusMinus,
279
- _1.TokenKind.PlusPlus,
280
- _1.TokenKind.Eof
322
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
323
+ TokenKind_1.TokenKind.Caret,
324
+ TokenKind_1.TokenKind.Minus,
325
+ TokenKind_1.TokenKind.Plus,
326
+ TokenKind_1.TokenKind.Star,
327
+ TokenKind_1.TokenKind.Mod,
328
+ TokenKind_1.TokenKind.Forwardslash,
329
+ TokenKind_1.TokenKind.Backslash,
330
+ TokenKind_1.TokenKind.MinusMinus,
331
+ TokenKind_1.TokenKind.PlusPlus,
332
+ TokenKind_1.TokenKind.Eof
281
333
  ]);
282
334
  });
283
335
  it('reads bitshift operators', () => {
284
336
  let { tokens } = Lexer_1.Lexer.scan('<< >> <<');
285
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([
286
- _1.TokenKind.LeftShift,
287
- _1.TokenKind.RightShift,
288
- _1.TokenKind.LeftShift,
289
- _1.TokenKind.Eof
337
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
338
+ TokenKind_1.TokenKind.LeftShift,
339
+ TokenKind_1.TokenKind.RightShift,
340
+ TokenKind_1.TokenKind.LeftShift,
341
+ TokenKind_1.TokenKind.Eof
290
342
  ]);
291
343
  });
292
344
  it('reads bitshift assignment operators', () => {
293
345
  let { tokens } = Lexer_1.Lexer.scan('<<= >>=');
294
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([
295
- _1.TokenKind.LeftShiftEqual,
296
- _1.TokenKind.RightShiftEqual,
297
- _1.TokenKind.Eof
346
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
347
+ TokenKind_1.TokenKind.LeftShiftEqual,
348
+ TokenKind_1.TokenKind.RightShiftEqual,
349
+ TokenKind_1.TokenKind.Eof
298
350
  ]);
299
351
  });
300
352
  it('reads comparators', () => {
301
353
  let { tokens } = Lexer_1.Lexer.scan('< <= > >= = <>');
302
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([
303
- _1.TokenKind.Less,
304
- _1.TokenKind.LessEqual,
305
- _1.TokenKind.Greater,
306
- _1.TokenKind.GreaterEqual,
307
- _1.TokenKind.Equal,
308
- _1.TokenKind.LessGreater,
309
- _1.TokenKind.Eof
354
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
355
+ TokenKind_1.TokenKind.Less,
356
+ TokenKind_1.TokenKind.LessEqual,
357
+ TokenKind_1.TokenKind.Greater,
358
+ TokenKind_1.TokenKind.GreaterEqual,
359
+ TokenKind_1.TokenKind.Equal,
360
+ TokenKind_1.TokenKind.LessGreater,
361
+ TokenKind_1.TokenKind.Eof
310
362
  ]);
311
363
  });
312
364
  }); // non-literals
313
365
  describe('string literals', () => {
314
366
  it('produces string literal tokens', () => {
315
367
  let { tokens } = Lexer_1.Lexer.scan(`"hello world"`);
316
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([_1.TokenKind.StringLiteral, _1.TokenKind.Eof]);
368
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([TokenKind_1.TokenKind.StringLiteral, TokenKind_1.TokenKind.Eof]);
317
369
  });
318
370
  it(`safely escapes " literals`, () => {
319
371
  let { tokens } = Lexer_1.Lexer.scan(`"the cat says ""meow"""`);
320
- chai_1.expect(tokens[0].kind).to.equal(_1.TokenKind.StringLiteral);
372
+ (0, chai_1.expect)(tokens[0].kind).to.equal(TokenKind_1.TokenKind.StringLiteral);
321
373
  });
322
374
  it('captures text to end of line for unterminated strings with LF', () => {
323
375
  let { tokens } = Lexer_1.Lexer.scan(`"unterminated!\n`);
324
- chai_1.expect(tokens[0].kind).to.eql(_1.TokenKind.StringLiteral);
376
+ (0, chai_1.expect)(tokens[0].kind).to.eql(TokenKind_1.TokenKind.StringLiteral);
325
377
  });
326
378
  it('captures text to end of line for unterminated strings with CRLF', () => {
327
379
  let { tokens } = Lexer_1.Lexer.scan(`"unterminated!\r\n`);
328
- chai_1.expect(tokens[0].text).to.equal('"unterminated!');
380
+ (0, chai_1.expect)(tokens[0].text).to.equal('"unterminated!');
329
381
  });
330
382
  it('disallows multiline strings', () => {
331
383
  let { diagnostics } = Lexer_1.Lexer.scan(`"multi-line\n\n`);
332
- chai_1.expect(diagnostics.map(err => err.message)).to.deep.equal([
384
+ (0, chai_1.expect)(diagnostics.map(err => err.message)).to.deep.equal([
333
385
  'Unterminated string at end of line'
334
386
  ]);
335
387
  });
@@ -338,21 +390,21 @@ describe('lexer', () => {
338
390
  describe('template string literals', () => {
339
391
  it('supports escaped chars', () => {
340
392
  let { tokens } = Lexer_1.Lexer.scan('`\\n\\`\\r\\n`');
341
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([
342
- _1.TokenKind.BackTick,
343
- _1.TokenKind.TemplateStringQuasi,
344
- _1.TokenKind.EscapedCharCodeLiteral,
345
- _1.TokenKind.TemplateStringQuasi,
346
- _1.TokenKind.EscapedCharCodeLiteral,
347
- _1.TokenKind.TemplateStringQuasi,
348
- _1.TokenKind.EscapedCharCodeLiteral,
349
- _1.TokenKind.TemplateStringQuasi,
350
- _1.TokenKind.EscapedCharCodeLiteral,
351
- _1.TokenKind.TemplateStringQuasi,
352
- _1.TokenKind.BackTick,
353
- _1.TokenKind.Eof
393
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
394
+ TokenKind_1.TokenKind.BackTick,
395
+ TokenKind_1.TokenKind.TemplateStringQuasi,
396
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
397
+ TokenKind_1.TokenKind.TemplateStringQuasi,
398
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
399
+ TokenKind_1.TokenKind.TemplateStringQuasi,
400
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
401
+ TokenKind_1.TokenKind.TemplateStringQuasi,
402
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
403
+ TokenKind_1.TokenKind.TemplateStringQuasi,
404
+ TokenKind_1.TokenKind.BackTick,
405
+ TokenKind_1.TokenKind.Eof
354
406
  ]);
355
- chai_1.expect(tokens.map(x => x.charCode).filter(x => !!x)).to.eql([
407
+ (0, chai_1.expect)(tokens.map(x => x.charCode).filter(x => !!x)).to.eql([
356
408
  10,
357
409
  96,
358
410
  13,
@@ -361,30 +413,30 @@ describe('lexer', () => {
361
413
  });
362
414
  it('prevents expressions when escaping the dollar sign', () => {
363
415
  let { tokens } = Lexer_1.Lexer.scan('`\\${just text}`');
364
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([
365
- _1.TokenKind.BackTick,
366
- _1.TokenKind.TemplateStringQuasi,
367
- _1.TokenKind.EscapedCharCodeLiteral,
368
- _1.TokenKind.TemplateStringQuasi,
369
- _1.TokenKind.BackTick,
370
- _1.TokenKind.Eof
416
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
417
+ TokenKind_1.TokenKind.BackTick,
418
+ TokenKind_1.TokenKind.TemplateStringQuasi,
419
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
420
+ TokenKind_1.TokenKind.TemplateStringQuasi,
421
+ TokenKind_1.TokenKind.BackTick,
422
+ TokenKind_1.TokenKind.Eof
371
423
  ]);
372
424
  });
373
425
  it('supports escaping unicode char codes', () => {
374
426
  let { tokens } = Lexer_1.Lexer.scan('`\\c1\\c12\\c123`');
375
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([
376
- _1.TokenKind.BackTick,
377
- _1.TokenKind.TemplateStringQuasi,
378
- _1.TokenKind.EscapedCharCodeLiteral,
379
- _1.TokenKind.TemplateStringQuasi,
380
- _1.TokenKind.EscapedCharCodeLiteral,
381
- _1.TokenKind.TemplateStringQuasi,
382
- _1.TokenKind.EscapedCharCodeLiteral,
383
- _1.TokenKind.TemplateStringQuasi,
384
- _1.TokenKind.BackTick,
385
- _1.TokenKind.Eof
427
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
428
+ TokenKind_1.TokenKind.BackTick,
429
+ TokenKind_1.TokenKind.TemplateStringQuasi,
430
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
431
+ TokenKind_1.TokenKind.TemplateStringQuasi,
432
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
433
+ TokenKind_1.TokenKind.TemplateStringQuasi,
434
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
435
+ TokenKind_1.TokenKind.TemplateStringQuasi,
436
+ TokenKind_1.TokenKind.BackTick,
437
+ TokenKind_1.TokenKind.Eof
386
438
  ]);
387
- chai_1.expect(tokens.map(x => x.charCode).filter(x => !!x)).to.eql([
439
+ (0, chai_1.expect)(tokens.map(x => x.charCode).filter(x => !!x)).to.eql([
388
440
  1,
389
441
  12,
390
442
  123
@@ -392,29 +444,29 @@ describe('lexer', () => {
392
444
  });
393
445
  it('converts doublequote to EscapedCharCodeLiteral', () => {
394
446
  let { tokens } = Lexer_1.Lexer.scan('`"`');
395
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([
396
- _1.TokenKind.BackTick,
397
- _1.TokenKind.TemplateStringQuasi,
398
- _1.TokenKind.EscapedCharCodeLiteral,
399
- _1.TokenKind.TemplateStringQuasi,
400
- _1.TokenKind.BackTick,
401
- _1.TokenKind.Eof
447
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
448
+ TokenKind_1.TokenKind.BackTick,
449
+ TokenKind_1.TokenKind.TemplateStringQuasi,
450
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
451
+ TokenKind_1.TokenKind.TemplateStringQuasi,
452
+ TokenKind_1.TokenKind.BackTick,
453
+ TokenKind_1.TokenKind.Eof
402
454
  ]);
403
- chai_1.expect(tokens[2].charCode).to.equal(34);
455
+ (0, chai_1.expect)(tokens[2].charCode).to.equal(34);
404
456
  });
405
457
  it(`safely escapes \` literals`, () => {
406
458
  let { tokens } = Lexer_1.Lexer.scan('`the cat says \\`meow\\` a lot`');
407
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([
408
- _1.TokenKind.BackTick,
409
- _1.TokenKind.TemplateStringQuasi,
410
- _1.TokenKind.EscapedCharCodeLiteral,
411
- _1.TokenKind.TemplateStringQuasi,
412
- _1.TokenKind.EscapedCharCodeLiteral,
413
- _1.TokenKind.TemplateStringQuasi,
414
- _1.TokenKind.BackTick,
415
- _1.TokenKind.Eof
459
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
460
+ TokenKind_1.TokenKind.BackTick,
461
+ TokenKind_1.TokenKind.TemplateStringQuasi,
462
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
463
+ TokenKind_1.TokenKind.TemplateStringQuasi,
464
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
465
+ TokenKind_1.TokenKind.TemplateStringQuasi,
466
+ TokenKind_1.TokenKind.BackTick,
467
+ TokenKind_1.TokenKind.Eof
416
468
  ]);
417
- chai_1.expect(tokens.map(x => x.text)).to.eql([
469
+ (0, chai_1.expect)(tokens.map(x => x.text)).to.eql([
418
470
  '`',
419
471
  'the cat says ',
420
472
  '\\`',
@@ -427,27 +479,27 @@ describe('lexer', () => {
427
479
  });
428
480
  it('produces template string literal tokens', () => {
429
481
  let { tokens } = Lexer_1.Lexer.scan('`hello world`');
430
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([
431
- _1.TokenKind.BackTick,
432
- _1.TokenKind.TemplateStringQuasi,
433
- _1.TokenKind.BackTick,
434
- _1.TokenKind.Eof
482
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
483
+ TokenKind_1.TokenKind.BackTick,
484
+ TokenKind_1.TokenKind.TemplateStringQuasi,
485
+ TokenKind_1.TokenKind.BackTick,
486
+ TokenKind_1.TokenKind.Eof
435
487
  ]);
436
- chai_1.expect(tokens[1].text).to.deep.equal('hello world');
488
+ (0, chai_1.expect)(tokens[1].text).to.deep.equal('hello world');
437
489
  });
438
490
  it('collects quasis outside and expressions inside of template strings', () => {
439
491
  let { tokens } = Lexer_1.Lexer.scan('`hello ${"world"}!`');
440
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([
441
- _1.TokenKind.BackTick,
442
- _1.TokenKind.TemplateStringQuasi,
443
- _1.TokenKind.TemplateStringExpressionBegin,
444
- _1.TokenKind.StringLiteral,
445
- _1.TokenKind.TemplateStringExpressionEnd,
446
- _1.TokenKind.TemplateStringQuasi,
447
- _1.TokenKind.BackTick,
448
- _1.TokenKind.Eof
492
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
493
+ TokenKind_1.TokenKind.BackTick,
494
+ TokenKind_1.TokenKind.TemplateStringQuasi,
495
+ TokenKind_1.TokenKind.TemplateStringExpressionBegin,
496
+ TokenKind_1.TokenKind.StringLiteral,
497
+ TokenKind_1.TokenKind.TemplateStringExpressionEnd,
498
+ TokenKind_1.TokenKind.TemplateStringQuasi,
499
+ TokenKind_1.TokenKind.BackTick,
500
+ TokenKind_1.TokenKind.Eof
449
501
  ]);
450
- chai_1.expect(tokens[1].text).to.deep.equal(`hello `);
502
+ (0, chai_1.expect)(tokens[1].text).to.deep.equal(`hello `);
451
503
  });
452
504
  it('real example, which is causing issues in the formatter', () => {
453
505
  let { tokens } = Lexer_1.Lexer.scan(`
@@ -466,133 +518,133 @@ describe('lexer', () => {
466
518
  \`
467
519
  end function
468
520
  `);
469
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([
470
- _1.TokenKind.Newline,
471
- _1.TokenKind.Function,
472
- _1.TokenKind.Identifier,
473
- _1.TokenKind.LeftParen,
474
- _1.TokenKind.Identifier,
475
- _1.TokenKind.RightParen,
476
- _1.TokenKind.Newline,
477
- _1.TokenKind.Return,
478
- _1.TokenKind.BackTick,
479
- _1.TokenKind.TemplateStringQuasi,
480
- _1.TokenKind.EscapedCharCodeLiteral,
481
- _1.TokenKind.TemplateStringQuasi,
482
- _1.TokenKind.EscapedCharCodeLiteral,
483
- _1.TokenKind.TemplateStringQuasi,
484
- _1.TokenKind.EscapedCharCodeLiteral,
485
- _1.TokenKind.TemplateStringQuasi,
486
- _1.TokenKind.EscapedCharCodeLiteral,
487
- _1.TokenKind.TemplateStringQuasi,
488
- _1.TokenKind.EscapedCharCodeLiteral,
489
- _1.TokenKind.TemplateStringQuasi,
490
- _1.TokenKind.EscapedCharCodeLiteral,
491
- _1.TokenKind.TemplateStringQuasi,
492
- _1.TokenKind.EscapedCharCodeLiteral,
493
- _1.TokenKind.TemplateStringQuasi,
494
- _1.TokenKind.EscapedCharCodeLiteral,
495
- _1.TokenKind.TemplateStringQuasi,
496
- _1.TokenKind.EscapedCharCodeLiteral,
497
- _1.TokenKind.TemplateStringQuasi,
498
- _1.TokenKind.TemplateStringExpressionBegin,
499
- _1.TokenKind.Identifier,
500
- _1.TokenKind.Dot,
501
- _1.TokenKind.Identifier,
502
- _1.TokenKind.TemplateStringExpressionEnd,
503
- _1.TokenKind.TemplateStringQuasi,
504
- _1.TokenKind.EscapedCharCodeLiteral,
505
- _1.TokenKind.TemplateStringQuasi,
506
- _1.TokenKind.TemplateStringExpressionBegin,
507
- _1.TokenKind.Identifier,
508
- _1.TokenKind.Dot,
509
- _1.TokenKind.Identifier,
510
- _1.TokenKind.TemplateStringExpressionEnd,
511
- _1.TokenKind.TemplateStringQuasi,
512
- _1.TokenKind.EscapedCharCodeLiteral,
513
- _1.TokenKind.TemplateStringQuasi,
514
- _1.TokenKind.EscapedCharCodeLiteral,
515
- _1.TokenKind.TemplateStringQuasi,
516
- _1.TokenKind.EscapedCharCodeLiteral,
517
- _1.TokenKind.TemplateStringQuasi,
518
- _1.TokenKind.TemplateStringExpressionBegin,
519
- _1.TokenKind.Identifier,
520
- _1.TokenKind.Dot,
521
- _1.TokenKind.Identifier,
522
- _1.TokenKind.Dot,
523
- _1.TokenKind.Identifier,
524
- _1.TokenKind.Dot,
525
- _1.TokenKind.Identifier,
526
- _1.TokenKind.Dot,
527
- _1.TokenKind.Identifier,
528
- _1.TokenKind.TemplateStringExpressionEnd,
529
- _1.TokenKind.TemplateStringQuasi,
530
- _1.TokenKind.EscapedCharCodeLiteral,
531
- _1.TokenKind.TemplateStringQuasi,
532
- _1.TokenKind.EscapedCharCodeLiteral,
533
- _1.TokenKind.TemplateStringQuasi,
534
- _1.TokenKind.EscapedCharCodeLiteral,
535
- _1.TokenKind.TemplateStringQuasi,
536
- _1.TokenKind.EscapedCharCodeLiteral,
537
- _1.TokenKind.TemplateStringQuasi,
538
- _1.TokenKind.BackTick,
539
- _1.TokenKind.Newline,
540
- _1.TokenKind.EndFunction,
541
- _1.TokenKind.Newline,
542
- _1.TokenKind.Eof
521
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
522
+ TokenKind_1.TokenKind.Newline,
523
+ TokenKind_1.TokenKind.Function,
524
+ TokenKind_1.TokenKind.Identifier,
525
+ TokenKind_1.TokenKind.LeftParen,
526
+ TokenKind_1.TokenKind.Identifier,
527
+ TokenKind_1.TokenKind.RightParen,
528
+ TokenKind_1.TokenKind.Newline,
529
+ TokenKind_1.TokenKind.Return,
530
+ TokenKind_1.TokenKind.BackTick,
531
+ TokenKind_1.TokenKind.TemplateStringQuasi,
532
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
533
+ TokenKind_1.TokenKind.TemplateStringQuasi,
534
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
535
+ TokenKind_1.TokenKind.TemplateStringQuasi,
536
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
537
+ TokenKind_1.TokenKind.TemplateStringQuasi,
538
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
539
+ TokenKind_1.TokenKind.TemplateStringQuasi,
540
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
541
+ TokenKind_1.TokenKind.TemplateStringQuasi,
542
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
543
+ TokenKind_1.TokenKind.TemplateStringQuasi,
544
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
545
+ TokenKind_1.TokenKind.TemplateStringQuasi,
546
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
547
+ TokenKind_1.TokenKind.TemplateStringQuasi,
548
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
549
+ TokenKind_1.TokenKind.TemplateStringQuasi,
550
+ TokenKind_1.TokenKind.TemplateStringExpressionBegin,
551
+ TokenKind_1.TokenKind.Identifier,
552
+ TokenKind_1.TokenKind.Dot,
553
+ TokenKind_1.TokenKind.Identifier,
554
+ TokenKind_1.TokenKind.TemplateStringExpressionEnd,
555
+ TokenKind_1.TokenKind.TemplateStringQuasi,
556
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
557
+ TokenKind_1.TokenKind.TemplateStringQuasi,
558
+ TokenKind_1.TokenKind.TemplateStringExpressionBegin,
559
+ TokenKind_1.TokenKind.Identifier,
560
+ TokenKind_1.TokenKind.Dot,
561
+ TokenKind_1.TokenKind.Identifier,
562
+ TokenKind_1.TokenKind.TemplateStringExpressionEnd,
563
+ TokenKind_1.TokenKind.TemplateStringQuasi,
564
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
565
+ TokenKind_1.TokenKind.TemplateStringQuasi,
566
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
567
+ TokenKind_1.TokenKind.TemplateStringQuasi,
568
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
569
+ TokenKind_1.TokenKind.TemplateStringQuasi,
570
+ TokenKind_1.TokenKind.TemplateStringExpressionBegin,
571
+ TokenKind_1.TokenKind.Identifier,
572
+ TokenKind_1.TokenKind.Dot,
573
+ TokenKind_1.TokenKind.Identifier,
574
+ TokenKind_1.TokenKind.Dot,
575
+ TokenKind_1.TokenKind.Identifier,
576
+ TokenKind_1.TokenKind.Dot,
577
+ TokenKind_1.TokenKind.Identifier,
578
+ TokenKind_1.TokenKind.Dot,
579
+ TokenKind_1.TokenKind.Identifier,
580
+ TokenKind_1.TokenKind.TemplateStringExpressionEnd,
581
+ TokenKind_1.TokenKind.TemplateStringQuasi,
582
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
583
+ TokenKind_1.TokenKind.TemplateStringQuasi,
584
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
585
+ TokenKind_1.TokenKind.TemplateStringQuasi,
586
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
587
+ TokenKind_1.TokenKind.TemplateStringQuasi,
588
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
589
+ TokenKind_1.TokenKind.TemplateStringQuasi,
590
+ TokenKind_1.TokenKind.BackTick,
591
+ TokenKind_1.TokenKind.Newline,
592
+ TokenKind_1.TokenKind.EndFunction,
593
+ TokenKind_1.TokenKind.Newline,
594
+ TokenKind_1.TokenKind.Eof
543
595
  ]);
544
596
  });
545
597
  it('complicated example', () => {
546
598
  let { tokens } = Lexer_1.Lexer.scan('`hello ${"world"}!I am a ${"template" + "string"} and I am very ${["pleased"][0]} to meet you ${m.top.getChildCount()}.The end`');
547
- chai_1.expect(tokens.map(t => t.kind)).to.eql([
548
- _1.TokenKind.BackTick,
549
- _1.TokenKind.TemplateStringQuasi,
550
- _1.TokenKind.TemplateStringExpressionBegin,
551
- _1.TokenKind.StringLiteral,
552
- _1.TokenKind.TemplateStringExpressionEnd,
553
- _1.TokenKind.TemplateStringQuasi,
554
- _1.TokenKind.TemplateStringExpressionBegin,
555
- _1.TokenKind.StringLiteral,
556
- _1.TokenKind.Plus,
557
- _1.TokenKind.StringLiteral,
558
- _1.TokenKind.TemplateStringExpressionEnd,
559
- _1.TokenKind.TemplateStringQuasi,
560
- _1.TokenKind.TemplateStringExpressionBegin,
561
- _1.TokenKind.LeftSquareBracket,
562
- _1.TokenKind.StringLiteral,
563
- _1.TokenKind.RightSquareBracket,
564
- _1.TokenKind.LeftSquareBracket,
565
- _1.TokenKind.IntegerLiteral,
566
- _1.TokenKind.RightSquareBracket,
567
- _1.TokenKind.TemplateStringExpressionEnd,
568
- _1.TokenKind.TemplateStringQuasi,
569
- _1.TokenKind.TemplateStringExpressionBegin,
570
- _1.TokenKind.Identifier,
571
- _1.TokenKind.Dot,
572
- _1.TokenKind.Identifier,
573
- _1.TokenKind.Dot,
574
- _1.TokenKind.Identifier,
575
- _1.TokenKind.LeftParen,
576
- _1.TokenKind.RightParen,
577
- _1.TokenKind.TemplateStringExpressionEnd,
578
- _1.TokenKind.TemplateStringQuasi,
579
- _1.TokenKind.BackTick,
580
- _1.TokenKind.Eof
599
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.eql([
600
+ TokenKind_1.TokenKind.BackTick,
601
+ TokenKind_1.TokenKind.TemplateStringQuasi,
602
+ TokenKind_1.TokenKind.TemplateStringExpressionBegin,
603
+ TokenKind_1.TokenKind.StringLiteral,
604
+ TokenKind_1.TokenKind.TemplateStringExpressionEnd,
605
+ TokenKind_1.TokenKind.TemplateStringQuasi,
606
+ TokenKind_1.TokenKind.TemplateStringExpressionBegin,
607
+ TokenKind_1.TokenKind.StringLiteral,
608
+ TokenKind_1.TokenKind.Plus,
609
+ TokenKind_1.TokenKind.StringLiteral,
610
+ TokenKind_1.TokenKind.TemplateStringExpressionEnd,
611
+ TokenKind_1.TokenKind.TemplateStringQuasi,
612
+ TokenKind_1.TokenKind.TemplateStringExpressionBegin,
613
+ TokenKind_1.TokenKind.LeftSquareBracket,
614
+ TokenKind_1.TokenKind.StringLiteral,
615
+ TokenKind_1.TokenKind.RightSquareBracket,
616
+ TokenKind_1.TokenKind.LeftSquareBracket,
617
+ TokenKind_1.TokenKind.IntegerLiteral,
618
+ TokenKind_1.TokenKind.RightSquareBracket,
619
+ TokenKind_1.TokenKind.TemplateStringExpressionEnd,
620
+ TokenKind_1.TokenKind.TemplateStringQuasi,
621
+ TokenKind_1.TokenKind.TemplateStringExpressionBegin,
622
+ TokenKind_1.TokenKind.Identifier,
623
+ TokenKind_1.TokenKind.Dot,
624
+ TokenKind_1.TokenKind.Identifier,
625
+ TokenKind_1.TokenKind.Dot,
626
+ TokenKind_1.TokenKind.Identifier,
627
+ TokenKind_1.TokenKind.LeftParen,
628
+ TokenKind_1.TokenKind.RightParen,
629
+ TokenKind_1.TokenKind.TemplateStringExpressionEnd,
630
+ TokenKind_1.TokenKind.TemplateStringQuasi,
631
+ TokenKind_1.TokenKind.BackTick,
632
+ TokenKind_1.TokenKind.Eof
581
633
  ]);
582
634
  });
583
635
  it('allows multiline strings', () => {
584
636
  let { tokens } = Lexer_1.Lexer.scan('`multi-line\n\n`');
585
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([
586
- _1.TokenKind.BackTick,
587
- _1.TokenKind.TemplateStringQuasi,
588
- _1.TokenKind.EscapedCharCodeLiteral,
589
- _1.TokenKind.TemplateStringQuasi,
590
- _1.TokenKind.EscapedCharCodeLiteral,
591
- _1.TokenKind.TemplateStringQuasi,
592
- _1.TokenKind.BackTick,
593
- _1.TokenKind.Eof
637
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
638
+ TokenKind_1.TokenKind.BackTick,
639
+ TokenKind_1.TokenKind.TemplateStringQuasi,
640
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
641
+ TokenKind_1.TokenKind.TemplateStringQuasi,
642
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
643
+ TokenKind_1.TokenKind.TemplateStringQuasi,
644
+ TokenKind_1.TokenKind.BackTick,
645
+ TokenKind_1.TokenKind.Eof
594
646
  ]);
595
- chai_1.expect(tokens.map(x => x.text)).to.eql([
647
+ (0, chai_1.expect)(tokens.map(x => x.text)).to.eql([
596
648
  '`',
597
649
  'multi-line',
598
650
  '\n',
@@ -605,187 +657,187 @@ describe('lexer', () => {
605
657
  });
606
658
  it('maintains proper line/column locations for multiline strings', () => {
607
659
  let { tokens } = Lexer_1.Lexer.scan('123 `multi\nline\r\nstrings` true\nfalse');
608
- chai_1.expect(tokens.map(x => {
660
+ (0, chai_1.expect)(tokens.map(x => {
609
661
  return {
610
662
  range: x.range,
611
663
  kind: x.kind
612
664
  };
613
665
  })).to.eql([
614
- { range: vscode_languageserver_1.Range.create(0, 0, 0, 3), kind: _1.TokenKind.IntegerLiteral },
615
- { range: vscode_languageserver_1.Range.create(0, 4, 0, 5), kind: _1.TokenKind.BackTick },
616
- { range: vscode_languageserver_1.Range.create(0, 5, 0, 10), kind: _1.TokenKind.TemplateStringQuasi },
617
- { range: vscode_languageserver_1.Range.create(0, 10, 0, 11), kind: _1.TokenKind.EscapedCharCodeLiteral },
618
- { range: vscode_languageserver_1.Range.create(1, 0, 1, 4), kind: _1.TokenKind.TemplateStringQuasi },
619
- { range: vscode_languageserver_1.Range.create(1, 4, 1, 5), kind: _1.TokenKind.EscapedCharCodeLiteral },
620
- { range: vscode_languageserver_1.Range.create(1, 5, 1, 6), kind: _1.TokenKind.EscapedCharCodeLiteral },
621
- { range: vscode_languageserver_1.Range.create(2, 0, 2, 7), kind: _1.TokenKind.TemplateStringQuasi },
622
- { range: vscode_languageserver_1.Range.create(2, 7, 2, 8), kind: _1.TokenKind.BackTick },
623
- { range: vscode_languageserver_1.Range.create(2, 9, 2, 13), kind: _1.TokenKind.True },
624
- { range: vscode_languageserver_1.Range.create(2, 13, 2, 14), kind: _1.TokenKind.Newline },
625
- { range: vscode_languageserver_1.Range.create(3, 0, 3, 5), kind: _1.TokenKind.False },
626
- { range: vscode_languageserver_1.Range.create(3, 5, 3, 6), kind: _1.TokenKind.Eof }
666
+ { range: vscode_languageserver_1.Range.create(0, 0, 0, 3), kind: TokenKind_1.TokenKind.IntegerLiteral },
667
+ { range: vscode_languageserver_1.Range.create(0, 4, 0, 5), kind: TokenKind_1.TokenKind.BackTick },
668
+ { range: vscode_languageserver_1.Range.create(0, 5, 0, 10), kind: TokenKind_1.TokenKind.TemplateStringQuasi },
669
+ { range: vscode_languageserver_1.Range.create(0, 10, 0, 11), kind: TokenKind_1.TokenKind.EscapedCharCodeLiteral },
670
+ { range: vscode_languageserver_1.Range.create(1, 0, 1, 4), kind: TokenKind_1.TokenKind.TemplateStringQuasi },
671
+ { range: vscode_languageserver_1.Range.create(1, 4, 1, 5), kind: TokenKind_1.TokenKind.EscapedCharCodeLiteral },
672
+ { range: vscode_languageserver_1.Range.create(1, 5, 1, 6), kind: TokenKind_1.TokenKind.EscapedCharCodeLiteral },
673
+ { range: vscode_languageserver_1.Range.create(2, 0, 2, 7), kind: TokenKind_1.TokenKind.TemplateStringQuasi },
674
+ { range: vscode_languageserver_1.Range.create(2, 7, 2, 8), kind: TokenKind_1.TokenKind.BackTick },
675
+ { range: vscode_languageserver_1.Range.create(2, 9, 2, 13), kind: TokenKind_1.TokenKind.True },
676
+ { range: vscode_languageserver_1.Range.create(2, 13, 2, 14), kind: TokenKind_1.TokenKind.Newline },
677
+ { range: vscode_languageserver_1.Range.create(3, 0, 3, 5), kind: TokenKind_1.TokenKind.False },
678
+ { range: vscode_languageserver_1.Range.create(3, 5, 3, 6), kind: TokenKind_1.TokenKind.Eof }
627
679
  ]);
628
680
  });
629
681
  it('Example that tripped up the expression tests', () => {
630
682
  let { tokens } = Lexer_1.Lexer.scan('`I am a complex example\n${a.isRunning(["a","b","c"])}\nmore ${m.finish(true)}`');
631
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([
632
- _1.TokenKind.BackTick,
633
- _1.TokenKind.TemplateStringQuasi,
634
- _1.TokenKind.EscapedCharCodeLiteral,
635
- _1.TokenKind.TemplateStringQuasi,
636
- _1.TokenKind.TemplateStringExpressionBegin,
637
- _1.TokenKind.Identifier,
638
- _1.TokenKind.Dot,
639
- _1.TokenKind.Identifier,
640
- _1.TokenKind.LeftParen,
641
- _1.TokenKind.LeftSquareBracket,
642
- _1.TokenKind.StringLiteral,
643
- _1.TokenKind.Comma,
644
- _1.TokenKind.StringLiteral,
645
- _1.TokenKind.Comma,
646
- _1.TokenKind.StringLiteral,
647
- _1.TokenKind.RightSquareBracket,
648
- _1.TokenKind.RightParen,
649
- _1.TokenKind.TemplateStringExpressionEnd,
650
- _1.TokenKind.TemplateStringQuasi,
651
- _1.TokenKind.EscapedCharCodeLiteral,
652
- _1.TokenKind.TemplateStringQuasi,
653
- _1.TokenKind.TemplateStringExpressionBegin,
654
- _1.TokenKind.Identifier,
655
- _1.TokenKind.Dot,
656
- _1.TokenKind.Identifier,
657
- _1.TokenKind.LeftParen,
658
- _1.TokenKind.True,
659
- _1.TokenKind.RightParen,
660
- _1.TokenKind.TemplateStringExpressionEnd,
661
- _1.TokenKind.TemplateStringQuasi,
662
- _1.TokenKind.BackTick,
663
- _1.TokenKind.Eof
683
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
684
+ TokenKind_1.TokenKind.BackTick,
685
+ TokenKind_1.TokenKind.TemplateStringQuasi,
686
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
687
+ TokenKind_1.TokenKind.TemplateStringQuasi,
688
+ TokenKind_1.TokenKind.TemplateStringExpressionBegin,
689
+ TokenKind_1.TokenKind.Identifier,
690
+ TokenKind_1.TokenKind.Dot,
691
+ TokenKind_1.TokenKind.Identifier,
692
+ TokenKind_1.TokenKind.LeftParen,
693
+ TokenKind_1.TokenKind.LeftSquareBracket,
694
+ TokenKind_1.TokenKind.StringLiteral,
695
+ TokenKind_1.TokenKind.Comma,
696
+ TokenKind_1.TokenKind.StringLiteral,
697
+ TokenKind_1.TokenKind.Comma,
698
+ TokenKind_1.TokenKind.StringLiteral,
699
+ TokenKind_1.TokenKind.RightSquareBracket,
700
+ TokenKind_1.TokenKind.RightParen,
701
+ TokenKind_1.TokenKind.TemplateStringExpressionEnd,
702
+ TokenKind_1.TokenKind.TemplateStringQuasi,
703
+ TokenKind_1.TokenKind.EscapedCharCodeLiteral,
704
+ TokenKind_1.TokenKind.TemplateStringQuasi,
705
+ TokenKind_1.TokenKind.TemplateStringExpressionBegin,
706
+ TokenKind_1.TokenKind.Identifier,
707
+ TokenKind_1.TokenKind.Dot,
708
+ TokenKind_1.TokenKind.Identifier,
709
+ TokenKind_1.TokenKind.LeftParen,
710
+ TokenKind_1.TokenKind.True,
711
+ TokenKind_1.TokenKind.RightParen,
712
+ TokenKind_1.TokenKind.TemplateStringExpressionEnd,
713
+ TokenKind_1.TokenKind.TemplateStringQuasi,
714
+ TokenKind_1.TokenKind.BackTick,
715
+ TokenKind_1.TokenKind.Eof
664
716
  ]);
665
717
  });
666
718
  }); // string literals
667
719
  describe('double literals', () => {
668
720
  it('respects \'#\' suffix', () => {
669
721
  let d = Lexer_1.Lexer.scan('123#').tokens[0];
670
- chai_1.expect(d.kind).to.equal(_1.TokenKind.DoubleLiteral);
671
- chai_1.expect(d.text).to.eql('123#');
722
+ (0, chai_1.expect)(d.kind).to.equal(TokenKind_1.TokenKind.DoubleLiteral);
723
+ (0, chai_1.expect)(d.text).to.eql('123#');
672
724
  });
673
725
  it('forces literals >= 10 digits into doubles', () => {
674
726
  let d = Lexer_1.Lexer.scan('0000000005').tokens[0];
675
- chai_1.expect(d.kind).to.equal(_1.TokenKind.DoubleLiteral);
676
- chai_1.expect(d.text).to.eql('0000000005');
727
+ (0, chai_1.expect)(d.kind).to.equal(TokenKind_1.TokenKind.DoubleLiteral);
728
+ (0, chai_1.expect)(d.text).to.eql('0000000005');
677
729
  });
678
730
  it('forces literals with \'D\' in exponent into doubles', () => {
679
731
  let d = Lexer_1.Lexer.scan('2.5d3').tokens[0];
680
- chai_1.expect(d.kind).to.equal(_1.TokenKind.DoubleLiteral);
681
- chai_1.expect(d.text).to.eql('2.5d3');
732
+ (0, chai_1.expect)(d.kind).to.equal(TokenKind_1.TokenKind.DoubleLiteral);
733
+ (0, chai_1.expect)(d.text).to.eql('2.5d3');
682
734
  });
683
735
  it('allows digits before `.` to be elided', () => {
684
736
  let f = Lexer_1.Lexer.scan('.123#').tokens[0];
685
- chai_1.expect(f.kind).to.equal(_1.TokenKind.DoubleLiteral);
686
- chai_1.expect(f.text).to.eql('.123#');
737
+ (0, chai_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.DoubleLiteral);
738
+ (0, chai_1.expect)(f.text).to.eql('.123#');
687
739
  });
688
740
  it('allows digits after `.` to be elided', () => {
689
741
  let f = Lexer_1.Lexer.scan('12.#').tokens[0];
690
- chai_1.expect(f.kind).to.equal(_1.TokenKind.DoubleLiteral);
691
- chai_1.expect(f.text).to.eql('12.#');
742
+ (0, chai_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.DoubleLiteral);
743
+ (0, chai_1.expect)(f.text).to.eql('12.#');
692
744
  });
693
745
  });
694
746
  describe('float literals', () => {
695
747
  it('respects \'!\' suffix', () => {
696
748
  let f = Lexer_1.Lexer.scan('0.00000008!').tokens[0];
697
- chai_1.expect(f.kind).to.equal(_1.TokenKind.FloatLiteral);
749
+ (0, chai_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
698
750
  // Floating precision will make this *not* equal
699
- chai_1.expect(f.text).not.to.equal(8e-8);
700
- chai_1.expect(f.text).to.eql('0.00000008!');
751
+ (0, chai_1.expect)(f.text).not.to.equal(8e-8);
752
+ (0, chai_1.expect)(f.text).to.eql('0.00000008!');
701
753
  });
702
754
  it('forces literals with a decimal into floats', () => {
703
755
  let f = Lexer_1.Lexer.scan('1.0').tokens[0];
704
- chai_1.expect(f.kind).to.equal(_1.TokenKind.FloatLiteral);
705
- chai_1.expect(f.text).to.equal('1.0');
756
+ (0, chai_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
757
+ (0, chai_1.expect)(f.text).to.equal('1.0');
706
758
  });
707
759
  it('forces literals with \'E\' in exponent into floats', () => {
708
760
  let f = Lexer_1.Lexer.scan('2.5e3').tokens[0];
709
- chai_1.expect(f.kind).to.equal(_1.TokenKind.FloatLiteral);
710
- chai_1.expect(f.text).to.eql('2.5e3');
761
+ (0, chai_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
762
+ (0, chai_1.expect)(f.text).to.eql('2.5e3');
711
763
  });
712
764
  it('supports larger-than-supported-precision floats to be defined with exponents', () => {
713
765
  let f = Lexer_1.Lexer.scan('2.3659475627512424e-38').tokens[0];
714
- chai_1.expect(f.kind).to.equal(_1.TokenKind.FloatLiteral);
715
- chai_1.expect(f.text).to.eql('2.3659475627512424e-38');
766
+ (0, chai_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
767
+ (0, chai_1.expect)(f.text).to.eql('2.3659475627512424e-38');
716
768
  });
717
769
  it('allows digits before `.` to be elided', () => {
718
770
  let f = Lexer_1.Lexer.scan('.123').tokens[0];
719
- chai_1.expect(f.kind).to.equal(_1.TokenKind.FloatLiteral);
720
- chai_1.expect(f.text).to.equal('.123');
771
+ (0, chai_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
772
+ (0, chai_1.expect)(f.text).to.equal('.123');
721
773
  });
722
774
  it('allows digits after `.` to be elided', () => {
723
775
  let f = Lexer_1.Lexer.scan('12.').tokens[0];
724
- chai_1.expect(f.kind).to.equal(_1.TokenKind.FloatLiteral);
725
- chai_1.expect(f.text).to.equal('12.');
776
+ (0, chai_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
777
+ (0, chai_1.expect)(f.text).to.equal('12.');
726
778
  });
727
779
  });
728
780
  describe('long integer literals', () => {
729
781
  it('respects \'&\' suffix', () => {
730
782
  let f = Lexer_1.Lexer.scan('1&').tokens[0];
731
- chai_1.expect(f.kind).to.equal(_1.TokenKind.LongIntegerLiteral);
732
- chai_1.expect(f.text).to.eql('1&');
783
+ (0, chai_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.LongIntegerLiteral);
784
+ (0, chai_1.expect)(f.text).to.eql('1&');
733
785
  });
734
786
  it('supports hexadecimal literals', () => {
735
787
  let i = Lexer_1.Lexer.scan('&hf00d&').tokens[0];
736
- chai_1.expect(i.kind).to.equal(_1.TokenKind.LongIntegerLiteral);
737
- chai_1.expect(i.text).to.equal('&hf00d&');
788
+ (0, chai_1.expect)(i.kind).to.equal(TokenKind_1.TokenKind.LongIntegerLiteral);
789
+ (0, chai_1.expect)(i.text).to.equal('&hf00d&');
738
790
  });
739
791
  it('allows very long Int64 literals', () => {
740
792
  let li = Lexer_1.Lexer.scan('9876543210&').tokens[0];
741
- chai_1.expect(li.kind).to.equal(_1.TokenKind.LongIntegerLiteral);
742
- chai_1.expect(li.text).to.equal('9876543210&');
793
+ (0, chai_1.expect)(li.kind).to.equal(TokenKind_1.TokenKind.LongIntegerLiteral);
794
+ (0, chai_1.expect)(li.text).to.equal('9876543210&');
743
795
  });
744
796
  it('forces literals with \'&\' suffix into Int64s', () => {
745
797
  let li = Lexer_1.Lexer.scan('123&').tokens[0];
746
- chai_1.expect(li.kind).to.equal(_1.TokenKind.LongIntegerLiteral);
747
- chai_1.expect(li.text).to.deep.equal('123&');
798
+ (0, chai_1.expect)(li.kind).to.equal(TokenKind_1.TokenKind.LongIntegerLiteral);
799
+ (0, chai_1.expect)(li.text).to.deep.equal('123&');
748
800
  });
749
801
  });
750
802
  describe('integer literals', () => {
751
803
  it('respects \'%\' suffix', () => {
752
804
  let f = Lexer_1.Lexer.scan('1%').tokens[0];
753
- chai_1.expect(f.kind).to.equal(_1.TokenKind.IntegerLiteral);
754
- chai_1.expect(f.text).to.eql('1%');
805
+ (0, chai_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.IntegerLiteral);
806
+ (0, chai_1.expect)(f.text).to.eql('1%');
755
807
  });
756
808
  it('does not allow decimal numbers to end with %', () => {
757
809
  let f = Lexer_1.Lexer.scan('1.2%').tokens[0];
758
- chai_1.expect(f.kind).to.equal(_1.TokenKind.FloatLiteral);
759
- chai_1.expect(f.text).to.eql('1.2');
810
+ (0, chai_1.expect)(f.kind).to.equal(TokenKind_1.TokenKind.FloatLiteral);
811
+ (0, chai_1.expect)(f.text).to.eql('1.2');
760
812
  });
761
813
  it('supports hexadecimal literals', () => {
762
814
  let i = Lexer_1.Lexer.scan('&hFf').tokens[0];
763
- chai_1.expect(i.kind).to.equal(_1.TokenKind.IntegerLiteral);
764
- chai_1.expect(i.text).to.deep.equal('&hFf');
815
+ (0, chai_1.expect)(i.kind).to.equal(TokenKind_1.TokenKind.IntegerLiteral);
816
+ (0, chai_1.expect)(i.text).to.deep.equal('&hFf');
765
817
  });
766
818
  it('falls back to a regular integer', () => {
767
819
  let i = Lexer_1.Lexer.scan('123').tokens[0];
768
- chai_1.expect(i.kind).to.equal(_1.TokenKind.IntegerLiteral);
769
- chai_1.expect(i.text).to.deep.equal('123');
820
+ (0, chai_1.expect)(i.kind).to.equal(TokenKind_1.TokenKind.IntegerLiteral);
821
+ (0, chai_1.expect)(i.text).to.deep.equal('123');
770
822
  });
771
823
  });
772
824
  describe('types', () => {
773
825
  it('captures type tokens', () => {
774
- chai_1.expect(Lexer_1.Lexer.scan(`
826
+ (0, chai_1.expect)(Lexer_1.Lexer.scan(`
775
827
  void boolean integer longinteger float double string object interface invalid dynamic
776
828
  `.trim()).tokens.map(x => x.kind)).to.eql([
777
- _1.TokenKind.Void,
778
- _1.TokenKind.Boolean,
779
- _1.TokenKind.Integer,
780
- _1.TokenKind.LongInteger,
781
- _1.TokenKind.Float,
782
- _1.TokenKind.Double,
783
- _1.TokenKind.String,
784
- _1.TokenKind.Object,
785
- _1.TokenKind.Interface,
786
- _1.TokenKind.Invalid,
787
- _1.TokenKind.Dynamic,
788
- _1.TokenKind.Eof
829
+ TokenKind_1.TokenKind.Void,
830
+ TokenKind_1.TokenKind.Boolean,
831
+ TokenKind_1.TokenKind.Integer,
832
+ TokenKind_1.TokenKind.LongInteger,
833
+ TokenKind_1.TokenKind.Float,
834
+ TokenKind_1.TokenKind.Double,
835
+ TokenKind_1.TokenKind.String,
836
+ TokenKind_1.TokenKind.Object,
837
+ TokenKind_1.TokenKind.Interface,
838
+ TokenKind_1.TokenKind.Invalid,
839
+ TokenKind_1.TokenKind.Dynamic,
840
+ TokenKind_1.TokenKind.Eof
789
841
  ]);
790
842
  });
791
843
  });
@@ -794,59 +846,59 @@ describe('lexer', () => {
794
846
  // test just a sample of single-word reserved words for now.
795
847
  // if we find any that we've missed
796
848
  let { tokens } = Lexer_1.Lexer.scan('and then or if else endif return true false line_num');
797
- chai_1.expect(tokens.map(w => w.kind)).to.deep.equal([
798
- _1.TokenKind.And,
799
- _1.TokenKind.Then,
800
- _1.TokenKind.Or,
801
- _1.TokenKind.If,
802
- _1.TokenKind.Else,
803
- _1.TokenKind.EndIf,
804
- _1.TokenKind.Return,
805
- _1.TokenKind.True,
806
- _1.TokenKind.False,
807
- _1.TokenKind.LineNumLiteral,
808
- _1.TokenKind.Eof
849
+ (0, chai_1.expect)(tokens.map(w => w.kind)).to.deep.equal([
850
+ TokenKind_1.TokenKind.And,
851
+ TokenKind_1.TokenKind.Then,
852
+ TokenKind_1.TokenKind.Or,
853
+ TokenKind_1.TokenKind.If,
854
+ TokenKind_1.TokenKind.Else,
855
+ TokenKind_1.TokenKind.EndIf,
856
+ TokenKind_1.TokenKind.Return,
857
+ TokenKind_1.TokenKind.True,
858
+ TokenKind_1.TokenKind.False,
859
+ TokenKind_1.TokenKind.LineNumLiteral,
860
+ TokenKind_1.TokenKind.Eof
809
861
  ]);
810
862
  });
811
863
  it('matches multi-word keywords', () => {
812
864
  let { tokens } = Lexer_1.Lexer.scan('end if end while End Sub end Function Exit wHILe');
813
- chai_1.expect(tokens.map(w => w.kind)).to.deep.equal([
814
- _1.TokenKind.EndIf,
815
- _1.TokenKind.EndWhile,
816
- _1.TokenKind.EndSub,
817
- _1.TokenKind.EndFunction,
818
- _1.TokenKind.ExitWhile,
819
- _1.TokenKind.Eof
865
+ (0, chai_1.expect)(tokens.map(w => w.kind)).to.deep.equal([
866
+ TokenKind_1.TokenKind.EndIf,
867
+ TokenKind_1.TokenKind.EndWhile,
868
+ TokenKind_1.TokenKind.EndSub,
869
+ TokenKind_1.TokenKind.EndFunction,
870
+ TokenKind_1.TokenKind.ExitWhile,
871
+ TokenKind_1.TokenKind.Eof
820
872
  ]);
821
873
  });
822
874
  it('accepts \'exit for\' but not \'exitfor\'', () => {
823
875
  let { tokens } = Lexer_1.Lexer.scan('exit for exitfor');
824
- chai_1.expect(tokens.map(w => w.kind)).to.deep.equal([
825
- _1.TokenKind.ExitFor,
826
- _1.TokenKind.Identifier,
827
- _1.TokenKind.Eof
876
+ (0, chai_1.expect)(tokens.map(w => w.kind)).to.deep.equal([
877
+ TokenKind_1.TokenKind.ExitFor,
878
+ TokenKind_1.TokenKind.Identifier,
879
+ TokenKind_1.TokenKind.Eof
828
880
  ]);
829
881
  });
830
882
  it('matches keywords with silly capitalization', () => {
831
883
  let { tokens } = Lexer_1.Lexer.scan('iF ELSE eNDIf FUncTioN');
832
- chai_1.expect(tokens.map(w => w.kind)).to.deep.equal([
833
- _1.TokenKind.If,
834
- _1.TokenKind.Else,
835
- _1.TokenKind.EndIf,
836
- _1.TokenKind.Function,
837
- _1.TokenKind.Eof
884
+ (0, chai_1.expect)(tokens.map(w => w.kind)).to.deep.equal([
885
+ TokenKind_1.TokenKind.If,
886
+ TokenKind_1.TokenKind.Else,
887
+ TokenKind_1.TokenKind.EndIf,
888
+ TokenKind_1.TokenKind.Function,
889
+ TokenKind_1.TokenKind.Eof
838
890
  ]);
839
891
  });
840
892
  it('allows alpha-numeric (plus \'_\') identifiers', () => {
841
893
  let identifier = Lexer_1.Lexer.scan('_abc_123_').tokens[0];
842
- chai_1.expect(identifier.kind).to.equal(_1.TokenKind.Identifier);
843
- chai_1.expect(identifier.text).to.equal('_abc_123_');
894
+ (0, chai_1.expect)(identifier.kind).to.equal(TokenKind_1.TokenKind.Identifier);
895
+ (0, chai_1.expect)(identifier.text).to.equal('_abc_123_');
844
896
  });
845
897
  it('allows identifiers with trailing type designators', () => {
846
898
  let { tokens } = Lexer_1.Lexer.scan('lorem$ ipsum% dolor! sit# amet&');
847
- let identifiers = tokens.filter(t => t.kind !== _1.TokenKind.Eof);
848
- chai_1.expect(identifiers.every(t => t.kind === _1.TokenKind.Identifier));
849
- chai_1.expect(identifiers.map(t => t.text)).to.deep.equal([
899
+ let identifiers = tokens.filter(t => t.kind !== TokenKind_1.TokenKind.Eof);
900
+ (0, chai_1.expect)(identifiers.every(t => t.kind === TokenKind_1.TokenKind.Identifier));
901
+ (0, chai_1.expect)(identifiers.map(t => t.text)).to.deep.equal([
850
902
  'lorem$',
851
903
  'ipsum%',
852
904
  'dolor!',
@@ -858,20 +910,20 @@ describe('lexer', () => {
858
910
  describe('conditional compilation', () => {
859
911
  it('reads constant declarations', () => {
860
912
  let { tokens } = Lexer_1.Lexer.scan('#const foo true');
861
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([
862
- _1.TokenKind.HashConst,
863
- _1.TokenKind.Identifier,
864
- _1.TokenKind.True,
865
- _1.TokenKind.Eof
913
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
914
+ TokenKind_1.TokenKind.HashConst,
915
+ TokenKind_1.TokenKind.Identifier,
916
+ TokenKind_1.TokenKind.True,
917
+ TokenKind_1.TokenKind.Eof
866
918
  ]);
867
919
  });
868
920
  it('reads constant aliases', () => {
869
921
  let { tokens } = Lexer_1.Lexer.scan('#const bar foo');
870
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([
871
- _1.TokenKind.HashConst,
872
- _1.TokenKind.Identifier,
873
- _1.TokenKind.Identifier,
874
- _1.TokenKind.Eof
922
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
923
+ TokenKind_1.TokenKind.HashConst,
924
+ TokenKind_1.TokenKind.Identifier,
925
+ TokenKind_1.TokenKind.Identifier,
926
+ TokenKind_1.TokenKind.Eof
875
927
  ]);
876
928
  });
877
929
  it('reads conditional directives', () => {
@@ -885,19 +937,19 @@ describe('lexer', () => {
885
937
  `, {
886
938
  includeWhitespace: false
887
939
  });
888
- chai_1.expect(tokens.map(t => t.kind).filter(x => x !== _1.TokenKind.Newline)).to.deep.equal([
889
- _1.TokenKind.HashIf,
890
- _1.TokenKind.HashElseIf,
891
- _1.TokenKind.HashElseIf,
892
- _1.TokenKind.HashElse,
893
- _1.TokenKind.HashEndIf,
894
- _1.TokenKind.HashEndIf,
895
- _1.TokenKind.Eof
940
+ (0, chai_1.expect)(tokens.map(t => t.kind).filter(x => x !== TokenKind_1.TokenKind.Newline)).to.deep.equal([
941
+ TokenKind_1.TokenKind.HashIf,
942
+ TokenKind_1.TokenKind.HashElseIf,
943
+ TokenKind_1.TokenKind.HashElseIf,
944
+ TokenKind_1.TokenKind.HashElse,
945
+ TokenKind_1.TokenKind.HashEndIf,
946
+ TokenKind_1.TokenKind.HashEndIf,
947
+ TokenKind_1.TokenKind.Eof
896
948
  ]);
897
949
  });
898
950
  it('treats text "constructor" as an identifier', () => {
899
951
  let lexer = Lexer_1.Lexer.scan(`function constructor()\nend function`);
900
- chai_1.expect(lexer.tokens[1].kind).to.equal(_1.TokenKind.Identifier);
952
+ (0, chai_1.expect)(lexer.tokens[1].kind).to.equal(TokenKind_1.TokenKind.Identifier);
901
953
  });
902
954
  it('reads upper case conditional directives', () => {
903
955
  let { tokens } = Lexer_1.Lexer.scan(`
@@ -910,45 +962,45 @@ describe('lexer', () => {
910
962
  `, {
911
963
  includeWhitespace: false
912
964
  });
913
- chai_1.expect(tokens.map(t => t.kind).filter(x => x !== _1.TokenKind.Newline)).to.deep.equal([
914
- _1.TokenKind.HashIf,
915
- _1.TokenKind.HashElseIf,
916
- _1.TokenKind.HashElseIf,
917
- _1.TokenKind.HashElse,
918
- _1.TokenKind.HashEndIf,
919
- _1.TokenKind.HashEndIf,
920
- _1.TokenKind.Eof
965
+ (0, chai_1.expect)(tokens.map(t => t.kind).filter(x => x !== TokenKind_1.TokenKind.Newline)).to.deep.equal([
966
+ TokenKind_1.TokenKind.HashIf,
967
+ TokenKind_1.TokenKind.HashElseIf,
968
+ TokenKind_1.TokenKind.HashElseIf,
969
+ TokenKind_1.TokenKind.HashElse,
970
+ TokenKind_1.TokenKind.HashEndIf,
971
+ TokenKind_1.TokenKind.HashEndIf,
972
+ TokenKind_1.TokenKind.Eof
921
973
  ]);
922
974
  });
923
975
  it('supports various spacings between #endif', () => {
924
976
  let { tokens } = Lexer_1.Lexer.scan('#endif #end if #end\tif #end if #end\t\t if');
925
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([
926
- _1.TokenKind.HashEndIf,
927
- _1.TokenKind.HashEndIf,
928
- _1.TokenKind.HashEndIf,
929
- _1.TokenKind.HashEndIf,
930
- _1.TokenKind.HashEndIf,
931
- _1.TokenKind.Eof
977
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
978
+ TokenKind_1.TokenKind.HashEndIf,
979
+ TokenKind_1.TokenKind.HashEndIf,
980
+ TokenKind_1.TokenKind.HashEndIf,
981
+ TokenKind_1.TokenKind.HashEndIf,
982
+ TokenKind_1.TokenKind.HashEndIf,
983
+ TokenKind_1.TokenKind.Eof
932
984
  ]);
933
985
  });
934
986
  it('reads forced compilation diagnostics with messages', () => {
935
987
  let { tokens } = Lexer_1.Lexer.scan('#error a message goes here\n', {
936
988
  includeWhitespace: true
937
989
  });
938
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([
939
- _1.TokenKind.HashError,
940
- _1.TokenKind.Whitespace,
941
- _1.TokenKind.HashErrorMessage,
942
- _1.TokenKind.Newline,
943
- _1.TokenKind.Eof
990
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
991
+ TokenKind_1.TokenKind.HashError,
992
+ TokenKind_1.TokenKind.Whitespace,
993
+ TokenKind_1.TokenKind.HashErrorMessage,
994
+ TokenKind_1.TokenKind.Newline,
995
+ TokenKind_1.TokenKind.Eof
944
996
  ]);
945
- chai_1.expect(tokens[2].text).to.equal('a message goes here');
997
+ (0, chai_1.expect)(tokens[2].text).to.equal('a message goes here');
946
998
  });
947
999
  });
948
1000
  describe('location tracking', () => {
949
1001
  it('tracks starting and ending locations including whitespace', () => {
950
1002
  let { tokens } = Lexer_1.Lexer.scan(`sub foo()\n print "bar"\r\nend sub`, { includeWhitespace: true });
951
- chai_1.expect(tokens.map(t => t.range)).to.eql([
1003
+ (0, chai_1.expect)(tokens.map(t => t.range)).to.eql([
952
1004
  vscode_languageserver_1.Range.create(0, 0, 0, 3),
953
1005
  vscode_languageserver_1.Range.create(0, 3, 0, 4),
954
1006
  vscode_languageserver_1.Range.create(0, 4, 0, 7),
@@ -966,7 +1018,7 @@ describe('lexer', () => {
966
1018
  });
967
1019
  it('tracks starting and ending locations excluding whitespace', () => {
968
1020
  let { tokens } = Lexer_1.Lexer.scan(`sub foo()\n print "bar"\r\nend sub`, { includeWhitespace: false });
969
- chai_1.expect(tokens.map(t => t.range)).to.eql([
1021
+ (0, chai_1.expect)(tokens.map(t => t.range)).to.eql([
970
1022
  vscode_languageserver_1.Range.create(0, 0, 0, 3),
971
1023
  vscode_languageserver_1.Range.create(0, 4, 0, 7),
972
1024
  vscode_languageserver_1.Range.create(0, 7, 0, 8),
@@ -983,30 +1035,30 @@ describe('lexer', () => {
983
1035
  describe('two word keywords', () => {
984
1036
  it('supports various spacing between for each', () => {
985
1037
  let { tokens } = Lexer_1.Lexer.scan('for each for each for each for\teach for\t each for \teach for \t each');
986
- chai_1.expect(tokens.map(t => t.kind)).to.deep.equal([
987
- _1.TokenKind.ForEach,
988
- _1.TokenKind.ForEach,
989
- _1.TokenKind.ForEach,
990
- _1.TokenKind.ForEach,
991
- _1.TokenKind.ForEach,
992
- _1.TokenKind.ForEach,
993
- _1.TokenKind.ForEach,
994
- _1.TokenKind.Eof
1038
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.deep.equal([
1039
+ TokenKind_1.TokenKind.ForEach,
1040
+ TokenKind_1.TokenKind.ForEach,
1041
+ TokenKind_1.TokenKind.ForEach,
1042
+ TokenKind_1.TokenKind.ForEach,
1043
+ TokenKind_1.TokenKind.ForEach,
1044
+ TokenKind_1.TokenKind.ForEach,
1045
+ TokenKind_1.TokenKind.ForEach,
1046
+ TokenKind_1.TokenKind.Eof
995
1047
  ]);
996
1048
  });
997
1049
  });
998
1050
  it('detects rem when used as keyword', () => {
999
1051
  let { tokens } = Lexer_1.Lexer.scan('person.rem=true');
1000
- chai_1.expect(tokens.map(t => t.kind)).to.eql([
1001
- _1.TokenKind.Identifier,
1002
- _1.TokenKind.Dot,
1003
- _1.TokenKind.Identifier,
1004
- _1.TokenKind.Equal,
1005
- _1.TokenKind.True,
1006
- _1.TokenKind.Eof
1052
+ (0, chai_1.expect)(tokens.map(t => t.kind)).to.eql([
1053
+ TokenKind_1.TokenKind.Identifier,
1054
+ TokenKind_1.TokenKind.Dot,
1055
+ TokenKind_1.TokenKind.Identifier,
1056
+ TokenKind_1.TokenKind.Equal,
1057
+ TokenKind_1.TokenKind.True,
1058
+ TokenKind_1.TokenKind.Eof
1007
1059
  ]);
1008
1060
  //verify the location of `rem`
1009
- chai_1.expect(tokens.map(t => [t.range.start.character, t.range.end.character])).to.eql([
1061
+ (0, chai_1.expect)(tokens.map(t => [t.range.start.character, t.range.end.character])).to.eql([
1010
1062
  [0, 6],
1011
1063
  [6, 7],
1012
1064
  [7, 10],
@@ -1018,61 +1070,69 @@ describe('lexer', () => {
1018
1070
  describe('isToken', () => {
1019
1071
  it('works', () => {
1020
1072
  let range = vscode_languageserver_1.Range.create(0, 0, 0, 2);
1021
- chai_1.expect(Token_1.isToken({ kind: _1.TokenKind.And, text: 'and', range: range })).is.true;
1022
- chai_1.expect(Token_1.isToken({ text: 'and', range: range })).is.false;
1073
+ (0, chai_1.expect)((0, Token_1.isToken)({ kind: TokenKind_1.TokenKind.And, text: 'and', range: range })).is.true;
1074
+ (0, chai_1.expect)((0, Token_1.isToken)({ text: 'and', range: range })).is.false;
1023
1075
  });
1024
1076
  });
1077
+ it('recognizes enum-related keywords', () => {
1078
+ (0, chai_1.expect)(Lexer_1.Lexer.scan('enum end enum endenum').tokens.map(x => x.kind)).to.eql([
1079
+ TokenKind_1.TokenKind.Enum,
1080
+ TokenKind_1.TokenKind.EndEnum,
1081
+ TokenKind_1.TokenKind.EndEnum,
1082
+ TokenKind_1.TokenKind.Eof
1083
+ ]);
1084
+ });
1025
1085
  it('recognizes class-related keywords', () => {
1026
- chai_1.expect(Lexer_1.Lexer.scan('class public protected private end class endclass new override').tokens.map(x => x.kind)).to.eql([
1027
- _1.TokenKind.Class,
1028
- _1.TokenKind.Public,
1029
- _1.TokenKind.Protected,
1030
- _1.TokenKind.Private,
1031
- _1.TokenKind.EndClass,
1032
- _1.TokenKind.EndClass,
1033
- _1.TokenKind.New,
1034
- _1.TokenKind.Override,
1035
- _1.TokenKind.Eof
1086
+ (0, chai_1.expect)(Lexer_1.Lexer.scan('class public protected private end class endclass new override').tokens.map(x => x.kind)).to.eql([
1087
+ TokenKind_1.TokenKind.Class,
1088
+ TokenKind_1.TokenKind.Public,
1089
+ TokenKind_1.TokenKind.Protected,
1090
+ TokenKind_1.TokenKind.Private,
1091
+ TokenKind_1.TokenKind.EndClass,
1092
+ TokenKind_1.TokenKind.EndClass,
1093
+ TokenKind_1.TokenKind.New,
1094
+ TokenKind_1.TokenKind.Override,
1095
+ TokenKind_1.TokenKind.Eof
1036
1096
  ]);
1037
1097
  });
1038
1098
  describe('whitespace', () => {
1039
1099
  it('preserves the exact number of whitespace characterswhitespace', () => {
1040
1100
  let { tokens } = Lexer_1.Lexer.scan(' ', { includeWhitespace: true });
1041
- chai_1.expect(tokens[0]).to.include({
1042
- kind: _1.TokenKind.Whitespace,
1101
+ (0, chai_1.expect)(tokens[0]).to.include({
1102
+ kind: TokenKind_1.TokenKind.Whitespace,
1043
1103
  text: ' '
1044
1104
  });
1045
1105
  });
1046
1106
  it('tokenizes whitespace between things', () => {
1047
1107
  let { tokens } = Lexer_1.Lexer.scan('sub main ( ) \n end sub', { includeWhitespace: true });
1048
- chai_1.expect(tokens.map(x => x.kind)).to.eql([
1049
- _1.TokenKind.Sub,
1050
- _1.TokenKind.Whitespace,
1051
- _1.TokenKind.Identifier,
1052
- _1.TokenKind.Whitespace,
1053
- _1.TokenKind.LeftParen,
1054
- _1.TokenKind.Whitespace,
1055
- _1.TokenKind.RightParen,
1056
- _1.TokenKind.Whitespace,
1057
- _1.TokenKind.Newline,
1058
- _1.TokenKind.Whitespace,
1059
- _1.TokenKind.EndSub,
1060
- _1.TokenKind.Eof
1108
+ (0, chai_1.expect)(tokens.map(x => x.kind)).to.eql([
1109
+ TokenKind_1.TokenKind.Sub,
1110
+ TokenKind_1.TokenKind.Whitespace,
1111
+ TokenKind_1.TokenKind.Identifier,
1112
+ TokenKind_1.TokenKind.Whitespace,
1113
+ TokenKind_1.TokenKind.LeftParen,
1114
+ TokenKind_1.TokenKind.Whitespace,
1115
+ TokenKind_1.TokenKind.RightParen,
1116
+ TokenKind_1.TokenKind.Whitespace,
1117
+ TokenKind_1.TokenKind.Newline,
1118
+ TokenKind_1.TokenKind.Whitespace,
1119
+ TokenKind_1.TokenKind.EndSub,
1120
+ TokenKind_1.TokenKind.Eof
1061
1121
  ]);
1062
1122
  });
1063
1123
  });
1064
1124
  it('identifies brighterscript source literals', () => {
1065
1125
  let { tokens } = Lexer_1.Lexer.scan('LINE_NUM SOURCE_FILE_PATH SOURCE_LINE_NUM FUNCTION_NAME SOURCE_FUNCTION_NAME SOURCE_LOCATION PKG_PATH PKG_LOCATION');
1066
- chai_1.expect(tokens.map(x => x.kind)).to.eql([
1067
- _1.TokenKind.LineNumLiteral,
1068
- _1.TokenKind.SourceFilePathLiteral,
1069
- _1.TokenKind.SourceLineNumLiteral,
1070
- _1.TokenKind.FunctionNameLiteral,
1071
- _1.TokenKind.SourceFunctionNameLiteral,
1072
- _1.TokenKind.SourceLocationLiteral,
1073
- _1.TokenKind.PkgPathLiteral,
1074
- _1.TokenKind.PkgLocationLiteral,
1075
- _1.TokenKind.Eof
1126
+ (0, chai_1.expect)(tokens.map(x => x.kind)).to.eql([
1127
+ TokenKind_1.TokenKind.LineNumLiteral,
1128
+ TokenKind_1.TokenKind.SourceFilePathLiteral,
1129
+ TokenKind_1.TokenKind.SourceLineNumLiteral,
1130
+ TokenKind_1.TokenKind.FunctionNameLiteral,
1131
+ TokenKind_1.TokenKind.SourceFunctionNameLiteral,
1132
+ TokenKind_1.TokenKind.SourceLocationLiteral,
1133
+ TokenKind_1.TokenKind.PkgPathLiteral,
1134
+ TokenKind_1.TokenKind.PkgLocationLiteral,
1135
+ TokenKind_1.TokenKind.Eof
1076
1136
  ]);
1077
1137
  });
1078
1138
  it('properly tracks leadingWhitespace', () => {
@@ -1084,18 +1144,79 @@ describe('lexer', () => {
1084
1144
  end sub
1085
1145
  `;
1086
1146
  const { tokens } = Lexer_1.Lexer.scan(text, { includeWhitespace: false });
1087
- chai_1.expect(util_1.default.tokensToString(tokens)).to.equal(text);
1147
+ (0, chai_1.expect)(util_1.default.tokensToString(tokens)).to.equal(text);
1088
1148
  });
1089
1149
  it('properly detects try/catch tokens', () => {
1090
1150
  const { tokens } = Lexer_1.Lexer.scan(`try catch endtry end try throw`, { includeWhitespace: false });
1091
- chai_1.expect(tokens.map(x => x.kind)).to.eql([
1092
- _1.TokenKind.Try,
1093
- _1.TokenKind.Catch,
1094
- _1.TokenKind.EndTry,
1095
- _1.TokenKind.EndTry,
1096
- _1.TokenKind.Throw,
1097
- _1.TokenKind.Eof
1151
+ (0, chai_1.expect)(tokens.map(x => x.kind)).to.eql([
1152
+ TokenKind_1.TokenKind.Try,
1153
+ TokenKind_1.TokenKind.Catch,
1154
+ TokenKind_1.TokenKind.EndTry,
1155
+ TokenKind_1.TokenKind.EndTry,
1156
+ TokenKind_1.TokenKind.Throw,
1157
+ TokenKind_1.TokenKind.Eof
1098
1158
  ]);
1099
1159
  });
1160
+ describe('regular expression literals', () => {
1161
+ function testRegex(...regexps) {
1162
+ regexps = regexps.map(x => x.toString());
1163
+ const results = [];
1164
+ for (const regexp of regexps) {
1165
+ const { tokens } = Lexer_1.Lexer.scan(regexp);
1166
+ results.push(tokens[0].text);
1167
+ }
1168
+ (0, chai_1.expect)(results).to.eql(regexps);
1169
+ }
1170
+ it('recognizes regex literals', () => {
1171
+ testRegex(/simple/, /SimpleWithValidFlags/g, /UnknownFlags/gi, /with spaces/s, /with(parens)and[squarebraces]/,
1172
+ //lots of special characters
1173
+ /.*()^$@/,
1174
+ //captures quote char
1175
+ /"/);
1176
+ });
1177
+ it('does not capture multiple divisions on one line as regex', () => {
1178
+ const { tokens } = Lexer_1.Lexer.scan(`one = 1/2 + 1/4 + 1/4`, {
1179
+ includeWhitespace: false
1180
+ });
1181
+ (0, chai_1.expect)(tokens.map(x => x.kind)).to.eql([
1182
+ TokenKind_1.TokenKind.Identifier,
1183
+ TokenKind_1.TokenKind.Equal,
1184
+ TokenKind_1.TokenKind.IntegerLiteral,
1185
+ TokenKind_1.TokenKind.Forwardslash,
1186
+ TokenKind_1.TokenKind.IntegerLiteral,
1187
+ TokenKind_1.TokenKind.Plus,
1188
+ TokenKind_1.TokenKind.IntegerLiteral,
1189
+ TokenKind_1.TokenKind.Forwardslash,
1190
+ TokenKind_1.TokenKind.IntegerLiteral,
1191
+ TokenKind_1.TokenKind.Plus,
1192
+ TokenKind_1.TokenKind.IntegerLiteral,
1193
+ TokenKind_1.TokenKind.Forwardslash,
1194
+ TokenKind_1.TokenKind.IntegerLiteral,
1195
+ TokenKind_1.TokenKind.Eof
1196
+ ]);
1197
+ });
1198
+ it('only captures alphanumeric flags', () => {
1199
+ (0, chai_1.expect)(Lexer_1.Lexer.scan('speak(/a/)').tokens.map(x => x.kind)).to.eql([
1200
+ TokenKind_1.TokenKind.Identifier,
1201
+ TokenKind_1.TokenKind.LeftParen,
1202
+ TokenKind_1.TokenKind.RegexLiteral,
1203
+ TokenKind_1.TokenKind.RightParen,
1204
+ TokenKind_1.TokenKind.Eof
1205
+ ]);
1206
+ });
1207
+ it('handles escape characters properly', () => {
1208
+ testRegex(
1209
+ //an escaped forward slash right next to the end-regexp forwardslash
1210
+ /\//, /\r/, /\n/, /\r\n/,
1211
+ //a literal backslash in front of an escape backslash
1212
+ /\\\n/);
1213
+ });
1214
+ });
1100
1215
  });
1216
+ function expectKinds(text, tokenKinds) {
1217
+ let actual = Lexer_1.Lexer.scan(text).tokens.map(x => x.kind);
1218
+ //remove the EOF token
1219
+ actual.pop();
1220
+ (0, chai_1.expect)(actual).to.eql(tokenKinds);
1221
+ }
1101
1222
  //# sourceMappingURL=Lexer.spec.js.map