@zzzen/pyright-internal 1.2.0-dev.20231119 → 1.2.0-dev.20231126

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (386) hide show
  1. package/dist/analyzer/analysis.d.ts +1 -1
  2. package/dist/analyzer/analysis.js +5 -1
  3. package/dist/analyzer/analysis.js.map +1 -1
  4. package/dist/analyzer/analyzerFileInfo.d.ts +1 -1
  5. package/dist/analyzer/analyzerNodeInfo.d.ts +1 -1
  6. package/dist/analyzer/analyzerNodeInfo.js +1 -1
  7. package/dist/analyzer/analyzerNodeInfo.js.map +1 -1
  8. package/dist/analyzer/backgroundAnalysisProgram.d.ts +1 -1
  9. package/dist/analyzer/backgroundAnalysisProgram.js +1 -1
  10. package/dist/analyzer/backgroundAnalysisProgram.js.map +1 -1
  11. package/dist/analyzer/binder.js +198 -194
  12. package/dist/analyzer/binder.js.map +1 -1
  13. package/dist/analyzer/cacheManager.js +1 -1
  14. package/dist/analyzer/cacheManager.js.map +1 -1
  15. package/dist/analyzer/checker.js +257 -239
  16. package/dist/analyzer/checker.js.map +1 -1
  17. package/dist/analyzer/codeFlowEngine.d.ts +1 -1
  18. package/dist/analyzer/codeFlowEngine.js +26 -26
  19. package/dist/analyzer/codeFlowEngine.js.map +1 -1
  20. package/dist/analyzer/codeFlowTypes.d.ts +1 -1
  21. package/dist/analyzer/codeFlowTypes.js +23 -23
  22. package/dist/analyzer/codeFlowTypes.js.map +1 -1
  23. package/dist/analyzer/codeFlowUtils.js +39 -37
  24. package/dist/analyzer/codeFlowUtils.js.map +1 -1
  25. package/dist/analyzer/constraintSolver.js +40 -47
  26. package/dist/analyzer/constraintSolver.js.map +1 -1
  27. package/dist/analyzer/constructorTransform.js +8 -8
  28. package/dist/analyzer/constructorTransform.js.map +1 -1
  29. package/dist/analyzer/constructors.js +23 -25
  30. package/dist/analyzer/constructors.js.map +1 -1
  31. package/dist/analyzer/dataClasses.js +67 -63
  32. package/dist/analyzer/dataClasses.js.map +1 -1
  33. package/dist/analyzer/declaration.d.ts +2 -2
  34. package/dist/analyzer/declaration.js +10 -10
  35. package/dist/analyzer/declaration.js.map +1 -1
  36. package/dist/analyzer/declarationUtils.js +41 -41
  37. package/dist/analyzer/declarationUtils.js.map +1 -1
  38. package/dist/analyzer/decorators.js +51 -51
  39. package/dist/analyzer/decorators.js.map +1 -1
  40. package/dist/analyzer/enums.js +27 -27
  41. package/dist/analyzer/enums.js.map +1 -1
  42. package/dist/analyzer/functionTransform.js +4 -4
  43. package/dist/analyzer/functionTransform.js.map +1 -1
  44. package/dist/analyzer/importResolver.d.ts +1 -1
  45. package/dist/analyzer/importResolver.js +21 -17
  46. package/dist/analyzer/importResolver.js.map +1 -1
  47. package/dist/analyzer/importResult.js +1 -1
  48. package/dist/analyzer/importResult.js.map +1 -1
  49. package/dist/analyzer/importStatementUtils.js +31 -27
  50. package/dist/analyzer/importStatementUtils.js.map +1 -1
  51. package/dist/analyzer/namedTuples.js +36 -32
  52. package/dist/analyzer/namedTuples.js.map +1 -1
  53. package/dist/analyzer/operations.js +78 -78
  54. package/dist/analyzer/operations.js.map +1 -1
  55. package/dist/analyzer/packageTypeReport.d.ts +1 -1
  56. package/dist/analyzer/packageTypeReport.js +2 -2
  57. package/dist/analyzer/packageTypeReport.js.map +1 -1
  58. package/dist/analyzer/packageTypeVerifier.js +90 -90
  59. package/dist/analyzer/packageTypeVerifier.js.map +1 -1
  60. package/dist/analyzer/parameterUtils.js +14 -14
  61. package/dist/analyzer/parameterUtils.js.map +1 -1
  62. package/dist/analyzer/parentDirectoryCache.d.ts +2 -2
  63. package/dist/analyzer/parseTreeCleaner.js +5 -1
  64. package/dist/analyzer/parseTreeCleaner.js.map +1 -1
  65. package/dist/analyzer/parseTreeUtils.d.ts +1 -1
  66. package/dist/analyzer/parseTreeUtils.js +383 -379
  67. package/dist/analyzer/parseTreeUtils.js.map +1 -1
  68. package/dist/analyzer/parseTreeWalker.js +161 -157
  69. package/dist/analyzer/parseTreeWalker.js.map +1 -1
  70. package/dist/analyzer/patternMatching.d.ts +1 -1
  71. package/dist/analyzer/patternMatching.js +52 -50
  72. package/dist/analyzer/patternMatching.js.map +1 -1
  73. package/dist/analyzer/program.d.ts +1 -1
  74. package/dist/analyzer/program.js +18 -14
  75. package/dist/analyzer/program.js.map +1 -1
  76. package/dist/analyzer/properties.js +53 -44
  77. package/dist/analyzer/properties.js.map +1 -1
  78. package/dist/analyzer/protocols.js +14 -14
  79. package/dist/analyzer/protocols.js.map +1 -1
  80. package/dist/analyzer/pythonPathUtils.js +5 -1
  81. package/dist/analyzer/pythonPathUtils.js.map +1 -1
  82. package/dist/analyzer/regions.js +3 -3
  83. package/dist/analyzer/regions.js.map +1 -1
  84. package/dist/analyzer/scope.js +8 -8
  85. package/dist/analyzer/scope.js.map +1 -1
  86. package/dist/analyzer/scopeUtils.js +1 -1
  87. package/dist/analyzer/scopeUtils.js.map +1 -1
  88. package/dist/analyzer/service.js +5 -1
  89. package/dist/analyzer/service.js.map +1 -1
  90. package/dist/analyzer/sourceFile.js +25 -21
  91. package/dist/analyzer/sourceFile.js.map +1 -1
  92. package/dist/analyzer/sourceMapper.d.ts +2 -2
  93. package/dist/analyzer/sourceMapper.js +12 -8
  94. package/dist/analyzer/sourceMapper.js.map +1 -1
  95. package/dist/analyzer/staticExpressions.js +40 -40
  96. package/dist/analyzer/staticExpressions.js.map +1 -1
  97. package/dist/analyzer/symbol.d.ts +1 -1
  98. package/dist/analyzer/symbol.js +26 -26
  99. package/dist/analyzer/symbol.js.map +1 -1
  100. package/dist/analyzer/symbolUtils.js +1 -1
  101. package/dist/analyzer/symbolUtils.js.map +1 -1
  102. package/dist/analyzer/testWalker.js +5 -5
  103. package/dist/analyzer/testWalker.js.map +1 -1
  104. package/dist/analyzer/tracePrinter.d.ts +1 -1
  105. package/dist/analyzer/tracePrinter.js +35 -31
  106. package/dist/analyzer/tracePrinter.js.map +1 -1
  107. package/dist/analyzer/typeCacheUtils.js +5 -1
  108. package/dist/analyzer/typeCacheUtils.js.map +1 -1
  109. package/dist/analyzer/typeDocStringUtils.js +13 -9
  110. package/dist/analyzer/typeDocStringUtils.js.map +1 -1
  111. package/dist/analyzer/typeEvaluator.js +1233 -1153
  112. package/dist/analyzer/typeEvaluator.js.map +1 -1
  113. package/dist/analyzer/typeEvaluatorTypes.d.ts +3 -1
  114. package/dist/analyzer/typeEvaluatorTypes.js +3 -1
  115. package/dist/analyzer/typeEvaluatorTypes.js.map +1 -1
  116. package/dist/analyzer/typeGuards.d.ts +1 -1
  117. package/dist/analyzer/typeGuards.js +79 -65
  118. package/dist/analyzer/typeGuards.js.map +1 -1
  119. package/dist/analyzer/typePrinter.d.ts +1 -1
  120. package/dist/analyzer/typePrinter.js +80 -76
  121. package/dist/analyzer/typePrinter.js.map +1 -1
  122. package/dist/analyzer/typeStubWriter.js +26 -22
  123. package/dist/analyzer/typeStubWriter.js.map +1 -1
  124. package/dist/analyzer/typeUtils.js +123 -123
  125. package/dist/analyzer/typeUtils.js.map +1 -1
  126. package/dist/analyzer/typeVarContext.js +9 -9
  127. package/dist/analyzer/typeVarContext.js.map +1 -1
  128. package/dist/analyzer/typeWalker.js +10 -10
  129. package/dist/analyzer/typedDicts.js +74 -70
  130. package/dist/analyzer/typedDicts.js.map +1 -1
  131. package/dist/analyzer/types.d.ts +16 -11
  132. package/dist/analyzer/types.js +179 -177
  133. package/dist/analyzer/types.js.map +1 -1
  134. package/dist/backgroundAnalysisBase.d.ts +2 -2
  135. package/dist/backgroundAnalysisBase.js +5 -1
  136. package/dist/backgroundAnalysisBase.js.map +1 -1
  137. package/dist/backgroundThreadBase.js +5 -1
  138. package/dist/backgroundThreadBase.js.map +1 -1
  139. package/dist/commands/commandController.js +7 -7
  140. package/dist/commands/commandController.js.map +1 -1
  141. package/dist/commands/commandResult.js +1 -1
  142. package/dist/commands/commandResult.js.map +1 -1
  143. package/dist/commands/commands.js +1 -1
  144. package/dist/commands/commands.js.map +1 -1
  145. package/dist/commands/dumpFileDebugInfoCommand.js +196 -196
  146. package/dist/commands/dumpFileDebugInfoCommand.js.map +1 -1
  147. package/dist/commands/quickActionCommand.js +1 -1
  148. package/dist/commands/quickActionCommand.js.map +1 -1
  149. package/dist/common/charCodes.js +1 -1
  150. package/dist/common/charCodes.js.map +1 -1
  151. package/dist/common/chokidarFileWatcherProvider.js +5 -1
  152. package/dist/common/chokidarFileWatcherProvider.js.map +1 -1
  153. package/dist/common/collectionUtils.d.ts +3 -3
  154. package/dist/common/collectionUtils.js +3 -3
  155. package/dist/common/collectionUtils.js.map +1 -1
  156. package/dist/common/commandLineOptions.d.ts +1 -1
  157. package/dist/common/commandLineOptions.js +5 -5
  158. package/dist/common/commandLineOptions.js.map +1 -1
  159. package/dist/common/configOptions.d.ts +1 -1
  160. package/dist/common/configOptions.js +11 -7
  161. package/dist/common/configOptions.js.map +1 -1
  162. package/dist/common/console.js +7 -3
  163. package/dist/common/console.js.map +1 -1
  164. package/dist/common/core.d.ts +2 -2
  165. package/dist/common/core.js +6 -6
  166. package/dist/common/core.js.map +1 -1
  167. package/dist/common/diagnostic.js +6 -6
  168. package/dist/common/diagnostic.js.map +1 -1
  169. package/dist/common/diagnosticRules.js +1 -1
  170. package/dist/common/diagnosticRules.js.map +1 -1
  171. package/dist/common/diagnosticSink.js +12 -12
  172. package/dist/common/diagnosticSink.js.map +1 -1
  173. package/dist/common/editAction.d.ts +1 -1
  174. package/dist/common/editAction.js +2 -2
  175. package/dist/common/editAction.js.map +1 -1
  176. package/dist/common/envVarUtils.js +5 -1
  177. package/dist/common/envVarUtils.js.map +1 -1
  178. package/dist/common/extensibility.js +1 -1
  179. package/dist/common/extensibility.js.map +1 -1
  180. package/dist/common/fileBasedCancellationUtils.js +5 -1
  181. package/dist/common/fileBasedCancellationUtils.js.map +1 -1
  182. package/dist/common/fileSystem.d.ts +1 -0
  183. package/dist/common/fileSystem.js +2 -2
  184. package/dist/common/fileSystem.js.map +1 -1
  185. package/dist/common/fileWatcher.d.ts +2 -2
  186. package/dist/common/fullAccessHost.js +10 -6
  187. package/dist/common/fullAccessHost.js.map +1 -1
  188. package/dist/common/host.d.ts +1 -1
  189. package/dist/common/host.js +2 -2
  190. package/dist/common/host.js.map +1 -1
  191. package/dist/common/lspUtils.js +7 -7
  192. package/dist/common/lspUtils.js.map +1 -1
  193. package/dist/common/memUtils.d.ts +1 -1
  194. package/dist/common/pathUtils.js +12 -8
  195. package/dist/common/pathUtils.js.map +1 -1
  196. package/dist/common/pythonVersion.js +1 -1
  197. package/dist/common/pythonVersion.js.map +1 -1
  198. package/dist/common/realFileSystem.js +5 -1
  199. package/dist/common/realFileSystem.js.map +1 -1
  200. package/dist/common/serviceProvider.d.ts +1 -1
  201. package/dist/common/serviceProvider.js +5 -1
  202. package/dist/common/serviceProvider.js.map +1 -1
  203. package/dist/common/serviceProviderExtensions.js +1 -1
  204. package/dist/common/serviceProviderExtensions.js.map +1 -1
  205. package/dist/common/stringUtils.js +5 -5
  206. package/dist/common/stringUtils.js.map +1 -1
  207. package/dist/common/textEditTracker.js +11 -7
  208. package/dist/common/textEditTracker.js.map +1 -1
  209. package/dist/common/textRange.js +3 -3
  210. package/dist/common/textRange.js.map +1 -1
  211. package/dist/languageServerBase.js +13 -13
  212. package/dist/languageServerBase.js.map +1 -1
  213. package/dist/languageService/autoImporter.d.ts +3 -3
  214. package/dist/languageService/autoImporter.js +12 -8
  215. package/dist/languageService/autoImporter.js.map +1 -1
  216. package/dist/languageService/callHierarchyProvider.js +27 -23
  217. package/dist/languageService/callHierarchyProvider.js.map +1 -1
  218. package/dist/languageService/codeActionProvider.js +8 -8
  219. package/dist/languageService/codeActionProvider.js.map +1 -1
  220. package/dist/languageService/completionProvider.js +166 -162
  221. package/dist/languageService/completionProvider.js.map +1 -1
  222. package/dist/languageService/completionProviderUtils.js +9 -9
  223. package/dist/languageService/completionProviderUtils.js.map +1 -1
  224. package/dist/languageService/definitionProvider.js +14 -10
  225. package/dist/languageService/definitionProvider.js.map +1 -1
  226. package/dist/languageService/documentHighlightProvider.js +7 -3
  227. package/dist/languageService/documentHighlightProvider.js.map +1 -1
  228. package/dist/languageService/documentSymbolCollector.d.ts +1 -1
  229. package/dist/languageService/documentSymbolCollector.js +21 -17
  230. package/dist/languageService/documentSymbolCollector.js.map +1 -1
  231. package/dist/languageService/hoverProvider.js +30 -26
  232. package/dist/languageService/hoverProvider.js.map +1 -1
  233. package/dist/languageService/importSorter.js +1 -1
  234. package/dist/languageService/importSorter.js.map +1 -1
  235. package/dist/languageService/quickActions.js +10 -6
  236. package/dist/languageService/quickActions.js.map +1 -1
  237. package/dist/languageService/referencesProvider.d.ts +1 -1
  238. package/dist/languageService/referencesProvider.js +24 -20
  239. package/dist/languageService/referencesProvider.js.map +1 -1
  240. package/dist/languageService/signatureHelpProvider.js +8 -4
  241. package/dist/languageService/signatureHelpProvider.js.map +1 -1
  242. package/dist/languageService/symbolIndexer.js +8 -4
  243. package/dist/languageService/symbolIndexer.js.map +1 -1
  244. package/dist/languageService/tooltipUtils.js +20 -16
  245. package/dist/languageService/tooltipUtils.js.map +1 -1
  246. package/dist/languageService/workspaceSymbolProvider.js +5 -1
  247. package/dist/languageService/workspaceSymbolProvider.js.map +1 -1
  248. package/dist/localization/localize.d.ts +7 -9
  249. package/dist/localization/localize.js +5 -4
  250. package/dist/localization/localize.js.map +1 -1
  251. package/dist/localization/package.nls.cs.json +0 -3
  252. package/dist/localization/package.nls.de.json +0 -3
  253. package/dist/localization/package.nls.en-us.json +4 -3
  254. package/dist/localization/package.nls.es.json +0 -3
  255. package/dist/localization/package.nls.fr.json +0 -3
  256. package/dist/localization/package.nls.it.json +0 -3
  257. package/dist/localization/package.nls.ja.json +0 -3
  258. package/dist/localization/package.nls.ko.json +0 -3
  259. package/dist/localization/package.nls.pl.json +0 -3
  260. package/dist/localization/package.nls.pt-br.json +0 -3
  261. package/dist/localization/package.nls.qps-ploc.json +0 -3
  262. package/dist/localization/package.nls.ru.json +0 -3
  263. package/dist/localization/package.nls.tr.json +0 -3
  264. package/dist/localization/package.nls.zh-cn.json +0 -3
  265. package/dist/localization/package.nls.zh-tw.json +0 -3
  266. package/dist/parser/characterStream.js +3 -3
  267. package/dist/parser/characterStream.js.map +1 -1
  268. package/dist/parser/characters.js +13 -9
  269. package/dist/parser/characters.js.map +1 -1
  270. package/dist/parser/parseNodes.d.ts +12 -12
  271. package/dist/parser/parseNodes.js +193 -193
  272. package/dist/parser/parseNodes.js.map +1 -1
  273. package/dist/parser/parser.js +563 -559
  274. package/dist/parser/parser.js.map +1 -1
  275. package/dist/parser/stringTokenUtils.js +47 -47
  276. package/dist/parser/stringTokenUtils.js.map +1 -1
  277. package/dist/parser/tokenizer.js +288 -288
  278. package/dist/parser/tokenizer.js.map +1 -1
  279. package/dist/parser/tokenizerTypes.js +35 -35
  280. package/dist/parser/tokenizerTypes.js.map +1 -1
  281. package/dist/parser/unicode.d.ts +3 -3
  282. package/dist/pyright.js +21 -17
  283. package/dist/pyright.js.map +1 -1
  284. package/dist/pyrightFileSystem.d.ts +1 -0
  285. package/dist/pyrightFileSystem.js +1 -1
  286. package/dist/pyrightFileSystem.js.map +1 -1
  287. package/dist/readonlyAugmentedFileSystem.d.ts +1 -0
  288. package/dist/tests/chainedSourceFiles.test.js +5 -1
  289. package/dist/tests/chainedSourceFiles.test.js.map +1 -1
  290. package/dist/tests/checker.test.js +5 -1
  291. package/dist/tests/checker.test.js.map +1 -1
  292. package/dist/tests/collectionUtils.test.js +5 -1
  293. package/dist/tests/collectionUtils.test.js.map +1 -1
  294. package/dist/tests/common.test.js +5 -1
  295. package/dist/tests/common.test.js.map +1 -1
  296. package/dist/tests/debug.test.js +8 -4
  297. package/dist/tests/debug.test.js.map +1 -1
  298. package/dist/tests/deferred.test.js +5 -1
  299. package/dist/tests/deferred.test.js.map +1 -1
  300. package/dist/tests/diagnosticOverrides.test.js +5 -1
  301. package/dist/tests/diagnosticOverrides.test.js.map +1 -1
  302. package/dist/tests/docStringUtils.test.js +5 -1
  303. package/dist/tests/docStringUtils.test.js.map +1 -1
  304. package/dist/tests/filesystem.test.js +5 -1
  305. package/dist/tests/filesystem.test.js.map +1 -1
  306. package/dist/tests/fourSlashParser.test.js +5 -1
  307. package/dist/tests/fourSlashParser.test.js.map +1 -1
  308. package/dist/tests/fourSlashRunner.test.js +5 -1
  309. package/dist/tests/fourSlashRunner.test.js.map +1 -1
  310. package/dist/tests/harness/fourslash/fourSlashParser.js +13 -13
  311. package/dist/tests/harness/fourslash/fourSlashParser.js.map +1 -1
  312. package/dist/tests/harness/fourslash/fourSlashTypes.js +11 -7
  313. package/dist/tests/harness/fourslash/fourSlashTypes.js.map +1 -1
  314. package/dist/tests/harness/fourslash/runner.d.ts +1 -1
  315. package/dist/tests/harness/fourslash/runner.js +5 -1
  316. package/dist/tests/harness/fourslash/runner.js.map +1 -1
  317. package/dist/tests/harness/fourslash/testLanguageService.js +5 -1
  318. package/dist/tests/harness/fourslash/testLanguageService.js.map +1 -1
  319. package/dist/tests/harness/fourslash/testState.Consts.js +6 -2
  320. package/dist/tests/harness/fourslash/testState.Consts.js.map +1 -1
  321. package/dist/tests/harness/fourslash/testState.js +17 -13
  322. package/dist/tests/harness/fourslash/testState.js.map +1 -1
  323. package/dist/tests/harness/fourslash/testStateUtils.js +9 -5
  324. package/dist/tests/harness/fourslash/testStateUtils.js.map +1 -1
  325. package/dist/tests/harness/fourslash/workspaceEditTestUtils.js +5 -1
  326. package/dist/tests/harness/fourslash/workspaceEditTestUtils.js.map +1 -1
  327. package/dist/tests/harness/testHost.js +5 -1
  328. package/dist/tests/harness/testHost.js.map +1 -1
  329. package/dist/tests/harness/utils.d.ts +13 -12
  330. package/dist/tests/harness/utils.js.map +1 -1
  331. package/dist/tests/harness/vfs/factory.js +6 -2
  332. package/dist/tests/harness/vfs/factory.js.map +1 -1
  333. package/dist/tests/harness/vfs/filesystem.d.ts +4 -3
  334. package/dist/tests/harness/vfs/filesystem.js +11 -7
  335. package/dist/tests/harness/vfs/filesystem.js.map +1 -1
  336. package/dist/tests/harness/vfs/pathValidation.js +30 -26
  337. package/dist/tests/harness/vfs/pathValidation.js.map +1 -1
  338. package/dist/tests/importStatementUtils.test.js +25 -25
  339. package/dist/tests/importStatementUtils.test.js.map +1 -1
  340. package/dist/tests/ipythonMode.test.js +6 -6
  341. package/dist/tests/ipythonMode.test.js.map +1 -1
  342. package/dist/tests/localizer.test.js +5 -1
  343. package/dist/tests/localizer.test.js.map +1 -1
  344. package/dist/tests/logger.test.js +5 -1
  345. package/dist/tests/logger.test.js.map +1 -1
  346. package/dist/tests/parseTreeUtils.test.js +10 -10
  347. package/dist/tests/parseTreeUtils.test.js.map +1 -1
  348. package/dist/tests/parser.test.js +13 -9
  349. package/dist/tests/parser.test.js.map +1 -1
  350. package/dist/tests/pathUtils.test.js +5 -1
  351. package/dist/tests/pathUtils.test.js.map +1 -1
  352. package/dist/tests/sourceFile.test.js +5 -1
  353. package/dist/tests/sourceFile.test.js.map +1 -1
  354. package/dist/tests/sourceMapperUtils.test.js +5 -1
  355. package/dist/tests/sourceMapperUtils.test.js.map +1 -1
  356. package/dist/tests/stringUtils.test.js +10 -6
  357. package/dist/tests/stringUtils.test.js.map +1 -1
  358. package/dist/tests/symbolNameUtils.test.js +5 -1
  359. package/dist/tests/symbolNameUtils.test.js.map +1 -1
  360. package/dist/tests/testState.test.js +5 -1
  361. package/dist/tests/testState.test.js.map +1 -1
  362. package/dist/tests/testUtils.js +11 -7
  363. package/dist/tests/testUtils.js.map +1 -1
  364. package/dist/tests/textRange.test.js +5 -1
  365. package/dist/tests/textRange.test.js.map +1 -1
  366. package/dist/tests/tokenizer.test.js +463 -459
  367. package/dist/tests/tokenizer.test.js.map +1 -1
  368. package/dist/tests/typeEvaluator1.test.js +14 -2
  369. package/dist/tests/typeEvaluator1.test.js.map +1 -1
  370. package/dist/tests/typeEvaluator2.test.js +11 -3
  371. package/dist/tests/typeEvaluator2.test.js.map +1 -1
  372. package/dist/tests/typeEvaluator3.test.js +11 -3
  373. package/dist/tests/typeEvaluator3.test.js.map +1 -1
  374. package/dist/tests/typeEvaluator4.test.js +10 -2
  375. package/dist/tests/typeEvaluator4.test.js.map +1 -1
  376. package/dist/tests/typeEvaluator5.test.js +5 -1
  377. package/dist/tests/typeEvaluator5.test.js.map +1 -1
  378. package/dist/tests/typePrinter.test.js +44 -40
  379. package/dist/tests/typePrinter.test.js.map +1 -1
  380. package/dist/tests/workspaceEditUtils.test.js +5 -1
  381. package/dist/tests/workspaceEditUtils.test.js.map +1 -1
  382. package/dist/tests/zipfs.test.js +5 -1
  383. package/dist/tests/zipfs.test.js.map +1 -1
  384. package/dist/workspaceFactory.js +2 -2
  385. package/dist/workspaceFactory.js.map +1 -1
  386. package/package.json +2 -2
@@ -12,7 +12,11 @@
12
12
  */
13
13
  var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
14
14
  if (k2 === undefined) k2 = k;
15
- Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
15
+ var desc = Object.getOwnPropertyDescriptor(m, k);
16
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
17
+ desc = { enumerable: true, get: function() { return m[k]; } };
18
+ }
19
+ Object.defineProperty(o, k2, desc);
16
20
  }) : (function(o, m, k, k2) {
17
21
  if (k2 === undefined) k2 = k;
18
22
  o[k2] = m[k];
@@ -44,8 +48,8 @@ test('Empty', () => {
44
48
  const results = t.tokenize('');
45
49
  assert_1.default.equal(results.tokens.count, 0 + _implicitTokenCount);
46
50
  assert_1.default.equal(results.tokens.length, 0);
47
- assert_1.default.equal(results.tokens.getItemAt(0).type, 2 /* NewLine */);
48
- assert_1.default.equal(results.tokens.getItemAt(1).type, 1 /* EndOfStream */);
51
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 2 /* TokenType.NewLine */);
52
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 1 /* TokenType.EndOfStream */);
49
53
  assert_1.default.equal(results.tokens.getItemAtPosition(-1), -1);
50
54
  assert_1.default.equal(results.tokens.getItemAtPosition(2), -1);
51
55
  assert_1.default.throws(() => results.tokens.getItemAt(-1), Error);
@@ -57,13 +61,13 @@ test('NewLines', () => {
57
61
  const t = new tokenizer_1.Tokenizer();
58
62
  const results = t.tokenize('\na\r\nb\r');
59
63
  assert_1.default.equal(results.tokens.count, 5 + _implicitTokenCountNoImplicitNewLine);
60
- assert_1.default.equal(results.tokens.getItemAt(0).type, 2 /* NewLine */);
61
- assert_1.default.equal(results.tokens.getItemAt(0).newLineType, 1 /* LineFeed */);
62
- assert_1.default.equal(results.tokens.getItemAt(2).type, 2 /* NewLine */);
63
- assert_1.default.equal(results.tokens.getItemAt(2).newLineType, 2 /* CarriageReturnLineFeed */);
64
- assert_1.default.equal(results.tokens.getItemAt(4).type, 2 /* NewLine */);
65
- assert_1.default.equal(results.tokens.getItemAt(4).newLineType, 0 /* CarriageReturn */);
66
- assert_1.default.equal(results.tokens.getItemAt(5).type, 1 /* EndOfStream */);
64
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 2 /* TokenType.NewLine */);
65
+ assert_1.default.equal(results.tokens.getItemAt(0).newLineType, 1 /* NewLineType.LineFeed */);
66
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 2 /* TokenType.NewLine */);
67
+ assert_1.default.equal(results.tokens.getItemAt(2).newLineType, 2 /* NewLineType.CarriageReturnLineFeed */);
68
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 2 /* TokenType.NewLine */);
69
+ assert_1.default.equal(results.tokens.getItemAt(4).newLineType, 0 /* NewLineType.CarriageReturn */);
70
+ assert_1.default.equal(results.tokens.getItemAt(5).type, 1 /* TokenType.EndOfStream */);
67
71
  assert_1.default.equal(results.tokens.getItemAtPosition(0), 0);
68
72
  assert_1.default.equal(results.tokens.getItemAtPosition(1), 1);
69
73
  assert_1.default.equal(results.tokens.getItemAtPosition(2), 2);
@@ -78,33 +82,33 @@ test('InvalidWithNewLine', () => {
78
82
  const t = new tokenizer_1.Tokenizer();
79
83
  const results = t.tokenize('\\\\\r\n\\aaa \t\f\n');
80
84
  assert_1.default.equal(results.tokens.count, 4 + _implicitTokenCountNoImplicitNewLine);
81
- assert_1.default.equal(results.tokens.getItemAt(0).type, 0 /* Invalid */);
85
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 0 /* TokenType.Invalid */);
82
86
  assert_1.default.equal(results.tokens.getItemAt(0).length, 2);
83
- assert_1.default.equal(results.tokens.getItemAt(1).type, 2 /* NewLine */);
84
- assert_1.default.equal(results.tokens.getItemAt(1).newLineType, 2 /* CarriageReturnLineFeed */);
85
- assert_1.default.equal(results.tokens.getItemAt(2).type, 0 /* Invalid */);
87
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 2 /* TokenType.NewLine */);
88
+ assert_1.default.equal(results.tokens.getItemAt(1).newLineType, 2 /* NewLineType.CarriageReturnLineFeed */);
89
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 0 /* TokenType.Invalid */);
86
90
  assert_1.default.equal(results.tokens.getItemAt(2).length, 4);
87
- assert_1.default.equal(results.tokens.getItemAt(3).type, 2 /* NewLine */);
88
- assert_1.default.equal(results.tokens.getItemAt(3).newLineType, 1 /* LineFeed */);
91
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 2 /* TokenType.NewLine */);
92
+ assert_1.default.equal(results.tokens.getItemAt(3).newLineType, 1 /* NewLineType.LineFeed */);
89
93
  });
90
94
  test('InvalidIndent', () => {
91
95
  const t = new tokenizer_1.Tokenizer();
92
96
  const results = t.tokenize('\tpass\n');
93
97
  assert_1.default.equal(results.tokens.count, 4 + _implicitTokenCountNoImplicitNewLine);
94
- assert_1.default.equal(results.tokens.getItemAt(0).type, 3 /* Indent */);
95
- assert_1.default.equal(results.tokens.getItemAt(1).type, 8 /* Keyword */);
98
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 3 /* TokenType.Indent */);
99
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 8 /* TokenType.Keyword */);
96
100
  });
97
101
  test('ParenNewLines', () => {
98
102
  const t = new tokenizer_1.Tokenizer();
99
103
  const results = t.tokenize('\n(\n(\n)\n)\n)\n');
100
104
  assert_1.default.equal(results.tokens.count, 8 + _implicitTokenCountNoImplicitNewLine);
101
- assert_1.default.equal(results.tokens.getItemAt(0).type, 2 /* NewLine */);
102
- assert_1.default.equal(results.tokens.getItemAt(1).type, 13 /* OpenParenthesis */);
103
- assert_1.default.equal(results.tokens.getItemAt(2).type, 13 /* OpenParenthesis */);
104
- assert_1.default.equal(results.tokens.getItemAt(3).type, 14 /* CloseParenthesis */);
105
- assert_1.default.equal(results.tokens.getItemAt(4).type, 14 /* CloseParenthesis */);
106
- assert_1.default.equal(results.tokens.getItemAt(5).type, 2 /* NewLine */);
107
- assert_1.default.equal(results.tokens.getItemAt(6).type, 14 /* CloseParenthesis */);
105
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 2 /* TokenType.NewLine */);
106
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 13 /* TokenType.OpenParenthesis */);
107
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 13 /* TokenType.OpenParenthesis */);
108
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 14 /* TokenType.CloseParenthesis */);
109
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 14 /* TokenType.CloseParenthesis */);
110
+ assert_1.default.equal(results.tokens.getItemAt(5).type, 2 /* TokenType.NewLine */);
111
+ assert_1.default.equal(results.tokens.getItemAt(6).type, 14 /* TokenType.CloseParenthesis */);
108
112
  assert_1.default.equal(results.tokens.getItemAtPosition(0), 0);
109
113
  assert_1.default.equal(results.tokens.getItemAtPosition(1), 1);
110
114
  assert_1.default.equal(results.tokens.getItemAtPosition(2), 1);
@@ -124,13 +128,13 @@ test('BraceNewLines', () => {
124
128
  const t = new tokenizer_1.Tokenizer();
125
129
  const results = t.tokenize('\n{\n{\n}\n}\n}\n');
126
130
  assert_1.default.equal(results.tokens.count, 8 + _implicitTokenCountNoImplicitNewLine);
127
- assert_1.default.equal(results.tokens.getItemAt(0).type, 2 /* NewLine */);
128
- assert_1.default.equal(results.tokens.getItemAt(1).type, 17 /* OpenCurlyBrace */);
129
- assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
130
- assert_1.default.equal(results.tokens.getItemAt(3).type, 18 /* CloseCurlyBrace */);
131
- assert_1.default.equal(results.tokens.getItemAt(4).type, 18 /* CloseCurlyBrace */);
132
- assert_1.default.equal(results.tokens.getItemAt(5).type, 2 /* NewLine */);
133
- assert_1.default.equal(results.tokens.getItemAt(6).type, 18 /* CloseCurlyBrace */);
131
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 2 /* TokenType.NewLine */);
132
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 17 /* TokenType.OpenCurlyBrace */);
133
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* TokenType.OpenCurlyBrace */);
134
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 18 /* TokenType.CloseCurlyBrace */);
135
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 18 /* TokenType.CloseCurlyBrace */);
136
+ assert_1.default.equal(results.tokens.getItemAt(5).type, 2 /* TokenType.NewLine */);
137
+ assert_1.default.equal(results.tokens.getItemAt(6).type, 18 /* TokenType.CloseCurlyBrace */);
134
138
  assert_1.default.equal(results.tokens.getItemAtPosition(0), 0);
135
139
  assert_1.default.equal(results.tokens.getItemAtPosition(1), 1);
136
140
  assert_1.default.equal(results.tokens.getItemAtPosition(2), 1);
@@ -150,13 +154,13 @@ test('BracketNewLines', () => {
150
154
  const t = new tokenizer_1.Tokenizer();
151
155
  const results = t.tokenize('\n[\n[\n]\n]\n]\n');
152
156
  assert_1.default.equal(results.tokens.count, 8 + _implicitTokenCountNoImplicitNewLine);
153
- assert_1.default.equal(results.tokens.getItemAt(0).type, 2 /* NewLine */);
154
- assert_1.default.equal(results.tokens.getItemAt(1).type, 15 /* OpenBracket */);
155
- assert_1.default.equal(results.tokens.getItemAt(2).type, 15 /* OpenBracket */);
156
- assert_1.default.equal(results.tokens.getItemAt(3).type, 16 /* CloseBracket */);
157
- assert_1.default.equal(results.tokens.getItemAt(4).type, 16 /* CloseBracket */);
158
- assert_1.default.equal(results.tokens.getItemAt(5).type, 2 /* NewLine */);
159
- assert_1.default.equal(results.tokens.getItemAt(6).type, 16 /* CloseBracket */);
157
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 2 /* TokenType.NewLine */);
158
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 15 /* TokenType.OpenBracket */);
159
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 15 /* TokenType.OpenBracket */);
160
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 16 /* TokenType.CloseBracket */);
161
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 16 /* TokenType.CloseBracket */);
162
+ assert_1.default.equal(results.tokens.getItemAt(5).type, 2 /* TokenType.NewLine */);
163
+ assert_1.default.equal(results.tokens.getItemAt(6).type, 16 /* TokenType.CloseBracket */);
160
164
  assert_1.default.equal(results.tokens.getItemAtPosition(0), 0);
161
165
  assert_1.default.equal(results.tokens.getItemAtPosition(1), 1);
162
166
  assert_1.default.equal(results.tokens.getItemAtPosition(2), 1);
@@ -176,17 +180,17 @@ test('NewLinesWithWhiteSpace', () => {
176
180
  const t = new tokenizer_1.Tokenizer();
177
181
  const results = t.tokenize(' \na \r\nb \rc');
178
182
  assert_1.default.equal(results.tokens.count, 6 + _implicitTokenCount);
179
- assert_1.default.equal(results.tokens.getItemAt(0).type, 2 /* NewLine */);
183
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 2 /* TokenType.NewLine */);
180
184
  assert_1.default.equal(results.tokens.getItemAt(0).length, 1);
181
- assert_1.default.equal(results.tokens.getItemAt(0).newLineType, 1 /* LineFeed */);
182
- assert_1.default.equal(results.tokens.getItemAt(2).type, 2 /* NewLine */);
183
- assert_1.default.equal(results.tokens.getItemAt(2).newLineType, 2 /* CarriageReturnLineFeed */);
185
+ assert_1.default.equal(results.tokens.getItemAt(0).newLineType, 1 /* NewLineType.LineFeed */);
186
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 2 /* TokenType.NewLine */);
187
+ assert_1.default.equal(results.tokens.getItemAt(2).newLineType, 2 /* NewLineType.CarriageReturnLineFeed */);
184
188
  assert_1.default.equal(results.tokens.getItemAt(2).length, 2);
185
- assert_1.default.equal(results.tokens.getItemAt(4).type, 2 /* NewLine */);
186
- assert_1.default.equal(results.tokens.getItemAt(4).newLineType, 0 /* CarriageReturn */);
189
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 2 /* TokenType.NewLine */);
190
+ assert_1.default.equal(results.tokens.getItemAt(4).newLineType, 0 /* NewLineType.CarriageReturn */);
187
191
  assert_1.default.equal(results.tokens.getItemAt(4).length, 1);
188
- assert_1.default.equal(results.tokens.getItemAt(6).type, 2 /* NewLine */);
189
- assert_1.default.equal(results.tokens.getItemAt(6).newLineType, 3 /* Implied */);
192
+ assert_1.default.equal(results.tokens.getItemAt(6).type, 2 /* TokenType.NewLine */);
193
+ assert_1.default.equal(results.tokens.getItemAt(6).newLineType, 3 /* NewLineType.Implied */);
190
194
  assert_1.default.equal(results.tokens.getItemAt(6).length, 0);
191
195
  assert_1.default.equal(results.tokens.getItemAtPosition(0), -1);
192
196
  assert_1.default.equal(results.tokens.getItemAtPosition(1), -1);
@@ -207,9 +211,9 @@ test('NewLineEliding', () => {
207
211
  const t = new tokenizer_1.Tokenizer();
208
212
  const results = t.tokenize('\n\r\n\r');
209
213
  assert_1.default.equal(results.tokens.count, 1 + _implicitTokenCountNoImplicitNewLine);
210
- assert_1.default.equal(results.tokens.getItemAt(0).type, 2 /* NewLine */);
214
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 2 /* TokenType.NewLine */);
211
215
  assert_1.default.equal(results.tokens.getItemAt(0).length, 1);
212
- assert_1.default.equal(results.tokens.getItemAt(0).newLineType, 1 /* LineFeed */);
216
+ assert_1.default.equal(results.tokens.getItemAt(0).newLineType, 1 /* NewLineType.LineFeed */);
213
217
  assert_1.default.equal(results.tokens.getItemAtPosition(0), 0);
214
218
  assert_1.default.equal(results.tokens.getItemAtPosition(3), 0);
215
219
  assert_1.default.equal(results.tokens.getItemAtPosition(4), 1);
@@ -220,12 +224,12 @@ test('LineContinuation', () => {
220
224
  const t = new tokenizer_1.Tokenizer();
221
225
  const results = t.tokenize('foo \\\na \\\r\nb \\\rc \\ \n # Comment \\\n');
222
226
  assert_1.default.equal(results.tokens.count, 6 + _implicitTokenCountNoImplicitNewLine);
223
- assert_1.default.equal(results.tokens.getItemAt(0).type, 7 /* Identifier */);
224
- assert_1.default.equal(results.tokens.getItemAt(1).type, 7 /* Identifier */);
225
- assert_1.default.equal(results.tokens.getItemAt(2).type, 7 /* Identifier */);
226
- assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* Identifier */);
227
- assert_1.default.equal(results.tokens.getItemAt(4).type, 0 /* Invalid */);
228
- assert_1.default.equal(results.tokens.getItemAt(5).type, 2 /* NewLine */);
227
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 7 /* TokenType.Identifier */);
228
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 7 /* TokenType.Identifier */);
229
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 7 /* TokenType.Identifier */);
230
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* TokenType.Identifier */);
231
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 0 /* TokenType.Invalid */);
232
+ assert_1.default.equal(results.tokens.getItemAt(5).type, 2 /* TokenType.NewLine */);
229
233
  assert_1.default.equal(results.tokens.getItemAtPosition(0), 0);
230
234
  assert_1.default.equal(results.tokens.getItemAtPosition(6), 0);
231
235
  assert_1.default.equal(results.tokens.getItemAtPosition(7), 1);
@@ -246,12 +250,12 @@ test('Dots', () => {
246
250
  const t = new tokenizer_1.Tokenizer();
247
251
  const results = t.tokenize('. .. ... ....');
248
252
  assert_1.default.equal(results.tokens.count, 6 + _implicitTokenCount);
249
- assert_1.default.equal(results.tokens.getItemAt(0).type, 20 /* Dot */);
250
- assert_1.default.equal(results.tokens.getItemAt(1).type, 20 /* Dot */);
251
- assert_1.default.equal(results.tokens.getItemAt(2).type, 20 /* Dot */);
252
- assert_1.default.equal(results.tokens.getItemAt(3).type, 19 /* Ellipsis */);
253
- assert_1.default.equal(results.tokens.getItemAt(4).type, 19 /* Ellipsis */);
254
- assert_1.default.equal(results.tokens.getItemAt(5).type, 20 /* Dot */);
253
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 20 /* TokenType.Dot */);
254
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 20 /* TokenType.Dot */);
255
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 20 /* TokenType.Dot */);
256
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 19 /* TokenType.Ellipsis */);
257
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 19 /* TokenType.Ellipsis */);
258
+ assert_1.default.equal(results.tokens.getItemAt(5).type, 20 /* TokenType.Dot */);
255
259
  assert_1.default.equal(results.tokens.getItemAtPosition(0), 0);
256
260
  assert_1.default.equal(results.tokens.getItemAtPosition(1), 0);
257
261
  assert_1.default.equal(results.tokens.getItemAtPosition(2), 1);
@@ -270,49 +274,49 @@ test('PunctuationTokens', () => {
270
274
  const t = new tokenizer_1.Tokenizer();
271
275
  const results = t.tokenize(':;,()[]{}->');
272
276
  assert_1.default.equal(results.tokens.count, 10 + _implicitTokenCount);
273
- assert_1.default.equal(results.tokens.getItemAt(0).type, 10 /* Colon */);
274
- assert_1.default.equal(results.tokens.getItemAt(1).type, 11 /* Semicolon */);
275
- assert_1.default.equal(results.tokens.getItemAt(2).type, 12 /* Comma */);
276
- assert_1.default.equal(results.tokens.getItemAt(3).type, 13 /* OpenParenthesis */);
277
- assert_1.default.equal(results.tokens.getItemAt(4).type, 14 /* CloseParenthesis */);
278
- assert_1.default.equal(results.tokens.getItemAt(5).type, 15 /* OpenBracket */);
279
- assert_1.default.equal(results.tokens.getItemAt(6).type, 16 /* CloseBracket */);
280
- assert_1.default.equal(results.tokens.getItemAt(7).type, 17 /* OpenCurlyBrace */);
281
- assert_1.default.equal(results.tokens.getItemAt(8).type, 18 /* CloseCurlyBrace */);
282
- assert_1.default.equal(results.tokens.getItemAt(9).type, 21 /* Arrow */);
277
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 10 /* TokenType.Colon */);
278
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 11 /* TokenType.Semicolon */);
279
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 12 /* TokenType.Comma */);
280
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 13 /* TokenType.OpenParenthesis */);
281
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 14 /* TokenType.CloseParenthesis */);
282
+ assert_1.default.equal(results.tokens.getItemAt(5).type, 15 /* TokenType.OpenBracket */);
283
+ assert_1.default.equal(results.tokens.getItemAt(6).type, 16 /* TokenType.CloseBracket */);
284
+ assert_1.default.equal(results.tokens.getItemAt(7).type, 17 /* TokenType.OpenCurlyBrace */);
285
+ assert_1.default.equal(results.tokens.getItemAt(8).type, 18 /* TokenType.CloseCurlyBrace */);
286
+ assert_1.default.equal(results.tokens.getItemAt(9).type, 21 /* TokenType.Arrow */);
283
287
  });
284
288
  test('IndentDedent', () => {
285
289
  const t = new tokenizer_1.Tokenizer();
286
290
  const results = t.tokenize('test\n' + ' i1\n' + ' i2 # \n' + ' # \n' + ' \ti3\n' + '\ti4\n' + ' i1');
287
291
  assert_1.default.equal(results.tokens.count, 16 + _implicitTokenCount);
288
- assert_1.default.equal(results.tokens.getItemAt(0).type, 7 /* Identifier */);
289
- assert_1.default.equal(results.tokens.getItemAt(1).type, 2 /* NewLine */);
290
- assert_1.default.equal(results.tokens.getItemAt(2).type, 3 /* Indent */);
292
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 7 /* TokenType.Identifier */);
293
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 2 /* TokenType.NewLine */);
294
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 3 /* TokenType.Indent */);
291
295
  assert_1.default.equal(results.tokens.getItemAt(2).indentAmount, 2);
292
296
  assert_1.default.equal(results.tokens.getItemAt(2).length, 2);
293
- assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* Identifier */);
294
- assert_1.default.equal(results.tokens.getItemAt(4).type, 2 /* NewLine */);
295
- assert_1.default.equal(results.tokens.getItemAt(5).type, 7 /* Identifier */);
296
- assert_1.default.equal(results.tokens.getItemAt(6).type, 2 /* NewLine */);
297
- assert_1.default.equal(results.tokens.getItemAt(7).type, 3 /* Indent */);
297
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* TokenType.Identifier */);
298
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 2 /* TokenType.NewLine */);
299
+ assert_1.default.equal(results.tokens.getItemAt(5).type, 7 /* TokenType.Identifier */);
300
+ assert_1.default.equal(results.tokens.getItemAt(6).type, 2 /* TokenType.NewLine */);
301
+ assert_1.default.equal(results.tokens.getItemAt(7).type, 3 /* TokenType.Indent */);
298
302
  assert_1.default.equal(results.tokens.getItemAt(7).indentAmount, 8);
299
303
  assert_1.default.equal(results.tokens.getItemAt(7).length, 3);
300
- assert_1.default.equal(results.tokens.getItemAt(8).type, 7 /* Identifier */);
301
- assert_1.default.equal(results.tokens.getItemAt(9).type, 2 /* NewLine */);
302
- assert_1.default.equal(results.tokens.getItemAt(10).type, 3 /* Indent */);
304
+ assert_1.default.equal(results.tokens.getItemAt(8).type, 7 /* TokenType.Identifier */);
305
+ assert_1.default.equal(results.tokens.getItemAt(9).type, 2 /* TokenType.NewLine */);
306
+ assert_1.default.equal(results.tokens.getItemAt(10).type, 3 /* TokenType.Indent */);
303
307
  assert_1.default.equal(results.tokens.getItemAt(10).isIndentAmbiguous, true);
304
308
  assert_1.default.equal(results.tokens.getItemAt(10).length, 1);
305
- assert_1.default.equal(results.tokens.getItemAt(11).type, 7 /* Identifier */);
306
- assert_1.default.equal(results.tokens.getItemAt(12).type, 2 /* NewLine */);
307
- assert_1.default.equal(results.tokens.getItemAt(13).type, 4 /* Dedent */);
309
+ assert_1.default.equal(results.tokens.getItemAt(11).type, 7 /* TokenType.Identifier */);
310
+ assert_1.default.equal(results.tokens.getItemAt(12).type, 2 /* TokenType.NewLine */);
311
+ assert_1.default.equal(results.tokens.getItemAt(13).type, 4 /* TokenType.Dedent */);
308
312
  assert_1.default.equal(results.tokens.getItemAt(13).indentAmount, 2);
309
313
  assert_1.default.equal(results.tokens.getItemAt(13).matchesIndent, true);
310
- assert_1.default.equal(results.tokens.getItemAt(14).type, 4 /* Dedent */);
314
+ assert_1.default.equal(results.tokens.getItemAt(14).type, 4 /* TokenType.Dedent */);
311
315
  assert_1.default.equal(results.tokens.getItemAt(14).indentAmount, 1);
312
316
  assert_1.default.equal(results.tokens.getItemAt(14).matchesIndent, false);
313
- assert_1.default.equal(results.tokens.getItemAt(15).type, 7 /* Identifier */);
314
- assert_1.default.equal(results.tokens.getItemAt(16).type, 2 /* NewLine */);
315
- assert_1.default.equal(results.tokens.getItemAt(17).type, 1 /* EndOfStream */);
317
+ assert_1.default.equal(results.tokens.getItemAt(15).type, 7 /* TokenType.Identifier */);
318
+ assert_1.default.equal(results.tokens.getItemAt(16).type, 2 /* TokenType.NewLine */);
319
+ assert_1.default.equal(results.tokens.getItemAt(17).type, 1 /* TokenType.EndOfStream */);
316
320
  });
317
321
  test('IndentDedentParen', () => {
318
322
  const t = new tokenizer_1.Tokenizer();
@@ -320,28 +324,28 @@ test('IndentDedentParen', () => {
320
324
  assert_1.default.equal(results.tokens.count, 8 + _implicitTokenCount);
321
325
  // Test that indent and dedent tokens are suppressed within
322
326
  // a parenthetical clause.
323
- assert_1.default.equal(results.tokens.getItemAt(0).type, 7 /* Identifier */);
324
- assert_1.default.equal(results.tokens.getItemAt(1).type, 13 /* OpenParenthesis */);
325
- assert_1.default.equal(results.tokens.getItemAt(2).type, 7 /* Identifier */);
326
- assert_1.default.equal(results.tokens.getItemAt(3).type, 14 /* CloseParenthesis */);
327
- assert_1.default.equal(results.tokens.getItemAt(4).type, 2 /* NewLine */);
328
- assert_1.default.equal(results.tokens.getItemAt(5).type, 3 /* Indent */);
329
- assert_1.default.equal(results.tokens.getItemAt(6).type, 7 /* Identifier */);
330
- assert_1.default.equal(results.tokens.getItemAt(7).type, 2 /* NewLine */);
331
- assert_1.default.equal(results.tokens.getItemAt(8).type, 4 /* Dedent */);
332
- assert_1.default.equal(results.tokens.getItemAt(9).type, 1 /* EndOfStream */);
327
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 7 /* TokenType.Identifier */);
328
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 13 /* TokenType.OpenParenthesis */);
329
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 7 /* TokenType.Identifier */);
330
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 14 /* TokenType.CloseParenthesis */);
331
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 2 /* TokenType.NewLine */);
332
+ assert_1.default.equal(results.tokens.getItemAt(5).type, 3 /* TokenType.Indent */);
333
+ assert_1.default.equal(results.tokens.getItemAt(6).type, 7 /* TokenType.Identifier */);
334
+ assert_1.default.equal(results.tokens.getItemAt(7).type, 2 /* TokenType.NewLine */);
335
+ assert_1.default.equal(results.tokens.getItemAt(8).type, 4 /* TokenType.Dedent */);
336
+ assert_1.default.equal(results.tokens.getItemAt(9).type, 1 /* TokenType.EndOfStream */);
333
337
  });
334
338
  test('Strings: simple', () => {
335
339
  const t = new tokenizer_1.Tokenizer();
336
340
  const results = t.tokenize(' "a"');
337
341
  assert_1.default.equal(results.tokens.count, 3 + _implicitTokenCount);
338
- assert_1.default.equal(results.tokens.getItemAt(0).type, 3 /* Indent */);
342
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 3 /* TokenType.Indent */);
339
343
  const stringToken = results.tokens.getItemAt(1);
340
- assert_1.default.equal(stringToken.type, 5 /* String */);
344
+ assert_1.default.equal(stringToken.type, 5 /* TokenType.String */);
341
345
  assert_1.default.equal(stringToken.length, 3);
342
346
  assert_1.default.equal(stringToken.escapedValue, 'a');
343
- assert_1.default.equal(stringToken.flags, 2 /* DoubleQuote */);
344
- assert_1.default.equal(results.tokens.getItemAt(2).type, 2 /* NewLine */);
347
+ assert_1.default.equal(stringToken.flags, 2 /* StringTokenFlags.DoubleQuote */);
348
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 2 /* TokenType.NewLine */);
345
349
  });
346
350
  test('Strings: unclosed', () => {
347
351
  const t = new tokenizer_1.Tokenizer();
@@ -352,13 +356,13 @@ test('Strings: unclosed', () => {
352
356
  [10, 18],
353
357
  [29, 10],
354
358
  ];
355
- assert_1.default.equal(results.tokens.getItemAt(0).type, 3 /* Indent */);
359
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 3 /* TokenType.Indent */);
356
360
  for (let i = 0; i < ranges.length; i++) {
357
361
  assert_1.default.equal(results.tokens.getItemAt(i + 1).start, ranges[i][0]);
358
362
  assert_1.default.equal(results.tokens.getItemAt(i + 1).length, ranges[i][1]);
359
- assert_1.default.equal(results.tokens.getItemAt(i + 1).type, 5 /* String */);
363
+ assert_1.default.equal(results.tokens.getItemAt(i + 1).type, 5 /* TokenType.String */);
360
364
  }
361
- assert_1.default.equal(results.tokens.getItemAt(5).type, 4 /* Dedent */);
365
+ assert_1.default.equal(results.tokens.getItemAt(5).type, 4 /* TokenType.Dedent */);
362
366
  });
363
367
  test('Strings: escaped across multiple lines', () => {
364
368
  const t = new tokenizer_1.Tokenizer();
@@ -368,13 +372,13 @@ test('Strings: escaped across multiple lines', () => {
368
372
  [1, 6],
369
373
  [8, 7],
370
374
  ];
371
- assert_1.default.equal(results.tokens.getItemAt(0).type, 3 /* Indent */);
375
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 3 /* TokenType.Indent */);
372
376
  for (let i = 0; i < ranges.length; i++) {
373
377
  assert_1.default.equal(results.tokens.getItemAt(i + 1).start, ranges[i][0]);
374
378
  assert_1.default.equal(results.tokens.getItemAt(i + 1).length, ranges[i][1]);
375
- assert_1.default.equal(results.tokens.getItemAt(i + 1).type, 5 /* String */);
379
+ assert_1.default.equal(results.tokens.getItemAt(i + 1).type, 5 /* TokenType.String */);
376
380
  }
377
- assert_1.default.equal(results.tokens.getItemAt(5).type, 1 /* EndOfStream */);
381
+ assert_1.default.equal(results.tokens.getItemAt(5).type, 1 /* TokenType.EndOfStream */);
378
382
  });
379
383
  test('Strings: block next to regular, double-quoted', () => {
380
384
  const t = new tokenizer_1.Tokenizer();
@@ -387,7 +391,7 @@ test('Strings: block next to regular, double-quoted', () => {
387
391
  for (let i = 0; i < ranges.length; i++) {
388
392
  assert_1.default.equal(results.tokens.getItemAt(i).start, ranges[i][0]);
389
393
  assert_1.default.equal(results.tokens.getItemAt(i).length, ranges[i][1]);
390
- assert_1.default.equal(results.tokens.getItemAt(i).type, 5 /* String */);
394
+ assert_1.default.equal(results.tokens.getItemAt(i).type, 5 /* TokenType.String */);
391
395
  }
392
396
  });
393
397
  test('Strings: block next to block, double-quoted', () => {
@@ -401,7 +405,7 @@ test('Strings: block next to block, double-quoted', () => {
401
405
  for (let i = 0; i < ranges.length; i++) {
402
406
  assert_1.default.equal(results.tokens.getItemAt(i).start, ranges[i][0]);
403
407
  assert_1.default.equal(results.tokens.getItemAt(i).length, ranges[i][1]);
404
- assert_1.default.equal(results.tokens.getItemAt(i).type, 5 /* String */);
408
+ assert_1.default.equal(results.tokens.getItemAt(i).type, 5 /* TokenType.String */);
405
409
  }
406
410
  });
407
411
  test('Strings: unclosed sequence of quotes', () => {
@@ -412,7 +416,7 @@ test('Strings: unclosed sequence of quotes', () => {
412
416
  for (let i = 0; i < ranges.length; i++) {
413
417
  assert_1.default.equal(results.tokens.getItemAt(i).start, ranges[i][0]);
414
418
  assert_1.default.equal(results.tokens.getItemAt(i).length, ranges[i][1]);
415
- assert_1.default.equal(results.tokens.getItemAt(i).type, 5 /* String */);
419
+ assert_1.default.equal(results.tokens.getItemAt(i).type, 5 /* TokenType.String */);
416
420
  }
417
421
  });
418
422
  test('Strings: single quote escape', () => {
@@ -420,8 +424,8 @@ test('Strings: single quote escape', () => {
420
424
  const results = t.tokenize("'\\'quoted\\''");
421
425
  assert_1.default.equal(results.tokens.count, 1 + _implicitTokenCount);
422
426
  const stringToken = results.tokens.getItemAt(0);
423
- assert_1.default.equal(stringToken.type, 5 /* String */);
424
- assert_1.default.equal(stringToken.flags, 1 /* SingleQuote */);
427
+ assert_1.default.equal(stringToken.type, 5 /* TokenType.String */);
428
+ assert_1.default.equal(stringToken.flags, 1 /* StringTokenFlags.SingleQuote */);
425
429
  assert_1.default.equal(stringToken.length, 12);
426
430
  assert_1.default.equal(stringToken.prefixLength, 0);
427
431
  assert_1.default.equal(stringToken.escapedValue, "\\'quoted\\'");
@@ -431,8 +435,8 @@ test('Strings: double quote escape', () => {
431
435
  const results = t.tokenize('"\\"quoted\\""');
432
436
  assert_1.default.equal(results.tokens.count, 1 + _implicitTokenCount);
433
437
  const stringToken = results.tokens.getItemAt(0);
434
- assert_1.default.equal(stringToken.type, 5 /* String */);
435
- assert_1.default.equal(stringToken.flags, 2 /* DoubleQuote */);
438
+ assert_1.default.equal(stringToken.type, 5 /* TokenType.String */);
439
+ assert_1.default.equal(stringToken.flags, 2 /* StringTokenFlags.DoubleQuote */);
436
440
  assert_1.default.equal(stringToken.length, 12);
437
441
  assert_1.default.equal(stringToken.escapedValue, '\\"quoted\\"');
438
442
  });
@@ -441,8 +445,8 @@ test('Strings: triplicate double quote escape', () => {
441
445
  const results = t.tokenize('"""\\"quoted\\""""');
442
446
  assert_1.default.equal(results.tokens.count, 1 + _implicitTokenCount);
443
447
  const stringToken = results.tokens.getItemAt(0);
444
- assert_1.default.equal(stringToken.type, 5 /* String */);
445
- assert_1.default.equal(stringToken.flags, 2 /* DoubleQuote */ | 4 /* Triplicate */);
448
+ assert_1.default.equal(stringToken.type, 5 /* TokenType.String */);
449
+ assert_1.default.equal(stringToken.flags, 2 /* StringTokenFlags.DoubleQuote */ | 4 /* StringTokenFlags.Triplicate */);
446
450
  assert_1.default.equal(stringToken.length, 16);
447
451
  assert_1.default.equal(stringToken.escapedValue, '\\"quoted\\"');
448
452
  });
@@ -450,43 +454,43 @@ test('Strings: single quoted f-string', () => {
450
454
  const t = new tokenizer_1.Tokenizer();
451
455
  const results = t.tokenize("a+f'quoted'");
452
456
  assert_1.default.equal(results.tokens.count, 5 + _implicitTokenCount);
453
- assert_1.default.equal(results.tokens.getItemAt(0).type, 7 /* Identifier */);
454
- assert_1.default.equal(results.tokens.getItemAt(1).type, 9 /* Operator */);
457
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 7 /* TokenType.Identifier */);
458
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 9 /* TokenType.Operator */);
455
459
  const fStringStartToken = results.tokens.getItemAt(2);
456
- assert_1.default.equal(fStringStartToken.type, 24 /* FStringStart */);
457
- assert_1.default.equal(fStringStartToken.flags, 1 /* SingleQuote */ | 64 /* Format */);
460
+ assert_1.default.equal(fStringStartToken.type, 24 /* TokenType.FStringStart */);
461
+ assert_1.default.equal(fStringStartToken.flags, 1 /* StringTokenFlags.SingleQuote */ | 64 /* StringTokenFlags.Format */);
458
462
  assert_1.default.equal(fStringStartToken.length, 2);
459
463
  const fStringMiddleToken = results.tokens.getItemAt(3);
460
- assert_1.default.equal(fStringMiddleToken.type, 25 /* FStringMiddle */);
461
- assert_1.default.equal(fStringMiddleToken.flags, 1 /* SingleQuote */ | 64 /* Format */);
464
+ assert_1.default.equal(fStringMiddleToken.type, 25 /* TokenType.FStringMiddle */);
465
+ assert_1.default.equal(fStringMiddleToken.flags, 1 /* StringTokenFlags.SingleQuote */ | 64 /* StringTokenFlags.Format */);
462
466
  assert_1.default.equal(fStringMiddleToken.length, 6);
463
467
  assert_1.default.equal(fStringMiddleToken.escapedValue, 'quoted');
464
468
  const fStringEndToken = results.tokens.getItemAt(4);
465
- assert_1.default.equal(fStringEndToken.type, 26 /* FStringEnd */);
466
- assert_1.default.equal(fStringEndToken.flags, 1 /* SingleQuote */ | 64 /* Format */);
469
+ assert_1.default.equal(fStringEndToken.type, 26 /* TokenType.FStringEnd */);
470
+ assert_1.default.equal(fStringEndToken.flags, 1 /* StringTokenFlags.SingleQuote */ | 64 /* StringTokenFlags.Format */);
467
471
  assert_1.default.equal(fStringEndToken.length, 1);
468
472
  });
469
473
  test('Strings: double quoted f-string', () => {
470
474
  const t = new tokenizer_1.Tokenizer();
471
475
  const results = t.tokenize('x(1,f"quoted")');
472
476
  assert_1.default.equal(results.tokens.count, 8 + _implicitTokenCount);
473
- assert_1.default.equal(results.tokens.getItemAt(0).type, 7 /* Identifier */);
474
- assert_1.default.equal(results.tokens.getItemAt(1).type, 13 /* OpenParenthesis */);
475
- assert_1.default.equal(results.tokens.getItemAt(2).type, 6 /* Number */);
476
- assert_1.default.equal(results.tokens.getItemAt(3).type, 12 /* Comma */);
477
- assert_1.default.equal(results.tokens.getItemAt(7).type, 14 /* CloseParenthesis */);
477
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 7 /* TokenType.Identifier */);
478
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 13 /* TokenType.OpenParenthesis */);
479
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 6 /* TokenType.Number */);
480
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 12 /* TokenType.Comma */);
481
+ assert_1.default.equal(results.tokens.getItemAt(7).type, 14 /* TokenType.CloseParenthesis */);
478
482
  const fStringStartToken = results.tokens.getItemAt(4);
479
- assert_1.default.equal(fStringStartToken.type, 24 /* FStringStart */);
480
- assert_1.default.equal(fStringStartToken.flags, 2 /* DoubleQuote */ | 64 /* Format */);
483
+ assert_1.default.equal(fStringStartToken.type, 24 /* TokenType.FStringStart */);
484
+ assert_1.default.equal(fStringStartToken.flags, 2 /* StringTokenFlags.DoubleQuote */ | 64 /* StringTokenFlags.Format */);
481
485
  assert_1.default.equal(fStringStartToken.length, 2);
482
486
  const fStringMiddleToken = results.tokens.getItemAt(5);
483
- assert_1.default.equal(fStringMiddleToken.type, 25 /* FStringMiddle */);
484
- assert_1.default.equal(fStringMiddleToken.flags, 2 /* DoubleQuote */ | 64 /* Format */);
487
+ assert_1.default.equal(fStringMiddleToken.type, 25 /* TokenType.FStringMiddle */);
488
+ assert_1.default.equal(fStringMiddleToken.flags, 2 /* StringTokenFlags.DoubleQuote */ | 64 /* StringTokenFlags.Format */);
485
489
  assert_1.default.equal(fStringMiddleToken.length, 6);
486
490
  assert_1.default.equal(fStringMiddleToken.escapedValue, 'quoted');
487
491
  const fStringEndToken = results.tokens.getItemAt(6);
488
- assert_1.default.equal(fStringEndToken.type, 26 /* FStringEnd */);
489
- assert_1.default.equal(fStringEndToken.flags, 2 /* DoubleQuote */ | 64 /* Format */);
492
+ assert_1.default.equal(fStringEndToken.type, 26 /* TokenType.FStringEnd */);
493
+ assert_1.default.equal(fStringEndToken.flags, 2 /* StringTokenFlags.DoubleQuote */ | 64 /* StringTokenFlags.Format */);
490
494
  assert_1.default.equal(fStringEndToken.length, 1);
491
495
  });
492
496
  test('Strings: single quoted multiline f-string', () => {
@@ -494,17 +498,17 @@ test('Strings: single quoted multiline f-string', () => {
494
498
  const results = t.tokenize("f'''quoted'''");
495
499
  assert_1.default.equal(results.tokens.count, 3 + _implicitTokenCount);
496
500
  const fStringStartToken = results.tokens.getItemAt(0);
497
- assert_1.default.equal(fStringStartToken.type, 24 /* FStringStart */);
498
- assert_1.default.equal(fStringStartToken.flags, 1 /* SingleQuote */ | 4 /* Triplicate */ | 64 /* Format */);
501
+ assert_1.default.equal(fStringStartToken.type, 24 /* TokenType.FStringStart */);
502
+ assert_1.default.equal(fStringStartToken.flags, 1 /* StringTokenFlags.SingleQuote */ | 4 /* StringTokenFlags.Triplicate */ | 64 /* StringTokenFlags.Format */);
499
503
  assert_1.default.equal(fStringStartToken.length, 4);
500
504
  const fStringMiddleToken = results.tokens.getItemAt(1);
501
- assert_1.default.equal(fStringMiddleToken.type, 25 /* FStringMiddle */);
502
- assert_1.default.equal(fStringMiddleToken.flags, 1 /* SingleQuote */ | 4 /* Triplicate */ | 64 /* Format */);
505
+ assert_1.default.equal(fStringMiddleToken.type, 25 /* TokenType.FStringMiddle */);
506
+ assert_1.default.equal(fStringMiddleToken.flags, 1 /* StringTokenFlags.SingleQuote */ | 4 /* StringTokenFlags.Triplicate */ | 64 /* StringTokenFlags.Format */);
503
507
  assert_1.default.equal(fStringMiddleToken.length, 6);
504
508
  assert_1.default.equal(fStringMiddleToken.escapedValue, 'quoted');
505
509
  const fStringEndToken = results.tokens.getItemAt(2);
506
- assert_1.default.equal(fStringEndToken.type, 26 /* FStringEnd */);
507
- assert_1.default.equal(fStringEndToken.flags, 1 /* SingleQuote */ | 4 /* Triplicate */ | 64 /* Format */);
510
+ assert_1.default.equal(fStringEndToken.type, 26 /* TokenType.FStringEnd */);
511
+ assert_1.default.equal(fStringEndToken.flags, 1 /* StringTokenFlags.SingleQuote */ | 4 /* StringTokenFlags.Triplicate */ | 64 /* StringTokenFlags.Format */);
508
512
  assert_1.default.equal(fStringEndToken.length, 3);
509
513
  });
510
514
  test('Strings: double quoted multiline f-string', () => {
@@ -512,17 +516,17 @@ test('Strings: double quoted multiline f-string', () => {
512
516
  const results = t.tokenize('f"""quoted """');
513
517
  assert_1.default.equal(results.tokens.count, 3 + _implicitTokenCount);
514
518
  const fStringStartToken = results.tokens.getItemAt(0);
515
- assert_1.default.equal(fStringStartToken.type, 24 /* FStringStart */);
516
- assert_1.default.equal(fStringStartToken.flags, 2 /* DoubleQuote */ | 4 /* Triplicate */ | 64 /* Format */);
519
+ assert_1.default.equal(fStringStartToken.type, 24 /* TokenType.FStringStart */);
520
+ assert_1.default.equal(fStringStartToken.flags, 2 /* StringTokenFlags.DoubleQuote */ | 4 /* StringTokenFlags.Triplicate */ | 64 /* StringTokenFlags.Format */);
517
521
  assert_1.default.equal(fStringStartToken.length, 4);
518
522
  const fStringMiddleToken = results.tokens.getItemAt(1);
519
- assert_1.default.equal(fStringMiddleToken.type, 25 /* FStringMiddle */);
520
- assert_1.default.equal(fStringMiddleToken.flags, 2 /* DoubleQuote */ | 4 /* Triplicate */ | 64 /* Format */);
523
+ assert_1.default.equal(fStringMiddleToken.type, 25 /* TokenType.FStringMiddle */);
524
+ assert_1.default.equal(fStringMiddleToken.flags, 2 /* StringTokenFlags.DoubleQuote */ | 4 /* StringTokenFlags.Triplicate */ | 64 /* StringTokenFlags.Format */);
521
525
  assert_1.default.equal(fStringMiddleToken.length, 7);
522
526
  assert_1.default.equal(fStringMiddleToken.escapedValue, 'quoted ');
523
527
  const fStringEndToken = results.tokens.getItemAt(2);
524
- assert_1.default.equal(fStringEndToken.type, 26 /* FStringEnd */);
525
- assert_1.default.equal(fStringEndToken.flags, 2 /* DoubleQuote */ | 4 /* Triplicate */ | 64 /* Format */);
528
+ assert_1.default.equal(fStringEndToken.type, 26 /* TokenType.FStringEnd */);
529
+ assert_1.default.equal(fStringEndToken.flags, 2 /* StringTokenFlags.DoubleQuote */ | 4 /* StringTokenFlags.Triplicate */ | 64 /* StringTokenFlags.Format */);
526
530
  assert_1.default.equal(fStringEndToken.length, 3);
527
531
  });
528
532
  test('Strings: f-string with single right brace', () => {
@@ -530,253 +534,253 @@ test('Strings: f-string with single right brace', () => {
530
534
  const results = t.tokenize("f'hello}'");
531
535
  assert_1.default.equal(results.tokens.count, 4 + _implicitTokenCount);
532
536
  const fStringStartToken = results.tokens.getItemAt(0);
533
- assert_1.default.equal(fStringStartToken.type, 24 /* FStringStart */);
537
+ assert_1.default.equal(fStringStartToken.type, 24 /* TokenType.FStringStart */);
534
538
  assert_1.default.equal(fStringStartToken.length, 2);
535
- assert_1.default.equal(fStringStartToken.flags, 1 /* SingleQuote */ | 64 /* Format */);
539
+ assert_1.default.equal(fStringStartToken.flags, 1 /* StringTokenFlags.SingleQuote */ | 64 /* StringTokenFlags.Format */);
536
540
  const fStringMiddleToken = results.tokens.getItemAt(1);
537
- assert_1.default.equal(fStringMiddleToken.type, 25 /* FStringMiddle */);
541
+ assert_1.default.equal(fStringMiddleToken.type, 25 /* TokenType.FStringMiddle */);
538
542
  assert_1.default.equal(fStringMiddleToken.length, 5);
539
- assert_1.default.equal(fStringMiddleToken.flags, 1 /* SingleQuote */ | 64 /* Format */ | 256 /* ReplacementFieldEnd */);
543
+ assert_1.default.equal(fStringMiddleToken.flags, 1 /* StringTokenFlags.SingleQuote */ | 64 /* StringTokenFlags.Format */ | 256 /* StringTokenFlags.ReplacementFieldEnd */);
540
544
  const braceToken = results.tokens.getItemAt(2).type;
541
- assert_1.default.equal(braceToken, 18 /* CloseCurlyBrace */);
545
+ assert_1.default.equal(braceToken, 18 /* TokenType.CloseCurlyBrace */);
542
546
  const fStringEndToken = results.tokens.getItemAt(3);
543
- assert_1.default.equal(fStringEndToken.type, 26 /* FStringEnd */);
544
- assert_1.default.equal(fStringEndToken.flags, 1 /* SingleQuote */ | 64 /* Format */);
547
+ assert_1.default.equal(fStringEndToken.type, 26 /* TokenType.FStringEnd */);
548
+ assert_1.default.equal(fStringEndToken.flags, 1 /* StringTokenFlags.SingleQuote */ | 64 /* StringTokenFlags.Format */);
545
549
  assert_1.default.equal(fStringEndToken.length, 1);
546
550
  });
547
551
  test('Strings: f-string with backslash escape', () => {
548
552
  const t = new tokenizer_1.Tokenizer();
549
553
  const results = t.tokenize(`f'\\\\'`);
550
554
  assert_1.default.equal(results.tokens.count, 3 + _implicitTokenCount);
551
- assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
555
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* TokenType.FStringStart */);
552
556
  const fStringMiddleToken = results.tokens.getItemAt(1);
553
- assert_1.default.equal(fStringMiddleToken.type, 25 /* FStringMiddle */);
557
+ assert_1.default.equal(fStringMiddleToken.type, 25 /* TokenType.FStringMiddle */);
554
558
  assert_1.default.equal(fStringMiddleToken.length, 2);
555
- assert_1.default.equal(results.tokens.getItemAt(2).type, 26 /* FStringEnd */);
559
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 26 /* TokenType.FStringEnd */);
556
560
  });
557
561
  test('Strings: f-string with new line escape', () => {
558
562
  const t = new tokenizer_1.Tokenizer();
559
563
  const results = t.tokenize(`f'x \\\ny'`);
560
564
  assert_1.default.equal(results.tokens.count, 3 + _implicitTokenCount);
561
- assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
562
- assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* FStringMiddle */);
563
- assert_1.default.equal(results.tokens.getItemAt(2).type, 26 /* FStringEnd */);
565
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* TokenType.FStringStart */);
566
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* TokenType.FStringMiddle */);
567
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 26 /* TokenType.FStringEnd */);
564
568
  });
565
569
  test('Strings: f-string with escape in expression', () => {
566
570
  const t = new tokenizer_1.Tokenizer();
567
571
  const results = t.tokenize(`f'hello { "\\t" }'`);
568
572
  assert_1.default.equal(results.tokens.count, 6 + _implicitTokenCount);
569
- assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
570
- assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* FStringMiddle */);
571
- assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
572
- assert_1.default.equal(results.tokens.getItemAt(3).type, 5 /* String */);
573
- assert_1.default.equal(results.tokens.getItemAt(4).type, 18 /* CloseCurlyBrace */);
574
- assert_1.default.equal(results.tokens.getItemAt(5).type, 26 /* FStringEnd */);
573
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* TokenType.FStringStart */);
574
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* TokenType.FStringMiddle */);
575
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* TokenType.OpenCurlyBrace */);
576
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 5 /* TokenType.String */);
577
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 18 /* TokenType.CloseCurlyBrace */);
578
+ assert_1.default.equal(results.tokens.getItemAt(5).type, 26 /* TokenType.FStringEnd */);
575
579
  });
576
580
  test('Strings: f-string with escape in format string 1', () => {
577
581
  const t = new tokenizer_1.Tokenizer();
578
582
  const results = t.tokenize("f'he\\{ 1 }lo'");
579
583
  assert_1.default.equal(results.tokens.count, 7 + _implicitTokenCount);
580
- assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
584
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* TokenType.FStringStart */);
581
585
  const middleFString = results.tokens.getItemAt(1);
582
- assert_1.default.equal(middleFString.type, 25 /* FStringMiddle */);
586
+ assert_1.default.equal(middleFString.type, 25 /* TokenType.FStringMiddle */);
583
587
  assert_1.default.equal(middleFString.escapedValue.length, 3);
584
- assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
585
- assert_1.default.equal(results.tokens.getItemAt(3).type, 6 /* Number */);
586
- assert_1.default.equal(results.tokens.getItemAt(4).type, 18 /* CloseCurlyBrace */);
587
- assert_1.default.equal(results.tokens.getItemAt(5).type, 25 /* FStringMiddle */);
588
- assert_1.default.equal(results.tokens.getItemAt(6).type, 26 /* FStringEnd */);
588
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* TokenType.OpenCurlyBrace */);
589
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 6 /* TokenType.Number */);
590
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 18 /* TokenType.CloseCurlyBrace */);
591
+ assert_1.default.equal(results.tokens.getItemAt(5).type, 25 /* TokenType.FStringMiddle */);
592
+ assert_1.default.equal(results.tokens.getItemAt(6).type, 26 /* TokenType.FStringEnd */);
589
593
  });
590
594
  test('Strings: f-string with escape in format string 2', () => {
591
595
  const t = new tokenizer_1.Tokenizer();
592
596
  const results = t.tokenize(`f"'{{\\"{0}\\": {0}}}'"`);
593
597
  assert_1.default.equal(results.tokens.count, 11 + _implicitTokenCount);
594
- assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
598
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* TokenType.FStringStart */);
595
599
  const middleFString = results.tokens.getItemAt(1);
596
- assert_1.default.equal(middleFString.type, 25 /* FStringMiddle */);
600
+ assert_1.default.equal(middleFString.type, 25 /* TokenType.FStringMiddle */);
597
601
  assert_1.default.equal(middleFString.escapedValue.length, 5);
598
- assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
599
- assert_1.default.equal(results.tokens.getItemAt(3).type, 6 /* Number */);
600
- assert_1.default.equal(results.tokens.getItemAt(4).type, 18 /* CloseCurlyBrace */);
601
- assert_1.default.equal(results.tokens.getItemAt(5).type, 25 /* FStringMiddle */);
602
- assert_1.default.equal(results.tokens.getItemAt(6).type, 17 /* OpenCurlyBrace */);
603
- assert_1.default.equal(results.tokens.getItemAt(7).type, 6 /* Number */);
604
- assert_1.default.equal(results.tokens.getItemAt(8).type, 18 /* CloseCurlyBrace */);
605
- assert_1.default.equal(results.tokens.getItemAt(9).type, 25 /* FStringMiddle */);
606
- assert_1.default.equal(results.tokens.getItemAt(10).type, 26 /* FStringEnd */);
602
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* TokenType.OpenCurlyBrace */);
603
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 6 /* TokenType.Number */);
604
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 18 /* TokenType.CloseCurlyBrace */);
605
+ assert_1.default.equal(results.tokens.getItemAt(5).type, 25 /* TokenType.FStringMiddle */);
606
+ assert_1.default.equal(results.tokens.getItemAt(6).type, 17 /* TokenType.OpenCurlyBrace */);
607
+ assert_1.default.equal(results.tokens.getItemAt(7).type, 6 /* TokenType.Number */);
608
+ assert_1.default.equal(results.tokens.getItemAt(8).type, 18 /* TokenType.CloseCurlyBrace */);
609
+ assert_1.default.equal(results.tokens.getItemAt(9).type, 25 /* TokenType.FStringMiddle */);
610
+ assert_1.default.equal(results.tokens.getItemAt(10).type, 26 /* TokenType.FStringEnd */);
607
611
  });
608
612
  test('Strings: f-string with double brace', () => {
609
613
  const t = new tokenizer_1.Tokenizer();
610
614
  const results = t.tokenize(`f"hello {{{0==0}}}"`);
611
615
  assert_1.default.equal(results.tokens.count, 9 + _implicitTokenCount);
612
- assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
613
- assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* FStringMiddle */);
614
- assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
615
- assert_1.default.equal(results.tokens.getItemAt(3).type, 6 /* Number */);
616
- assert_1.default.equal(results.tokens.getItemAt(4).type, 9 /* Operator */);
617
- assert_1.default.equal(results.tokens.getItemAt(5).type, 6 /* Number */);
618
- assert_1.default.equal(results.tokens.getItemAt(6).type, 18 /* CloseCurlyBrace */);
619
- assert_1.default.equal(results.tokens.getItemAt(7).type, 25 /* FStringMiddle */);
620
- assert_1.default.equal(results.tokens.getItemAt(8).type, 26 /* FStringEnd */);
616
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* TokenType.FStringStart */);
617
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* TokenType.FStringMiddle */);
618
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* TokenType.OpenCurlyBrace */);
619
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 6 /* TokenType.Number */);
620
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 9 /* TokenType.Operator */);
621
+ assert_1.default.equal(results.tokens.getItemAt(5).type, 6 /* TokenType.Number */);
622
+ assert_1.default.equal(results.tokens.getItemAt(6).type, 18 /* TokenType.CloseCurlyBrace */);
623
+ assert_1.default.equal(results.tokens.getItemAt(7).type, 25 /* TokenType.FStringMiddle */);
624
+ assert_1.default.equal(results.tokens.getItemAt(8).type, 26 /* TokenType.FStringEnd */);
621
625
  });
622
626
  test('Strings: f-string with walrus operator', () => {
623
627
  const t = new tokenizer_1.Tokenizer();
624
628
  const results = t.tokenize(`f"{(x:=0)}"`);
625
629
  assert_1.default.equal(results.tokens.count, 9 + _implicitTokenCount);
626
- assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
627
- assert_1.default.equal(results.tokens.getItemAt(1).type, 17 /* OpenCurlyBrace */);
628
- assert_1.default.equal(results.tokens.getItemAt(2).type, 13 /* OpenParenthesis */);
629
- assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* Identifier */);
630
- assert_1.default.equal(results.tokens.getItemAt(4).type, 9 /* Operator */);
631
- assert_1.default.equal(results.tokens.getItemAt(5).type, 6 /* Number */);
632
- assert_1.default.equal(results.tokens.getItemAt(6).type, 14 /* CloseParenthesis */);
633
- assert_1.default.equal(results.tokens.getItemAt(7).type, 18 /* CloseCurlyBrace */);
634
- assert_1.default.equal(results.tokens.getItemAt(8).type, 26 /* FStringEnd */);
630
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* TokenType.FStringStart */);
631
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 17 /* TokenType.OpenCurlyBrace */);
632
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 13 /* TokenType.OpenParenthesis */);
633
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* TokenType.Identifier */);
634
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 9 /* TokenType.Operator */);
635
+ assert_1.default.equal(results.tokens.getItemAt(5).type, 6 /* TokenType.Number */);
636
+ assert_1.default.equal(results.tokens.getItemAt(6).type, 14 /* TokenType.CloseParenthesis */);
637
+ assert_1.default.equal(results.tokens.getItemAt(7).type, 18 /* TokenType.CloseCurlyBrace */);
638
+ assert_1.default.equal(results.tokens.getItemAt(8).type, 26 /* TokenType.FStringEnd */);
635
639
  });
636
640
  test('Strings: f-string with single right brace', () => {
637
641
  const t = new tokenizer_1.Tokenizer();
638
642
  const results = t.tokenize(`f"}"`);
639
643
  assert_1.default.equal(results.tokens.count, 3 + _implicitTokenCount);
640
- assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
641
- assert_1.default.equal(results.tokens.getItemAt(1).type, 18 /* CloseCurlyBrace */);
642
- assert_1.default.equal(results.tokens.getItemAt(2).type, 26 /* FStringEnd */);
644
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* TokenType.FStringStart */);
645
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 18 /* TokenType.CloseCurlyBrace */);
646
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 26 /* TokenType.FStringEnd */);
643
647
  });
644
648
  test('Strings: f-string with comment', () => {
645
649
  const t = new tokenizer_1.Tokenizer();
646
650
  const results = t.tokenize(`f'''hello{\nx # comment\n}'''`);
647
651
  assert_1.default.equal(results.tokens.count, 6 + _implicitTokenCount);
648
- assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
649
- assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* FStringMiddle */);
650
- assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
651
- assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* Identifier */);
652
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* TokenType.FStringStart */);
653
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* TokenType.FStringMiddle */);
654
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* TokenType.OpenCurlyBrace */);
655
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* TokenType.Identifier */);
652
656
  const closeBraceToken = results.tokens.getItemAt(4);
653
- assert_1.default.equal(closeBraceToken.type, 18 /* CloseCurlyBrace */);
657
+ assert_1.default.equal(closeBraceToken.type, 18 /* TokenType.CloseCurlyBrace */);
654
658
  assert_1.default.deepEqual(closeBraceToken.comments, [
655
- { type: 0 /* Regular */, value: ' comment', start: 14, length: 8 },
659
+ { type: 0 /* CommentType.Regular */, value: ' comment', start: 14, length: 8 },
656
660
  ]);
657
- assert_1.default.equal(results.tokens.getItemAt(5).type, 26 /* FStringEnd */);
661
+ assert_1.default.equal(results.tokens.getItemAt(5).type, 26 /* TokenType.FStringEnd */);
658
662
  });
659
663
  test('Strings: f-string with unterminated expression', () => {
660
664
  const t = new tokenizer_1.Tokenizer();
661
665
  const results = t.tokenize("f'hello { a'");
662
666
  assert_1.default.equal(results.tokens.count, 5 + _implicitTokenCount);
663
- assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
664
- assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* FStringMiddle */);
665
- assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
666
- assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* Identifier */);
667
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* TokenType.FStringStart */);
668
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* TokenType.FStringMiddle */);
669
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* TokenType.OpenCurlyBrace */);
670
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* TokenType.Identifier */);
667
671
  const fStringEnd = results.tokens.getItemAt(4);
668
- assert_1.default.equal(fStringEnd.type, 26 /* FStringEnd */);
669
- assert_1.default.equal(fStringEnd.flags, 64 /* Format */ | 1 /* SingleQuote */);
672
+ assert_1.default.equal(fStringEnd.type, 26 /* TokenType.FStringEnd */);
673
+ assert_1.default.equal(fStringEnd.flags, 64 /* StringTokenFlags.Format */ | 1 /* StringTokenFlags.SingleQuote */);
670
674
  });
671
675
  test('Strings: f-string with replacement field', () => {
672
676
  const t = new tokenizer_1.Tokenizer();
673
677
  const results = t.tokenize("f'hello { a + b}'");
674
678
  assert_1.default.equal(results.tokens.count, 8 + _implicitTokenCount);
675
- assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
676
- assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* FStringMiddle */);
677
- assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
678
- assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* Identifier */);
679
- assert_1.default.equal(results.tokens.getItemAt(4).type, 9 /* Operator */);
680
- assert_1.default.equal(results.tokens.getItemAt(5).type, 7 /* Identifier */);
681
- assert_1.default.equal(results.tokens.getItemAt(6).type, 18 /* CloseCurlyBrace */);
682
- assert_1.default.equal(results.tokens.getItemAt(7).type, 26 /* FStringEnd */);
679
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* TokenType.FStringStart */);
680
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* TokenType.FStringMiddle */);
681
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* TokenType.OpenCurlyBrace */);
682
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* TokenType.Identifier */);
683
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 9 /* TokenType.Operator */);
684
+ assert_1.default.equal(results.tokens.getItemAt(5).type, 7 /* TokenType.Identifier */);
685
+ assert_1.default.equal(results.tokens.getItemAt(6).type, 18 /* TokenType.CloseCurlyBrace */);
686
+ assert_1.default.equal(results.tokens.getItemAt(7).type, 26 /* TokenType.FStringEnd */);
683
687
  });
684
688
  test('Strings: f-string with format specifier', () => {
685
689
  const t = new tokenizer_1.Tokenizer();
686
690
  const results = t.tokenize("f'hello { a ! b}'");
687
691
  assert_1.default.equal(results.tokens.count, 8 + _implicitTokenCount);
688
- assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
689
- assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* FStringMiddle */);
690
- assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
691
- assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* Identifier */);
692
- assert_1.default.equal(results.tokens.getItemAt(4).type, 23 /* ExclamationMark */);
693
- assert_1.default.equal(results.tokens.getItemAt(5).type, 7 /* Identifier */);
694
- assert_1.default.equal(results.tokens.getItemAt(6).type, 18 /* CloseCurlyBrace */);
695
- assert_1.default.equal(results.tokens.getItemAt(7).type, 26 /* FStringEnd */);
692
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* TokenType.FStringStart */);
693
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* TokenType.FStringMiddle */);
694
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* TokenType.OpenCurlyBrace */);
695
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* TokenType.Identifier */);
696
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 23 /* TokenType.ExclamationMark */);
697
+ assert_1.default.equal(results.tokens.getItemAt(5).type, 7 /* TokenType.Identifier */);
698
+ assert_1.default.equal(results.tokens.getItemAt(6).type, 18 /* TokenType.CloseCurlyBrace */);
699
+ assert_1.default.equal(results.tokens.getItemAt(7).type, 26 /* TokenType.FStringEnd */);
696
700
  });
697
701
  test('Strings: f-string with debug format specifier', () => {
698
702
  const t = new tokenizer_1.Tokenizer();
699
703
  const results = t.tokenize("f'hello { a =}'");
700
704
  assert_1.default.equal(results.tokens.count, 7 + _implicitTokenCount);
701
- assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
702
- assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* FStringMiddle */);
703
- assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
704
- assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* Identifier */);
705
- assert_1.default.equal(results.tokens.getItemAt(4).type, 9 /* Operator */);
706
- assert_1.default.equal(results.tokens.getItemAt(5).type, 18 /* CloseCurlyBrace */);
707
- assert_1.default.equal(results.tokens.getItemAt(6).type, 26 /* FStringEnd */);
705
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* TokenType.FStringStart */);
706
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* TokenType.FStringMiddle */);
707
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* TokenType.OpenCurlyBrace */);
708
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* TokenType.Identifier */);
709
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 9 /* TokenType.Operator */);
710
+ assert_1.default.equal(results.tokens.getItemAt(5).type, 18 /* TokenType.CloseCurlyBrace */);
711
+ assert_1.default.equal(results.tokens.getItemAt(6).type, 26 /* TokenType.FStringEnd */);
708
712
  });
709
713
  test('Strings: nested f-string', () => {
710
714
  const t = new tokenizer_1.Tokenizer();
711
715
  const results = t.tokenize("f'{f'{a}'}'");
712
716
  assert_1.default.equal(results.tokens.count, 9 + _implicitTokenCount);
713
- assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
714
- assert_1.default.equal(results.tokens.getItemAt(1).type, 17 /* OpenCurlyBrace */);
715
- assert_1.default.equal(results.tokens.getItemAt(2).type, 24 /* FStringStart */);
716
- assert_1.default.equal(results.tokens.getItemAt(3).type, 17 /* OpenCurlyBrace */);
717
- assert_1.default.equal(results.tokens.getItemAt(4).type, 7 /* Identifier */);
718
- assert_1.default.equal(results.tokens.getItemAt(5).type, 18 /* CloseCurlyBrace */);
719
- assert_1.default.equal(results.tokens.getItemAt(6).type, 26 /* FStringEnd */);
720
- assert_1.default.equal(results.tokens.getItemAt(7).type, 18 /* CloseCurlyBrace */);
721
- assert_1.default.equal(results.tokens.getItemAt(8).type, 26 /* FStringEnd */);
717
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* TokenType.FStringStart */);
718
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 17 /* TokenType.OpenCurlyBrace */);
719
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 24 /* TokenType.FStringStart */);
720
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 17 /* TokenType.OpenCurlyBrace */);
721
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 7 /* TokenType.Identifier */);
722
+ assert_1.default.equal(results.tokens.getItemAt(5).type, 18 /* TokenType.CloseCurlyBrace */);
723
+ assert_1.default.equal(results.tokens.getItemAt(6).type, 26 /* TokenType.FStringEnd */);
724
+ assert_1.default.equal(results.tokens.getItemAt(7).type, 18 /* TokenType.CloseCurlyBrace */);
725
+ assert_1.default.equal(results.tokens.getItemAt(8).type, 26 /* TokenType.FStringEnd */);
722
726
  });
723
727
  test('Strings: nested f-string formats 1', () => {
724
728
  const t = new tokenizer_1.Tokenizer();
725
729
  const results = t.tokenize("f'{a:x{{b}+:x{c}+}}'");
726
730
  assert_1.default.equal(results.tokens.count, 19 + _implicitTokenCount);
727
- assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
728
- assert_1.default.equal(results.tokens.getItemAt(1).type, 17 /* OpenCurlyBrace */);
729
- assert_1.default.equal(results.tokens.getItemAt(2).type, 7 /* Identifier */);
730
- assert_1.default.equal(results.tokens.getItemAt(3).type, 10 /* Colon */);
731
- assert_1.default.equal(results.tokens.getItemAt(4).type, 25 /* FStringMiddle */);
732
- assert_1.default.equal(results.tokens.getItemAt(5).type, 17 /* OpenCurlyBrace */);
733
- assert_1.default.equal(results.tokens.getItemAt(6).type, 17 /* OpenCurlyBrace */);
734
- assert_1.default.equal(results.tokens.getItemAt(7).type, 7 /* Identifier */);
735
- assert_1.default.equal(results.tokens.getItemAt(8).type, 18 /* CloseCurlyBrace */);
736
- assert_1.default.equal(results.tokens.getItemAt(9).type, 9 /* Operator */);
737
- assert_1.default.equal(results.tokens.getItemAt(10).type, 10 /* Colon */);
738
- assert_1.default.equal(results.tokens.getItemAt(11).type, 25 /* FStringMiddle */);
739
- assert_1.default.equal(results.tokens.getItemAt(12).type, 17 /* OpenCurlyBrace */);
740
- assert_1.default.equal(results.tokens.getItemAt(13).type, 7 /* Identifier */);
741
- assert_1.default.equal(results.tokens.getItemAt(14).type, 18 /* CloseCurlyBrace */);
742
- assert_1.default.equal(results.tokens.getItemAt(15).type, 25 /* FStringMiddle */);
743
- assert_1.default.equal(results.tokens.getItemAt(16).type, 18 /* CloseCurlyBrace */);
744
- assert_1.default.equal(results.tokens.getItemAt(17).type, 18 /* CloseCurlyBrace */);
745
- assert_1.default.equal(results.tokens.getItemAt(18).type, 26 /* FStringEnd */);
731
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* TokenType.FStringStart */);
732
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 17 /* TokenType.OpenCurlyBrace */);
733
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 7 /* TokenType.Identifier */);
734
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 10 /* TokenType.Colon */);
735
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 25 /* TokenType.FStringMiddle */);
736
+ assert_1.default.equal(results.tokens.getItemAt(5).type, 17 /* TokenType.OpenCurlyBrace */);
737
+ assert_1.default.equal(results.tokens.getItemAt(6).type, 17 /* TokenType.OpenCurlyBrace */);
738
+ assert_1.default.equal(results.tokens.getItemAt(7).type, 7 /* TokenType.Identifier */);
739
+ assert_1.default.equal(results.tokens.getItemAt(8).type, 18 /* TokenType.CloseCurlyBrace */);
740
+ assert_1.default.equal(results.tokens.getItemAt(9).type, 9 /* TokenType.Operator */);
741
+ assert_1.default.equal(results.tokens.getItemAt(10).type, 10 /* TokenType.Colon */);
742
+ assert_1.default.equal(results.tokens.getItemAt(11).type, 25 /* TokenType.FStringMiddle */);
743
+ assert_1.default.equal(results.tokens.getItemAt(12).type, 17 /* TokenType.OpenCurlyBrace */);
744
+ assert_1.default.equal(results.tokens.getItemAt(13).type, 7 /* TokenType.Identifier */);
745
+ assert_1.default.equal(results.tokens.getItemAt(14).type, 18 /* TokenType.CloseCurlyBrace */);
746
+ assert_1.default.equal(results.tokens.getItemAt(15).type, 25 /* TokenType.FStringMiddle */);
747
+ assert_1.default.equal(results.tokens.getItemAt(16).type, 18 /* TokenType.CloseCurlyBrace */);
748
+ assert_1.default.equal(results.tokens.getItemAt(17).type, 18 /* TokenType.CloseCurlyBrace */);
749
+ assert_1.default.equal(results.tokens.getItemAt(18).type, 26 /* TokenType.FStringEnd */);
746
750
  });
747
751
  test('Strings: nested f-string formats 2', () => {
748
752
  const t = new tokenizer_1.Tokenizer();
749
753
  const results = t.tokenize("f'hi{'x':*^{8:{'':}}0}'");
750
754
  assert_1.default.equal(results.tokens.count, 17 + _implicitTokenCount);
751
- assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* FStringStart */);
752
- assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* FStringMiddle */);
753
- assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* OpenCurlyBrace */);
754
- assert_1.default.equal(results.tokens.getItemAt(3).type, 5 /* String */);
755
- assert_1.default.equal(results.tokens.getItemAt(4).type, 10 /* Colon */);
756
- assert_1.default.equal(results.tokens.getItemAt(5).type, 25 /* FStringMiddle */);
757
- assert_1.default.equal(results.tokens.getItemAt(6).type, 17 /* OpenCurlyBrace */);
758
- assert_1.default.equal(results.tokens.getItemAt(7).type, 6 /* Number */);
759
- assert_1.default.equal(results.tokens.getItemAt(8).type, 10 /* Colon */);
760
- assert_1.default.equal(results.tokens.getItemAt(9).type, 17 /* OpenCurlyBrace */);
761
- assert_1.default.equal(results.tokens.getItemAt(10).type, 5 /* String */);
762
- assert_1.default.equal(results.tokens.getItemAt(11).type, 10 /* Colon */);
763
- assert_1.default.equal(results.tokens.getItemAt(12).type, 18 /* CloseCurlyBrace */);
764
- assert_1.default.equal(results.tokens.getItemAt(13).type, 18 /* CloseCurlyBrace */);
765
- assert_1.default.equal(results.tokens.getItemAt(14).type, 25 /* FStringMiddle */);
766
- assert_1.default.equal(results.tokens.getItemAt(15).type, 18 /* CloseCurlyBrace */);
767
- assert_1.default.equal(results.tokens.getItemAt(16).type, 26 /* FStringEnd */);
755
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 24 /* TokenType.FStringStart */);
756
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 25 /* TokenType.FStringMiddle */);
757
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 17 /* TokenType.OpenCurlyBrace */);
758
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 5 /* TokenType.String */);
759
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 10 /* TokenType.Colon */);
760
+ assert_1.default.equal(results.tokens.getItemAt(5).type, 25 /* TokenType.FStringMiddle */);
761
+ assert_1.default.equal(results.tokens.getItemAt(6).type, 17 /* TokenType.OpenCurlyBrace */);
762
+ assert_1.default.equal(results.tokens.getItemAt(7).type, 6 /* TokenType.Number */);
763
+ assert_1.default.equal(results.tokens.getItemAt(8).type, 10 /* TokenType.Colon */);
764
+ assert_1.default.equal(results.tokens.getItemAt(9).type, 17 /* TokenType.OpenCurlyBrace */);
765
+ assert_1.default.equal(results.tokens.getItemAt(10).type, 5 /* TokenType.String */);
766
+ assert_1.default.equal(results.tokens.getItemAt(11).type, 10 /* TokenType.Colon */);
767
+ assert_1.default.equal(results.tokens.getItemAt(12).type, 18 /* TokenType.CloseCurlyBrace */);
768
+ assert_1.default.equal(results.tokens.getItemAt(13).type, 18 /* TokenType.CloseCurlyBrace */);
769
+ assert_1.default.equal(results.tokens.getItemAt(14).type, 25 /* TokenType.FStringMiddle */);
770
+ assert_1.default.equal(results.tokens.getItemAt(15).type, 18 /* TokenType.CloseCurlyBrace */);
771
+ assert_1.default.equal(results.tokens.getItemAt(16).type, 26 /* TokenType.FStringEnd */);
768
772
  });
769
773
  test('Strings: escape at the end of single quoted string', () => {
770
774
  const t = new tokenizer_1.Tokenizer();
771
775
  const results = t.tokenize("'quoted\\'\nx");
772
776
  assert_1.default.equal(results.tokens.count, 3 + _implicitTokenCount);
773
777
  const stringToken = results.tokens.getItemAt(0);
774
- assert_1.default.equal(stringToken.type, 5 /* String */);
775
- assert_1.default.equal(stringToken.flags, 1 /* SingleQuote */ | 65536 /* Unterminated */);
778
+ assert_1.default.equal(stringToken.type, 5 /* TokenType.String */);
779
+ assert_1.default.equal(stringToken.flags, 1 /* StringTokenFlags.SingleQuote */ | 65536 /* StringTokenFlags.Unterminated */);
776
780
  assert_1.default.equal(stringToken.length, 9);
777
781
  assert_1.default.equal(stringToken.escapedValue, "quoted\\'");
778
- assert_1.default.equal(results.tokens.getItemAt(1).type, 2 /* NewLine */);
779
- assert_1.default.equal(results.tokens.getItemAt(2).type, 7 /* Identifier */);
782
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 2 /* TokenType.NewLine */);
783
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 7 /* TokenType.Identifier */);
780
784
  assert_1.default.equal(results.tokens.getItemAtPosition(0), 0);
781
785
  assert_1.default.equal(results.tokens.getItemAtPosition(8), 0);
782
786
  assert_1.default.equal(results.tokens.getItemAtPosition(9), 1);
@@ -790,38 +794,38 @@ test('Strings: escape at the end of double quoted string', () => {
790
794
  const results = t.tokenize('"quoted\\"\nx');
791
795
  assert_1.default.equal(results.tokens.count, 3 + _implicitTokenCount);
792
796
  const stringToken = results.tokens.getItemAt(0);
793
- assert_1.default.equal(stringToken.type, 5 /* String */);
794
- assert_1.default.equal(stringToken.flags, 2 /* DoubleQuote */ | 65536 /* Unterminated */);
797
+ assert_1.default.equal(stringToken.type, 5 /* TokenType.String */);
798
+ assert_1.default.equal(stringToken.flags, 2 /* StringTokenFlags.DoubleQuote */ | 65536 /* StringTokenFlags.Unterminated */);
795
799
  assert_1.default.equal(stringToken.length, 9);
796
800
  assert_1.default.equal(stringToken.escapedValue, 'quoted\\"');
797
- assert_1.default.equal(results.tokens.getItemAt(1).type, 2 /* NewLine */);
798
- assert_1.default.equal(results.tokens.getItemAt(2).type, 7 /* Identifier */);
801
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 2 /* TokenType.NewLine */);
802
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 7 /* TokenType.Identifier */);
799
803
  });
800
804
  test('Strings: b/u/r-string', () => {
801
805
  const t = new tokenizer_1.Tokenizer();
802
806
  const results = t.tokenize('b"b" U\'u\' bR"br" Ru\'ur\'');
803
807
  assert_1.default.equal(results.tokens.count, 4 + _implicitTokenCount);
804
808
  const stringToken0 = results.tokens.getItemAt(0);
805
- assert_1.default.equal(stringToken0.type, 5 /* String */);
806
- assert_1.default.equal(stringToken0.flags, 2 /* DoubleQuote */ | 32 /* Bytes */);
809
+ assert_1.default.equal(stringToken0.type, 5 /* TokenType.String */);
810
+ assert_1.default.equal(stringToken0.flags, 2 /* StringTokenFlags.DoubleQuote */ | 32 /* StringTokenFlags.Bytes */);
807
811
  assert_1.default.equal(stringToken0.length, 4);
808
812
  assert_1.default.equal(stringToken0.escapedValue, 'b');
809
813
  assert_1.default.equal(stringToken0.prefixLength, 1);
810
814
  const stringToken1 = results.tokens.getItemAt(1);
811
- assert_1.default.equal(stringToken1.type, 5 /* String */);
812
- assert_1.default.equal(stringToken1.flags, 1 /* SingleQuote */ | 16 /* Unicode */);
815
+ assert_1.default.equal(stringToken1.type, 5 /* TokenType.String */);
816
+ assert_1.default.equal(stringToken1.flags, 1 /* StringTokenFlags.SingleQuote */ | 16 /* StringTokenFlags.Unicode */);
813
817
  assert_1.default.equal(stringToken1.length, 4);
814
818
  assert_1.default.equal(stringToken1.escapedValue, 'u');
815
819
  assert_1.default.equal(stringToken1.prefixLength, 1);
816
820
  const stringToken2 = results.tokens.getItemAt(2);
817
- assert_1.default.equal(stringToken2.type, 5 /* String */);
818
- assert_1.default.equal(stringToken2.flags, 2 /* DoubleQuote */ | 32 /* Bytes */ | 8 /* Raw */);
821
+ assert_1.default.equal(stringToken2.type, 5 /* TokenType.String */);
822
+ assert_1.default.equal(stringToken2.flags, 2 /* StringTokenFlags.DoubleQuote */ | 32 /* StringTokenFlags.Bytes */ | 8 /* StringTokenFlags.Raw */);
819
823
  assert_1.default.equal(stringToken2.length, 6);
820
824
  assert_1.default.equal(stringToken2.escapedValue, 'br');
821
825
  assert_1.default.equal(stringToken2.prefixLength, 2);
822
826
  const stringToken3 = results.tokens.getItemAt(3);
823
- assert_1.default.equal(stringToken3.type, 5 /* String */);
824
- assert_1.default.equal(stringToken3.flags, 1 /* SingleQuote */ | 16 /* Unicode */ | 8 /* Raw */);
827
+ assert_1.default.equal(stringToken3.type, 5 /* TokenType.String */);
828
+ assert_1.default.equal(stringToken3.flags, 1 /* StringTokenFlags.SingleQuote */ | 16 /* StringTokenFlags.Unicode */ | 8 /* StringTokenFlags.Raw */);
825
829
  assert_1.default.equal(stringToken3.length, 6);
826
830
  assert_1.default.equal(stringToken3.escapedValue, 'ur');
827
831
  assert_1.default.equal(stringToken3.prefixLength, 2);
@@ -844,14 +848,14 @@ test('Strings: bytes string with non-ASCII', () => {
844
848
  assert_1.default.equal(results.tokens.count, 2 + _implicitTokenCount);
845
849
  const stringToken0 = results.tokens.getItemAt(0);
846
850
  const unescapedValue0 = StringTokenUtils.getUnescapedString(stringToken0);
847
- assert_1.default.equal(stringToken0.type, 5 /* String */);
848
- assert_1.default.equal(stringToken0.flags, 2 /* DoubleQuote */ | 32 /* Bytes */);
851
+ assert_1.default.equal(stringToken0.type, 5 /* TokenType.String */);
852
+ assert_1.default.equal(stringToken0.flags, 2 /* StringTokenFlags.DoubleQuote */ | 32 /* StringTokenFlags.Bytes */);
849
853
  assert_1.default.equal(unescapedValue0.nonAsciiInBytes, true);
850
854
  assert_1.default.equal(stringToken0.length, 7);
851
855
  const stringToken1 = results.tokens.getItemAt(1);
852
856
  const unescapedValue1 = StringTokenUtils.getUnescapedString(stringToken1);
853
- assert_1.default.equal(stringToken1.type, 5 /* String */);
854
- assert_1.default.equal(stringToken1.flags, 1 /* SingleQuote */ | 32 /* Bytes */ | 4 /* Triplicate */);
857
+ assert_1.default.equal(stringToken1.type, 5 /* TokenType.String */);
858
+ assert_1.default.equal(stringToken1.flags, 1 /* StringTokenFlags.SingleQuote */ | 32 /* StringTokenFlags.Bytes */ | 4 /* StringTokenFlags.Triplicate */);
855
859
  assert_1.default.equal(unescapedValue1.nonAsciiInBytes, true);
856
860
  assert_1.default.equal(stringToken1.length, 11);
857
861
  });
@@ -861,15 +865,15 @@ test('Strings: raw strings with escapes', () => {
861
865
  assert_1.default.equal(results.tokens.count, 2 + _implicitTokenCount);
862
866
  const stringToken0 = results.tokens.getItemAt(0);
863
867
  const unescapedValue0 = StringTokenUtils.getUnescapedString(stringToken0);
864
- assert_1.default.equal(stringToken0.type, 5 /* String */);
865
- assert_1.default.equal(stringToken0.flags, 2 /* DoubleQuote */ | 8 /* Raw */);
868
+ assert_1.default.equal(stringToken0.type, 5 /* TokenType.String */);
869
+ assert_1.default.equal(stringToken0.flags, 2 /* StringTokenFlags.DoubleQuote */ | 8 /* StringTokenFlags.Raw */);
866
870
  assert_1.default.equal(stringToken0.length, 5);
867
871
  assert_1.default.equal(stringToken0.escapedValue, '\\"');
868
872
  assert_1.default.equal(unescapedValue0.value, '\\"');
869
873
  const stringToken1 = results.tokens.getItemAt(1);
870
874
  const unescapedValue1 = StringTokenUtils.getUnescapedString(stringToken1);
871
- assert_1.default.equal(stringToken1.type, 5 /* String */);
872
- assert_1.default.equal(stringToken1.flags, 2 /* DoubleQuote */ | 8 /* Raw */);
875
+ assert_1.default.equal(stringToken1.type, 5 /* TokenType.String */);
876
+ assert_1.default.equal(stringToken1.flags, 2 /* StringTokenFlags.DoubleQuote */ | 8 /* StringTokenFlags.Raw */);
873
877
  assert_1.default.equal(stringToken1.length, 10);
874
878
  assert_1.default.equal(stringToken1.escapedValue, '\\\r\n\\\n\\a');
875
879
  assert_1.default.equal(unescapedValue1.value, '\\\r\n\\\n\\a');
@@ -879,12 +883,12 @@ test('Strings: escape at the end of double quoted string', () => {
879
883
  const results = t.tokenize('"quoted\\"\nx');
880
884
  assert_1.default.equal(results.tokens.count, 3 + _implicitTokenCount);
881
885
  const stringToken = results.tokens.getItemAt(0);
882
- assert_1.default.equal(stringToken.type, 5 /* String */);
883
- assert_1.default.equal(stringToken.flags, 2 /* DoubleQuote */ | 65536 /* Unterminated */);
886
+ assert_1.default.equal(stringToken.type, 5 /* TokenType.String */);
887
+ assert_1.default.equal(stringToken.flags, 2 /* StringTokenFlags.DoubleQuote */ | 65536 /* StringTokenFlags.Unterminated */);
884
888
  assert_1.default.equal(stringToken.length, 9);
885
889
  assert_1.default.equal(stringToken.escapedValue, 'quoted\\"');
886
- assert_1.default.equal(results.tokens.getItemAt(1).type, 2 /* NewLine */);
887
- assert_1.default.equal(results.tokens.getItemAt(2).type, 7 /* Identifier */);
890
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 2 /* TokenType.NewLine */);
891
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 7 /* TokenType.Identifier */);
888
892
  });
889
893
  test('Strings: special escape characters', () => {
890
894
  const t = new tokenizer_1.Tokenizer();
@@ -892,8 +896,8 @@ test('Strings: special escape characters', () => {
892
896
  assert_1.default.equal(results.tokens.count, 1 + _implicitTokenCount);
893
897
  const stringToken = results.tokens.getItemAt(0);
894
898
  const unescapedValue = StringTokenUtils.getUnescapedString(stringToken);
895
- assert_1.default.equal(stringToken.type, 5 /* String */);
896
- assert_1.default.equal(stringToken.flags, 2 /* DoubleQuote */);
899
+ assert_1.default.equal(stringToken.type, 5 /* TokenType.String */);
900
+ assert_1.default.equal(stringToken.flags, 2 /* StringTokenFlags.DoubleQuote */);
897
901
  assert_1.default.equal(stringToken.length, 18);
898
902
  assert_1.default.equal(unescapedValue.value, '\r\n\u0007\v\t\b\f\\');
899
903
  assert_1.default.equal(results.tokens.getItemAtPosition(0), 0);
@@ -908,17 +912,17 @@ test('Strings: invalid escape characters', () => {
908
912
  assert_1.default.equal(results.tokens.count, 1 + _implicitTokenCount);
909
913
  const stringToken = results.tokens.getItemAt(0);
910
914
  const unescapedValue = StringTokenUtils.getUnescapedString(stringToken);
911
- assert_1.default.equal(stringToken.type, 5 /* String */);
912
- assert_1.default.equal(stringToken.flags, 2 /* DoubleQuote */);
915
+ assert_1.default.equal(stringToken.type, 5 /* TokenType.String */);
916
+ assert_1.default.equal(stringToken.flags, 2 /* StringTokenFlags.DoubleQuote */);
913
917
  assert_1.default.equal(stringToken.length, 8);
914
918
  assert_1.default.equal(stringToken.escapedValue, '\\d \\ ');
915
919
  assert_1.default.equal(unescapedValue.unescapeErrors.length, 2);
916
920
  assert_1.default.equal(unescapedValue.unescapeErrors[0].offset, 0);
917
921
  assert_1.default.equal(unescapedValue.unescapeErrors[0].length, 2);
918
- assert_1.default.equal(unescapedValue.unescapeErrors[0].errorType, 0 /* InvalidEscapeSequence */);
922
+ assert_1.default.equal(unescapedValue.unescapeErrors[0].errorType, 0 /* StringTokenUtils.UnescapeErrorType.InvalidEscapeSequence */);
919
923
  assert_1.default.equal(unescapedValue.unescapeErrors[1].offset, 4);
920
924
  assert_1.default.equal(unescapedValue.unescapeErrors[1].length, 2);
921
- assert_1.default.equal(unescapedValue.unescapeErrors[1].errorType, 0 /* InvalidEscapeSequence */);
925
+ assert_1.default.equal(unescapedValue.unescapeErrors[1].errorType, 0 /* StringTokenUtils.UnescapeErrorType.InvalidEscapeSequence */);
922
926
  });
923
927
  test('Strings: good hex escapes', () => {
924
928
  const t = new tokenizer_1.Tokenizer();
@@ -926,22 +930,22 @@ test('Strings: good hex escapes', () => {
926
930
  assert_1.default.equal(results.tokens.count, 3 + _implicitTokenCount);
927
931
  const stringToken0 = results.tokens.getItemAt(0);
928
932
  const unescapedValue0 = StringTokenUtils.getUnescapedString(stringToken0);
929
- assert_1.default.equal(stringToken0.type, 5 /* String */);
930
- assert_1.default.equal(stringToken0.flags, 2 /* DoubleQuote */);
933
+ assert_1.default.equal(stringToken0.type, 5 /* TokenType.String */);
934
+ assert_1.default.equal(stringToken0.flags, 2 /* StringTokenFlags.DoubleQuote */);
931
935
  assert_1.default.equal(stringToken0.length, 6);
932
936
  assert_1.default.equal(stringToken0.escapedValue, '\\x4d');
933
937
  assert_1.default.equal(unescapedValue0.value, 'M');
934
938
  const stringToken1 = results.tokens.getItemAt(1);
935
939
  const unescapedValue1 = StringTokenUtils.getUnescapedString(stringToken1);
936
- assert_1.default.equal(stringToken1.type, 5 /* String */);
937
- assert_1.default.equal(stringToken1.flags, 2 /* DoubleQuote */);
940
+ assert_1.default.equal(stringToken1.type, 5 /* TokenType.String */);
941
+ assert_1.default.equal(stringToken1.flags, 2 /* StringTokenFlags.DoubleQuote */);
938
942
  assert_1.default.equal(stringToken1.length, 8);
939
943
  assert_1.default.equal(stringToken1.escapedValue, '\\u006b');
940
944
  assert_1.default.equal(unescapedValue1.value, 'k');
941
945
  const stringToken2 = results.tokens.getItemAt(2);
942
946
  const unescapedValue2 = StringTokenUtils.getUnescapedString(stringToken2);
943
- assert_1.default.equal(stringToken2.type, 5 /* String */);
944
- assert_1.default.equal(stringToken2.flags, 2 /* DoubleQuote */);
947
+ assert_1.default.equal(stringToken2.type, 5 /* TokenType.String */);
948
+ assert_1.default.equal(stringToken2.flags, 2 /* StringTokenFlags.DoubleQuote */);
945
949
  assert_1.default.equal(stringToken2.length, 12);
946
950
  assert_1.default.equal(stringToken2.escapedValue, '\\U0000006F');
947
951
  assert_1.default.equal(unescapedValue2.value, 'o');
@@ -952,22 +956,22 @@ test('Strings: bad hex escapes', () => {
952
956
  assert_1.default.equal(results.tokens.count, 3 + _implicitTokenCount);
953
957
  const stringToken0 = results.tokens.getItemAt(0);
954
958
  const unescapedValue0 = StringTokenUtils.getUnescapedString(stringToken0);
955
- assert_1.default.equal(stringToken0.type, 5 /* String */);
956
- assert_1.default.equal(stringToken0.flags, 2 /* DoubleQuote */);
959
+ assert_1.default.equal(stringToken0.type, 5 /* TokenType.String */);
960
+ assert_1.default.equal(stringToken0.flags, 2 /* StringTokenFlags.DoubleQuote */);
957
961
  assert_1.default.equal(unescapedValue0.unescapeErrors.length, 1);
958
962
  assert_1.default.equal(stringToken0.length, 6);
959
963
  assert_1.default.equal(unescapedValue0.value, '\\x4g');
960
964
  const stringToken1 = results.tokens.getItemAt(1);
961
965
  const unescapedValue1 = StringTokenUtils.getUnescapedString(stringToken1);
962
- assert_1.default.equal(stringToken1.type, 5 /* String */);
963
- assert_1.default.equal(stringToken1.flags, 2 /* DoubleQuote */);
966
+ assert_1.default.equal(stringToken1.type, 5 /* TokenType.String */);
967
+ assert_1.default.equal(stringToken1.flags, 2 /* StringTokenFlags.DoubleQuote */);
964
968
  assert_1.default.equal(unescapedValue1.unescapeErrors.length, 1);
965
969
  assert_1.default.equal(stringToken1.length, 7);
966
970
  assert_1.default.equal(unescapedValue1.value, '\\u006');
967
971
  const stringToken2 = results.tokens.getItemAt(2);
968
972
  const unescapedValue2 = StringTokenUtils.getUnescapedString(stringToken2);
969
- assert_1.default.equal(stringToken2.type, 5 /* String */);
970
- assert_1.default.equal(stringToken2.flags, 2 /* DoubleQuote */);
973
+ assert_1.default.equal(stringToken2.type, 5 /* TokenType.String */);
974
+ assert_1.default.equal(stringToken2.flags, 2 /* StringTokenFlags.DoubleQuote */);
971
975
  assert_1.default.equal(unescapedValue2.unescapeErrors.length, 1);
972
976
  assert_1.default.equal(stringToken2.length, 12);
973
977
  assert_1.default.equal(unescapedValue2.value, '\\U0000006m');
@@ -978,15 +982,15 @@ test('Strings: good name escapes', () => {
978
982
  assert_1.default.equal(results.tokens.count, 2 + _implicitTokenCount);
979
983
  const stringToken0 = results.tokens.getItemAt(0);
980
984
  const unescapedValue0 = StringTokenUtils.getUnescapedString(stringToken0);
981
- assert_1.default.equal(stringToken0.type, 5 /* String */);
982
- assert_1.default.equal(stringToken0.flags, 2 /* DoubleQuote */);
985
+ assert_1.default.equal(stringToken0.type, 5 /* TokenType.String */);
986
+ assert_1.default.equal(stringToken0.flags, 2 /* StringTokenFlags.DoubleQuote */);
983
987
  assert_1.default.equal(stringToken0.length, 23);
984
988
  assert_1.default.equal(stringToken0.escapedValue, '\\N{caret escape blah}');
985
989
  assert_1.default.equal(unescapedValue0.value, '-');
986
990
  const stringToken1 = results.tokens.getItemAt(1);
987
991
  const unescapedValue1 = StringTokenUtils.getUnescapedString(stringToken1);
988
- assert_1.default.equal(stringToken1.type, 5 /* String */);
989
- assert_1.default.equal(stringToken1.flags, 2 /* DoubleQuote */);
992
+ assert_1.default.equal(stringToken1.type, 5 /* TokenType.String */);
993
+ assert_1.default.equal(stringToken1.flags, 2 /* StringTokenFlags.DoubleQuote */);
990
994
  assert_1.default.equal(stringToken1.length, 10);
991
995
  assert_1.default.equal(stringToken1.escapedValue, 'a\\N{A9}a');
992
996
  assert_1.default.equal(unescapedValue1.value, 'a-a');
@@ -997,16 +1001,16 @@ test('Strings: bad name escapes', () => {
997
1001
  assert_1.default.equal(results.tokens.count, 2 + _implicitTokenCount);
998
1002
  const stringToken0 = results.tokens.getItemAt(0);
999
1003
  const unescapedValue0 = StringTokenUtils.getUnescapedString(stringToken0);
1000
- assert_1.default.equal(stringToken0.type, 5 /* String */);
1001
- assert_1.default.equal(stringToken0.flags, 2 /* DoubleQuote */);
1004
+ assert_1.default.equal(stringToken0.type, 5 /* TokenType.String */);
1005
+ assert_1.default.equal(stringToken0.flags, 2 /* StringTokenFlags.DoubleQuote */);
1002
1006
  assert_1.default.equal(unescapedValue0.unescapeErrors.length, 1);
1003
1007
  assert_1.default.equal(stringToken0.length, 10);
1004
1008
  assert_1.default.equal(stringToken0.escapedValue, '\\N{caret');
1005
1009
  assert_1.default.equal(unescapedValue0.value, '\\N{caret');
1006
1010
  const stringToken1 = results.tokens.getItemAt(1);
1007
1011
  const unescapedValue1 = StringTokenUtils.getUnescapedString(stringToken1);
1008
- assert_1.default.equal(stringToken1.type, 5 /* String */);
1009
- assert_1.default.equal(stringToken1.flags, 2 /* DoubleQuote */);
1012
+ assert_1.default.equal(stringToken1.type, 5 /* TokenType.String */);
1013
+ assert_1.default.equal(stringToken1.flags, 2 /* StringTokenFlags.DoubleQuote */);
1010
1014
  assert_1.default.equal(unescapedValue1.unescapeErrors.length, 1);
1011
1015
  assert_1.default.equal(stringToken1.length, 9);
1012
1016
  assert_1.default.equal(stringToken1.escapedValue, '\\N{.A9}');
@@ -1016,76 +1020,76 @@ test('Comments', () => {
1016
1020
  const t = new tokenizer_1.Tokenizer();
1017
1021
  const results = t.tokenize(' #co"""mment1\n\t\n#x\'y2 ');
1018
1022
  assert_1.default.equal(results.tokens.count, 1 + _implicitTokenCountNoImplicitNewLine);
1019
- assert_1.default.equal(results.tokens.getItemAt(0).type, 2 /* NewLine */);
1023
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 2 /* TokenType.NewLine */);
1020
1024
  });
1021
1025
  test('Period to operator token', () => {
1022
1026
  const t = new tokenizer_1.Tokenizer();
1023
1027
  const results = t.tokenize('x.y');
1024
1028
  assert_1.default.equal(results.tokens.count, 3 + _implicitTokenCount);
1025
- assert_1.default.equal(results.tokens.getItemAt(0).type, 7 /* Identifier */);
1026
- assert_1.default.equal(results.tokens.getItemAt(1).type, 20 /* Dot */);
1027
- assert_1.default.equal(results.tokens.getItemAt(2).type, 7 /* Identifier */);
1029
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 7 /* TokenType.Identifier */);
1030
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 20 /* TokenType.Dot */);
1031
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 7 /* TokenType.Identifier */);
1028
1032
  });
1029
1033
  test('@ to operator token', () => {
1030
1034
  const t = new tokenizer_1.Tokenizer();
1031
1035
  const results = t.tokenize('@x');
1032
1036
  assert_1.default.equal(results.tokens.count, 2 + _implicitTokenCount);
1033
- assert_1.default.equal(results.tokens.getItemAt(0).type, 9 /* Operator */);
1034
- assert_1.default.equal(results.tokens.getItemAt(1).type, 7 /* Identifier */);
1037
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 9 /* TokenType.Operator */);
1038
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 7 /* TokenType.Identifier */);
1035
1039
  });
1036
1040
  test('Unknown token', () => {
1037
1041
  const t = new tokenizer_1.Tokenizer();
1038
1042
  const results = t.tokenize('`$');
1039
1043
  assert_1.default.equal(results.tokens.count, 2 + _implicitTokenCount);
1040
- assert_1.default.equal(results.tokens.getItemAt(0).type, 22 /* Backtick */);
1041
- assert_1.default.equal(results.tokens.getItemAt(1).type, 0 /* Invalid */);
1044
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 22 /* TokenType.Backtick */);
1045
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 0 /* TokenType.Invalid */);
1042
1046
  });
1043
1047
  test('Hex number', () => {
1044
1048
  const t = new tokenizer_1.Tokenizer();
1045
1049
  const results = t.tokenize('1 0X2 0xFe_Ab 0x');
1046
1050
  assert_1.default.equal(results.tokens.count, 5 + _implicitTokenCount);
1047
- assert_1.default.equal(results.tokens.getItemAt(0).type, 6 /* Number */);
1051
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 6 /* TokenType.Number */);
1048
1052
  assert_1.default.equal(results.tokens.getItemAt(0).length, 1);
1049
1053
  assert_1.default.equal(results.tokens.getItemAt(0).value, 1);
1050
1054
  assert_1.default.equal(results.tokens.getItemAt(0).isInteger, true);
1051
- assert_1.default.equal(results.tokens.getItemAt(1).type, 6 /* Number */);
1055
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 6 /* TokenType.Number */);
1052
1056
  assert_1.default.equal(results.tokens.getItemAt(1).length, 3);
1053
1057
  assert_1.default.equal(results.tokens.getItemAt(1).value, 2);
1054
1058
  assert_1.default.equal(results.tokens.getItemAt(1).isInteger, true);
1055
- assert_1.default.equal(results.tokens.getItemAt(2).type, 6 /* Number */);
1059
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 6 /* TokenType.Number */);
1056
1060
  assert_1.default.equal(results.tokens.getItemAt(2).length, 7);
1057
1061
  assert_1.default.equal(results.tokens.getItemAt(2).value, 0xfeab);
1058
1062
  assert_1.default.equal(results.tokens.getItemAt(2).isInteger, true);
1059
- assert_1.default.equal(results.tokens.getItemAt(3).type, 6 /* Number */);
1063
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 6 /* TokenType.Number */);
1060
1064
  assert_1.default.equal(results.tokens.getItemAt(3).length, 1);
1061
- assert_1.default.equal(results.tokens.getItemAt(4).type, 7 /* Identifier */);
1065
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 7 /* TokenType.Identifier */);
1062
1066
  assert_1.default.equal(results.tokens.getItemAt(4).length, 1);
1063
1067
  });
1064
1068
  test('Binary number', () => {
1065
1069
  const t = new tokenizer_1.Tokenizer();
1066
1070
  const results = t.tokenize('1 0B1 0b010 0b3 0b');
1067
1071
  assert_1.default.equal(results.tokens.count, 7 + _implicitTokenCount);
1068
- assert_1.default.equal(results.tokens.getItemAt(0).type, 6 /* Number */);
1072
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 6 /* TokenType.Number */);
1069
1073
  assert_1.default.equal(results.tokens.getItemAt(0).length, 1);
1070
1074
  assert_1.default.equal(results.tokens.getItemAt(0).value, 1);
1071
1075
  assert_1.default.equal(results.tokens.getItemAt(0).isInteger, true);
1072
- assert_1.default.equal(results.tokens.getItemAt(1).type, 6 /* Number */);
1076
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 6 /* TokenType.Number */);
1073
1077
  assert_1.default.equal(results.tokens.getItemAt(1).length, 3);
1074
1078
  assert_1.default.equal(results.tokens.getItemAt(1).value, 1);
1075
1079
  assert_1.default.equal(results.tokens.getItemAt(1).isInteger, true);
1076
- assert_1.default.equal(results.tokens.getItemAt(2).type, 6 /* Number */);
1080
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 6 /* TokenType.Number */);
1077
1081
  assert_1.default.equal(results.tokens.getItemAt(2).length, 5);
1078
1082
  assert_1.default.equal(results.tokens.getItemAt(2).value, 2);
1079
1083
  assert_1.default.equal(results.tokens.getItemAt(2).isInteger, true);
1080
- assert_1.default.equal(results.tokens.getItemAt(3).type, 6 /* Number */);
1084
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 6 /* TokenType.Number */);
1081
1085
  assert_1.default.equal(results.tokens.getItemAt(3).length, 1);
1082
1086
  assert_1.default.equal(results.tokens.getItemAt(3).value, 0);
1083
1087
  assert_1.default.equal(results.tokens.getItemAt(3).isInteger, true);
1084
- assert_1.default.equal(results.tokens.getItemAt(4).type, 7 /* Identifier */);
1088
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 7 /* TokenType.Identifier */);
1085
1089
  assert_1.default.equal(results.tokens.getItemAt(4).length, 2);
1086
- assert_1.default.equal(results.tokens.getItemAt(5).type, 6 /* Number */);
1090
+ assert_1.default.equal(results.tokens.getItemAt(5).type, 6 /* TokenType.Number */);
1087
1091
  assert_1.default.equal(results.tokens.getItemAt(5).length, 1);
1088
- assert_1.default.equal(results.tokens.getItemAt(6).type, 7 /* Identifier */);
1092
+ assert_1.default.equal(results.tokens.getItemAt(6).type, 7 /* TokenType.Identifier */);
1089
1093
  assert_1.default.equal(results.tokens.getItemAt(6).length, 1);
1090
1094
  assert_1.default.equal(results.tokens.getItemAtPosition(0), 0);
1091
1095
  assert_1.default.equal(results.tokens.getItemAtPosition(1), 0);
@@ -1106,36 +1110,36 @@ test('Octal number', () => {
1106
1110
  const t = new tokenizer_1.Tokenizer();
1107
1111
  const results = t.tokenize('1 0o4 0O0_7_7 -0o200 0o9 0oO');
1108
1112
  assert_1.default.equal(results.tokens.count, 9 + _implicitTokenCount);
1109
- assert_1.default.equal(results.tokens.getItemAt(0).type, 6 /* Number */);
1113
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 6 /* TokenType.Number */);
1110
1114
  assert_1.default.equal(results.tokens.getItemAt(0).length, 1);
1111
1115
  assert_1.default.equal(results.tokens.getItemAt(0).value, 1);
1112
1116
  assert_1.default.equal(results.tokens.getItemAt(0).isInteger, true);
1113
- assert_1.default.equal(results.tokens.getItemAt(1).type, 6 /* Number */);
1117
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 6 /* TokenType.Number */);
1114
1118
  assert_1.default.equal(results.tokens.getItemAt(1).length, 3);
1115
1119
  assert_1.default.equal(results.tokens.getItemAt(1).value, 4);
1116
1120
  assert_1.default.equal(results.tokens.getItemAt(1).isInteger, true);
1117
- assert_1.default.equal(results.tokens.getItemAt(2).type, 6 /* Number */);
1121
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 6 /* TokenType.Number */);
1118
1122
  assert_1.default.equal(results.tokens.getItemAt(2).length, 7);
1119
1123
  assert_1.default.equal(results.tokens.getItemAt(2).value, 0o77);
1120
1124
  assert_1.default.equal(results.tokens.getItemAt(2).isInteger, true);
1121
- assert_1.default.equal(results.tokens.getItemAt(3).type, 9 /* Operator */);
1125
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 9 /* TokenType.Operator */);
1122
1126
  assert_1.default.equal(results.tokens.getItemAt(3).length, 1);
1123
- assert_1.default.equal(results.tokens.getItemAt(4).type, 6 /* Number */);
1127
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 6 /* TokenType.Number */);
1124
1128
  assert_1.default.equal(results.tokens.getItemAt(4).length, 5);
1125
1129
  assert_1.default.equal(results.tokens.getItemAt(4).value, 0o200);
1126
1130
  assert_1.default.equal(results.tokens.getItemAt(4).isInteger, true);
1127
- assert_1.default.equal(results.tokens.getItemAt(5).type, 6 /* Number */);
1131
+ assert_1.default.equal(results.tokens.getItemAt(5).type, 6 /* TokenType.Number */);
1128
1132
  assert_1.default.equal(results.tokens.getItemAt(5).length, 1);
1129
1133
  assert_1.default.equal(results.tokens.getItemAt(5).value, 0);
1130
1134
  assert_1.default.equal(results.tokens.getItemAt(5).isInteger, true);
1131
- assert_1.default.equal(results.tokens.getItemAt(6).type, 7 /* Identifier */);
1135
+ assert_1.default.equal(results.tokens.getItemAt(6).type, 7 /* TokenType.Identifier */);
1132
1136
  assert_1.default.equal(results.tokens.getItemAt(6).length, 2);
1133
1137
  assert_1.default.equal(results.tokens.getItemAt(6).value, 'o9');
1134
- assert_1.default.equal(results.tokens.getItemAt(7).type, 6 /* Number */);
1138
+ assert_1.default.equal(results.tokens.getItemAt(7).type, 6 /* TokenType.Number */);
1135
1139
  assert_1.default.equal(results.tokens.getItemAt(7).length, 1);
1136
1140
  assert_1.default.equal(results.tokens.getItemAt(7).value, 0);
1137
1141
  assert_1.default.equal(results.tokens.getItemAt(7).isInteger, true);
1138
- assert_1.default.equal(results.tokens.getItemAt(8).type, 7 /* Identifier */);
1142
+ assert_1.default.equal(results.tokens.getItemAt(8).type, 7 /* TokenType.Identifier */);
1139
1143
  assert_1.default.equal(results.tokens.getItemAt(8).length, 2);
1140
1144
  assert_1.default.equal(results.tokens.getItemAt(8).value, 'oO');
1141
1145
  });
@@ -1143,17 +1147,17 @@ test('Decimal number', () => {
1143
1147
  const t = new tokenizer_1.Tokenizer();
1144
1148
  const results = t.tokenize('-2147483647 ++2147483647');
1145
1149
  assert_1.default.equal(results.tokens.count, 5 + _implicitTokenCount);
1146
- assert_1.default.equal(results.tokens.getItemAt(0).type, 9 /* Operator */);
1150
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 9 /* TokenType.Operator */);
1147
1151
  assert_1.default.equal(results.tokens.getItemAt(0).length, 1);
1148
- assert_1.default.equal(results.tokens.getItemAt(1).type, 6 /* Number */);
1152
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 6 /* TokenType.Number */);
1149
1153
  assert_1.default.equal(results.tokens.getItemAt(1).length, 10);
1150
1154
  assert_1.default.equal(results.tokens.getItemAt(1).value, 2147483647);
1151
1155
  assert_1.default.equal(results.tokens.getItemAt(1).isInteger, true);
1152
- assert_1.default.equal(results.tokens.getItemAt(2).type, 9 /* Operator */);
1156
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 9 /* TokenType.Operator */);
1153
1157
  assert_1.default.equal(results.tokens.getItemAt(2).length, 1);
1154
- assert_1.default.equal(results.tokens.getItemAt(3).type, 9 /* Operator */);
1158
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 9 /* TokenType.Operator */);
1155
1159
  assert_1.default.equal(results.tokens.getItemAt(3).length, 1);
1156
- assert_1.default.equal(results.tokens.getItemAt(4).type, 6 /* Number */);
1160
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 6 /* TokenType.Number */);
1157
1161
  assert_1.default.equal(results.tokens.getItemAt(4).length, 10);
1158
1162
  assert_1.default.equal(results.tokens.getItemAt(4).value, 2147483647);
1159
1163
  assert_1.default.equal(results.tokens.getItemAt(4).isInteger, true);
@@ -1172,46 +1176,46 @@ test('Decimal number operator', () => {
1172
1176
  const t = new tokenizer_1.Tokenizer();
1173
1177
  const results = t.tokenize('a[: -1]');
1174
1178
  assert_1.default.equal(results.tokens.count, 6 + _implicitTokenCount);
1175
- assert_1.default.equal(results.tokens.getItemAt(4).type, 6 /* Number */);
1179
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 6 /* TokenType.Number */);
1176
1180
  assert_1.default.equal(results.tokens.getItemAt(4).length, 1);
1177
1181
  });
1178
1182
  test('Floating point number', () => {
1179
1183
  const t = new tokenizer_1.Tokenizer();
1180
1184
  const results = t.tokenize('3.0 .2 ++.3e+12 --.4e1 1e-4 0.01 01.0');
1181
1185
  assert_1.default.equal(results.tokens.count, 11 + _implicitTokenCount);
1182
- assert_1.default.equal(results.tokens.getItemAt(0).type, 6 /* Number */);
1186
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 6 /* TokenType.Number */);
1183
1187
  assert_1.default.equal(results.tokens.getItemAt(0).value, 3);
1184
1188
  assert_1.default.equal(results.tokens.getItemAt(0).isInteger, false);
1185
1189
  assert_1.default.equal(results.tokens.getItemAt(0).length, 3);
1186
- assert_1.default.equal(results.tokens.getItemAt(1).type, 6 /* Number */);
1190
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 6 /* TokenType.Number */);
1187
1191
  assert_1.default.equal(results.tokens.getItemAt(1).value, 0.2);
1188
1192
  assert_1.default.equal(results.tokens.getItemAt(1).isInteger, false);
1189
1193
  assert_1.default.equal(results.tokens.getItemAt(1).length, 2);
1190
- assert_1.default.equal(results.tokens.getItemAt(2).type, 9 /* Operator */);
1194
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 9 /* TokenType.Operator */);
1191
1195
  assert_1.default.equal(results.tokens.getItemAt(2).length, 1);
1192
- assert_1.default.equal(results.tokens.getItemAt(3).type, 9 /* Operator */);
1196
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 9 /* TokenType.Operator */);
1193
1197
  assert_1.default.equal(results.tokens.getItemAt(3).length, 1);
1194
- assert_1.default.equal(results.tokens.getItemAt(4).type, 6 /* Number */);
1198
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 6 /* TokenType.Number */);
1195
1199
  assert_1.default.equal(results.tokens.getItemAt(4).value, 0.3e12);
1196
1200
  assert_1.default.equal(results.tokens.getItemAt(4).isInteger, false);
1197
1201
  assert_1.default.equal(results.tokens.getItemAt(4).length, 6);
1198
- assert_1.default.equal(results.tokens.getItemAt(5).type, 9 /* Operator */);
1202
+ assert_1.default.equal(results.tokens.getItemAt(5).type, 9 /* TokenType.Operator */);
1199
1203
  assert_1.default.equal(results.tokens.getItemAt(5).length, 1);
1200
- assert_1.default.equal(results.tokens.getItemAt(6).type, 9 /* Operator */);
1204
+ assert_1.default.equal(results.tokens.getItemAt(6).type, 9 /* TokenType.Operator */);
1201
1205
  assert_1.default.equal(results.tokens.getItemAt(6).length, 1);
1202
- assert_1.default.equal(results.tokens.getItemAt(7).type, 6 /* Number */);
1206
+ assert_1.default.equal(results.tokens.getItemAt(7).type, 6 /* TokenType.Number */);
1203
1207
  assert_1.default.equal(results.tokens.getItemAt(7).value, 0.4e1);
1204
1208
  assert_1.default.equal(results.tokens.getItemAt(7).isInteger, false);
1205
1209
  assert_1.default.equal(results.tokens.getItemAt(7).length, 4);
1206
- assert_1.default.equal(results.tokens.getItemAt(8).type, 6 /* Number */);
1210
+ assert_1.default.equal(results.tokens.getItemAt(8).type, 6 /* TokenType.Number */);
1207
1211
  assert_1.default.equal(results.tokens.getItemAt(8).value, 1e-4);
1208
1212
  assert_1.default.equal(results.tokens.getItemAt(8).isInteger, false);
1209
1213
  assert_1.default.equal(results.tokens.getItemAt(8).length, 4);
1210
- assert_1.default.equal(results.tokens.getItemAt(9).type, 6 /* Number */);
1214
+ assert_1.default.equal(results.tokens.getItemAt(9).type, 6 /* TokenType.Number */);
1211
1215
  assert_1.default.equal(results.tokens.getItemAt(9).value, 0.01);
1212
1216
  assert_1.default.equal(results.tokens.getItemAt(9).isInteger, false);
1213
1217
  assert_1.default.equal(results.tokens.getItemAt(9).length, 4);
1214
- assert_1.default.equal(results.tokens.getItemAt(10).type, 6 /* Number */);
1218
+ assert_1.default.equal(results.tokens.getItemAt(10).type, 6 /* TokenType.Number */);
1215
1219
  assert_1.default.equal(results.tokens.getItemAt(10).value, 1.0);
1216
1220
  assert_1.default.equal(results.tokens.getItemAt(10).isInteger, false);
1217
1221
  assert_1.default.equal(results.tokens.getItemAt(10).length, 4);
@@ -1220,28 +1224,28 @@ test('Floating point numbers with parens', () => {
1220
1224
  const t = new tokenizer_1.Tokenizer();
1221
1225
  const results = t.tokenize('(3.0) (.2) (+.3e+12, .4e1; 0)');
1222
1226
  assert_1.default.equal(results.tokens.count, 14 + _implicitTokenCount);
1223
- assert_1.default.equal(results.tokens.getItemAt(1).type, 6 /* Number */);
1227
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 6 /* TokenType.Number */);
1224
1228
  assert_1.default.equal(results.tokens.getItemAt(1).length, 3);
1225
- assert_1.default.equal(results.tokens.getItemAt(4).type, 6 /* Number */);
1229
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 6 /* TokenType.Number */);
1226
1230
  assert_1.default.equal(results.tokens.getItemAt(4).length, 2);
1227
- assert_1.default.equal(results.tokens.getItemAt(8).type, 6 /* Number */);
1231
+ assert_1.default.equal(results.tokens.getItemAt(8).type, 6 /* TokenType.Number */);
1228
1232
  assert_1.default.equal(results.tokens.getItemAt(8).length, 6);
1229
- assert_1.default.equal(results.tokens.getItemAt(10).type, 6 /* Number */);
1233
+ assert_1.default.equal(results.tokens.getItemAt(10).type, 6 /* TokenType.Number */);
1230
1234
  assert_1.default.equal(results.tokens.getItemAt(10).length, 4);
1231
- assert_1.default.equal(results.tokens.getItemAt(12).type, 6 /* Number */);
1235
+ assert_1.default.equal(results.tokens.getItemAt(12).type, 6 /* TokenType.Number */);
1232
1236
  assert_1.default.equal(results.tokens.getItemAt(12).length, 1);
1233
1237
  });
1234
1238
  test('Floating point numbers with operators', () => {
1235
1239
  const t = new tokenizer_1.Tokenizer();
1236
1240
  const results = t.tokenize('88.9/100.0*4.0-2.0,');
1237
1241
  assert_1.default.equal(results.tokens.count, 8 + _implicitTokenCount);
1238
- assert_1.default.equal(results.tokens.getItemAt(0).type, 6 /* Number */);
1242
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 6 /* TokenType.Number */);
1239
1243
  assert_1.default.equal(results.tokens.getItemAt(0).length, 4);
1240
- assert_1.default.equal(results.tokens.getItemAt(2).type, 6 /* Number */);
1244
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 6 /* TokenType.Number */);
1241
1245
  assert_1.default.equal(results.tokens.getItemAt(2).length, 5);
1242
- assert_1.default.equal(results.tokens.getItemAt(4).type, 6 /* Number */);
1246
+ assert_1.default.equal(results.tokens.getItemAt(4).type, 6 /* TokenType.Number */);
1243
1247
  assert_1.default.equal(results.tokens.getItemAt(4).length, 3);
1244
- assert_1.default.equal(results.tokens.getItemAt(6).type, 6 /* Number */);
1248
+ assert_1.default.equal(results.tokens.getItemAt(6).type, 6 /* TokenType.Number */);
1245
1249
  assert_1.default.equal(results.tokens.getItemAt(6).length, 3);
1246
1250
  assert_1.default.equal(results.tokens.getItemAtPosition(0), 0);
1247
1251
  assert_1.default.equal(results.tokens.getItemAtPosition(3), 0);
@@ -1264,19 +1268,19 @@ test('Imaginary numbers', () => {
1264
1268
  const results = t.tokenize('88.9j/100.0J*4.0e-5j-2.0j,');
1265
1269
  assert_1.default.equal(results.tokens.count, 8 + _implicitTokenCount);
1266
1270
  const token0 = results.tokens.getItemAt(0);
1267
- assert_1.default.equal(token0.type, 6 /* Number */);
1271
+ assert_1.default.equal(token0.type, 6 /* TokenType.Number */);
1268
1272
  assert_1.default.equal(token0.length, 5);
1269
1273
  (0, assert_1.default)(token0.isImaginary);
1270
1274
  const token2 = results.tokens.getItemAt(2);
1271
- assert_1.default.equal(token2.type, 6 /* Number */);
1275
+ assert_1.default.equal(token2.type, 6 /* TokenType.Number */);
1272
1276
  assert_1.default.equal(token2.length, 6);
1273
1277
  (0, assert_1.default)(token2.isImaginary);
1274
1278
  const token4 = results.tokens.getItemAt(4);
1275
- assert_1.default.equal(token4.type, 6 /* Number */);
1279
+ assert_1.default.equal(token4.type, 6 /* TokenType.Number */);
1276
1280
  assert_1.default.equal(token4.length, 7);
1277
1281
  (0, assert_1.default)(token4.isImaginary);
1278
1282
  const token6 = results.tokens.getItemAt(6);
1279
- assert_1.default.equal(token6.type, 6 /* Number */);
1283
+ assert_1.default.equal(token6.type, 6 /* TokenType.Number */);
1280
1284
  assert_1.default.equal(token6.length, 4);
1281
1285
  (0, assert_1.default)(token6.isImaginary);
1282
1286
  });
@@ -1287,7 +1291,7 @@ test('Underscore numbers', () => {
1287
1291
  const isIntegers = [true, true, false, true, false, true];
1288
1292
  assert_1.default.equal(results.tokens.count, 6 + _implicitTokenCount);
1289
1293
  for (let i = 0; i < lengths.length; i++) {
1290
- assert_1.default.equal(results.tokens.getItemAt(i).type, 6 /* Number */);
1294
+ assert_1.default.equal(results.tokens.getItemAt(i).type, 6 /* TokenType.Number */);
1291
1295
  assert_1.default.equal(results.tokens.getItemAt(i).length, lengths[i]);
1292
1296
  assert_1.default.equal(results.tokens.getItemAt(i).isInteger, isIntegers[i]);
1293
1297
  }
@@ -1296,13 +1300,13 @@ test('Simple expression, leading minus', () => {
1296
1300
  const t = new tokenizer_1.Tokenizer();
1297
1301
  const results = t.tokenize('x == -y');
1298
1302
  assert_1.default.equal(results.tokens.count, 4 + _implicitTokenCount);
1299
- assert_1.default.equal(results.tokens.getItemAt(0).type, 7 /* Identifier */);
1303
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 7 /* TokenType.Identifier */);
1300
1304
  assert_1.default.equal(results.tokens.getItemAt(0).length, 1);
1301
- assert_1.default.equal(results.tokens.getItemAt(1).type, 9 /* Operator */);
1305
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 9 /* TokenType.Operator */);
1302
1306
  assert_1.default.equal(results.tokens.getItemAt(1).length, 2);
1303
- assert_1.default.equal(results.tokens.getItemAt(2).type, 9 /* Operator */);
1307
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 9 /* TokenType.Operator */);
1304
1308
  assert_1.default.equal(results.tokens.getItemAt(2).length, 1);
1305
- assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* Identifier */);
1309
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* TokenType.Identifier */);
1306
1310
  assert_1.default.equal(results.tokens.getItemAt(3).length, 1);
1307
1311
  });
1308
1312
  test('Operators', () => {
@@ -1316,45 +1320,45 @@ test('Operators', () => {
1316
1320
  const results = new tokenizer_1.Tokenizer().tokenize(text);
1317
1321
  const lengths = [1, 2, 3, 2, 2, 1, 2, 3, 2, 2, 1, 1, 1, 1, 1, 2, 1, 2, 2, 3, 2, 2, 2, 2, 3, 1, 2, 1, 2, 1, 2, 2, 2];
1318
1322
  const operatorTypes = [
1319
- 20 /* LessThan */,
1320
- 17 /* LeftShift */,
1321
- 18 /* LeftShiftEqual */,
1322
- 12 /* Equals */,
1323
- 28 /* NotEquals */,
1324
- 15 /* GreaterThan */,
1325
- 31 /* RightShift */,
1326
- 32 /* RightShiftEqual */,
1327
- 16 /* GreaterThanOrEqual */,
1328
- 21 /* LessThanOrEqual */,
1329
- 0 /* Add */,
1330
- 33 /* Subtract */,
1331
- 5 /* BitwiseInvert */,
1332
- 24 /* Mod */,
1333
- 26 /* Multiply */,
1334
- 29 /* Power */,
1335
- 10 /* Divide */,
1336
- 13 /* FloorDivide */,
1337
- 11 /* DivideEqual */,
1338
- 14 /* FloorDivideEqual */,
1339
- 27 /* MultiplyEqual */,
1340
- 1 /* AddEqual */,
1341
- 34 /* SubtractEqual */,
1342
- 25 /* ModEqual */,
1343
- 30 /* PowerEqual */,
1344
- 3 /* BitwiseAnd */,
1345
- 4 /* BitwiseAndEqual */,
1346
- 6 /* BitwiseOr */,
1347
- 7 /* BitwiseOrEqual */,
1348
- 8 /* BitwiseXor */,
1349
- 9 /* BitwiseXorEqual */,
1350
- 35 /* Walrus */,
1351
- 19 /* LessOrGreaterThan */,
1323
+ 20 /* OperatorType.LessThan */,
1324
+ 17 /* OperatorType.LeftShift */,
1325
+ 18 /* OperatorType.LeftShiftEqual */,
1326
+ 12 /* OperatorType.Equals */,
1327
+ 28 /* OperatorType.NotEquals */,
1328
+ 15 /* OperatorType.GreaterThan */,
1329
+ 31 /* OperatorType.RightShift */,
1330
+ 32 /* OperatorType.RightShiftEqual */,
1331
+ 16 /* OperatorType.GreaterThanOrEqual */,
1332
+ 21 /* OperatorType.LessThanOrEqual */,
1333
+ 0 /* OperatorType.Add */,
1334
+ 33 /* OperatorType.Subtract */,
1335
+ 5 /* OperatorType.BitwiseInvert */,
1336
+ 24 /* OperatorType.Mod */,
1337
+ 26 /* OperatorType.Multiply */,
1338
+ 29 /* OperatorType.Power */,
1339
+ 10 /* OperatorType.Divide */,
1340
+ 13 /* OperatorType.FloorDivide */,
1341
+ 11 /* OperatorType.DivideEqual */,
1342
+ 14 /* OperatorType.FloorDivideEqual */,
1343
+ 27 /* OperatorType.MultiplyEqual */,
1344
+ 1 /* OperatorType.AddEqual */,
1345
+ 34 /* OperatorType.SubtractEqual */,
1346
+ 25 /* OperatorType.ModEqual */,
1347
+ 30 /* OperatorType.PowerEqual */,
1348
+ 3 /* OperatorType.BitwiseAnd */,
1349
+ 4 /* OperatorType.BitwiseAndEqual */,
1350
+ 6 /* OperatorType.BitwiseOr */,
1351
+ 7 /* OperatorType.BitwiseOrEqual */,
1352
+ 8 /* OperatorType.BitwiseXor */,
1353
+ 9 /* OperatorType.BitwiseXorEqual */,
1354
+ 35 /* OperatorType.Walrus */,
1355
+ 19 /* OperatorType.LessOrGreaterThan */,
1352
1356
  ];
1353
1357
  assert_1.default.equal(results.tokens.count - _implicitTokenCount, lengths.length);
1354
1358
  assert_1.default.equal(results.tokens.count - _implicitTokenCount, operatorTypes.length);
1355
1359
  for (let i = 0; i < lengths.length; i++) {
1356
1360
  const t = results.tokens.getItemAt(i);
1357
- assert_1.default.equal(t.type, 9 /* Operator */, `${t.type} at ${i} is not an operator`);
1361
+ assert_1.default.equal(t.type, 9 /* TokenType.Operator */, `${t.type} at ${i} is not an operator`);
1358
1362
  assert_1.default.equal(t.operatorType, operatorTypes[i]);
1359
1363
  assert_1.default.equal(t.length, lengths[i], `Length ${t.length} at ${i} (text ${text.substr(t.start, t.length)}), expected ${lengths[i]}`);
1360
1364
  }
@@ -1363,13 +1367,13 @@ test('Identifiers', () => {
1363
1367
  const t = new tokenizer_1.Tokenizer();
1364
1368
  const results = t.tokenize('and __and __and__ and__');
1365
1369
  assert_1.default.equal(results.tokens.count, 4 + _implicitTokenCount);
1366
- assert_1.default.equal(results.tokens.getItemAt(0).type, 8 /* Keyword */);
1370
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 8 /* TokenType.Keyword */);
1367
1371
  assert_1.default.equal(results.tokens.getItemAt(0).length, 3);
1368
- assert_1.default.equal(results.tokens.getItemAt(1).type, 7 /* Identifier */);
1372
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 7 /* TokenType.Identifier */);
1369
1373
  assert_1.default.equal(results.tokens.getItemAt(1).length, 5);
1370
- assert_1.default.equal(results.tokens.getItemAt(2).type, 7 /* Identifier */);
1374
+ assert_1.default.equal(results.tokens.getItemAt(2).type, 7 /* TokenType.Identifier */);
1371
1375
  assert_1.default.equal(results.tokens.getItemAt(2).length, 7);
1372
- assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* Identifier */);
1376
+ assert_1.default.equal(results.tokens.getItemAt(3).type, 7 /* TokenType.Identifier */);
1373
1377
  assert_1.default.equal(results.tokens.getItemAt(3).length, 5);
1374
1378
  assert_1.default.equal(results.tokens.getItemAtPosition(0), 0);
1375
1379
  assert_1.default.equal(results.tokens.getItemAtPosition(3), 0);
@@ -1402,15 +1406,15 @@ test('Comments1', () => {
1402
1406
  const results = t.tokenize('# hello\n# good bye\n\n\n""" test """ # another\n\n\npass');
1403
1407
  assert_1.default.equal(results.tokens.count, 4 + _implicitTokenCount);
1404
1408
  const token0 = results.tokens.getItemAt(0);
1405
- assert_1.default.equal(token0.type, 2 /* NewLine */);
1409
+ assert_1.default.equal(token0.type, 2 /* TokenType.NewLine */);
1406
1410
  assert_1.default.equal(token0.comments.length, 1);
1407
1411
  assert_1.default.equal(token0.comments[0].value, ' hello');
1408
1412
  const token1 = results.tokens.getItemAt(1);
1409
- assert_1.default.equal(token1.type, 5 /* String */);
1413
+ assert_1.default.equal(token1.type, 5 /* TokenType.String */);
1410
1414
  assert_1.default.equal(token1.comments.length, 1);
1411
1415
  assert_1.default.equal(token1.comments[0].value, ' good bye');
1412
1416
  const token2 = results.tokens.getItemAt(2);
1413
- assert_1.default.equal(token2.type, 2 /* NewLine */);
1417
+ assert_1.default.equal(token2.type, 2 /* TokenType.NewLine */);
1414
1418
  assert_1.default.equal(token2.comments.length, 1);
1415
1419
  assert_1.default.equal(token2.comments[0].value, ' another');
1416
1420
  assert_1.default.equal(results.tokens.getItemAtPosition(0), -1);
@@ -1432,19 +1436,19 @@ test('Identifiers1', () => {
1432
1436
  assert_1.default.equal(results.tokens.count, 5 + _implicitTokenCount);
1433
1437
  // Korean (Hangul)
1434
1438
  const token0 = results.tokens.getItemAt(0);
1435
- assert_1.default.equal(token0.type, 7 /* Identifier */);
1439
+ assert_1.default.equal(token0.type, 7 /* TokenType.Identifier */);
1436
1440
  // Japanese
1437
1441
  const token1 = results.tokens.getItemAt(1);
1438
- assert_1.default.equal(token1.type, 7 /* Identifier */);
1442
+ assert_1.default.equal(token1.type, 7 /* TokenType.Identifier */);
1439
1443
  // Russian (Cyrillic)
1440
1444
  const token2 = results.tokens.getItemAt(2);
1441
- assert_1.default.equal(token2.type, 7 /* Identifier */);
1445
+ assert_1.default.equal(token2.type, 7 /* TokenType.Identifier */);
1442
1446
  // Hungarian
1443
1447
  const token3 = results.tokens.getItemAt(3);
1444
- assert_1.default.equal(token3.type, 7 /* Identifier */);
1448
+ assert_1.default.equal(token3.type, 7 /* TokenType.Identifier */);
1445
1449
  // Chinese
1446
1450
  const token4 = results.tokens.getItemAt(4);
1447
- assert_1.default.equal(token4.type, 7 /* Identifier */);
1451
+ assert_1.default.equal(token4.type, 7 /* TokenType.Identifier */);
1448
1452
  });
1449
1453
  test('TypeIgnoreAll1', () => {
1450
1454
  const t = new tokenizer_1.Tokenizer();
@@ -1495,9 +1499,9 @@ test('Constructor', () => {
1495
1499
  const t = new tokenizer_1.Tokenizer();
1496
1500
  const results = t.tokenize('def constructor');
1497
1501
  assert_1.default.equal(results.tokens.count, 2 + _implicitTokenCount);
1498
- assert_1.default.equal(results.tokens.getItemAt(0).type, 8 /* Keyword */);
1502
+ assert_1.default.equal(results.tokens.getItemAt(0).type, 8 /* TokenType.Keyword */);
1499
1503
  assert_1.default.equal(results.tokens.getItemAt(0).length, 3);
1500
- assert_1.default.equal(results.tokens.getItemAt(1).type, 7 /* Identifier */);
1504
+ assert_1.default.equal(results.tokens.getItemAt(1).type, 7 /* TokenType.Identifier */);
1501
1505
  assert_1.default.equal(results.tokens.getItemAt(1).length, 11);
1502
1506
  });
1503
1507
  test('Normalization', () => {
@@ -1505,11 +1509,11 @@ test('Normalization', () => {
1505
1509
  const results = t.tokenize('ℝ 𝕽');
1506
1510
  assert_1.default.equal(results.tokens.count, 2 + _implicitTokenCount);
1507
1511
  let idToken = results.tokens.getItemAt(0);
1508
- assert_1.default.equal(idToken.type, 7 /* Identifier */);
1512
+ assert_1.default.equal(idToken.type, 7 /* TokenType.Identifier */);
1509
1513
  assert_1.default.equal(idToken.length, 1);
1510
1514
  assert_1.default.equal(idToken.value, 'R');
1511
1515
  idToken = results.tokens.getItemAt(1);
1512
- assert_1.default.equal(idToken.type, 7 /* Identifier */);
1516
+ assert_1.default.equal(idToken.type, 7 /* TokenType.Identifier */);
1513
1517
  assert_1.default.equal(idToken.length, 2);
1514
1518
  assert_1.default.equal(idToken.value, 'R');
1515
1519
  });
@@ -1518,11 +1522,11 @@ test('Last empty line', () => {
1518
1522
  const results = t.tokenize('\r\n');
1519
1523
  assert_1.default.equal(results.tokens.count, _implicitTokenCount);
1520
1524
  const newLineToken = results.tokens.getItemAt(0);
1521
- assert_1.default.equal(newLineToken.type, 2 /* NewLine */);
1525
+ assert_1.default.equal(newLineToken.type, 2 /* TokenType.NewLine */);
1522
1526
  assert_1.default.equal(newLineToken.length, 2);
1523
- assert_1.default.equal(newLineToken.newLineType, 2 /* CarriageReturnLineFeed */);
1527
+ assert_1.default.equal(newLineToken.newLineType, 2 /* NewLineType.CarriageReturnLineFeed */);
1524
1528
  const eofToken = results.tokens.getItemAt(1);
1525
- assert_1.default.equal(eofToken.type, 1 /* EndOfStream */);
1529
+ assert_1.default.equal(eofToken.type, 1 /* TokenType.EndOfStream */);
1526
1530
  assert_1.default.equal(eofToken.length, 0);
1527
1531
  });
1528
1532
  //# sourceMappingURL=tokenizer.test.js.map