@loopstack/loopstack-studio 0.25.0 → 0.25.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/components/ai-elements/message.js +2 -2
- package/dist/components/ai-elements/reasoning.js +9 -9
- package/dist/loopstack-studio.css +1 -1
- package/dist/node_modules/@chevrotain/gast/lib/src/helpers.js +2 -5
- package/dist/node_modules/@chevrotain/gast/lib/src/model.js +69 -72
- package/dist/node_modules/@mermaid-js/parser/dist/chunks/mermaid-parser.core/{architecture-7HQA4BMR.js → architecture-YZFGNWBL.js} +2 -2
- package/dist/node_modules/@mermaid-js/parser/dist/chunks/mermaid-parser.core/{chunk-TQ3KTPDO.js → chunk-2KRD3SAO.js} +1 -1
- package/dist/node_modules/@mermaid-js/parser/dist/chunks/mermaid-parser.core/{chunk-4F5CHEZ2.js → chunk-67CJDMHE.js} +1 -1
- package/dist/node_modules/@mermaid-js/parser/dist/chunks/mermaid-parser.core/{chunk-UMXZTB3W.js → chunk-7N4EOEYR.js} +5 -2
- package/dist/node_modules/@mermaid-js/parser/dist/chunks/mermaid-parser.core/{chunk-PL6DKKU2.js → chunk-AA7GKIK3.js} +1 -1
- package/dist/node_modules/@mermaid-js/parser/dist/chunks/mermaid-parser.core/chunk-CIAEETIT.js +23 -0
- package/dist/node_modules/@mermaid-js/parser/dist/chunks/mermaid-parser.core/{chunk-SJTYNZTY.js → chunk-FOC6F5B3.js} +1 -1
- package/dist/node_modules/@mermaid-js/parser/dist/chunks/mermaid-parser.core/chunk-K5T4RW27.js +1197 -0
- package/dist/node_modules/@mermaid-js/parser/dist/chunks/mermaid-parser.core/{chunk-FRFDVMJY.js → chunk-KGLVRYIC.js} +1 -1
- package/dist/node_modules/@mermaid-js/parser/dist/chunks/mermaid-parser.core/{chunk-B2363JML.js → chunk-LIHQZDEY.js} +1 -1
- package/dist/node_modules/@mermaid-js/parser/dist/chunks/mermaid-parser.core/chunk-ORNJ4GCN.js +29 -0
- package/dist/node_modules/@mermaid-js/parser/dist/chunks/mermaid-parser.core/{gitGraph-G5XIXVHT.js → gitGraph-7Q5UKJZL.js} +2 -2
- package/dist/node_modules/@mermaid-js/parser/dist/chunks/mermaid-parser.core/info-OMHHGYJF.js +3 -0
- package/dist/node_modules/@mermaid-js/parser/dist/chunks/mermaid-parser.core/packet-4T2RLAQJ.js +3 -0
- package/dist/node_modules/@mermaid-js/parser/dist/chunks/mermaid-parser.core/pie-ZZUOXDRM.js +3 -0
- package/dist/node_modules/@mermaid-js/parser/dist/chunks/mermaid-parser.core/radar-PYXPWWZC.js +3 -0
- package/dist/node_modules/@mermaid-js/parser/dist/chunks/mermaid-parser.core/treeView-SZITEDCU.js +3 -0
- package/dist/node_modules/@mermaid-js/parser/dist/chunks/mermaid-parser.core/treemap-W4RFUUIX.js +3 -0
- package/dist/node_modules/@mermaid-js/parser/dist/chunks/mermaid-parser.core/wardley-RL74JXVD.js +3 -0
- package/dist/node_modules/@mermaid-js/parser/dist/mermaid-parser.core.js +27 -17
- package/dist/node_modules/@upsetjs/venn.js/build/venn.esm.js +903 -0
- package/dist/node_modules/@xyflow/react/dist/esm/index.js +76 -75
- package/dist/node_modules/@xyflow/system/dist/esm/index.js +30 -28
- package/dist/node_modules/chevrotain/lib/src/parse/cst/cst_visitor.js +31 -40
- package/dist/node_modules/chevrotain/lib/src/parse/errors_public.js +23 -26
- package/dist/node_modules/chevrotain/lib/src/parse/exceptions_public.js +12 -13
- package/dist/node_modules/chevrotain/lib/src/parse/grammar/checks.js +181 -205
- package/dist/node_modules/chevrotain/lib/src/parse/grammar/first.js +11 -13
- package/dist/node_modules/chevrotain/lib/src/parse/grammar/follow.js +12 -13
- package/dist/node_modules/chevrotain/lib/src/parse/grammar/gast/gast_resolver_public.js +8 -9
- package/dist/node_modules/chevrotain/lib/src/parse/grammar/interpreter.js +176 -183
- package/dist/node_modules/chevrotain/lib/src/parse/grammar/llk_lookahead.js +17 -19
- package/dist/node_modules/chevrotain/lib/src/parse/grammar/lookahead.js +153 -160
- package/dist/node_modules/chevrotain/lib/src/parse/grammar/resolver.js +10 -12
- package/dist/node_modules/chevrotain/lib/src/parse/grammar/rest.js +36 -38
- package/dist/node_modules/chevrotain/lib/src/parse/parser/parser.js +37 -45
- package/dist/node_modules/chevrotain/lib/src/parse/parser/traits/error_handler.js +12 -14
- package/dist/node_modules/chevrotain/lib/src/parse/parser/traits/gast_recorder.js +80 -86
- package/dist/node_modules/chevrotain/lib/src/parse/parser/traits/lexer_adapter.js +6 -2
- package/dist/node_modules/chevrotain/lib/src/parse/parser/traits/looksahead.js +39 -41
- package/dist/node_modules/chevrotain/lib/src/parse/parser/traits/perf_tracer.js +7 -8
- package/dist/node_modules/chevrotain/lib/src/parse/parser/traits/recognizer_api.js +69 -70
- package/dist/node_modules/chevrotain/lib/src/parse/parser/traits/recognizer_engine.js +215 -205
- package/dist/node_modules/chevrotain/lib/src/parse/parser/traits/recoverable.js +76 -76
- package/dist/node_modules/chevrotain/lib/src/parse/parser/traits/tree_builder.js +29 -39
- package/dist/node_modules/chevrotain/lib/src/scan/lexer.js +252 -274
- package/dist/node_modules/chevrotain/lib/src/scan/lexer_public.js +93 -106
- package/dist/node_modules/chevrotain/lib/src/scan/reg_exp.js +61 -61
- package/dist/node_modules/chevrotain/lib/src/scan/tokens.js +31 -41
- package/dist/node_modules/chevrotain/lib/src/scan/tokens_public.js +15 -18
- package/dist/node_modules/chevrotain-allstar/lib/all-star-lookahead.js +226 -226
- package/dist/node_modules/chevrotain-allstar/lib/atn.js +17 -17
- package/dist/node_modules/cytoscape/dist/cytoscape.esm.js +17 -14
- package/dist/node_modules/dagre-d3-es/src/dagre/greedy-fas.js +19 -19
- package/dist/node_modules/dagre-d3-es/src/dagre/layout.js +48 -48
- package/dist/node_modules/dagre-d3-es/src/dagre/nesting-graph.js +9 -9
- package/dist/node_modules/dagre-d3-es/src/dagre/order/cross-count.js +3 -3
- package/dist/node_modules/dagre-d3-es/src/dagre/order/index.js +11 -11
- package/dist/node_modules/dagre-d3-es/src/dagre/order/init-order.js +4 -4
- package/dist/node_modules/dagre-d3-es/src/dagre/order/resolve-conflicts.js +4 -4
- package/dist/node_modules/dagre-d3-es/src/dagre/order/sort-subgraph.js +8 -8
- package/dist/node_modules/dagre-d3-es/src/dagre/order/sort.js +5 -5
- package/dist/node_modules/dagre-d3-es/src/dagre/position/bk.js +61 -61
- package/dist/node_modules/dagre-d3-es/src/dagre/position/index.js +1 -1
- package/dist/node_modules/dagre-d3-es/src/dagre/rank/network-simplex.js +25 -25
- package/dist/node_modules/dagre-d3-es/src/dagre/rank/util.js +8 -8
- package/dist/node_modules/dagre-d3-es/src/dagre/util.js +48 -48
- package/dist/node_modules/dagre-d3-es/src/graphlib/graph.js +109 -109
- package/dist/node_modules/dagre-d3-es/src/graphlib/json.js +17 -17
- package/dist/node_modules/dompurify/dist/purify.es.js +67 -67
- package/dist/node_modules/katex/dist/katex.js +1256 -1209
- package/dist/node_modules/khroma/dist/methods/transparentize.js +3 -0
- package/dist/node_modules/lodash-es/_baseClone.js +2 -2
- package/dist/node_modules/lodash-es/_baseMatchesProperty.js +6 -6
- package/dist/node_modules/lodash-es/_baseToString.js +1 -1
- package/dist/node_modules/lodash-es/_baseUniq.js +1 -1
- package/dist/node_modules/lodash-es/_createSet.js +1 -1
- package/dist/node_modules/lodash-es/_getAllKeys.js +2 -2
- package/dist/node_modules/lodash-es/_getAllKeysIn.js +1 -1
- package/dist/node_modules/lodash-es/findIndex.js +2 -2
- package/dist/node_modules/lodash-es/flatMap.js +1 -1
- package/dist/node_modules/lodash-es/forEach.js +1 -1
- package/dist/node_modules/lodash-es/max.js +1 -1
- package/dist/node_modules/lodash-es/min.js +1 -1
- package/dist/node_modules/lodash-es/minBy.js +1 -1
- package/dist/node_modules/lodash-es/reduce.js +1 -1
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/architectureDiagram-Q4EWVU46.js +691 -0
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/{blockDiagram-VD42YOAC.js → blockDiagram-DXYQGD6D.js} +183 -157
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/{c4Diagram-YG6GDRKO.js → c4Diagram-AHTNJAMY.js} +65 -63
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/chunk-336JU56O.js +47 -0
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/{chunk-EXTU4WIE.js → chunk-426QAEUC.js} +1 -1
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/{chunk-B4BG7PRW.js → chunk-4TB4RGXK.js} +533 -393
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/chunk-5FUZZQ4R.js +3638 -0
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/{chunk-S3R3BYOJ.js → chunk-5PVQY5BW.js} +1 -1
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/{chunk-HN2XXSSU.js → chunk-BSJP7CBP.js} +20 -14
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/{chunk-QN33PNHL.js → chunk-EDXVE4YY.js} +1 -1
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/chunk-ENJZ2VHE.js +568 -0
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/chunk-ICPOFSXX.js +2320 -0
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/{chunk-DI55MBZ5.js → chunk-OYMX7WX6.js} +35 -21
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/chunk-U2HBQHQK.js +272 -0
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/{chunk-ATLVNIR6.js → chunk-X2U36JSP.js} +1 -1
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/{chunk-MI3HLSF2.js → chunk-XPW4576I.js} +11 -7
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/chunk-YZCP3GAM.js +60 -0
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/chunk-ZZ45TVLE.js +30 -0
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/{classDiagram-2ON5EDUG.js → classDiagram-6PBFFD2Q.js} +12 -11
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/{classDiagram-v2-WZHVMYZB.js → classDiagram-v2-HSJHXN6E.js} +12 -11
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/{dagre-6UL2VRFP.js → dagre-KV5264BT.js} +8 -8
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/diagram-5BDNPKRD.js +99 -0
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/{diagram-PSM6KHXK.js → diagram-G4DWMVQ6.js} +20 -23
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/diagram-MMDJMWI5.js +211 -0
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/{diagram-S2PKOQOG.js → diagram-TYMM5635.js} +4 -4
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/{erDiagram-Q2GNP2WA.js → erDiagram-SMLLAGMA.js} +486 -386
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/{flowDiagram-NV44I4VS.js → flowDiagram-DWJPFMVM.js} +915 -898
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/{ganttDiagram-JELNMOA3.js → ganttDiagram-T4ZO3ILL.js} +59 -29
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/gitGraphDiagram-UUTBAWPF.js +728 -0
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/infoDiagram-42DDH7IO.js +17 -0
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/ishikawaDiagram-UXIWVN3A.js +714 -0
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/{journeyDiagram-XKPGCS4Q.js → journeyDiagram-VCZTEJTY.js} +32 -32
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/{kanban-definition-3W4ZIXB7.js → kanban-definition-6JOO6SKY.js} +13 -9
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/{mindmap-definition-VGOIOE7T.js → mindmap-definition-QFDTVHPH.js} +105 -59
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/pieDiagram-DEJITSTG.js +117 -0
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/{quadrantDiagram-AYHSOK5B.js → quadrantDiagram-34T5L4WZ.js} +1 -1
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/{requirementDiagram-UZGBJVZJ.js → requirementDiagram-MS252O5E.js} +49 -19
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/{sankeyDiagram-TZEHDZUN.js → sankeyDiagram-XADWPNL6.js} +1 -1
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/sequenceDiagram-FGHM5R23.js +4155 -0
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/{stateDiagram-FKZM4ZOC.js → stateDiagram-FHFEXIEX.js} +12 -12
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/{stateDiagram-v2-4FDKWEC3.js → stateDiagram-v2-QKLJ7IA2.js} +11 -11
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/timeline-definition-GMOUNBTQ.js +1071 -0
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/vennDiagram-DHZGUBPP.js +959 -0
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/wardleyDiagram-NUSXRM2D.js +574 -0
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/{xychartDiagram-PRI3JC2R.js → xychartDiagram-5P7HB3ND.js} +38 -35
- package/dist/node_modules/mermaid/dist/mermaid.core.js +235 -192
- package/dist/node_modules/nanoid/url-alphabet/index.js +1 -1
- package/dist/node_modules/remend/dist/index.js +333 -264
- package/dist/node_modules/streamdown/dist/chunk-BO2N2NFS.js +2498 -0
- package/dist/node_modules/streamdown/dist/highlighted-body-OFNGDK62.js +35 -0
- package/dist/node_modules/streamdown/dist/index.js +1 -1
- package/dist/node_modules/streamdown/dist/mermaid-GHXKKRXX.js +3 -0
- package/dist/node_modules/streamdown/node_modules/marked/lib/marked.esm.js +672 -667
- package/dist/pages/DashboardPage.js +3 -3
- package/package.json +2 -2
- package/dist/node_modules/@mermaid-js/parser/dist/chunks/mermaid-parser.core/chunk-TCCFYFTB.js +0 -787
- package/dist/node_modules/@mermaid-js/parser/dist/chunks/mermaid-parser.core/info-VBDWY6EO.js +0 -3
- package/dist/node_modules/@mermaid-js/parser/dist/chunks/mermaid-parser.core/packet-DYOGHKS2.js +0 -3
- package/dist/node_modules/@mermaid-js/parser/dist/chunks/mermaid-parser.core/pie-VRWISCQL.js +0 -3
- package/dist/node_modules/@mermaid-js/parser/dist/chunks/mermaid-parser.core/radar-ZZBFDIW7.js +0 -3
- package/dist/node_modules/@mermaid-js/parser/dist/chunks/mermaid-parser.core/treemap-GDKQZRPO.js +0 -3
- package/dist/node_modules/chevrotain/lib/src/parse/parser/traits/context_assist.js +0 -16
- package/dist/node_modules/lodash-es/_arrayAggregator.js +0 -9
- package/dist/node_modules/lodash-es/_arrayEvery.js +0 -6
- package/dist/node_modules/lodash-es/_baseAggregator.js +0 -8
- package/dist/node_modules/lodash-es/_baseDifference.js +0 -22
- package/dist/node_modules/lodash-es/_baseEvery.js +0 -9
- package/dist/node_modules/lodash-es/_baseIsRegExp.js +0 -8
- package/dist/node_modules/lodash-es/_baseSlice.js +0 -8
- package/dist/node_modules/lodash-es/_baseSome.js +0 -9
- package/dist/node_modules/lodash-es/_createAggregator.js +0 -12
- package/dist/node_modules/lodash-es/assign.js +0 -14
- package/dist/node_modules/lodash-es/compact.js +0 -9
- package/dist/node_modules/lodash-es/difference.js +0 -8
- package/dist/node_modules/lodash-es/drop.js +0 -8
- package/dist/node_modules/lodash-es/dropRight.js +0 -8
- package/dist/node_modules/lodash-es/every.js +0 -11
- package/dist/node_modules/lodash-es/groupBy.js +0 -6
- package/dist/node_modules/lodash-es/head.js +0 -5
- package/dist/node_modules/lodash-es/includes.js +0 -13
- package/dist/node_modules/lodash-es/indexOf.js +0 -11
- package/dist/node_modules/lodash-es/isRegExp.js +0 -5
- package/dist/node_modules/lodash-es/negate.js +0 -16
- package/dist/node_modules/lodash-es/pickBy.js +0 -15
- package/dist/node_modules/lodash-es/reject.js +0 -10
- package/dist/node_modules/lodash-es/some.js +0 -11
- package/dist/node_modules/lodash-es/uniq.js +0 -6
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/architectureDiagram-VXUJARFQ.js +0 -673
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/chunk-ABZYJK2D.js +0 -1547
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/chunk-CVBHYZKI.js +0 -10
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/chunk-DR5Q36YT.js +0 -135
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/chunk-JA3XYJ7Z.js +0 -247
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/chunk-JZLCHNYA.js +0 -3516
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/chunk-N4CR4FBY.js +0 -39
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/chunk-QXUST7PY.js +0 -497
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/chunk-TZMSLE5B.js +0 -55
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/diagram-QEK2KX5R.js +0 -211
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/gitGraphDiagram-V2S2FVAM.js +0 -621
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/infoDiagram-HS3SLOUP.js +0 -18
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/pieDiagram-ADFJNKIX.js +0 -117
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/sequenceDiagram-WL72ISMW.js +0 -3560
- package/dist/node_modules/mermaid/dist/chunks/mermaid.core/timeline-definition-IT6M3QCI.js +0 -833
- package/dist/node_modules/streamdown/dist/chunk-RLXIAIE6.js +0 -2189
- package/dist/node_modules/streamdown/dist/highlighted-body-B3W2YXNL.js +0 -33
- package/dist/node_modules/streamdown/dist/mermaid-3ZIDBTTL.js +0 -3
|
@@ -1,57 +1,34 @@
|
|
|
1
|
-
import isFunction_default from "../../../../lodash-es/isFunction.js";
|
|
2
|
-
import isArray_default from "../../../../lodash-es/isArray.js";
|
|
3
|
-
import isEmpty_default from "../../../../lodash-es/isEmpty.js";
|
|
4
1
|
import { BaseRegExpVisitor } from "../../../../@chevrotain/regexp-to-ast/lib/src/base-regexp-visitor.js";
|
|
5
2
|
import "../../../../@chevrotain/regexp-to-ast/lib/src/api.js";
|
|
6
|
-
import keys_default from "../../../../lodash-es/keys.js";
|
|
7
|
-
import flatten_default from "../../../../lodash-es/flatten.js";
|
|
8
|
-
import compact_default from "../../../../lodash-es/compact.js";
|
|
9
|
-
import defaults_default from "../../../../lodash-es/defaults.js";
|
|
10
|
-
import difference_default from "../../../../lodash-es/difference.js";
|
|
11
|
-
import forEach_default from "../../../../lodash-es/forEach.js";
|
|
12
|
-
import filter_default from "../../../../lodash-es/filter.js";
|
|
13
|
-
import find_default from "../../../../lodash-es/find.js";
|
|
14
|
-
import head_default from "../../../../lodash-es/head.js";
|
|
15
|
-
import map_default from "../../../../lodash-es/map.js";
|
|
16
|
-
import has_default from "../../../../lodash-es/has.js";
|
|
17
|
-
import isString_default from "../../../../lodash-es/isString.js";
|
|
18
|
-
import values_default from "../../../../lodash-es/values.js";
|
|
19
|
-
import includes_default from "../../../../lodash-es/includes.js";
|
|
20
|
-
import indexOf_default from "../../../../lodash-es/indexOf.js";
|
|
21
|
-
import isRegExp_default from "../../../../lodash-es/isRegExp.js";
|
|
22
|
-
import isUndefined_default from "../../../../lodash-es/isUndefined.js";
|
|
23
|
-
import reduce_default from "../../../../lodash-es/reduce.js";
|
|
24
|
-
import reject_default from "../../../../lodash-es/reject.js";
|
|
25
3
|
import { PRINT_ERROR } from "../../../../@chevrotain/utils/lib/src/print.js";
|
|
26
4
|
import { getRegExpAst } from "./reg_exp_parser.js";
|
|
27
5
|
import { canMatchCharCode, failedOptimizationPrefixMsg, getOptimizedStartCodesIndices } from "./reg_exp.js";
|
|
28
6
|
import { Lexer, LexerDefinitionErrorType } from "./lexer_public.js";
|
|
29
7
|
var PATTERN = "PATTERN";
|
|
30
8
|
const DEFAULT_MODE = "defaultMode";
|
|
31
|
-
function analyzeTokenTypes(
|
|
32
|
-
|
|
33
|
-
debug: !1,
|
|
9
|
+
function analyzeTokenTypes(e, S) {
|
|
10
|
+
S = Object.assign({
|
|
34
11
|
safeMode: !1,
|
|
35
12
|
positionTracking: "full",
|
|
36
13
|
lineTerminatorCharacters: ["\r", "\n"],
|
|
37
|
-
tracer: (
|
|
38
|
-
});
|
|
39
|
-
let
|
|
40
|
-
|
|
14
|
+
tracer: (e, x) => x()
|
|
15
|
+
}, S);
|
|
16
|
+
let D = S.tracer;
|
|
17
|
+
D("initCharCodeToOptimizedIndexMap", () => {
|
|
41
18
|
initCharCodeToOptimizedIndexMap();
|
|
42
19
|
});
|
|
43
|
-
let
|
|
44
|
-
|
|
45
|
-
|
|
20
|
+
let O;
|
|
21
|
+
D("Reject Lexer.NA", () => {
|
|
22
|
+
O = e.filter((e) => e[PATTERN] !== Lexer.NA);
|
|
46
23
|
});
|
|
47
|
-
let
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
let
|
|
24
|
+
let k = !1, A;
|
|
25
|
+
D("Transform Patterns", () => {
|
|
26
|
+
k = !1, A = O.map((e) => {
|
|
27
|
+
let x = e[PATTERN];
|
|
51
28
|
/* istanbul ignore else */
|
|
52
|
-
if (
|
|
53
|
-
let
|
|
54
|
-
return
|
|
29
|
+
if (x instanceof RegExp) {
|
|
30
|
+
let e = x.source;
|
|
31
|
+
return e.length === 1 && e !== "^" && e !== "$" && e !== "." && !x.ignoreCase ? e : e.length === 2 && e[0] === "\\" && ![
|
|
55
32
|
"d",
|
|
56
33
|
"D",
|
|
57
34
|
"s",
|
|
@@ -68,256 +45,256 @@ function analyzeTokenTypes(F, I) {
|
|
|
68
45
|
"v",
|
|
69
46
|
"w",
|
|
70
47
|
"W"
|
|
71
|
-
]
|
|
72
|
-
} else if (
|
|
73
|
-
else if (typeof
|
|
74
|
-
else if (typeof
|
|
75
|
-
if (
|
|
48
|
+
].includes(e[1]) ? e[1] : addStickyFlag(x);
|
|
49
|
+
} else if (typeof x == "function") return k = !0, { exec: x };
|
|
50
|
+
else if (typeof x == "object") return k = !0, x;
|
|
51
|
+
else if (typeof x == "string") {
|
|
52
|
+
if (x.length === 1) return x;
|
|
76
53
|
{
|
|
77
|
-
let
|
|
78
|
-
return addStickyFlag(new RegExp(
|
|
54
|
+
let e = x.replace(/[\\^$.*+?()[\]{}|]/g, "\\$&");
|
|
55
|
+
return addStickyFlag(new RegExp(e));
|
|
79
56
|
}
|
|
80
57
|
} else throw Error("non exhaustive match");
|
|
81
58
|
});
|
|
82
59
|
});
|
|
83
|
-
let
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
let
|
|
87
|
-
if (
|
|
88
|
-
if (
|
|
89
|
-
if (
|
|
60
|
+
let j, M, N, P, F;
|
|
61
|
+
D("misc mapping", () => {
|
|
62
|
+
j = O.map((e) => e.tokenTypeIdx), M = O.map((e) => {
|
|
63
|
+
let x = e.GROUP;
|
|
64
|
+
if (x !== Lexer.SKIPPED) {
|
|
65
|
+
if (typeof x == "string") return x;
|
|
66
|
+
if (x === void 0) return !1;
|
|
90
67
|
throw Error("non exhaustive match");
|
|
91
68
|
}
|
|
92
|
-
}),
|
|
93
|
-
let
|
|
94
|
-
if (
|
|
95
|
-
}),
|
|
69
|
+
}), N = O.map((e) => {
|
|
70
|
+
let x = e.LONGER_ALT;
|
|
71
|
+
if (x) return Array.isArray(x) ? x.map((e) => O.indexOf(e)) : [O.indexOf(x)];
|
|
72
|
+
}), P = O.map((e) => e.PUSH_MODE), F = O.map((e) => Object.hasOwn(e, "POP_MODE"));
|
|
96
73
|
});
|
|
97
|
-
let
|
|
98
|
-
|
|
99
|
-
let
|
|
100
|
-
|
|
74
|
+
let I;
|
|
75
|
+
D("Line Terminator Handling", () => {
|
|
76
|
+
let e = getCharCodes(S.lineTerminatorCharacters);
|
|
77
|
+
I = O.map((e) => !1), S.positionTracking !== "onlyOffset" && (I = O.map((x) => Object.hasOwn(x, "LINE_BREAKS") ? !!x.LINE_BREAKS : checkLineBreaksIssues(x, e) === !1 && canMatchCharCode(e, x.PATTERN)));
|
|
101
78
|
});
|
|
102
|
-
let
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
let
|
|
106
|
-
return
|
|
107
|
-
}, {}),
|
|
108
|
-
pattern:
|
|
109
|
-
longerAlt:
|
|
110
|
-
canLineTerminator:
|
|
111
|
-
isCustom:
|
|
112
|
-
short:
|
|
113
|
-
group:
|
|
114
|
-
push:
|
|
115
|
-
pop:
|
|
116
|
-
tokenTypeIdx:
|
|
117
|
-
tokenType:
|
|
79
|
+
let L, R, z, B;
|
|
80
|
+
D("Misc Mapping #2", () => {
|
|
81
|
+
L = O.map(isCustomPattern), R = A.map(isShortPattern), z = O.reduce((e, x) => {
|
|
82
|
+
let S = x.GROUP;
|
|
83
|
+
return typeof S == "string" && S !== Lexer.SKIPPED && (e[S] = []), e;
|
|
84
|
+
}, {}), B = A.map((e, x) => ({
|
|
85
|
+
pattern: A[x],
|
|
86
|
+
longerAlt: N[x],
|
|
87
|
+
canLineTerminator: I[x],
|
|
88
|
+
isCustom: L[x],
|
|
89
|
+
short: R[x],
|
|
90
|
+
group: M[x],
|
|
91
|
+
push: P[x],
|
|
92
|
+
pop: F[x],
|
|
93
|
+
tokenTypeIdx: j[x],
|
|
94
|
+
tokenType: O[x]
|
|
118
95
|
}));
|
|
119
96
|
});
|
|
120
|
-
let
|
|
121
|
-
return
|
|
122
|
-
|
|
123
|
-
if (typeof
|
|
124
|
-
else if (
|
|
125
|
-
let
|
|
126
|
-
|
|
127
|
-
let
|
|
97
|
+
let V = !0, H = [];
|
|
98
|
+
return S.safeMode || D("First Char Optimization", () => {
|
|
99
|
+
H = O.reduce((e, C, E) => {
|
|
100
|
+
if (typeof C.PATTERN == "string") addToMapOfArrays(e, charCodeToOptimizedIndex(C.PATTERN.charCodeAt(0)), B[E]);
|
|
101
|
+
else if (Array.isArray(C.START_CHARS_HINT)) {
|
|
102
|
+
let x;
|
|
103
|
+
C.START_CHARS_HINT.forEach((S) => {
|
|
104
|
+
let C = charCodeToOptimizedIndex(typeof S == "string" ? S.charCodeAt(0) : S);
|
|
128
105
|
/* istanbul ignore else */
|
|
129
|
-
|
|
106
|
+
x !== C && (x = C, addToMapOfArrays(e, C, B[E]));
|
|
130
107
|
});
|
|
131
|
-
} else if (
|
|
108
|
+
} else if (C.PATTERN instanceof RegExp) if (C.PATTERN.unicode) V = !1, S.ensureOptimizations && PRINT_ERROR(`${failedOptimizationPrefixMsg}\tUnable to analyze < ${C.PATTERN.toString()} > pattern.\n The regexp unicode flag is not currently supported by the regexp-to-ast library.
|
|
132
109
|
This will disable the lexer's first char optimizations.
|
|
133
110
|
For details See: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#UNICODE_OPTIMIZE`);
|
|
134
111
|
else {
|
|
135
|
-
let
|
|
136
|
-
|
|
137
|
-
addToMapOfArrays(
|
|
112
|
+
let x = getOptimizedStartCodesIndices(C.PATTERN, S.ensureOptimizations);
|
|
113
|
+
x.length === 0 && (V = !1), x.forEach((x) => {
|
|
114
|
+
addToMapOfArrays(e, x, B[E]);
|
|
138
115
|
});
|
|
139
116
|
}
|
|
140
|
-
else
|
|
141
|
-
For details See: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#CUSTOM_OPTIMIZE`),
|
|
142
|
-
return
|
|
117
|
+
else S.ensureOptimizations && PRINT_ERROR(`${failedOptimizationPrefixMsg}\tTokenType: <${C.name}> is using a custom token pattern without providing <start_chars_hint> parameter.\n This will disable the lexer's first char optimizations.
|
|
118
|
+
For details See: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#CUSTOM_OPTIMIZE`), V = !1;
|
|
119
|
+
return e;
|
|
143
120
|
}, []);
|
|
144
121
|
}), {
|
|
145
|
-
emptyGroups:
|
|
146
|
-
patternIdxToConfig:
|
|
147
|
-
charCodeToPatternIdxToConfig:
|
|
148
|
-
hasCustom:
|
|
149
|
-
canBeOptimized:
|
|
122
|
+
emptyGroups: z,
|
|
123
|
+
patternIdxToConfig: B,
|
|
124
|
+
charCodeToPatternIdxToConfig: H,
|
|
125
|
+
hasCustom: k,
|
|
126
|
+
canBeOptimized: V
|
|
150
127
|
};
|
|
151
128
|
}
|
|
152
|
-
function validatePatterns(
|
|
153
|
-
let
|
|
154
|
-
|
|
155
|
-
let
|
|
156
|
-
return
|
|
129
|
+
function validatePatterns(e, x) {
|
|
130
|
+
let S = [], C = findMissingPatterns(e);
|
|
131
|
+
S = S.concat(C.errors);
|
|
132
|
+
let w = findInvalidPatterns(C.valid), T = w.valid;
|
|
133
|
+
return S = S.concat(w.errors), S = S.concat(validateRegExpPattern(T)), S = S.concat(findInvalidGroupType(T)), S = S.concat(findModesThatDoNotExist(T, x)), S = S.concat(findUnreachablePatterns(T)), S;
|
|
157
134
|
}
|
|
158
|
-
function validateRegExpPattern(
|
|
159
|
-
let
|
|
160
|
-
return
|
|
135
|
+
function validateRegExpPattern(e) {
|
|
136
|
+
let x = [], S = e.filter((e) => e[PATTERN] instanceof RegExp);
|
|
137
|
+
return x = x.concat(findEndOfInputAnchor(S)), x = x.concat(findStartOfInputAnchor(S)), x = x.concat(findUnsupportedFlags(S)), x = x.concat(findDuplicatePatterns(S)), x = x.concat(findEmptyMatchRegExps(S)), x;
|
|
161
138
|
}
|
|
162
|
-
function findMissingPatterns(
|
|
163
|
-
let
|
|
139
|
+
function findMissingPatterns(e) {
|
|
140
|
+
let x = e.filter((e) => !Object.hasOwn(e, PATTERN));
|
|
164
141
|
return {
|
|
165
|
-
errors:
|
|
166
|
-
message: "Token Type: ->" +
|
|
142
|
+
errors: x.map((e) => ({
|
|
143
|
+
message: "Token Type: ->" + e.name + "<- missing static 'PATTERN' property",
|
|
167
144
|
type: LexerDefinitionErrorType.MISSING_PATTERN,
|
|
168
|
-
tokenTypes: [
|
|
145
|
+
tokenTypes: [e]
|
|
169
146
|
})),
|
|
170
|
-
valid:
|
|
147
|
+
valid: e.filter((e) => !x.includes(e))
|
|
171
148
|
};
|
|
172
149
|
}
|
|
173
|
-
function findInvalidPatterns(
|
|
174
|
-
let
|
|
175
|
-
let
|
|
176
|
-
return !
|
|
150
|
+
function findInvalidPatterns(e) {
|
|
151
|
+
let x = e.filter((e) => {
|
|
152
|
+
let x = e[PATTERN];
|
|
153
|
+
return !(x instanceof RegExp) && typeof x != "function" && !Object.hasOwn(x, "exec") && typeof x != "string";
|
|
177
154
|
});
|
|
178
155
|
return {
|
|
179
|
-
errors:
|
|
180
|
-
message: "Token Type: ->" +
|
|
156
|
+
errors: x.map((e) => ({
|
|
157
|
+
message: "Token Type: ->" + e.name + "<- static 'PATTERN' can only be a RegExp, a Function matching the {CustomPatternMatcherFunc} type or an Object matching the {ICustomPattern} interface.",
|
|
181
158
|
type: LexerDefinitionErrorType.INVALID_PATTERN,
|
|
182
|
-
tokenTypes: [
|
|
159
|
+
tokenTypes: [e]
|
|
183
160
|
})),
|
|
184
|
-
valid:
|
|
161
|
+
valid: e.filter((e) => !x.includes(e))
|
|
185
162
|
};
|
|
186
163
|
}
|
|
187
164
|
var end_of_input = /[^\\][$]/;
|
|
188
|
-
function findEndOfInputAnchor(
|
|
189
|
-
class
|
|
165
|
+
function findEndOfInputAnchor(x) {
|
|
166
|
+
class C extends BaseRegExpVisitor {
|
|
190
167
|
constructor() {
|
|
191
168
|
super(...arguments), this.found = !1;
|
|
192
169
|
}
|
|
193
|
-
visitEndAnchor(
|
|
170
|
+
visitEndAnchor(e) {
|
|
194
171
|
this.found = !0;
|
|
195
172
|
}
|
|
196
173
|
}
|
|
197
|
-
return
|
|
198
|
-
let
|
|
174
|
+
return x.filter((e) => {
|
|
175
|
+
let x = e.PATTERN;
|
|
199
176
|
try {
|
|
200
|
-
let
|
|
201
|
-
return
|
|
177
|
+
let e = getRegExpAst(x), w = new C();
|
|
178
|
+
return w.visit(e), w.found;
|
|
202
179
|
} catch {
|
|
203
180
|
/* istanbul ignore next - cannot ensure an error in regexp-to-ast*/
|
|
204
|
-
return end_of_input.test(
|
|
181
|
+
return end_of_input.test(x.source);
|
|
205
182
|
}
|
|
206
|
-
})
|
|
207
|
-
message: "Unexpected RegExp Anchor Error:\n Token Type: ->" +
|
|
183
|
+
}).map((e) => ({
|
|
184
|
+
message: "Unexpected RegExp Anchor Error:\n Token Type: ->" + e.name + "<- static 'PATTERN' cannot contain end of input anchor '$'\n See chevrotain.io/docs/guide/resolving_lexer_errors.html#ANCHORS for details.",
|
|
208
185
|
type: LexerDefinitionErrorType.EOI_ANCHOR_FOUND,
|
|
209
|
-
tokenTypes: [
|
|
186
|
+
tokenTypes: [e]
|
|
210
187
|
}));
|
|
211
188
|
}
|
|
212
|
-
function findEmptyMatchRegExps(
|
|
213
|
-
return
|
|
214
|
-
message: "Token Type: ->" +
|
|
189
|
+
function findEmptyMatchRegExps(e) {
|
|
190
|
+
return e.filter((e) => e.PATTERN.test("")).map((e) => ({
|
|
191
|
+
message: "Token Type: ->" + e.name + "<- static 'PATTERN' must not match an empty string",
|
|
215
192
|
type: LexerDefinitionErrorType.EMPTY_MATCH_PATTERN,
|
|
216
|
-
tokenTypes: [
|
|
193
|
+
tokenTypes: [e]
|
|
217
194
|
}));
|
|
218
195
|
}
|
|
219
196
|
var start_of_input = /[^\\[][\^]|^\^/;
|
|
220
|
-
function findStartOfInputAnchor(
|
|
221
|
-
class
|
|
197
|
+
function findStartOfInputAnchor(x) {
|
|
198
|
+
class C extends BaseRegExpVisitor {
|
|
222
199
|
constructor() {
|
|
223
200
|
super(...arguments), this.found = !1;
|
|
224
201
|
}
|
|
225
|
-
visitStartAnchor(
|
|
202
|
+
visitStartAnchor(e) {
|
|
226
203
|
this.found = !0;
|
|
227
204
|
}
|
|
228
205
|
}
|
|
229
|
-
return
|
|
230
|
-
let
|
|
206
|
+
return x.filter((e) => {
|
|
207
|
+
let x = e.PATTERN;
|
|
231
208
|
try {
|
|
232
|
-
let
|
|
233
|
-
return
|
|
209
|
+
let e = getRegExpAst(x), w = new C();
|
|
210
|
+
return w.visit(e), w.found;
|
|
234
211
|
} catch {
|
|
235
212
|
/* istanbul ignore next - cannot ensure an error in regexp-to-ast*/
|
|
236
|
-
return start_of_input.test(
|
|
213
|
+
return start_of_input.test(x.source);
|
|
237
214
|
}
|
|
238
|
-
})
|
|
239
|
-
message: "Unexpected RegExp Anchor Error:\n Token Type: ->" +
|
|
215
|
+
}).map((e) => ({
|
|
216
|
+
message: "Unexpected RegExp Anchor Error:\n Token Type: ->" + e.name + "<- static 'PATTERN' cannot contain start of input anchor '^'\n See https://chevrotain.io/docs/guide/resolving_lexer_errors.html#ANCHORS for details.",
|
|
240
217
|
type: LexerDefinitionErrorType.SOI_ANCHOR_FOUND,
|
|
241
|
-
tokenTypes: [
|
|
218
|
+
tokenTypes: [e]
|
|
242
219
|
}));
|
|
243
220
|
}
|
|
244
|
-
function findUnsupportedFlags(
|
|
245
|
-
return
|
|
246
|
-
let
|
|
247
|
-
return
|
|
248
|
-
})
|
|
249
|
-
message: "Token Type: ->" +
|
|
221
|
+
function findUnsupportedFlags(e) {
|
|
222
|
+
return e.filter((e) => {
|
|
223
|
+
let x = e[PATTERN];
|
|
224
|
+
return x instanceof RegExp && (x.multiline || x.global);
|
|
225
|
+
}).map((e) => ({
|
|
226
|
+
message: "Token Type: ->" + e.name + "<- static 'PATTERN' may NOT contain global('g') or multiline('m')",
|
|
250
227
|
type: LexerDefinitionErrorType.UNSUPPORTED_FLAGS_FOUND,
|
|
251
|
-
tokenTypes: [
|
|
228
|
+
tokenTypes: [e]
|
|
252
229
|
}));
|
|
253
230
|
}
|
|
254
|
-
function findDuplicatePatterns(
|
|
255
|
-
let
|
|
256
|
-
return
|
|
257
|
-
let
|
|
231
|
+
function findDuplicatePatterns(e) {
|
|
232
|
+
let x = [], S = e.map((S) => e.reduce((e, C) => S.PATTERN.source === C.PATTERN.source && !x.includes(C) && C.PATTERN !== Lexer.NA ? (x.push(C), e.push(C), e) : e, []));
|
|
233
|
+
return S = S.filter(Boolean), S.filter((e) => e.length > 1).map((e) => {
|
|
234
|
+
let x = e.map((e) => e.name);
|
|
258
235
|
return {
|
|
259
|
-
message: `The same RegExp pattern ->${
|
|
236
|
+
message: `The same RegExp pattern ->${e[0].PATTERN}<-has been used in all of the following Token Types: ${x.join(", ")} <-`,
|
|
260
237
|
type: LexerDefinitionErrorType.DUPLICATE_PATTERNS_FOUND,
|
|
261
|
-
tokenTypes:
|
|
238
|
+
tokenTypes: e
|
|
262
239
|
};
|
|
263
240
|
});
|
|
264
241
|
}
|
|
265
|
-
function findInvalidGroupType(
|
|
266
|
-
return
|
|
267
|
-
if (!
|
|
268
|
-
let
|
|
269
|
-
return
|
|
270
|
-
})
|
|
271
|
-
message: "Token Type: ->" +
|
|
242
|
+
function findInvalidGroupType(e) {
|
|
243
|
+
return e.filter((e) => {
|
|
244
|
+
if (!Object.hasOwn(e, "GROUP")) return !1;
|
|
245
|
+
let x = e.GROUP;
|
|
246
|
+
return x !== Lexer.SKIPPED && x !== Lexer.NA && typeof x != "string";
|
|
247
|
+
}).map((e) => ({
|
|
248
|
+
message: "Token Type: ->" + e.name + "<- static 'GROUP' can only be Lexer.SKIPPED/Lexer.NA/A String",
|
|
272
249
|
type: LexerDefinitionErrorType.INVALID_GROUP_TYPE_FOUND,
|
|
273
|
-
tokenTypes: [
|
|
250
|
+
tokenTypes: [e]
|
|
274
251
|
}));
|
|
275
252
|
}
|
|
276
|
-
function findModesThatDoNotExist(
|
|
277
|
-
return
|
|
278
|
-
message: `Token Type: ->${
|
|
253
|
+
function findModesThatDoNotExist(e, x) {
|
|
254
|
+
return e.filter((e) => e.PUSH_MODE !== void 0 && !x.includes(e.PUSH_MODE)).map((e) => ({
|
|
255
|
+
message: `Token Type: ->${e.name}<- static 'PUSH_MODE' value cannot refer to a Lexer Mode ->${e.PUSH_MODE}<-which does not exist`,
|
|
279
256
|
type: LexerDefinitionErrorType.PUSH_MODE_DOES_NOT_EXIST,
|
|
280
|
-
tokenTypes: [
|
|
257
|
+
tokenTypes: [e]
|
|
281
258
|
}));
|
|
282
259
|
}
|
|
283
|
-
function findUnreachablePatterns(
|
|
284
|
-
let
|
|
285
|
-
let
|
|
286
|
-
return
|
|
287
|
-
str:
|
|
288
|
-
idx:
|
|
289
|
-
tokenType:
|
|
290
|
-
}) :
|
|
291
|
-
str:
|
|
292
|
-
idx:
|
|
293
|
-
tokenType:
|
|
294
|
-
})),
|
|
260
|
+
function findUnreachablePatterns(e) {
|
|
261
|
+
let x = [], S = e.reduce((e, x, S) => {
|
|
262
|
+
let C = x.PATTERN;
|
|
263
|
+
return C === Lexer.NA || (typeof C == "string" ? e.push({
|
|
264
|
+
str: C,
|
|
265
|
+
idx: S,
|
|
266
|
+
tokenType: x
|
|
267
|
+
}) : C instanceof RegExp && noMetaChar(C) && e.push({
|
|
268
|
+
str: C.source,
|
|
269
|
+
idx: S,
|
|
270
|
+
tokenType: x
|
|
271
|
+
})), e;
|
|
295
272
|
}, []);
|
|
296
|
-
return
|
|
297
|
-
|
|
298
|
-
if (
|
|
299
|
-
let
|
|
300
|
-
|
|
301
|
-
message:
|
|
273
|
+
return e.forEach((e, C) => {
|
|
274
|
+
S.forEach(({ str: S, idx: w, tokenType: T }) => {
|
|
275
|
+
if (C < w && tryToMatchStrToPattern(S, e.PATTERN)) {
|
|
276
|
+
let S = `Token: ->${T.name}<- can never be matched.\nBecause it appears AFTER the Token Type ->${e.name}<-in the lexer's definition.\nSee https://chevrotain.io/docs/guide/resolving_lexer_errors.html#UNREACHABLE`;
|
|
277
|
+
x.push({
|
|
278
|
+
message: S,
|
|
302
279
|
type: LexerDefinitionErrorType.UNREACHABLE_PATTERN,
|
|
303
|
-
tokenTypes: [
|
|
280
|
+
tokenTypes: [e, T]
|
|
304
281
|
});
|
|
305
282
|
}
|
|
306
283
|
});
|
|
307
|
-
}),
|
|
284
|
+
}), x;
|
|
308
285
|
}
|
|
309
|
-
function tryToMatchStrToPattern(
|
|
310
|
-
if (
|
|
311
|
-
if (usesLookAheadOrBehind(
|
|
312
|
-
let
|
|
313
|
-
return
|
|
314
|
-
} else if (
|
|
315
|
-
else if (
|
|
316
|
-
else if (typeof
|
|
286
|
+
function tryToMatchStrToPattern(e, x) {
|
|
287
|
+
if (x instanceof RegExp) {
|
|
288
|
+
if (usesLookAheadOrBehind(x)) return !1;
|
|
289
|
+
let S = x.exec(e);
|
|
290
|
+
return S !== null && S.index === 0;
|
|
291
|
+
} else if (typeof x == "function") return x(e, 0, [], {});
|
|
292
|
+
else if (Object.hasOwn(x, "exec")) return x.exec(e, 0, [], {});
|
|
293
|
+
else if (typeof x == "string") return x === e;
|
|
317
294
|
else throw Error("non exhaustive match");
|
|
318
295
|
}
|
|
319
|
-
function noMetaChar(
|
|
320
|
-
return
|
|
296
|
+
function noMetaChar(e) {
|
|
297
|
+
return [
|
|
321
298
|
".",
|
|
322
299
|
"\\",
|
|
323
300
|
"[",
|
|
@@ -331,129 +308,130 @@ function noMetaChar(i) {
|
|
|
331
308
|
"*",
|
|
332
309
|
"+",
|
|
333
310
|
"{"
|
|
334
|
-
]
|
|
311
|
+
].find((x) => e.source.indexOf(x) !== -1) === void 0;
|
|
335
312
|
}
|
|
336
|
-
function usesLookAheadOrBehind(
|
|
337
|
-
return /(\(\?=)|(\(\?!)|(\(\?<=)|(\(\?<!)/.test(
|
|
313
|
+
function usesLookAheadOrBehind(e) {
|
|
314
|
+
return /(\(\?=)|(\(\?!)|(\(\?<=)|(\(\?<!)/.test(e.source);
|
|
338
315
|
}
|
|
339
|
-
function addStickyFlag(
|
|
340
|
-
let
|
|
341
|
-
return RegExp(`${
|
|
316
|
+
function addStickyFlag(e) {
|
|
317
|
+
let x = e.ignoreCase ? "iy" : "y";
|
|
318
|
+
return RegExp(`${e.source}`, x);
|
|
342
319
|
}
|
|
343
|
-
function performRuntimeChecks(
|
|
344
|
-
let
|
|
345
|
-
return
|
|
320
|
+
function performRuntimeChecks(e, x, S) {
|
|
321
|
+
let C = [];
|
|
322
|
+
return Object.hasOwn(e, "defaultMode") || C.push({
|
|
346
323
|
message: "A MultiMode Lexer cannot be initialized without a <" + DEFAULT_MODE + "> property in its definition\n",
|
|
347
324
|
type: LexerDefinitionErrorType.MULTI_MODE_LEXER_WITHOUT_DEFAULT_MODE
|
|
348
|
-
}),
|
|
325
|
+
}), Object.hasOwn(e, "modes") || C.push({
|
|
349
326
|
message: "A MultiMode Lexer cannot be initialized without a <modes> property in its definition\n",
|
|
350
327
|
type: LexerDefinitionErrorType.MULTI_MODE_LEXER_WITHOUT_MODES_PROPERTY
|
|
351
|
-
}),
|
|
352
|
-
message: `A MultiMode Lexer cannot be initialized with a ${DEFAULT_MODE}: <${
|
|
328
|
+
}), Object.hasOwn(e, "modes") && Object.hasOwn(e, "defaultMode") && !Object.hasOwn(e.modes, e.defaultMode) && C.push({
|
|
329
|
+
message: `A MultiMode Lexer cannot be initialized with a ${DEFAULT_MODE}: <${e.defaultMode}>which does not exist\n`,
|
|
353
330
|
type: LexerDefinitionErrorType.MULTI_MODE_LEXER_DEFAULT_MODE_VALUE_DOES_NOT_EXIST
|
|
354
|
-
}),
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
331
|
+
}), Object.hasOwn(e, "modes") && Object.keys(e.modes).forEach((x) => {
|
|
332
|
+
let S = e.modes[x];
|
|
333
|
+
S.forEach((e, w) => {
|
|
334
|
+
e === void 0 ? C.push({
|
|
335
|
+
message: `A Lexer cannot be initialized using an undefined Token Type. Mode:<${x}> at index: <${w}>\n`,
|
|
358
336
|
type: LexerDefinitionErrorType.LEXER_DEFINITION_CANNOT_CONTAIN_UNDEFINED
|
|
359
|
-
}) :
|
|
360
|
-
|
|
361
|
-
message: `A MultiMode Lexer cannot be initialized with a longer_alt <${
|
|
337
|
+
}) : Object.hasOwn(e, "LONGER_ALT") && (Array.isArray(e.LONGER_ALT) ? e.LONGER_ALT : [e.LONGER_ALT]).forEach((w) => {
|
|
338
|
+
w !== void 0 && !S.includes(w) && C.push({
|
|
339
|
+
message: `A MultiMode Lexer cannot be initialized with a longer_alt <${w.name}> on token <${e.name}> outside of mode <${x}>\n`,
|
|
362
340
|
type: LexerDefinitionErrorType.MULTI_MODE_LEXER_LONGER_ALT_NOT_IN_CURRENT_MODE
|
|
363
341
|
});
|
|
364
342
|
});
|
|
365
343
|
});
|
|
366
|
-
}),
|
|
344
|
+
}), C;
|
|
367
345
|
}
|
|
368
|
-
function performWarningRuntimeChecks(
|
|
369
|
-
let
|
|
370
|
-
return
|
|
371
|
-
let
|
|
372
|
-
if (
|
|
373
|
-
let
|
|
374
|
-
message: buildLineBreakIssueMessage(
|
|
375
|
-
type:
|
|
376
|
-
tokenType:
|
|
346
|
+
function performWarningRuntimeChecks(e, x, S) {
|
|
347
|
+
let w = [], T = !1, O = Object.values(e.modes || {}).flat().filter(Boolean).filter((e) => e[PATTERN] !== Lexer.NA), k = getCharCodes(S);
|
|
348
|
+
return x && O.forEach((e) => {
|
|
349
|
+
let x = checkLineBreaksIssues(e, k);
|
|
350
|
+
if (x !== !1) {
|
|
351
|
+
let S = {
|
|
352
|
+
message: buildLineBreakIssueMessage(e, x),
|
|
353
|
+
type: x.issue,
|
|
354
|
+
tokenType: e
|
|
377
355
|
};
|
|
378
|
-
|
|
379
|
-
} else
|
|
380
|
-
}),
|
|
356
|
+
w.push(S);
|
|
357
|
+
} else Object.hasOwn(e, "LINE_BREAKS") ? e.LINE_BREAKS === !0 && (T = !0) : canMatchCharCode(k, e.PATTERN) && (T = !0);
|
|
358
|
+
}), x && !T && w.push({
|
|
381
359
|
message: "Warning: No LINE_BREAKS Found.\n This Lexer has been defined to track line and column information,\n But none of the Token Types can be identified as matching a line terminator.\n See https://chevrotain.io/docs/guide/resolving_lexer_errors.html#LINE_BREAKS \n for details.",
|
|
382
360
|
type: LexerDefinitionErrorType.NO_LINE_BREAKS_FLAGS
|
|
383
|
-
}),
|
|
361
|
+
}), w;
|
|
384
362
|
}
|
|
385
|
-
function cloneEmptyGroups(
|
|
386
|
-
let
|
|
387
|
-
return
|
|
388
|
-
let
|
|
363
|
+
function cloneEmptyGroups(e) {
|
|
364
|
+
let x = {};
|
|
365
|
+
return Object.keys(e).forEach((S) => {
|
|
366
|
+
let C = e[S];
|
|
389
367
|
/* istanbul ignore else */
|
|
390
|
-
if (
|
|
368
|
+
if (Array.isArray(C)) x[S] = [];
|
|
391
369
|
else throw Error("non exhaustive match");
|
|
392
|
-
}),
|
|
370
|
+
}), x;
|
|
393
371
|
}
|
|
394
|
-
function isCustomPattern(
|
|
395
|
-
let
|
|
372
|
+
function isCustomPattern(e) {
|
|
373
|
+
let x = e.PATTERN;
|
|
396
374
|
/* istanbul ignore else */
|
|
397
|
-
if (
|
|
398
|
-
if (
|
|
399
|
-
if (
|
|
375
|
+
if (x instanceof RegExp) return !1;
|
|
376
|
+
if (typeof x == "function" || Object.hasOwn(x, "exec")) return !0;
|
|
377
|
+
if (typeof x == "string") return !1;
|
|
400
378
|
throw Error("non exhaustive match");
|
|
401
379
|
}
|
|
402
|
-
function isShortPattern(
|
|
403
|
-
return
|
|
380
|
+
function isShortPattern(e) {
|
|
381
|
+
return typeof e == "string" && e.length === 1 ? e.charCodeAt(0) : !1;
|
|
404
382
|
}
|
|
405
383
|
const LineTerminatorOptimizedTester = {
|
|
406
|
-
test: function(
|
|
407
|
-
let
|
|
408
|
-
for (let
|
|
409
|
-
let
|
|
410
|
-
if (
|
|
411
|
-
if (
|
|
384
|
+
test: function(e) {
|
|
385
|
+
let x = e.length;
|
|
386
|
+
for (let S = this.lastIndex; S < x; S++) {
|
|
387
|
+
let x = e.charCodeAt(S);
|
|
388
|
+
if (x === 10) return this.lastIndex = S + 1, !0;
|
|
389
|
+
if (x === 13) return e.charCodeAt(S + 1) === 10 ? this.lastIndex = S + 2 : this.lastIndex = S + 1, !0;
|
|
412
390
|
}
|
|
413
391
|
return !1;
|
|
414
392
|
},
|
|
415
393
|
lastIndex: 0
|
|
416
394
|
};
|
|
417
|
-
function checkLineBreaksIssues(
|
|
418
|
-
if (
|
|
419
|
-
if (
|
|
395
|
+
function checkLineBreaksIssues(e, x) {
|
|
396
|
+
if (Object.hasOwn(e, "LINE_BREAKS")) return !1;
|
|
397
|
+
if (e.PATTERN instanceof RegExp) {
|
|
420
398
|
try {
|
|
421
|
-
canMatchCharCode(
|
|
422
|
-
} catch (
|
|
399
|
+
canMatchCharCode(x, e.PATTERN);
|
|
400
|
+
} catch (e) {
|
|
423
401
|
/* istanbul ignore next - to test this we would have to mock <canMatchCharCode> to throw an error */
|
|
424
402
|
return {
|
|
425
403
|
issue: LexerDefinitionErrorType.IDENTIFY_TERMINATOR,
|
|
426
|
-
errMsg:
|
|
404
|
+
errMsg: e.message
|
|
427
405
|
};
|
|
428
406
|
}
|
|
429
407
|
return !1;
|
|
430
|
-
} else if (
|
|
431
|
-
else if (isCustomPattern(
|
|
408
|
+
} else if (typeof e.PATTERN == "string") return !1;
|
|
409
|
+
else if (isCustomPattern(e)) return { issue: LexerDefinitionErrorType.CUSTOM_LINE_BREAK };
|
|
432
410
|
else throw Error("non exhaustive match");
|
|
433
411
|
}
|
|
434
|
-
function buildLineBreakIssueMessage(
|
|
412
|
+
function buildLineBreakIssueMessage(e, x) {
|
|
435
413
|
/* istanbul ignore else */
|
|
436
|
-
if (
|
|
437
|
-
\tThe problem is in the <${
|
|
438
|
-
if (
|
|
439
|
-
\tThe problem is in the <${
|
|
414
|
+
if (x.issue === LexerDefinitionErrorType.IDENTIFY_TERMINATOR) return `Warning: unable to identify line terminator usage in pattern.
|
|
415
|
+
\tThe problem is in the <${e.name}> Token Type\n\t Root cause: ${x.errMsg}.\n For details See: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#IDENTIFY_TERMINATOR`;
|
|
416
|
+
if (x.issue === LexerDefinitionErrorType.CUSTOM_LINE_BREAK) return `Warning: A Custom Token Pattern should specify the <line_breaks> option.
|
|
417
|
+
\tThe problem is in the <${e.name}> Token Type\n For details See: https://chevrotain.io/docs/guide/resolving_lexer_errors.html#CUSTOM_LINE_BREAK`;
|
|
440
418
|
throw Error("non exhaustive match");
|
|
441
419
|
}
|
|
442
|
-
function getCharCodes(
|
|
443
|
-
return
|
|
420
|
+
function getCharCodes(e) {
|
|
421
|
+
return e.map((e) => typeof e == "string" ? e.charCodeAt(0) : e);
|
|
444
422
|
}
|
|
445
|
-
function addToMapOfArrays(
|
|
446
|
-
|
|
423
|
+
function addToMapOfArrays(e, x, S) {
|
|
424
|
+
e[x] === void 0 ? e[x] = [S] : e[x].push(S);
|
|
447
425
|
}
|
|
448
426
|
const minOptimizationVal = 256;
|
|
449
427
|
var charCodeToOptimizedIdxMap = [];
|
|
450
|
-
function charCodeToOptimizedIndex(
|
|
451
|
-
return
|
|
428
|
+
function charCodeToOptimizedIndex(e) {
|
|
429
|
+
return e < 256 ? e : charCodeToOptimizedIdxMap[e];
|
|
452
430
|
}
|
|
453
431
|
function initCharCodeToOptimizedIndexMap() {
|
|
454
|
-
if (
|
|
432
|
+
if (charCodeToOptimizedIdxMap.length === 0) {
|
|
455
433
|
charCodeToOptimizedIdxMap = Array(65536);
|
|
456
|
-
for (let
|
|
434
|
+
for (let e = 0; e < 65536; e++) charCodeToOptimizedIdxMap[e] = e > 255 ? 255 + ~~(e / 255) : e;
|
|
457
435
|
}
|
|
458
436
|
}
|
|
459
437
|
export { DEFAULT_MODE, LineTerminatorOptimizedTester, analyzeTokenTypes, charCodeToOptimizedIndex, cloneEmptyGroups, minOptimizationVal, performRuntimeChecks, performWarningRuntimeChecks, validatePatterns };
|