flowquery 1.0.3 → 1.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (168) hide show
  1. package/dist/compute/runner.d.ts +81 -0
  2. package/dist/compute/runner.d.ts.map +1 -0
  3. package/dist/index.browser.d.ts +10 -0
  4. package/dist/index.browser.d.ts.map +1 -0
  5. package/dist/index.d.ts +9 -0
  6. package/dist/index.d.ts.map +1 -0
  7. package/dist/index.node.d.ts +14 -0
  8. package/dist/index.node.d.ts.map +1 -0
  9. package/dist/io/command_line.d.ts +28 -0
  10. package/dist/io/command_line.d.ts.map +1 -0
  11. package/dist/parsing/alias.d.ts +10 -0
  12. package/dist/parsing/alias.d.ts.map +1 -0
  13. package/dist/parsing/alias_option.d.ts +6 -0
  14. package/dist/parsing/alias_option.d.ts.map +1 -0
  15. package/dist/parsing/ast_node.d.ts +100 -0
  16. package/dist/parsing/ast_node.d.ts.map +1 -0
  17. package/dist/parsing/base_parser.d.ts +49 -0
  18. package/dist/parsing/base_parser.d.ts.map +1 -0
  19. package/dist/parsing/components/csv.d.ts +6 -0
  20. package/dist/parsing/components/csv.d.ts.map +1 -0
  21. package/dist/parsing/components/from.d.ts +7 -0
  22. package/dist/parsing/components/from.d.ts.map +1 -0
  23. package/dist/parsing/components/headers.d.ts +7 -0
  24. package/dist/parsing/components/headers.d.ts.map +1 -0
  25. package/dist/parsing/components/json.d.ts +6 -0
  26. package/dist/parsing/components/json.d.ts.map +1 -0
  27. package/dist/parsing/components/null.d.ts +6 -0
  28. package/dist/parsing/components/null.d.ts.map +1 -0
  29. package/dist/parsing/components/post.d.ts +6 -0
  30. package/dist/parsing/components/post.d.ts.map +1 -0
  31. package/dist/parsing/components/text.d.ts +6 -0
  32. package/dist/parsing/components/text.d.ts.map +1 -0
  33. package/dist/parsing/context.d.ts +38 -0
  34. package/dist/parsing/context.d.ts.map +1 -0
  35. package/dist/parsing/data_structures/associative_array.d.ts +28 -0
  36. package/dist/parsing/data_structures/associative_array.d.ts.map +1 -0
  37. package/dist/parsing/data_structures/json_array.d.ts +26 -0
  38. package/dist/parsing/data_structures/json_array.d.ts.map +1 -0
  39. package/dist/parsing/data_structures/key_value_pair.d.ts +25 -0
  40. package/dist/parsing/data_structures/key_value_pair.d.ts.map +1 -0
  41. package/dist/parsing/data_structures/lookup.d.ts +25 -0
  42. package/dist/parsing/data_structures/lookup.d.ts.map +1 -0
  43. package/dist/parsing/data_structures/range_lookup.d.ts +14 -0
  44. package/dist/parsing/data_structures/range_lookup.d.ts.map +1 -0
  45. package/dist/parsing/expressions/expression.d.ts +53 -0
  46. package/dist/parsing/expressions/expression.d.ts.map +1 -0
  47. package/dist/parsing/expressions/f_string.d.ts +21 -0
  48. package/dist/parsing/expressions/f_string.d.ts.map +1 -0
  49. package/dist/parsing/expressions/identifier.d.ts +17 -0
  50. package/dist/parsing/expressions/identifier.d.ts.map +1 -0
  51. package/dist/parsing/expressions/number.d.ts +25 -0
  52. package/dist/parsing/expressions/number.d.ts.map +1 -0
  53. package/dist/parsing/expressions/operator.d.ts +79 -0
  54. package/dist/parsing/expressions/operator.d.ts.map +1 -0
  55. package/dist/parsing/expressions/reference.d.ts +29 -0
  56. package/dist/parsing/expressions/reference.d.ts.map +1 -0
  57. package/dist/parsing/expressions/string.d.ts +23 -0
  58. package/dist/parsing/expressions/string.d.ts.map +1 -0
  59. package/dist/parsing/functions/aggregate_function.d.ts +42 -0
  60. package/dist/parsing/functions/aggregate_function.d.ts.map +1 -0
  61. package/dist/parsing/functions/async_function.d.ts +52 -0
  62. package/dist/parsing/functions/async_function.d.ts.map +1 -0
  63. package/dist/parsing/functions/avg.d.ts +15 -0
  64. package/dist/parsing/functions/avg.d.ts.map +1 -0
  65. package/dist/parsing/functions/collect.d.ts +21 -0
  66. package/dist/parsing/functions/collect.d.ts.map +1 -0
  67. package/dist/parsing/functions/function.d.ts +36 -0
  68. package/dist/parsing/functions/function.d.ts.map +1 -0
  69. package/dist/parsing/functions/function_factory.d.ts +151 -0
  70. package/dist/parsing/functions/function_factory.d.ts.map +1 -0
  71. package/dist/parsing/functions/function_metadata.d.ts +83 -0
  72. package/dist/parsing/functions/function_metadata.d.ts.map +1 -0
  73. package/dist/parsing/functions/functions.d.ts +21 -0
  74. package/dist/parsing/functions/functions.d.ts.map +1 -0
  75. package/dist/parsing/functions/join.d.ts +9 -0
  76. package/dist/parsing/functions/join.d.ts.map +1 -0
  77. package/dist/parsing/functions/predicate_function.d.ts +18 -0
  78. package/dist/parsing/functions/predicate_function.d.ts.map +1 -0
  79. package/dist/parsing/functions/predicate_function_factory.d.ts +6 -0
  80. package/dist/parsing/functions/predicate_function_factory.d.ts.map +1 -0
  81. package/dist/parsing/functions/predicate_sum.d.ts +7 -0
  82. package/dist/parsing/functions/predicate_sum.d.ts.map +1 -0
  83. package/dist/parsing/functions/rand.d.ts +7 -0
  84. package/dist/parsing/functions/rand.d.ts.map +1 -0
  85. package/dist/parsing/functions/range.d.ts +7 -0
  86. package/dist/parsing/functions/range.d.ts.map +1 -0
  87. package/dist/parsing/functions/reducer_element.d.ts +6 -0
  88. package/dist/parsing/functions/reducer_element.d.ts.map +1 -0
  89. package/dist/parsing/functions/replace.d.ts +7 -0
  90. package/dist/parsing/functions/replace.d.ts.map +1 -0
  91. package/dist/parsing/functions/round.d.ts +7 -0
  92. package/dist/parsing/functions/round.d.ts.map +1 -0
  93. package/dist/parsing/functions/size.d.ts +7 -0
  94. package/dist/parsing/functions/size.d.ts.map +1 -0
  95. package/dist/parsing/functions/split.d.ts +9 -0
  96. package/dist/parsing/functions/split.d.ts.map +1 -0
  97. package/dist/parsing/functions/stringify.d.ts +9 -0
  98. package/dist/parsing/functions/stringify.d.ts.map +1 -0
  99. package/dist/parsing/functions/sum.d.ts +14 -0
  100. package/dist/parsing/functions/sum.d.ts.map +1 -0
  101. package/dist/parsing/functions/to_json.d.ts +7 -0
  102. package/dist/parsing/functions/to_json.d.ts.map +1 -0
  103. package/dist/parsing/functions/value_holder.d.ts +8 -0
  104. package/dist/parsing/functions/value_holder.d.ts.map +1 -0
  105. package/dist/parsing/logic/case.d.ts +7 -0
  106. package/dist/parsing/logic/case.d.ts.map +1 -0
  107. package/dist/parsing/logic/else.d.ts +7 -0
  108. package/dist/parsing/logic/else.d.ts.map +1 -0
  109. package/dist/parsing/logic/end.d.ts +6 -0
  110. package/dist/parsing/logic/end.d.ts.map +1 -0
  111. package/dist/parsing/logic/then.d.ts +7 -0
  112. package/dist/parsing/logic/then.d.ts.map +1 -0
  113. package/dist/parsing/logic/when.d.ts +7 -0
  114. package/dist/parsing/logic/when.d.ts.map +1 -0
  115. package/dist/parsing/operations/aggregated_return.d.ts +8 -0
  116. package/dist/parsing/operations/aggregated_return.d.ts.map +1 -0
  117. package/dist/parsing/operations/aggregated_with.d.ts +8 -0
  118. package/dist/parsing/operations/aggregated_with.d.ts.map +1 -0
  119. package/dist/parsing/operations/group_by.d.ts +35 -0
  120. package/dist/parsing/operations/group_by.d.ts.map +1 -0
  121. package/dist/parsing/operations/limit.d.ts +10 -0
  122. package/dist/parsing/operations/limit.d.ts.map +1 -0
  123. package/dist/parsing/operations/load.d.ts +48 -0
  124. package/dist/parsing/operations/load.d.ts.map +1 -0
  125. package/dist/parsing/operations/operation.d.ts +39 -0
  126. package/dist/parsing/operations/operation.d.ts.map +1 -0
  127. package/dist/parsing/operations/projection.d.ts +8 -0
  128. package/dist/parsing/operations/projection.d.ts.map +1 -0
  129. package/dist/parsing/operations/return.d.ts +23 -0
  130. package/dist/parsing/operations/return.d.ts.map +1 -0
  131. package/dist/parsing/operations/unwind.d.ts +12 -0
  132. package/dist/parsing/operations/unwind.d.ts.map +1 -0
  133. package/dist/parsing/operations/where.d.ts +26 -0
  134. package/dist/parsing/operations/where.d.ts.map +1 -0
  135. package/dist/parsing/operations/with.d.ts +17 -0
  136. package/dist/parsing/operations/with.d.ts.map +1 -0
  137. package/dist/parsing/parser.d.ts +66 -0
  138. package/dist/parsing/parser.d.ts.map +1 -0
  139. package/dist/parsing/parser.js +1 -1
  140. package/dist/parsing/token_to_node.d.ts +7 -0
  141. package/dist/parsing/token_to_node.d.ts.map +1 -0
  142. package/dist/tokenization/keyword.d.ts +43 -0
  143. package/dist/tokenization/keyword.d.ts.map +1 -0
  144. package/dist/tokenization/operator.d.ts +22 -0
  145. package/dist/tokenization/operator.d.ts.map +1 -0
  146. package/dist/tokenization/string_walker.d.ts +55 -0
  147. package/dist/tokenization/string_walker.d.ts.map +1 -0
  148. package/dist/tokenization/symbol.d.ts +15 -0
  149. package/dist/tokenization/symbol.d.ts.map +1 -0
  150. package/dist/tokenization/token.d.ts +176 -0
  151. package/dist/tokenization/token.d.ts.map +1 -0
  152. package/dist/tokenization/token_mapper.d.ts +40 -0
  153. package/dist/tokenization/token_mapper.d.ts.map +1 -0
  154. package/dist/tokenization/token_type.d.ts +16 -0
  155. package/dist/tokenization/token_type.d.ts.map +1 -0
  156. package/dist/tokenization/tokenizer.d.ts +45 -0
  157. package/dist/tokenization/tokenizer.d.ts.map +1 -0
  158. package/dist/tokenization/trie.d.ts +41 -0
  159. package/dist/tokenization/trie.d.ts.map +1 -0
  160. package/dist/utils/object_utils.d.ts +15 -0
  161. package/dist/utils/object_utils.d.ts.map +1 -0
  162. package/dist/utils/string_utils.d.ts +51 -0
  163. package/dist/utils/string_utils.d.ts.map +1 -0
  164. package/misc/apps/RAG/package.json +1 -1
  165. package/misc/apps/RAG/src/index.ts +4 -4
  166. package/package.json +1 -1
  167. package/src/parsing/parser.ts +1 -1
  168. package/tsconfig.json +3 -1
@@ -0,0 +1 @@
1
+ {"version":3,"file":"unwind.d.ts","sourceRoot":"","sources":["../../../src/parsing/operations/unwind.ts"],"names":[],"mappings":"AAAA,OAAO,SAAS,MAAM,aAAa,CAAC;AACpC,OAAO,UAAU,MAAM,2BAA2B,CAAC;AAEnD,cAAM,MAAO,SAAQ,SAAS;IAC1B,OAAO,CAAC,MAAM,CAAM;gBACR,UAAU,EAAE,UAAU;IAIlC,IAAW,UAAU,IAAI,UAAU,CAElC;IACD,IAAW,EAAE,IAAI,MAAM,CAEtB;IACY,GAAG,IAAI,OAAO,CAAC,IAAI,CAAC;IAW1B,KAAK,IAAI,GAAG;CAGtB;AAED,eAAe,MAAM,CAAC"}
@@ -0,0 +1,26 @@
1
+ import Operation from "./operation";
2
+ import Expression from "../expressions/expression";
3
+ /**
4
+ * Represents a WHERE operation that filters data based on a condition.
5
+ *
6
+ * The WHERE operation evaluates a boolean expression and only continues
7
+ * execution to the next operation if the condition is true.
8
+ *
9
+ * @example
10
+ * ```typescript
11
+ * // RETURN x WHERE x > 0
12
+ * ```
13
+ */
14
+ declare class Where extends Operation {
15
+ /**
16
+ * Creates a new WHERE operation with the given condition.
17
+ *
18
+ * @param expression - The boolean expression to evaluate
19
+ */
20
+ constructor(expression: Expression);
21
+ get expression(): Expression;
22
+ run(): Promise<void>;
23
+ value(): any;
24
+ }
25
+ export default Where;
26
+ //# sourceMappingURL=where.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"where.d.ts","sourceRoot":"","sources":["../../../src/parsing/operations/where.ts"],"names":[],"mappings":"AAAA,OAAO,SAAS,MAAM,aAAa,CAAC;AACpC,OAAO,UAAU,MAAM,2BAA2B,CAAC;AAEnD;;;;;;;;;;GAUG;AACH,cAAM,KAAM,SAAQ,SAAS;IACzB;;;;OAIG;gBACS,UAAU,EAAE,UAAU;IAIlC,IAAW,UAAU,IAAI,UAAU,CAElC;IACY,GAAG,IAAI,OAAO,CAAC,IAAI,CAAC;IAK1B,KAAK,IAAI,GAAG;CAGtB;AAED,eAAe,KAAK,CAAC"}
@@ -0,0 +1,17 @@
1
+ import Projection from "./projection";
2
+ /**
3
+ * Represents a WITH operation that defines variables or intermediate results.
4
+ *
5
+ * The WITH operation creates named expressions that can be referenced later in the query.
6
+ * It passes control to the next operation in the chain.
7
+ *
8
+ * @example
9
+ * ```typescript
10
+ * // WITH x = 1, y = 2 RETURN x + y
11
+ * ```
12
+ */
13
+ declare class With extends Projection {
14
+ run(): Promise<void>;
15
+ }
16
+ export default With;
17
+ //# sourceMappingURL=with.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"with.d.ts","sourceRoot":"","sources":["../../../src/parsing/operations/with.ts"],"names":[],"mappings":"AAAA,OAAO,UAAU,MAAM,cAAc,CAAC;AAEtC;;;;;;;;;;GAUG;AACH,cAAM,IAAK,SAAQ,UAAU;IACZ,GAAG,IAAI,OAAO,CAAC,IAAI,CAAC;CAGpC;AAED,eAAe,IAAI,CAAC"}
@@ -0,0 +1,66 @@
1
+ import ASTNode from "./ast_node";
2
+ import BaseParser from "./base_parser";
3
+ /**
4
+ * Main parser for FlowQuery statements.
5
+ *
6
+ * Parses FlowQuery declarative query language statements into an Abstract Syntax Tree (AST).
7
+ * Supports operations like WITH, UNWIND, RETURN, LOAD, WHERE, and LIMIT, along with
8
+ * expressions, functions, data structures, and logical constructs.
9
+ *
10
+ * @example
11
+ * ```typescript
12
+ * const parser = new Parser();
13
+ * const ast = parser.parse("unwind [1, 2, 3, 4] as num return num");
14
+ * ```
15
+ */
16
+ declare class Parser extends BaseParser {
17
+ private variables;
18
+ private context;
19
+ private _returns;
20
+ /**
21
+ * Parses a FlowQuery statement into an Abstract Syntax Tree.
22
+ *
23
+ * @param statement - The FlowQuery statement to parse
24
+ * @returns The root AST node containing the parsed structure
25
+ * @throws {Error} If the statement is malformed or contains syntax errors
26
+ *
27
+ * @example
28
+ * ```typescript
29
+ * const ast = parser.parse("LOAD JSON FROM 'https://api.adviceslip.com/advice' AS data RETURN data");
30
+ * ```
31
+ */
32
+ parse(statement: string): ASTNode;
33
+ private parseOperation;
34
+ private parseWith;
35
+ private parseUnwind;
36
+ private parseReturn;
37
+ private parseWhere;
38
+ private parseLoad;
39
+ private parseLimit;
40
+ private parseExpressions;
41
+ private parseExpression;
42
+ private parseLookup;
43
+ private parseCase;
44
+ private parseWhen;
45
+ private parseThen;
46
+ private parseElse;
47
+ private parseAlias;
48
+ private parseFunction;
49
+ /**
50
+ * Parses an async function call for use in LOAD operations.
51
+ * Only matches if the identifier is registered as an async data provider.
52
+ *
53
+ * @returns An AsyncFunction node if a registered async function is found, otherwise null
54
+ */
55
+ private parseAsyncFunction;
56
+ private parsePredicateFunction;
57
+ private parseFString;
58
+ private parseJSON;
59
+ private parseAssociativeArray;
60
+ private parseJSONArray;
61
+ private expectAndSkipWhitespaceAndComments;
62
+ private skipWhitespaceAndComments;
63
+ private expectPreviousTokenToBeWhitespaceOrComment;
64
+ }
65
+ export default Parser;
66
+ //# sourceMappingURL=parser.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"parser.d.ts","sourceRoot":"","sources":["../../src/parsing/parser.ts"],"names":[],"mappings":"AAEA,OAAO,OAAO,MAAM,YAAY,CAAC;AACjC,OAAO,UAAU,MAAM,eAAe,CAAC;AAuCvC;;;;;;;;;;;;GAYG;AACH,cAAM,MAAO,SAAQ,UAAU;IAC3B,OAAO,CAAC,SAAS,CAAmC;IACpD,OAAO,CAAC,OAAO,CAA0B;IACzC,OAAO,CAAC,QAAQ,CAAa;IAE7B;;;;;;;;;;;OAWG;IACI,KAAK,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO;IA6CxC,OAAO,CAAC,cAAc;IAStB,OAAO,CAAC,SAAS;IAgBjB,OAAO,CAAC,WAAW;IAyBnB,OAAO,CAAC,WAAW;IAiBnB,OAAO,CAAC,UAAU;IAiBlB,OAAO,CAAC,SAAS;IAoEjB,OAAO,CAAC,UAAU;IAgBlB,OAAO,CAAE,gBAAgB;IA6BzB,OAAO,CAAC,eAAe;IAiFvB,OAAO,CAAC,WAAW;IAkDnB,OAAO,CAAC,SAAS;IAyCjB,OAAO,CAAC,SAAS;IAejB,OAAO,CAAC,SAAS;IAejB,OAAO,CAAC,SAAS;IAejB,OAAO,CAAC,UAAU;IAgBlB,OAAO,CAAC,aAAa;IAiCrB;;;;;OAKG;IACH,OAAO,CAAC,kBAAkB;IA0B1B,OAAO,CAAC,sBAAsB;IA0D9B,OAAO,CAAC,YAAY;IA4BpB,OAAO,CAAC,SAAS;IAiBjB,OAAO,CAAC,qBAAqB;IAuC7B,OAAO,CAAC,cAAc;IAyBtB,OAAO,CAAC,kCAAkC;IAO1C,OAAO,CAAC,yBAAyB;IASjC,OAAO,CAAC,0CAA0C;CAKrD;AAED,eAAe,MAAM,CAAC"}
@@ -52,7 +52,7 @@ const null_1 = __importDefault(require("./components/null"));
52
52
  * @example
53
53
  * ```typescript
54
54
  * const parser = new Parser();
55
- * const ast = parser.parse("unwind [1, 2, 3, 4, 5] as num return num");
55
+ * const ast = parser.parse("unwind [1, 2, 3, 4] as num return num");
56
56
  * ```
57
57
  */
58
58
  class Parser extends base_parser_1.default {
@@ -0,0 +1,7 @@
1
+ import Token from '../tokenization/token';
2
+ import ASTNode from './ast_node';
3
+ declare class TokenToNode {
4
+ static convert(token: Token): ASTNode;
5
+ }
6
+ export default TokenToNode;
7
+ //# sourceMappingURL=token_to_node.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"token_to_node.d.ts","sourceRoot":"","sources":["../../src/parsing/token_to_node.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,MAAM,uBAAuB,CAAC;AAC1C,OAAO,OAAO,MAAM,YAAY,CAAC;AASjC,cAAM,WAAW;WACC,OAAO,CAAC,KAAK,EAAE,KAAK,GAAG,OAAO;CAyE/C;AAED,eAAe,WAAW,CAAC"}
@@ -0,0 +1,43 @@
1
+ declare enum Keyword {
2
+ RETURN = "RETURN",
3
+ MATCH = "MATCH",
4
+ WHERE = "WHERE",
5
+ CREATE = "CREATE",
6
+ MERGE = "MERGE",
7
+ DELETE = "DELETE",
8
+ DETACH = "DETACH",
9
+ SET = "SET",
10
+ REMOVE = "REMOVE",
11
+ FOREACH = "FOREACH",
12
+ WITH = "WITH",
13
+ CALL = "CALL",
14
+ YIELD = "YIELD",
15
+ LOAD = "LOAD",
16
+ HEADERS = "HEADERS",
17
+ POST = "POST",
18
+ FROM = "FROM",
19
+ CSV = "CSV",
20
+ JSON = "JSON",
21
+ TEXT = "TEXT",
22
+ AS = "AS",
23
+ UNWIND = "UNWIND",
24
+ SUM = "SUM",
25
+ COLLECT = "COLLECT",
26
+ DISTINCT = "DISTINCT",
27
+ ORDER = "ORDER",
28
+ BY = "BY",
29
+ ASC = "ASC",
30
+ DESC = "DESC",
31
+ SKIP = "SKIP",
32
+ LIMIT = "LIMIT",
33
+ EOF = "EOF",
34
+ CASE = "CASE",
35
+ WHEN = "WHEN",
36
+ THEN = "THEN",
37
+ ELSE = "ELSE",
38
+ END = "END",
39
+ NULL = "NULL",
40
+ IN = "IN"
41
+ }
42
+ export default Keyword;
43
+ //# sourceMappingURL=keyword.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"keyword.d.ts","sourceRoot":"","sources":["../../src/tokenization/keyword.ts"],"names":[],"mappings":"AAAA,aAAK,OAAO;IACR,MAAM,WAAW;IACjB,KAAK,UAAU;IACf,KAAK,UAAU;IACf,MAAM,WAAW;IACjB,KAAK,UAAU;IACf,MAAM,WAAW;IACjB,MAAM,WAAW;IACjB,GAAG,QAAQ;IACX,MAAM,WAAW;IACjB,OAAO,YAAY;IACnB,IAAI,SAAS;IACb,IAAI,SAAS;IACb,KAAK,UAAU;IACf,IAAI,SAAS;IACb,OAAO,YAAY;IACnB,IAAI,SAAS;IACb,IAAI,SAAS;IACb,GAAG,QAAQ;IACX,IAAI,SAAS;IACb,IAAI,SAAS;IACb,EAAE,OAAO;IACT,MAAM,WAAW;IACjB,GAAG,QAAQ;IACX,OAAO,YAAY;IACnB,QAAQ,aAAa;IACrB,KAAK,UAAU;IACf,EAAE,OAAO;IACT,GAAG,QAAQ;IACX,IAAI,SAAS;IACb,IAAI,SAAS;IACb,KAAK,UAAU;IACf,GAAG,QAAQ;IACX,IAAI,SAAS;IACb,IAAI,SAAS;IACb,IAAI,SAAS;IACb,IAAI,SAAS;IACb,GAAG,QAAQ;IACX,IAAI,SAAS;IACb,EAAE,OAAO;CACZ;AAED,eAAe,OAAO,CAAC"}
@@ -0,0 +1,22 @@
1
+ declare enum Operator {
2
+ ADD = "+",
3
+ SUBTRACT = "-",
4
+ MULTIPLY = "*",
5
+ DIVIDE = "/",
6
+ MODULO = "%",
7
+ EXPONENT = "^",
8
+ EQUALS = "=",
9
+ NOT_EQUALS = "<>",
10
+ LESS_THAN = "<",
11
+ LESS_THAN_OR_EQUAL = "<=",
12
+ GREATER_THAN = ">",
13
+ GREATER_THAN_OR_EQUAL = ">=",
14
+ IS = "IS",
15
+ AND = "AND",
16
+ OR = "OR",
17
+ NOT = "NOT",
18
+ IN = "IN",
19
+ PIPE = "|"
20
+ }
21
+ export default Operator;
22
+ //# sourceMappingURL=operator.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"operator.d.ts","sourceRoot":"","sources":["../../src/tokenization/operator.ts"],"names":[],"mappings":"AAAA,aAAK,QAAQ;IAET,GAAG,MAAM;IACT,QAAQ,MAAM;IACd,QAAQ,MAAM;IACd,MAAM,MAAM;IACZ,MAAM,MAAM;IACZ,QAAQ,MAAM;IAEd,MAAM,MAAM;IACZ,UAAU,OAAO;IACjB,SAAS,MAAM;IACf,kBAAkB,OAAO;IACzB,YAAY,MAAM;IAClB,qBAAqB,OAAO;IAC5B,EAAE,OAAO;IAET,GAAG,QAAQ;IACX,EAAE,OAAO;IACT,GAAG,QAAQ;IACX,EAAE,OAAO;IACT,IAAI,MAAM;CACb;AAED,eAAe,QAAQ,CAAC"}
@@ -0,0 +1,55 @@
1
+ /**
2
+ * Utility class for walking through a string character by character during tokenization.
3
+ *
4
+ * Provides methods to check for specific character patterns, move through the string,
5
+ * and extract substrings. Used by the Tokenizer to process input text.
6
+ *
7
+ * @example
8
+ * ```typescript
9
+ * const walker = new StringWalker("WITH x as variable");
10
+ * while (!walker.isAtEnd) {
11
+ * // Process characters
12
+ * }
13
+ * ```
14
+ */
15
+ declare class StringWalker {
16
+ private _position;
17
+ private readonly text;
18
+ /**
19
+ * Creates a new StringWalker for the given text.
20
+ *
21
+ * @param text - The input text to walk through
22
+ */
23
+ constructor(text: string);
24
+ get position(): number;
25
+ get currentChar(): string;
26
+ get nextChar(): string;
27
+ get previousChar(): string;
28
+ get isAtEnd(): boolean;
29
+ getString(startPosition: number): string;
30
+ getRemainingString(): string;
31
+ checkForSingleComment(): boolean;
32
+ checkForMultiLineComment(): boolean;
33
+ singleLineCommentStart(): boolean;
34
+ multiLineCommentStart(): boolean;
35
+ multiLineCommentEnd(): boolean;
36
+ newLine(): boolean;
37
+ escaped(char: string): boolean;
38
+ escapedBrace(): boolean;
39
+ openingBrace(): boolean;
40
+ closingBrace(): boolean;
41
+ checkForUnderScore(): boolean;
42
+ checkForLetter(): boolean;
43
+ checkForDigit(): boolean;
44
+ checkForQuote(): string | null;
45
+ checkForString(value: string): boolean;
46
+ checkForWhitespace(): boolean;
47
+ checkForFStringStart(): boolean;
48
+ moveNext(): void;
49
+ moveBy(steps: number): void;
50
+ movePrevious(): void;
51
+ is_word(word: string | null): boolean;
52
+ word_continuation(word: string): boolean;
53
+ }
54
+ export default StringWalker;
55
+ //# sourceMappingURL=string_walker.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"string_walker.d.ts","sourceRoot":"","sources":["../../src/tokenization/string_walker.ts"],"names":[],"mappings":"AAEA;;;;;;;;;;;;;GAaG;AACH,cAAM,YAAY;IACd,OAAO,CAAC,SAAS,CAAS;IAC1B,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAS;IAE9B;;;;OAIG;gBACS,IAAI,EAAE,MAAM;IAKxB,IAAW,QAAQ,IAAI,MAAM,CAE5B;IAED,IAAW,WAAW,IAAI,MAAM,CAE/B;IAED,IAAW,QAAQ,IAAI,MAAM,CAE5B;IAED,IAAW,YAAY,IAAI,MAAM,CAEhC;IAED,IAAW,OAAO,IAAI,OAAO,CAE5B;IAEM,SAAS,CAAC,aAAa,EAAE,MAAM,GAAG,MAAM;IAIxC,kBAAkB,IAAI,MAAM;IAI5B,qBAAqB,IAAI,OAAO;IAUhC,wBAAwB,IAAI,OAAO;IAcnC,sBAAsB,IAAI,OAAO;IAIjC,qBAAqB,IAAI,OAAO;IAIhC,mBAAmB,IAAI,OAAO;IAI9B,OAAO,IAAI,OAAO;IAOlB,OAAO,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO;IAI9B,YAAY,IAAI,OAAO;IAIvB,YAAY,IAAI,OAAO;IAIvB,YAAY,IAAI,OAAO;IAIvB,kBAAkB,IAAI,OAAO;IAQ7B,cAAc,IAAI,OAAO;IAQzB,aAAa,IAAI,OAAO;IAQxB,aAAa,IAAI,MAAM,GAAG,IAAI;IAS9B,cAAc,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO;IAUtC,kBAAkB,IAAI,OAAO;IAI7B,oBAAoB,IAAI,OAAO;IAI/B,QAAQ,IAAI,IAAI;IAIhB,MAAM,CAAC,KAAK,EAAE,MAAM,GAAG,IAAI;IAI3B,YAAY,IAAI,IAAI;IAIpB,OAAO,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI,GAAG,OAAO;IAOrC,iBAAiB,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO;CAIlD;AAED,eAAe,YAAY,CAAC"}
@@ -0,0 +1,15 @@
1
+ declare enum Symbol {
2
+ LEFT_PARENTHESIS = "(",
3
+ RIGHT_PARENTHESIS = ")",
4
+ COMMA = ",",
5
+ DOT = ".",
6
+ COLON = ":",
7
+ WHITESPACE = "",
8
+ OPENING_BRACE = "{",
9
+ CLOSING_BRACE = "}",
10
+ OPENING_BRACKET = "[",
11
+ CLOSING_BRACKET = "]",
12
+ BACKTICK = "`"
13
+ }
14
+ export default Symbol;
15
+ //# sourceMappingURL=symbol.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"symbol.d.ts","sourceRoot":"","sources":["../../src/tokenization/symbol.ts"],"names":[],"mappings":"AAAA,aAAK,MAAM;IACP,gBAAgB,MAAM;IACtB,iBAAiB,MAAM;IACvB,KAAK,MAAM;IACX,GAAG,MAAM;IACT,KAAK,MAAM;IACX,UAAU,KAAK;IACf,aAAa,MAAM;IACnB,aAAa,MAAM;IACnB,eAAe,MAAM;IACrB,eAAe,MAAM;IACrB,QAAQ,MAAM;CACjB;AAED,eAAe,MAAM,CAAC"}
@@ -0,0 +1,176 @@
1
+ import TokenType from "./token_type";
2
+ import ASTNode from "../parsing/ast_node";
3
+ /**
4
+ * Represents a single token in the FlowQuery language.
5
+ *
6
+ * Tokens are the atomic units of lexical analysis, produced by the tokenizer
7
+ * and consumed by the parser. Each token has a type (keyword, operator, identifier, etc.)
8
+ * and an optional value.
9
+ *
10
+ * @example
11
+ * ```typescript
12
+ * const withToken = Token.WITH;
13
+ * const identToken = Token.IDENTIFIER("myVar");
14
+ * const numToken = Token.NUMBER("42");
15
+ * ```
16
+ */
17
+ declare class Token {
18
+ private _position;
19
+ private _type;
20
+ private _value;
21
+ private _case_sensitive_value;
22
+ private _can_be_identifier;
23
+ /**
24
+ * Creates a new Token instance.
25
+ *
26
+ * @param type - The type of the token
27
+ * @param value - The optional value associated with the token
28
+ */
29
+ constructor(type: TokenType, value?: string | null);
30
+ /**
31
+ * Checks if this token equals another token.
32
+ *
33
+ * @param other - The token to compare against
34
+ * @returns True if tokens are equal, false otherwise
35
+ */
36
+ equals(other: Token): boolean;
37
+ set position(position: number);
38
+ get position(): number;
39
+ get type(): TokenType;
40
+ get value(): string | null;
41
+ set case_sensitive_value(value: string);
42
+ get can_be_identifier(): boolean;
43
+ get node(): ASTNode;
44
+ toString(): string;
45
+ static COMMENT(comment: string): Token;
46
+ isComment(): boolean;
47
+ static IDENTIFIER(value: string): Token;
48
+ isIdentifier(): boolean;
49
+ static STRING(value: string, quoteChar?: string): Token;
50
+ isString(): boolean;
51
+ static BACKTICK_STRING(value: string, quoteChar?: string): Token;
52
+ static F_STRING(value: string, quoteChar?: string): Token;
53
+ isFString(): boolean;
54
+ static NUMBER(value: string): Token;
55
+ isNumber(): boolean;
56
+ static get LEFT_PARENTHESIS(): Token;
57
+ isLeftParenthesis(): boolean;
58
+ static get RIGHT_PARENTHESIS(): Token;
59
+ isRightParenthesis(): boolean;
60
+ static get COMMA(): Token;
61
+ isComma(): boolean;
62
+ static get DOT(): Token;
63
+ isDot(): boolean;
64
+ static get COLON(): Token;
65
+ isColon(): boolean;
66
+ static get OPENING_BRACE(): Token;
67
+ isOpeningBrace(): boolean;
68
+ static get CLOSING_BRACE(): Token;
69
+ isClosingBrace(): boolean;
70
+ static get OPENING_BRACKET(): Token;
71
+ isOpeningBracket(): boolean;
72
+ static get CLOSING_BRACKET(): Token;
73
+ isClosingBracket(): boolean;
74
+ static get WHITESPACE(): Token;
75
+ isWhitespace(): boolean;
76
+ isOperator(): boolean;
77
+ isUnaryOperator(): boolean;
78
+ static get ADD(): Token;
79
+ isAdd(): boolean;
80
+ static get SUBTRACT(): Token;
81
+ isSubtract(): boolean;
82
+ isNegation(): boolean;
83
+ static get MULTIPLY(): Token;
84
+ isMultiply(): boolean;
85
+ static get DIVIDE(): Token;
86
+ isDivide(): boolean;
87
+ static get EXPONENT(): Token;
88
+ isExponent(): boolean;
89
+ static get MODULO(): Token;
90
+ isModulo(): boolean;
91
+ static get EQUALS(): Token;
92
+ isEquals(): boolean;
93
+ static get NOT_EQUALS(): Token;
94
+ isNotEquals(): boolean;
95
+ static get LESS_THAN(): Token;
96
+ isLessThan(): boolean;
97
+ static get LESS_THAN_OR_EQUAL(): Token;
98
+ isLessThanOrEqual(): boolean;
99
+ static get GREATER_THAN(): Token;
100
+ isGreaterThan(): boolean;
101
+ static get GREATER_THAN_OR_EQUAL(): Token;
102
+ isGreaterThanOrEqual(): boolean;
103
+ static get AND(): Token;
104
+ isAnd(): boolean;
105
+ static get OR(): Token;
106
+ isOr(): boolean;
107
+ static get NOT(): Token;
108
+ isNot(): boolean;
109
+ static get IS(): Token;
110
+ isIs(): boolean;
111
+ isKeyword(): boolean;
112
+ static get WITH(): Token;
113
+ isWith(): boolean;
114
+ static get RETURN(): Token;
115
+ isReturn(): boolean;
116
+ static get LOAD(): Token;
117
+ isLoad(): boolean;
118
+ static get JSON(): Token;
119
+ isJSON(): boolean;
120
+ static get CSV(): Token;
121
+ isCSV(): boolean;
122
+ static get TEXT(): Token;
123
+ isText(): boolean;
124
+ static get FROM(): Token;
125
+ isFrom(): boolean;
126
+ static get HEADERS(): Token;
127
+ isHeaders(): boolean;
128
+ static get POST(): Token;
129
+ isPost(): boolean;
130
+ static get UNWIND(): Token;
131
+ isUnwind(): boolean;
132
+ static get MATCH(): Token;
133
+ isMatch(): boolean;
134
+ static get AS(): Token;
135
+ isAs(): boolean;
136
+ static get WHERE(): Token;
137
+ isWhere(): boolean;
138
+ static get MERGE(): Token;
139
+ isMerge(): boolean;
140
+ static get CREATE(): Token;
141
+ isCreate(): boolean;
142
+ static get DELETE(): Token;
143
+ isDelete(): boolean;
144
+ static get SET(): Token;
145
+ isSet(): boolean;
146
+ static get REMOVE(): Token;
147
+ isRemove(): boolean;
148
+ static get CASE(): Token;
149
+ isCase(): boolean;
150
+ static get WHEN(): Token;
151
+ isWhen(): boolean;
152
+ static get THEN(): Token;
153
+ isThen(): boolean;
154
+ static get ELSE(): Token;
155
+ isElse(): boolean;
156
+ static get END(): Token;
157
+ isEnd(): boolean;
158
+ static get NULL(): Token;
159
+ isNull(): boolean;
160
+ static get IN(): Token;
161
+ isIn(): boolean;
162
+ static get PIPE(): Token;
163
+ isPipe(): boolean;
164
+ static get DISTINCT(): Token;
165
+ isDistinct(): boolean;
166
+ static get LIMIT(): Token;
167
+ isLimit(): boolean;
168
+ static get EOF(): Token;
169
+ isEOF(): boolean;
170
+ isOperand(): boolean;
171
+ isWhitespaceOrComment(): boolean;
172
+ isSymbol(): boolean;
173
+ static method(name: string): Token | undefined;
174
+ }
175
+ export default Token;
176
+ //# sourceMappingURL=token.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"token.d.ts","sourceRoot":"","sources":["../../src/tokenization/token.ts"],"names":[],"mappings":"AAAA,OAAO,SAAS,MAAM,cAAc,CAAC;AAKrC,OAAO,OAAO,MAAM,qBAAqB,CAAC;AAG1C;;;;;;;;;;;;;GAaG;AACH,cAAM,KAAK;IACP,OAAO,CAAC,SAAS,CAAc;IAC/B,OAAO,CAAC,KAAK,CAAY;IACzB,OAAO,CAAC,MAAM,CAAgB;IAC9B,OAAO,CAAC,qBAAqB,CAAuB;IACpD,OAAO,CAAC,kBAAkB,CAAkB;IAE5C;;;;;OAKG;gBACS,IAAI,EAAE,SAAS,EAAE,KAAK,GAAE,MAAM,GAAG,IAAW;IAMxD;;;;;OAKG;IACI,MAAM,CAAC,KAAK,EAAE,KAAK,GAAG,OAAO;IAOpC,IAAW,QAAQ,CAAC,QAAQ,EAAE,MAAM,EAEnC;IAED,IAAW,QAAQ,IAAI,MAAM,CAE5B;IAED,IAAW,IAAI,IAAI,SAAS,CAE3B;IAED,IAAW,KAAK,IAAI,MAAM,GAAG,IAAI,CAEhC;IAED,IAAW,oBAAoB,CAAC,KAAK,EAAE,MAAM,EAE5C;IAED,IAAW,iBAAiB,IAAI,OAAO,CAEtC;IAED,IAAW,IAAI,IAAI,OAAO,CAEzB;IAEM,QAAQ,IAAI,MAAM;WAMX,OAAO,CAAC,OAAO,EAAE,MAAM,GAAG,KAAK;IAItC,SAAS,IAAI,OAAO;WAMb,UAAU,CAAC,KAAK,EAAE,MAAM,GAAG,KAAK;IAIvC,YAAY,IAAI,OAAO;WAMhB,MAAM,CAAC,KAAK,EAAE,MAAM,EAAE,SAAS,GAAE,MAAY,GAAG,KAAK;IAM5D,QAAQ,IAAI,OAAO;WAIZ,eAAe,CAAC,KAAK,EAAE,MAAM,EAAE,SAAS,GAAE,MAAY,GAAG,KAAK;WAM9D,QAAQ,CAAC,KAAK,EAAE,MAAM,EAAE,SAAS,GAAE,MAAY,GAAG,KAAK;IAO9D,SAAS,IAAI,OAAO;WAMb,MAAM,CAAC,KAAK,EAAE,MAAM,GAAG,KAAK;IAInC,QAAQ,IAAI,OAAO;IAM1B,WAAkB,gBAAgB,IAAI,KAAK,CAE1C;IAEM,iBAAiB,IAAI,OAAO;IAInC,WAAkB,iBAAiB,IAAI,KAAK,CAE3C;IAEM,kBAAkB,IAAI,OAAO;IAIpC,WAAkB,KAAK,IAAI,KAAK,CAE/B;IAEM,OAAO,IAAI,OAAO;IAIzB,WAAkB,GAAG,IAAI,KAAK,CAE7B;IAEM,KAAK,IAAI,OAAO;IAIvB,WAAkB,KAAK,IAAI,KAAK,CAE/B;IAEM,OAAO,IAAI,OAAO;IAIzB,WAAkB,aAAa,IAAI,KAAK,CAEvC;IAEM,cAAc,IAAI,OAAO;IAIhC,WAAkB,aAAa,IAAI,KAAK,CAEvC;IAEM,cAAc,IAAI,OAAO;IAIhC,WAAkB,eAAe,IAAI,KAAK,CAEzC;IAEM,gBAAgB,IAAI,OAAO;IAIlC,WAAkB,eAAe,IAAI,KAAK,CAEzC;IAEM,gBAAgB,IAAI,OAAO;IAMlC,WAAkB,UAAU,IAAI,KAAK,CAEpC;IAEM,YAAY,IAAI,OAAO;IAMvB,UAAU,IAAI,OAAO;IAIrB,eAAe,IAAI,OAAO;IAIjC,WAAkB,GAAG,IAAI,KAAK,CAE7B;IAEM,KAAK,IAAI,OAAO;IAIvB,WAAkB,QAAQ,IAAI,KAAK,CAElC;IAEM,UAAU,IAAI,OAAO;IAIrB,UAAU,IAAI,OAAO;IAI5B,WAAkB,QAAQ,IAAI,KAAK,CAElC;IAEM,UAAU,IAAI,OAAO;IAI5B,WAAkB,MAAM,IAAI,KAAK,CAEhC;IAEM,QAAQ,IAAI,OAAO;IAI1B,WAAkB,QAAQ,IAAI,KAAK,CAElC;IAEM,UAAU,IAAI,OAAO;IAI5B,WAAkB,MAAM,IAAI,KAAK,CAEhC;IAEM,QAAQ,IAAI,OAAO;IAI1B,WAAkB,MAAM,IAAI,KAAK,CAEhC;IAEM,QAAQ,IAAI,OAAO;IAI1B,WAAkB,UAAU,IAAI,KAAK,CAEpC;IAEM,WAAW,IAAI,OAAO;IAI7B,WAAkB,SAAS,IAAI,KAAK,CAEnC;IAEM,UAAU,IAAI,OAAO;IAI5B,WAAkB,kBAAkB,IAAI,KAAK,CAE5C;IAEM,iBAAiB,IAAI,OAAO;IAInC,WAAkB,YAAY,IAAI,KAAK,CAEtC;IAEM,aAAa,IAAI,OAAO;IAI/B,WAAkB,qBAAqB,IAAI,KAAK,CAE/C;IAEM,oBAAoB,IAAI,OAAO;IAItC,WAAkB,GAAG,IAAI,KAAK,CAE7B;IAEM,KAAK,IAAI,OAAO;IAIvB,WAAkB,EAAE,IAAI,KAAK,CAE5B;IAEM,IAAI,IAAI,OAAO;IAItB,WAAkB,GAAG,IAAI,KAAK,CAE7B;IAEM,KAAK,IAAI,OAAO;IAIvB,WAAkB,EAAE,IAAI,KAAK,CAE5B;IAEM,IAAI,IAAI,OAAO;IAMf,SAAS,IAAI,OAAO;IAI3B,WAAkB,IAAI,IAAI,KAAK,CAE9B;IAEM,MAAM,IAAI,OAAO;IAIxB,WAAkB,MAAM,IAAI,KAAK,CAEhC;IAEM,QAAQ,IAAI,OAAO;IAI1B,WAAkB,IAAI,IAAI,KAAK,CAE9B;IAEM,MAAM,IAAI,OAAO;IAIxB,WAAkB,IAAI,IAAI,KAAK,CAE9B;IAEM,MAAM,IAAI,OAAO;IAIxB,WAAkB,GAAG,IAAI,KAAK,CAE7B;IAEM,KAAK,IAAI,OAAO;IAIvB,WAAkB,IAAI,IAAI,KAAK,CAE9B;IAEM,MAAM,IAAI,OAAO;IAIxB,WAAkB,IAAI,IAAI,KAAK,CAE9B;IAEM,MAAM,IAAI,OAAO;IAIxB,WAAkB,OAAO,IAAI,KAAK,CAEjC;IAEM,SAAS,IAAI,OAAO;IAI3B,WAAkB,IAAI,IAAI,KAAK,CAE9B;IAEM,MAAM,IAAI,OAAO;IAIxB,WAAkB,MAAM,IAAI,KAAK,CAEhC;IAEM,QAAQ,IAAI,OAAO;IAI1B,WAAkB,KAAK,IAAI,KAAK,CAE/B;IAEM,OAAO,IAAI,OAAO;IAIzB,WAAkB,EAAE,IAAI,KAAK,CAE5B;IAEM,IAAI,IAAI,OAAO;IAItB,WAAkB,KAAK,IAAI,KAAK,CAE/B;IAEM,OAAO,IAAI,OAAO;IAIzB,WAAkB,KAAK,IAAI,KAAK,CAE/B;IAEM,OAAO,IAAI,OAAO;IAIzB,WAAkB,MAAM,IAAI,KAAK,CAEhC;IAEM,QAAQ,IAAI,OAAO;IAI1B,WAAkB,MAAM,IAAI,KAAK,CAEhC;IAEM,QAAQ,IAAI,OAAO;IAI1B,WAAkB,GAAG,IAAI,KAAK,CAE7B;IAEM,KAAK,IAAI,OAAO;IAIvB,WAAkB,MAAM,IAAI,KAAK,CAEhC;IAEM,QAAQ,IAAI,OAAO;IAI1B,WAAkB,IAAI,IAAI,KAAK,CAE9B;IAEM,MAAM,IAAI,OAAO;IAIxB,WAAkB,IAAI,IAAI,KAAK,CAE9B;IAEM,MAAM,IAAI,OAAO;IAIxB,WAAkB,IAAI,IAAI,KAAK,CAE9B;IAEM,MAAM,IAAI,OAAO;IAIxB,WAAkB,IAAI,IAAI,KAAK,CAE9B;IAEM,MAAM,IAAI,OAAO;IAIxB,WAAkB,GAAG,IAAI,KAAK,CAE7B;IAEM,KAAK,IAAI,OAAO;IAIvB,WAAkB,IAAI,IAAI,KAAK,CAE9B;IAEM,MAAM,IAAI,OAAO;IAIxB,WAAkB,EAAE,IAAI,KAAK,CAE5B;IAEM,IAAI,IAAI,OAAO;IAItB,WAAkB,IAAI,IAAI,KAAK,CAE9B;IAEM,MAAM,IAAI,OAAO;IAIxB,WAAkB,QAAQ,IAAI,KAAK,CAElC;IAEM,UAAU,IAAI,OAAO;IAI5B,WAAkB,KAAK,IAAI,KAAK,CAE/B;IAEM,OAAO,IAAI,OAAO;IAOzB,WAAkB,GAAG,IAAI,KAAK,CAE7B;IAEM,KAAK,IAAI,OAAO;IAMhB,SAAS,IAAI,OAAO;IAIpB,qBAAqB,IAAI,OAAO;IAIhC,QAAQ,IAAI,OAAO;WAKZ,MAAM,CAAC,IAAI,EAAE,MAAM,GAAG,KAAK,GAAG,SAAS;CAGxD;AAED,eAAe,KAAK,CAAC"}
@@ -0,0 +1,40 @@
1
+ import Token from "./token";
2
+ /**
3
+ * Maps string values to tokens using a Trie for efficient lookup.
4
+ *
5
+ * Takes an enum of keywords, operators, or symbols and builds a trie
6
+ * for fast token matching during tokenization.
7
+ *
8
+ * @example
9
+ * ```typescript
10
+ * const mapper = new TokenMapper(Keyword);
11
+ * const token = mapper.map("WITH");
12
+ * ```
13
+ */
14
+ declare class TokenMapper {
15
+ private _enum;
16
+ private _trie;
17
+ /**
18
+ * Creates a TokenMapper from an enum of token values.
19
+ *
20
+ * @param _enum - An enum object containing token values
21
+ */
22
+ constructor(_enum: {
23
+ [key: string]: any;
24
+ });
25
+ /**
26
+ * Maps a string value to its corresponding token.
27
+ *
28
+ * @param value - The string value to map
29
+ * @returns The matched token, or undefined if no match found
30
+ */
31
+ map(value: string): Token | undefined;
32
+ /**
33
+ * Gets the last matched string from the most recent map operation.
34
+ *
35
+ * @returns The last found string, or null if no match
36
+ */
37
+ get last_found(): string | null;
38
+ }
39
+ export default TokenMapper;
40
+ //# sourceMappingURL=token_mapper.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"token_mapper.d.ts","sourceRoot":"","sources":["../../src/tokenization/token_mapper.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,MAAM,SAAS,CAAC;AAG5B;;;;;;;;;;;GAWG;AACH,cAAM,WAAW;IAQD,OAAO,CAAC,KAAK;IAPzB,OAAO,CAAC,KAAK,CAAoB;IAEjC;;;;OAIG;gBACiB,KAAK,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE;IASjD;;;;;OAKG;IACI,GAAG,CAAC,KAAK,EAAE,MAAM,GAAG,KAAK,GAAG,SAAS;IAI5C;;;;OAIG;IACH,IAAW,UAAU,IAAI,MAAM,GAAG,IAAI,CAErC;CACJ;AAED,eAAe,WAAW,CAAC"}
@@ -0,0 +1,16 @@
1
+ declare enum TokenType {
2
+ KEYWORD = "KEYWORD",
3
+ OPERATOR = "OPERATOR",
4
+ UNARY_OPERATOR = "UNARY_OPERATOR",
5
+ IDENTIFIER = "IDENTIFIER",
6
+ STRING = "STRING",
7
+ F_STRING = "F-STRING",
8
+ BACKTICK_STRING = "BACKTICK_STRING",
9
+ NUMBER = "NUMBER",
10
+ SYMBOL = "SYMBOL",
11
+ WHITESPACE = "WHITESPACE",
12
+ COMMENT = "COMMENT",
13
+ EOF = "EOF"
14
+ }
15
+ export default TokenType;
16
+ //# sourceMappingURL=token_type.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"token_type.d.ts","sourceRoot":"","sources":["../../src/tokenization/token_type.ts"],"names":[],"mappings":"AAAA,aAAK,SAAS;IACV,OAAO,YAAY;IACnB,QAAQ,aAAa;IACrB,cAAc,mBAAmB;IACjC,UAAU,eAAe;IACzB,MAAM,WAAW;IACjB,QAAQ,aAAa;IACrB,eAAe,oBAAoB;IACnC,MAAM,WAAW;IACjB,MAAM,WAAW;IACjB,UAAU,eAAe;IACzB,OAAO,YAAY;IACnB,GAAG,QAAQ;CACd;AACD,eAAe,SAAS,CAAC"}
@@ -0,0 +1,45 @@
1
+ import Token from './token';
2
+ /**
3
+ * Tokenizes FlowQuery input strings into a sequence of tokens.
4
+ *
5
+ * The tokenizer performs lexical analysis, breaking down the input text into
6
+ * meaningful tokens such as keywords, identifiers, operators, strings, numbers,
7
+ * and symbols. It handles comments, whitespace, and f-strings.
8
+ *
9
+ * @example
10
+ * ```typescript
11
+ * const tokenizer = new Tokenizer("WITH x = 1 RETURN x");
12
+ * const tokens = tokenizer.tokenize();
13
+ * ```
14
+ */
15
+ declare class Tokenizer {
16
+ private walker;
17
+ private keywords;
18
+ private symbols;
19
+ private operators;
20
+ /**
21
+ * Creates a new Tokenizer instance for the given input.
22
+ *
23
+ * @param input - The FlowQuery input string to tokenize
24
+ */
25
+ constructor(input: string);
26
+ /**
27
+ * Tokenizes the input string into an array of tokens.
28
+ *
29
+ * @returns An array of Token objects representing the tokenized input
30
+ * @throws {Error} If an unrecognized token is encountered
31
+ */
32
+ tokenize(): Token[];
33
+ private getLastNonWhitespaceOrNonCommentToken;
34
+ private getNextToken;
35
+ comment(): Token | null;
36
+ private identifier;
37
+ private string;
38
+ private f_string;
39
+ private whitespace;
40
+ private number;
41
+ private lookup;
42
+ private skipMinus;
43
+ }
44
+ export default Tokenizer;
45
+ //# sourceMappingURL=tokenizer.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"tokenizer.d.ts","sourceRoot":"","sources":["../../src/tokenization/tokenizer.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,MAAM,SAAS,CAAC;AAO5B;;;;;;;;;;;;GAYG;AACH,cAAM,SAAS;IACX,OAAO,CAAC,MAAM,CAAe;IAC7B,OAAO,CAAC,QAAQ,CAAyC;IACzD,OAAO,CAAC,OAAO,CAAwC;IACvD,OAAO,CAAC,SAAS,CAA0C;IAE3D;;;;OAIG;gBACS,KAAK,EAAE,MAAM;IAIzB;;;;;OAKG;IACI,QAAQ,IAAI,KAAK,EAAE;IAgB1B,OAAO,CAAC,qCAAqC;IAU7C,OAAO,CAAC,YAAY;IAgBb,OAAO,IAAI,KAAK,GAAG,IAAI;IAS9B,OAAO,CAAC,UAAU;IAWlB,OAAO,CAAC,MAAM;IAwBd,OAAO,CAAE,QAAQ;IA6CjB,OAAO,CAAC,UAAU;IASlB,OAAO,CAAC,MAAM;IAiBd,OAAO,CAAC,MAAM;IAkBd,OAAO,CAAC,SAAS;CASpB;AAED,eAAe,SAAS,CAAC"}
@@ -0,0 +1,41 @@
1
+ import Token from "./token";
2
+ /**
3
+ * Trie (prefix tree) data structure for efficient keyword and operator lookup.
4
+ *
5
+ * Used during tokenization to quickly match input strings against known keywords
6
+ * and operators. Supports case-insensitive matching and tracks the longest match found.
7
+ *
8
+ * @example
9
+ * ```typescript
10
+ * const trie = new Trie();
11
+ * trie.insert(Token.WITH);
12
+ * const found = trie.find("WITH");
13
+ * ```
14
+ */
15
+ declare class Trie {
16
+ private _root;
17
+ private _max_length;
18
+ private _last_found;
19
+ /**
20
+ * Inserts a token into the trie.
21
+ *
22
+ * @param token - The token to insert
23
+ * @throws {Error} If the token value is null or empty
24
+ */
25
+ insert(token: Token): void;
26
+ /**
27
+ * Finds a token by searching for the longest matching prefix in the trie.
28
+ *
29
+ * @param value - The string value to search for
30
+ * @returns The token if found, undefined otherwise
31
+ */
32
+ find(value: string): Token | undefined;
33
+ /**
34
+ * Gets the last matched string from the most recent find operation.
35
+ *
36
+ * @returns The last found string, or null if no match was found
37
+ */
38
+ get last_found(): string | null;
39
+ }
40
+ export default Trie;
41
+ //# sourceMappingURL=trie.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"trie.d.ts","sourceRoot":"","sources":["../../src/tokenization/trie.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,MAAM,SAAS,CAAC;AAqC5B;;;;;;;;;;;;GAYG;AACH,cAAM,IAAI;IACN,OAAO,CAAC,KAAK,CAAoB;IACjC,OAAO,CAAC,WAAW,CAAa;IAChC,OAAO,CAAC,WAAW,CAAuB;IAE1C;;;;;OAKG;IACI,MAAM,CAAC,KAAK,EAAE,KAAK,GAAG,IAAI;IAcjC;;;;;OAKG;IACI,IAAI,CAAC,KAAK,EAAE,MAAM,GAAG,KAAK,GAAG,SAAS;IAyB7C;;;;OAIG;IACH,IAAW,UAAU,IAAI,MAAM,GAAG,IAAI,CAErC;CACJ;AAED,eAAe,IAAI,CAAC"}