flowquery 1.0.20 → 1.0.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/release.yml +2 -2
- package/.husky/pre-commit +26 -0
- package/dist/flowquery.min.js +1 -1
- package/dist/graph/data.d.ts +5 -4
- package/dist/graph/data.d.ts.map +1 -1
- package/dist/graph/data.js +35 -19
- package/dist/graph/data.js.map +1 -1
- package/dist/graph/node.d.ts +2 -0
- package/dist/graph/node.d.ts.map +1 -1
- package/dist/graph/node.js +23 -0
- package/dist/graph/node.js.map +1 -1
- package/dist/graph/node_data.js +1 -1
- package/dist/graph/node_data.js.map +1 -1
- package/dist/graph/relationship.d.ts +6 -1
- package/dist/graph/relationship.d.ts.map +1 -1
- package/dist/graph/relationship.js +38 -7
- package/dist/graph/relationship.js.map +1 -1
- package/dist/graph/relationship_data.d.ts +2 -0
- package/dist/graph/relationship_data.d.ts.map +1 -1
- package/dist/graph/relationship_data.js +8 -1
- package/dist/graph/relationship_data.js.map +1 -1
- package/dist/graph/relationship_match_collector.js +2 -2
- package/dist/graph/relationship_match_collector.js.map +1 -1
- package/dist/graph/relationship_reference.d.ts.map +1 -1
- package/dist/graph/relationship_reference.js +2 -1
- package/dist/graph/relationship_reference.js.map +1 -1
- package/dist/index.d.ts +7 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +4 -4
- package/dist/index.js.map +1 -1
- package/dist/parsing/parser.d.ts +1 -0
- package/dist/parsing/parser.d.ts.map +1 -1
- package/dist/parsing/parser.js +47 -0
- package/dist/parsing/parser.js.map +1 -1
- package/docs/flowquery.min.js +1 -1
- package/flowquery-py/notebooks/TestFlowQuery.ipynb +1 -1
- package/flowquery-py/pyproject.toml +45 -2
- package/flowquery-py/src/__init__.py +5 -5
- package/flowquery-py/src/compute/runner.py +14 -10
- package/flowquery-py/src/extensibility.py +8 -8
- package/flowquery-py/src/graph/__init__.py +7 -7
- package/flowquery-py/src/graph/data.py +36 -19
- package/flowquery-py/src/graph/database.py +10 -20
- package/flowquery-py/src/graph/node.py +50 -19
- package/flowquery-py/src/graph/node_data.py +1 -1
- package/flowquery-py/src/graph/node_reference.py +10 -11
- package/flowquery-py/src/graph/pattern.py +23 -36
- package/flowquery-py/src/graph/pattern_expression.py +13 -11
- package/flowquery-py/src/graph/patterns.py +2 -2
- package/flowquery-py/src/graph/physical_node.py +4 -3
- package/flowquery-py/src/graph/physical_relationship.py +5 -5
- package/flowquery-py/src/graph/relationship.py +56 -15
- package/flowquery-py/src/graph/relationship_data.py +7 -2
- package/flowquery-py/src/graph/relationship_match_collector.py +15 -10
- package/flowquery-py/src/graph/relationship_reference.py +4 -4
- package/flowquery-py/src/io/command_line.py +13 -14
- package/flowquery-py/src/parsing/__init__.py +2 -2
- package/flowquery-py/src/parsing/alias_option.py +1 -1
- package/flowquery-py/src/parsing/ast_node.py +21 -20
- package/flowquery-py/src/parsing/base_parser.py +7 -7
- package/flowquery-py/src/parsing/components/__init__.py +3 -3
- package/flowquery-py/src/parsing/components/from_.py +3 -1
- package/flowquery-py/src/parsing/components/headers.py +2 -2
- package/flowquery-py/src/parsing/components/null.py +2 -2
- package/flowquery-py/src/parsing/context.py +7 -7
- package/flowquery-py/src/parsing/data_structures/associative_array.py +7 -7
- package/flowquery-py/src/parsing/data_structures/json_array.py +3 -3
- package/flowquery-py/src/parsing/data_structures/key_value_pair.py +4 -4
- package/flowquery-py/src/parsing/data_structures/lookup.py +2 -2
- package/flowquery-py/src/parsing/data_structures/range_lookup.py +2 -2
- package/flowquery-py/src/parsing/expressions/__init__.py +16 -16
- package/flowquery-py/src/parsing/expressions/expression.py +16 -13
- package/flowquery-py/src/parsing/expressions/expression_map.py +9 -9
- package/flowquery-py/src/parsing/expressions/f_string.py +3 -3
- package/flowquery-py/src/parsing/expressions/identifier.py +4 -3
- package/flowquery-py/src/parsing/expressions/number.py +3 -3
- package/flowquery-py/src/parsing/expressions/operator.py +16 -16
- package/flowquery-py/src/parsing/expressions/reference.py +3 -3
- package/flowquery-py/src/parsing/expressions/string.py +2 -2
- package/flowquery-py/src/parsing/functions/__init__.py +17 -17
- package/flowquery-py/src/parsing/functions/aggregate_function.py +8 -8
- package/flowquery-py/src/parsing/functions/async_function.py +12 -9
- package/flowquery-py/src/parsing/functions/avg.py +4 -4
- package/flowquery-py/src/parsing/functions/collect.py +6 -6
- package/flowquery-py/src/parsing/functions/function.py +6 -6
- package/flowquery-py/src/parsing/functions/function_factory.py +31 -34
- package/flowquery-py/src/parsing/functions/function_metadata.py +10 -11
- package/flowquery-py/src/parsing/functions/functions.py +14 -6
- package/flowquery-py/src/parsing/functions/join.py +3 -3
- package/flowquery-py/src/parsing/functions/keys.py +3 -3
- package/flowquery-py/src/parsing/functions/predicate_function.py +8 -7
- package/flowquery-py/src/parsing/functions/predicate_sum.py +12 -7
- package/flowquery-py/src/parsing/functions/rand.py +2 -2
- package/flowquery-py/src/parsing/functions/range_.py +9 -4
- package/flowquery-py/src/parsing/functions/replace.py +2 -2
- package/flowquery-py/src/parsing/functions/round_.py +2 -2
- package/flowquery-py/src/parsing/functions/size.py +2 -2
- package/flowquery-py/src/parsing/functions/split.py +9 -4
- package/flowquery-py/src/parsing/functions/stringify.py +3 -3
- package/flowquery-py/src/parsing/functions/sum.py +4 -4
- package/flowquery-py/src/parsing/functions/to_json.py +2 -2
- package/flowquery-py/src/parsing/functions/type_.py +3 -3
- package/flowquery-py/src/parsing/functions/value_holder.py +1 -1
- package/flowquery-py/src/parsing/logic/__init__.py +2 -2
- package/flowquery-py/src/parsing/logic/case.py +0 -1
- package/flowquery-py/src/parsing/logic/when.py +3 -1
- package/flowquery-py/src/parsing/operations/__init__.py +10 -10
- package/flowquery-py/src/parsing/operations/aggregated_return.py +3 -5
- package/flowquery-py/src/parsing/operations/aggregated_with.py +4 -4
- package/flowquery-py/src/parsing/operations/call.py +6 -7
- package/flowquery-py/src/parsing/operations/create_node.py +5 -4
- package/flowquery-py/src/parsing/operations/create_relationship.py +5 -4
- package/flowquery-py/src/parsing/operations/group_by.py +18 -16
- package/flowquery-py/src/parsing/operations/load.py +21 -19
- package/flowquery-py/src/parsing/operations/match.py +8 -7
- package/flowquery-py/src/parsing/operations/operation.py +3 -3
- package/flowquery-py/src/parsing/operations/projection.py +6 -6
- package/flowquery-py/src/parsing/operations/return_op.py +9 -5
- package/flowquery-py/src/parsing/operations/unwind.py +3 -2
- package/flowquery-py/src/parsing/operations/where.py +9 -7
- package/flowquery-py/src/parsing/operations/with_op.py +2 -2
- package/flowquery-py/src/parsing/parser.py +104 -57
- package/flowquery-py/src/parsing/token_to_node.py +2 -2
- package/flowquery-py/src/tokenization/__init__.py +4 -4
- package/flowquery-py/src/tokenization/keyword.py +1 -1
- package/flowquery-py/src/tokenization/operator.py +1 -1
- package/flowquery-py/src/tokenization/string_walker.py +4 -4
- package/flowquery-py/src/tokenization/symbol.py +1 -1
- package/flowquery-py/src/tokenization/token.py +11 -11
- package/flowquery-py/src/tokenization/token_mapper.py +10 -9
- package/flowquery-py/src/tokenization/token_type.py +1 -1
- package/flowquery-py/src/tokenization/tokenizer.py +19 -19
- package/flowquery-py/src/tokenization/trie.py +18 -17
- package/flowquery-py/src/utils/__init__.py +1 -1
- package/flowquery-py/src/utils/object_utils.py +3 -3
- package/flowquery-py/src/utils/string_utils.py +12 -12
- package/flowquery-py/tests/compute/test_runner.py +205 -1
- package/flowquery-py/tests/parsing/test_parser.py +41 -0
- package/flowquery-vscode/flowQueryEngine/flowquery.min.js +1 -1
- package/package.json +1 -1
- package/src/graph/data.ts +35 -19
- package/src/graph/node.ts +23 -0
- package/src/graph/node_data.ts +1 -1
- package/src/graph/relationship.ts +37 -5
- package/src/graph/relationship_data.ts +8 -1
- package/src/graph/relationship_match_collector.ts +1 -1
- package/src/graph/relationship_reference.ts +2 -1
- package/src/index.ts +1 -0
- package/src/parsing/parser.ts +47 -0
- package/tests/compute/runner.test.ts +178 -0
- package/tests/parsing/parser.test.ts +32 -0
|
@@ -1,18 +1,26 @@
|
|
|
1
1
|
"""Main parser for FlowQuery statements."""
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
import sys
|
|
4
|
+
from typing import Dict, Iterator, List, Optional, Tuple, cast
|
|
4
5
|
|
|
6
|
+
from ..graph.hops import Hops
|
|
7
|
+
from ..graph.node import Node
|
|
8
|
+
from ..graph.node_reference import NodeReference
|
|
9
|
+
from ..graph.pattern import Pattern
|
|
10
|
+
from ..graph.pattern_expression import PatternExpression
|
|
11
|
+
from ..graph.relationship import Relationship
|
|
12
|
+
from ..graph.relationship_reference import RelationshipReference
|
|
5
13
|
from ..tokenization.token import Token
|
|
6
14
|
from ..utils.object_utils import ObjectUtils
|
|
7
15
|
from .alias import Alias
|
|
8
16
|
from .alias_option import AliasOption
|
|
9
17
|
from .ast_node import ASTNode
|
|
10
18
|
from .base_parser import BaseParser
|
|
11
|
-
from .context import Context
|
|
12
19
|
from .components.from_ import From
|
|
13
20
|
from .components.headers import Headers
|
|
14
21
|
from .components.null import Null
|
|
15
22
|
from .components.post import Post
|
|
23
|
+
from .context import Context
|
|
16
24
|
from .data_structures.associative_array import AssociativeArray
|
|
17
25
|
from .data_structures.json_array import JSONArray
|
|
18
26
|
from .data_structures.key_value_pair import KeyValuePair
|
|
@@ -30,12 +38,14 @@ from .functions.function import Function
|
|
|
30
38
|
from .functions.function_factory import FunctionFactory
|
|
31
39
|
from .functions.predicate_function import PredicateFunction
|
|
32
40
|
from .logic.case import Case
|
|
33
|
-
from .logic.when import When
|
|
34
|
-
from .logic.then import Then
|
|
35
41
|
from .logic.else_ import Else
|
|
42
|
+
from .logic.then import Then
|
|
43
|
+
from .logic.when import When
|
|
36
44
|
from .operations.aggregated_return import AggregatedReturn
|
|
37
45
|
from .operations.aggregated_with import AggregatedWith
|
|
38
46
|
from .operations.call import Call
|
|
47
|
+
from .operations.create_node import CreateNode
|
|
48
|
+
from .operations.create_relationship import CreateRelationship
|
|
39
49
|
from .operations.limit import Limit
|
|
40
50
|
from .operations.load import Load
|
|
41
51
|
from .operations.match import Match
|
|
@@ -44,22 +54,15 @@ from .operations.return_op import Return
|
|
|
44
54
|
from .operations.unwind import Unwind
|
|
45
55
|
from .operations.where import Where
|
|
46
56
|
from .operations.with_op import With
|
|
47
|
-
from ..graph.node import Node
|
|
48
|
-
from ..graph.node_reference import NodeReference
|
|
49
|
-
from ..graph.pattern import Pattern
|
|
50
|
-
from ..graph.pattern_expression import PatternExpression
|
|
51
|
-
from ..graph.relationship import Relationship
|
|
52
|
-
from .operations.create_node import CreateNode
|
|
53
|
-
from .operations.create_relationship import CreateRelationship
|
|
54
57
|
|
|
55
58
|
|
|
56
59
|
class Parser(BaseParser):
|
|
57
60
|
"""Main parser for FlowQuery statements.
|
|
58
|
-
|
|
61
|
+
|
|
59
62
|
Parses FlowQuery declarative query language statements into an Abstract Syntax Tree (AST).
|
|
60
63
|
Supports operations like WITH, UNWIND, RETURN, LOAD, WHERE, and LIMIT, along with
|
|
61
64
|
expressions, functions, data structures, and logical constructs.
|
|
62
|
-
|
|
65
|
+
|
|
63
66
|
Example:
|
|
64
67
|
parser = Parser()
|
|
65
68
|
ast = parser.parse("unwind [1, 2, 3, 4, 5] as num return num")
|
|
@@ -73,13 +76,13 @@ class Parser(BaseParser):
|
|
|
73
76
|
|
|
74
77
|
def parse(self, statement: str) -> ASTNode:
|
|
75
78
|
"""Parses a FlowQuery statement into an Abstract Syntax Tree.
|
|
76
|
-
|
|
79
|
+
|
|
77
80
|
Args:
|
|
78
81
|
statement: The FlowQuery statement to parse
|
|
79
|
-
|
|
82
|
+
|
|
80
83
|
Returns:
|
|
81
84
|
The root AST node containing the parsed structure
|
|
82
|
-
|
|
85
|
+
|
|
83
86
|
Raises:
|
|
84
87
|
ValueError: If the statement is malformed or contains syntax errors
|
|
85
88
|
"""
|
|
@@ -90,32 +93,32 @@ class Parser(BaseParser):
|
|
|
90
93
|
root = ASTNode()
|
|
91
94
|
previous: Optional[Operation] = None
|
|
92
95
|
operation: Optional[Operation] = None
|
|
93
|
-
|
|
96
|
+
|
|
94
97
|
while not self.token.is_eof():
|
|
95
98
|
if root.child_count() > 0:
|
|
96
99
|
self._expect_and_skip_whitespace_and_comments()
|
|
97
100
|
else:
|
|
98
101
|
self._skip_whitespace_and_comments()
|
|
99
|
-
|
|
102
|
+
|
|
100
103
|
operation = self._parse_operation()
|
|
101
104
|
if operation is None and not is_sub_query:
|
|
102
105
|
raise ValueError("Expected one of WITH, UNWIND, RETURN, LOAD, OR CALL")
|
|
103
106
|
elif operation is None and is_sub_query:
|
|
104
107
|
return root
|
|
105
|
-
|
|
108
|
+
|
|
106
109
|
if self._returns > 1:
|
|
107
110
|
raise ValueError("Only one RETURN statement is allowed")
|
|
108
|
-
|
|
111
|
+
|
|
109
112
|
if isinstance(previous, Call) and not previous.has_yield:
|
|
110
113
|
raise ValueError(
|
|
111
114
|
"CALL operations must have a YIELD clause unless they are the last operation"
|
|
112
115
|
)
|
|
113
|
-
|
|
116
|
+
|
|
114
117
|
if previous is not None:
|
|
115
118
|
previous.add_sibling(operation)
|
|
116
119
|
else:
|
|
117
120
|
root.add_child(operation)
|
|
118
|
-
|
|
121
|
+
|
|
119
122
|
where = self._parse_where()
|
|
120
123
|
if where is not None:
|
|
121
124
|
if isinstance(operation, Return):
|
|
@@ -123,17 +126,17 @@ class Parser(BaseParser):
|
|
|
123
126
|
else:
|
|
124
127
|
operation.add_sibling(where)
|
|
125
128
|
operation = where
|
|
126
|
-
|
|
129
|
+
|
|
127
130
|
limit = self._parse_limit()
|
|
128
131
|
if limit is not None:
|
|
129
132
|
operation.add_sibling(limit)
|
|
130
133
|
operation = limit
|
|
131
|
-
|
|
134
|
+
|
|
132
135
|
previous = operation
|
|
133
|
-
|
|
136
|
+
|
|
134
137
|
if not isinstance(operation, (Return, Call, CreateNode, CreateRelationship)):
|
|
135
138
|
raise ValueError("Last statement must be a RETURN, WHERE, CALL, or CREATE statement")
|
|
136
|
-
|
|
139
|
+
|
|
137
140
|
return root
|
|
138
141
|
|
|
139
142
|
def _parse_operation(self) -> Optional[Operation]:
|
|
@@ -156,7 +159,7 @@ class Parser(BaseParser):
|
|
|
156
159
|
if len(expressions) == 0:
|
|
157
160
|
raise ValueError("Expected expression")
|
|
158
161
|
if any(expr.has_reducers() for expr in expressions):
|
|
159
|
-
return AggregatedWith(expressions)
|
|
162
|
+
return AggregatedWith(expressions) # type: ignore[return-value]
|
|
160
163
|
return With(expressions)
|
|
161
164
|
|
|
162
165
|
def _parse_unwind(self) -> Optional[Unwind]:
|
|
@@ -228,7 +231,7 @@ class Parser(BaseParser):
|
|
|
228
231
|
self._expect_and_skip_whitespace_and_comments()
|
|
229
232
|
from_node = From()
|
|
230
233
|
load.add_child(from_node)
|
|
231
|
-
|
|
234
|
+
|
|
232
235
|
# Check if source is async function
|
|
233
236
|
async_func = self._parse_async_function()
|
|
234
237
|
if async_func is not None:
|
|
@@ -238,7 +241,7 @@ class Parser(BaseParser):
|
|
|
238
241
|
if expression is None:
|
|
239
242
|
raise ValueError("Expected expression or async function")
|
|
240
243
|
from_node.add_child(expression)
|
|
241
|
-
|
|
244
|
+
|
|
242
245
|
self._expect_and_skip_whitespace_and_comments()
|
|
243
246
|
if self.token.is_headers():
|
|
244
247
|
headers = Headers()
|
|
@@ -250,7 +253,7 @@ class Parser(BaseParser):
|
|
|
250
253
|
headers.add_child(header)
|
|
251
254
|
load.add_child(headers)
|
|
252
255
|
self._expect_and_skip_whitespace_and_comments()
|
|
253
|
-
|
|
256
|
+
|
|
254
257
|
if self.token.is_post():
|
|
255
258
|
post = Post()
|
|
256
259
|
self.set_next_token()
|
|
@@ -261,7 +264,7 @@ class Parser(BaseParser):
|
|
|
261
264
|
post.add_child(payload)
|
|
262
265
|
load.add_child(post)
|
|
263
266
|
self._expect_and_skip_whitespace_and_comments()
|
|
264
|
-
|
|
267
|
+
|
|
265
268
|
alias = self._parse_alias()
|
|
266
269
|
if alias is not None:
|
|
267
270
|
load.add_child(alias)
|
|
@@ -288,7 +291,7 @@ class Parser(BaseParser):
|
|
|
288
291
|
expressions = list(self._parse_expressions(AliasOption.OPTIONAL))
|
|
289
292
|
if len(expressions) == 0:
|
|
290
293
|
raise ValueError("Expected at least one expression")
|
|
291
|
-
call.yielded = expressions
|
|
294
|
+
call.yielded = expressions # type: ignore[assignment]
|
|
292
295
|
return call
|
|
293
296
|
|
|
294
297
|
def _parse_match(self) -> Optional[Match]:
|
|
@@ -311,11 +314,11 @@ class Parser(BaseParser):
|
|
|
311
314
|
raise ValueError("Expected VIRTUAL")
|
|
312
315
|
self.set_next_token()
|
|
313
316
|
self._expect_and_skip_whitespace_and_comments()
|
|
314
|
-
|
|
317
|
+
|
|
315
318
|
node = self._parse_node()
|
|
316
319
|
if node is None:
|
|
317
320
|
raise ValueError("Expected node definition")
|
|
318
|
-
|
|
321
|
+
|
|
319
322
|
relationship: Optional[Relationship] = None
|
|
320
323
|
if self.token.is_subtract() and self.peek() and self.peek().is_opening_bracket():
|
|
321
324
|
self.set_next_token() # skip -
|
|
@@ -341,17 +344,17 @@ class Parser(BaseParser):
|
|
|
341
344
|
raise ValueError("Expected target node definition")
|
|
342
345
|
relationship = Relationship()
|
|
343
346
|
relationship.type = rel_type
|
|
344
|
-
|
|
347
|
+
|
|
345
348
|
self._expect_and_skip_whitespace_and_comments()
|
|
346
349
|
if not self.token.is_as():
|
|
347
350
|
raise ValueError("Expected AS")
|
|
348
351
|
self.set_next_token()
|
|
349
352
|
self._expect_and_skip_whitespace_and_comments()
|
|
350
|
-
|
|
353
|
+
|
|
351
354
|
query = self._parse_sub_query()
|
|
352
355
|
if query is None:
|
|
353
356
|
raise ValueError("Expected sub-query")
|
|
354
|
-
|
|
357
|
+
|
|
355
358
|
if relationship is not None:
|
|
356
359
|
return CreateRelationship(relationship, query)
|
|
357
360
|
else:
|
|
@@ -416,7 +419,7 @@ class Parser(BaseParser):
|
|
|
416
419
|
|
|
417
420
|
def _parse_pattern_expression(self) -> Optional[PatternExpression]:
|
|
418
421
|
"""Parse a pattern expression for WHERE clauses.
|
|
419
|
-
|
|
422
|
+
|
|
420
423
|
PatternExpression is used to test if a graph pattern exists.
|
|
421
424
|
It must start with a NodeReference (referencing an existing variable).
|
|
422
425
|
"""
|
|
@@ -459,17 +462,17 @@ class Parser(BaseParser):
|
|
|
459
462
|
raise ValueError("Expected node label identifier")
|
|
460
463
|
if self.token.is_colon() and peek is not None and peek.is_identifier():
|
|
461
464
|
self.set_next_token()
|
|
462
|
-
label = self.token.value
|
|
465
|
+
label = cast(str, self.token.value) # Guaranteed by is_identifier check
|
|
463
466
|
self.set_next_token()
|
|
464
467
|
self._skip_whitespace_and_comments()
|
|
465
468
|
node = Node()
|
|
466
469
|
node.label = label
|
|
470
|
+
node.properties = dict(self._parse_properties())
|
|
467
471
|
if label is not None and identifier is not None:
|
|
468
472
|
node.identifier = identifier
|
|
469
473
|
self._variables[identifier] = node
|
|
470
474
|
elif identifier is not None:
|
|
471
475
|
reference = self._variables.get(identifier)
|
|
472
|
-
from ..graph.node_reference import NodeReference
|
|
473
476
|
if reference is None or not isinstance(reference, Node):
|
|
474
477
|
raise ValueError(f"Undefined node reference: {identifier}")
|
|
475
478
|
node = NodeReference(node, reference)
|
|
@@ -479,7 +482,9 @@ class Parser(BaseParser):
|
|
|
479
482
|
return node
|
|
480
483
|
|
|
481
484
|
def _parse_relationship(self) -> Optional[Relationship]:
|
|
485
|
+
direction = "right"
|
|
482
486
|
if self.token.is_less_than() and self.peek() is not None and self.peek().is_subtract():
|
|
487
|
+
direction = "left"
|
|
483
488
|
self.set_next_token()
|
|
484
489
|
self.set_next_token()
|
|
485
490
|
elif self.token.is_subtract():
|
|
@@ -501,6 +506,7 @@ class Parser(BaseParser):
|
|
|
501
506
|
rel_type: str = self.token.value or ""
|
|
502
507
|
self.set_next_token()
|
|
503
508
|
hops = self._parse_relationship_hops()
|
|
509
|
+
properties: Dict[str, Expression] = dict(self._parse_properties())
|
|
504
510
|
if not self.token.is_closing_bracket():
|
|
505
511
|
raise ValueError("Expected closing bracket for relationship definition")
|
|
506
512
|
self.set_next_token()
|
|
@@ -510,12 +516,13 @@ class Parser(BaseParser):
|
|
|
510
516
|
if self.token.is_greater_than():
|
|
511
517
|
self.set_next_token()
|
|
512
518
|
relationship = Relationship()
|
|
519
|
+
relationship.direction = direction
|
|
520
|
+
relationship.properties = properties
|
|
513
521
|
if rel_type is not None and variable is not None:
|
|
514
522
|
relationship.identifier = variable
|
|
515
523
|
self._variables[variable] = relationship
|
|
516
524
|
elif variable is not None:
|
|
517
525
|
reference = self._variables.get(variable)
|
|
518
|
-
from ..graph.relationship_reference import RelationshipReference
|
|
519
526
|
if reference is None or not isinstance(reference, Relationship):
|
|
520
527
|
raise ValueError(f"Undefined relationship reference: {variable}")
|
|
521
528
|
relationship = RelationshipReference(relationship, reference)
|
|
@@ -524,9 +531,40 @@ class Parser(BaseParser):
|
|
|
524
531
|
relationship.type = rel_type
|
|
525
532
|
return relationship
|
|
526
533
|
|
|
527
|
-
def
|
|
528
|
-
|
|
529
|
-
|
|
534
|
+
def _parse_properties(self) -> Iterator[Tuple[str, Expression]]:
|
|
535
|
+
parts: int = 0
|
|
536
|
+
while True:
|
|
537
|
+
self._skip_whitespace_and_comments()
|
|
538
|
+
if not self.token.is_opening_brace() and parts == 0:
|
|
539
|
+
return
|
|
540
|
+
elif not self.token.is_opening_brace() and parts > 0:
|
|
541
|
+
raise ValueError("Expected opening brace")
|
|
542
|
+
self.set_next_token()
|
|
543
|
+
self._skip_whitespace_and_comments()
|
|
544
|
+
if not self.token.is_identifier():
|
|
545
|
+
raise ValueError("Expected identifier")
|
|
546
|
+
key: str = self.token.value or ""
|
|
547
|
+
self.set_next_token()
|
|
548
|
+
self._skip_whitespace_and_comments()
|
|
549
|
+
if not self.token.is_colon():
|
|
550
|
+
raise ValueError("Expected colon")
|
|
551
|
+
self.set_next_token()
|
|
552
|
+
self._skip_whitespace_and_comments()
|
|
553
|
+
expression = self._parse_expression()
|
|
554
|
+
if expression is None:
|
|
555
|
+
raise ValueError("Expected expression")
|
|
556
|
+
self._skip_whitespace_and_comments()
|
|
557
|
+
if not self.token.is_closing_brace():
|
|
558
|
+
raise ValueError("Expected closing brace")
|
|
559
|
+
self.set_next_token()
|
|
560
|
+
yield (key, expression)
|
|
561
|
+
self._skip_whitespace_and_comments()
|
|
562
|
+
if not self.token.is_comma():
|
|
563
|
+
break
|
|
564
|
+
self.set_next_token()
|
|
565
|
+
parts += 1
|
|
566
|
+
|
|
567
|
+
def _parse_relationship_hops(self) -> Optional[Hops]:
|
|
530
568
|
if not self.token.is_multiply():
|
|
531
569
|
return None
|
|
532
570
|
hops = Hops()
|
|
@@ -572,10 +610,11 @@ class Parser(BaseParser):
|
|
|
572
610
|
alias = self._parse_alias()
|
|
573
611
|
if isinstance(expression.first_child(), Reference) and alias is None:
|
|
574
612
|
reference = expression.first_child()
|
|
613
|
+
assert isinstance(reference, Reference) # For type narrowing
|
|
575
614
|
expression.set_alias(reference.identifier)
|
|
576
615
|
self._variables[reference.identifier] = expression
|
|
577
|
-
elif (alias_option == AliasOption.REQUIRED and
|
|
578
|
-
alias is None and
|
|
616
|
+
elif (alias_option == AliasOption.REQUIRED and
|
|
617
|
+
alias is None and
|
|
579
618
|
not isinstance(expression.first_child(), Reference)):
|
|
580
619
|
raise ValueError("Alias required")
|
|
581
620
|
elif alias_option == AliasOption.NOT_ALLOWED and alias is not None:
|
|
@@ -607,7 +646,15 @@ class Parser(BaseParser):
|
|
|
607
646
|
lookup = self._parse_lookup(func)
|
|
608
647
|
expression.add_node(lookup)
|
|
609
648
|
return True
|
|
610
|
-
elif
|
|
649
|
+
elif (
|
|
650
|
+
self.token.is_left_parenthesis()
|
|
651
|
+
and self.peek() is not None
|
|
652
|
+
and (
|
|
653
|
+
self.peek().is_identifier()
|
|
654
|
+
or self.peek().is_colon()
|
|
655
|
+
or self.peek().is_right_parenthesis()
|
|
656
|
+
)
|
|
657
|
+
):
|
|
611
658
|
# Possible graph pattern expression
|
|
612
659
|
pattern = self._parse_pattern_expression()
|
|
613
660
|
if pattern is not None:
|
|
@@ -675,7 +722,7 @@ class Parser(BaseParser):
|
|
|
675
722
|
else:
|
|
676
723
|
break
|
|
677
724
|
self.set_next_token()
|
|
678
|
-
|
|
725
|
+
|
|
679
726
|
if expression.nodes_added():
|
|
680
727
|
expression.finish()
|
|
681
728
|
return expression
|
|
@@ -683,7 +730,7 @@ class Parser(BaseParser):
|
|
|
683
730
|
|
|
684
731
|
def _parse_lookup(self, node: ASTNode) -> ASTNode:
|
|
685
732
|
variable = node
|
|
686
|
-
lookup = None
|
|
733
|
+
lookup: Lookup | RangeLookup | None = None
|
|
687
734
|
while True:
|
|
688
735
|
if self.token.is_dot():
|
|
689
736
|
self.set_next_token()
|
|
@@ -870,30 +917,30 @@ class Parser(BaseParser):
|
|
|
870
917
|
name = self.token.value or ""
|
|
871
918
|
if not self.peek() or not self.peek().is_left_parenthesis():
|
|
872
919
|
return None
|
|
873
|
-
|
|
920
|
+
|
|
874
921
|
try:
|
|
875
922
|
func = FunctionFactory.create(name)
|
|
876
923
|
except ValueError:
|
|
877
924
|
raise ValueError(f"Unknown function: {name}")
|
|
878
|
-
|
|
925
|
+
|
|
879
926
|
# Check for nested aggregate functions
|
|
880
927
|
if isinstance(func, AggregateFunction) and self._context.contains_type(AggregateFunction):
|
|
881
928
|
raise ValueError("Aggregate functions cannot be nested")
|
|
882
|
-
|
|
929
|
+
|
|
883
930
|
self._context.push(func)
|
|
884
931
|
self.set_next_token() # skip function name
|
|
885
932
|
self.set_next_token() # skip left parenthesis
|
|
886
933
|
self._skip_whitespace_and_comments()
|
|
887
|
-
|
|
934
|
+
|
|
888
935
|
# Check for DISTINCT keyword
|
|
889
936
|
if self.token.is_distinct():
|
|
890
937
|
func.distinct = True
|
|
891
938
|
self.set_next_token()
|
|
892
939
|
self._expect_and_skip_whitespace_and_comments()
|
|
893
|
-
|
|
940
|
+
|
|
894
941
|
params = list(self._parse_function_parameters())
|
|
895
942
|
func.parameters = params
|
|
896
|
-
|
|
943
|
+
|
|
897
944
|
if not self.token.is_right_parenthesis():
|
|
898
945
|
raise ValueError("Expected right parenthesis")
|
|
899
946
|
self.set_next_token()
|
|
@@ -910,11 +957,11 @@ class Parser(BaseParser):
|
|
|
910
957
|
if not self.token.is_left_parenthesis():
|
|
911
958
|
raise ValueError("Expected left parenthesis")
|
|
912
959
|
self.set_next_token()
|
|
913
|
-
|
|
960
|
+
|
|
914
961
|
func = FunctionFactory.create_async(name)
|
|
915
962
|
params = list(self._parse_function_parameters())
|
|
916
963
|
func.parameters = params
|
|
917
|
-
|
|
964
|
+
|
|
918
965
|
if not self.token.is_right_parenthesis():
|
|
919
966
|
raise ValueError("Expected right parenthesis")
|
|
920
967
|
self.set_next_token()
|
|
@@ -9,7 +9,6 @@ from .components.text import Text
|
|
|
9
9
|
from .expressions.boolean import Boolean
|
|
10
10
|
from .expressions.identifier import Identifier
|
|
11
11
|
from .expressions.number import Number
|
|
12
|
-
from .expressions.string import String
|
|
13
12
|
from .expressions.operator import (
|
|
14
13
|
Add,
|
|
15
14
|
And,
|
|
@@ -28,6 +27,7 @@ from .expressions.operator import (
|
|
|
28
27
|
Power,
|
|
29
28
|
Subtract,
|
|
30
29
|
)
|
|
30
|
+
from .expressions.string import String
|
|
31
31
|
from .logic.else_ import Else
|
|
32
32
|
from .logic.end import End
|
|
33
33
|
from .logic.then import Then
|
|
@@ -103,7 +103,7 @@ class TokenToNode:
|
|
|
103
103
|
elif token.is_null():
|
|
104
104
|
return Null()
|
|
105
105
|
elif token.is_boolean():
|
|
106
|
-
return Boolean(token.value)
|
|
106
|
+
return Boolean(token.value or "")
|
|
107
107
|
else:
|
|
108
108
|
raise ValueError("Unknown token")
|
|
109
109
|
return ASTNode()
|
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
"""Tokenization module for FlowQuery."""
|
|
2
2
|
|
|
3
|
-
from .tokenizer import Tokenizer
|
|
4
|
-
from .token import Token
|
|
5
|
-
from .token_type import TokenType
|
|
6
3
|
from .keyword import Keyword
|
|
7
4
|
from .operator import Operator
|
|
5
|
+
from .string_walker import StringWalker
|
|
8
6
|
from .symbol import Symbol
|
|
7
|
+
from .token import Token
|
|
9
8
|
from .token_mapper import TokenMapper
|
|
10
|
-
from .
|
|
9
|
+
from .token_type import TokenType
|
|
10
|
+
from .tokenizer import Tokenizer
|
|
11
11
|
from .trie import Trie
|
|
12
12
|
|
|
13
13
|
__all__ = [
|
|
@@ -5,10 +5,10 @@ from ..utils.string_utils import StringUtils
|
|
|
5
5
|
|
|
6
6
|
class StringWalker:
|
|
7
7
|
"""Utility class for walking through a string character by character during tokenization.
|
|
8
|
-
|
|
8
|
+
|
|
9
9
|
Provides methods to check for specific character patterns, move through the string,
|
|
10
10
|
and extract substrings. Used by the Tokenizer to process input text.
|
|
11
|
-
|
|
11
|
+
|
|
12
12
|
Example:
|
|
13
13
|
walker = StringWalker("WITH x as variable")
|
|
14
14
|
while not walker.is_at_end:
|
|
@@ -17,7 +17,7 @@ class StringWalker:
|
|
|
17
17
|
|
|
18
18
|
def __init__(self, text: str):
|
|
19
19
|
"""Creates a new StringWalker for the given text.
|
|
20
|
-
|
|
20
|
+
|
|
21
21
|
Args:
|
|
22
22
|
text: The input text to walk through
|
|
23
23
|
"""
|
|
@@ -89,7 +89,7 @@ class StringWalker:
|
|
|
89
89
|
return self.current_char == '\\' and self.next_char == char
|
|
90
90
|
|
|
91
91
|
def escaped_brace(self) -> bool:
|
|
92
|
-
return ((self.current_char == '{' and self.next_char == '{') or
|
|
92
|
+
return ((self.current_char == '{' and self.next_char == '{') or
|
|
93
93
|
(self.current_char == '}' and self.next_char == '}'))
|
|
94
94
|
|
|
95
95
|
def opening_brace(self) -> bool:
|
|
@@ -1,25 +1,24 @@
|
|
|
1
1
|
"""Represents a single token in the FlowQuery language."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from typing import TYPE_CHECKING, Optional, Any
|
|
5
4
|
|
|
6
|
-
from
|
|
5
|
+
from typing import Optional
|
|
6
|
+
|
|
7
|
+
from ..parsing.ast_node import ASTNode
|
|
8
|
+
from ..utils.string_utils import StringUtils
|
|
7
9
|
from .keyword import Keyword
|
|
8
10
|
from .operator import Operator
|
|
9
11
|
from .symbol import Symbol
|
|
10
|
-
from
|
|
11
|
-
|
|
12
|
-
if TYPE_CHECKING:
|
|
13
|
-
from ..parsing.ast_node import ASTNode
|
|
12
|
+
from .token_type import TokenType
|
|
14
13
|
|
|
15
14
|
|
|
16
15
|
class Token:
|
|
17
16
|
"""Represents a single token in the FlowQuery language.
|
|
18
|
-
|
|
17
|
+
|
|
19
18
|
Tokens are the atomic units of lexical analysis, produced by the tokenizer
|
|
20
19
|
and consumed by the parser. Each token has a type (keyword, operator, identifier, etc.)
|
|
21
20
|
and an optional value.
|
|
22
|
-
|
|
21
|
+
|
|
23
22
|
Example:
|
|
24
23
|
with_token = Token.WITH()
|
|
25
24
|
ident_token = Token.IDENTIFIER("myVar")
|
|
@@ -28,7 +27,7 @@ class Token:
|
|
|
28
27
|
|
|
29
28
|
def __init__(self, type_: TokenType, value: Optional[str] = None):
|
|
30
29
|
"""Creates a new Token instance.
|
|
31
|
-
|
|
30
|
+
|
|
32
31
|
Args:
|
|
33
32
|
type_: The type of the token
|
|
34
33
|
value: The optional value associated with the token
|
|
@@ -41,10 +40,10 @@ class Token:
|
|
|
41
40
|
|
|
42
41
|
def equals(self, other: Token) -> bool:
|
|
43
42
|
"""Checks if this token equals another token.
|
|
44
|
-
|
|
43
|
+
|
|
45
44
|
Args:
|
|
46
45
|
other: The token to compare against
|
|
47
|
-
|
|
46
|
+
|
|
48
47
|
Returns:
|
|
49
48
|
True if tokens are equal, False otherwise
|
|
50
49
|
"""
|
|
@@ -82,6 +81,7 @@ class Token:
|
|
|
82
81
|
|
|
83
82
|
@property
|
|
84
83
|
def node(self) -> ASTNode:
|
|
84
|
+
# Import at runtime to avoid circular dependency
|
|
85
85
|
from ..parsing.token_to_node import TokenToNode
|
|
86
86
|
return TokenToNode.convert(self)
|
|
87
87
|
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"""Maps string values to tokens using a Trie for efficient lookup."""
|
|
2
2
|
|
|
3
|
-
from
|
|
3
|
+
from enum import Enum
|
|
4
|
+
from typing import Optional, Type
|
|
4
5
|
|
|
5
6
|
from .token import Token
|
|
6
7
|
from .trie import Trie
|
|
@@ -8,24 +9,24 @@ from .trie import Trie
|
|
|
8
9
|
|
|
9
10
|
class TokenMapper:
|
|
10
11
|
"""Maps string values to tokens using a Trie for efficient lookup.
|
|
11
|
-
|
|
12
|
+
|
|
12
13
|
Takes an enum of keywords, operators, or symbols and builds a trie
|
|
13
14
|
for fast token matching during tokenization.
|
|
14
|
-
|
|
15
|
+
|
|
15
16
|
Example:
|
|
16
17
|
mapper = TokenMapper(Keyword)
|
|
17
18
|
token = mapper.map("WITH")
|
|
18
19
|
"""
|
|
19
20
|
|
|
20
|
-
def __init__(self, enum_class):
|
|
21
|
+
def __init__(self, enum_class: Type[Enum]) -> None:
|
|
21
22
|
"""Creates a TokenMapper from an enum of token values.
|
|
22
|
-
|
|
23
|
+
|
|
23
24
|
Args:
|
|
24
25
|
enum_class: An enum class containing token values
|
|
25
26
|
"""
|
|
26
27
|
self._trie = Trie()
|
|
27
28
|
self._enum = enum_class
|
|
28
|
-
|
|
29
|
+
|
|
29
30
|
for member in enum_class:
|
|
30
31
|
token = Token.method(member.name)
|
|
31
32
|
if token is not None and token.value is not None:
|
|
@@ -33,10 +34,10 @@ class TokenMapper:
|
|
|
33
34
|
|
|
34
35
|
def map(self, value: str) -> Optional[Token]:
|
|
35
36
|
"""Maps a string value to its corresponding token.
|
|
36
|
-
|
|
37
|
+
|
|
37
38
|
Args:
|
|
38
39
|
value: The string value to map
|
|
39
|
-
|
|
40
|
+
|
|
40
41
|
Returns:
|
|
41
42
|
The matched token, or None if no match found
|
|
42
43
|
"""
|
|
@@ -45,7 +46,7 @@ class TokenMapper:
|
|
|
45
46
|
@property
|
|
46
47
|
def last_found(self) -> Optional[str]:
|
|
47
48
|
"""Gets the last matched string from the most recent map operation.
|
|
48
|
-
|
|
49
|
+
|
|
49
50
|
Returns:
|
|
50
51
|
The last found string, or None if no match
|
|
51
52
|
"""
|