flowquery 1.0.22 → 1.0.24
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/python-publish.yml +0 -5
- package/dist/flowquery.min.js +1 -1
- package/dist/parsing/parser.js +10 -10
- package/dist/parsing/parser.js.map +1 -1
- package/dist/tokenization/token.d.ts +2 -0
- package/dist/tokenization/token.d.ts.map +1 -1
- package/dist/tokenization/token.js +12 -0
- package/dist/tokenization/token.js.map +1 -1
- package/docs/flowquery.min.js +1 -1
- package/flowquery-py/pyproject.toml +1 -1
- package/flowquery-py/src/parsing/parser.py +11 -11
- package/flowquery-py/src/tokenization/token.py +18 -0
- package/flowquery-py/tests/compute/test_runner.py +60 -1
- package/flowquery-py/tests/parsing/test_parser.py +46 -0
- package/flowquery-py/tests/tokenization/test_tokenizer.py +34 -0
- package/flowquery-vscode/flowQueryEngine/flowquery.min.js +1 -1
- package/package.json +1 -1
- package/src/parsing/parser.ts +10 -10
- package/src/tokenization/token.ts +16 -0
- package/tests/compute/runner.test.ts +56 -0
- package/tests/parsing/parser.test.ts +45 -0
- package/tests/tokenization/tokenizer.test.ts +34 -0
|
@@ -326,7 +326,7 @@ class Parser(BaseParser):
|
|
|
326
326
|
if not self.token.is_colon():
|
|
327
327
|
raise ValueError("Expected ':' for relationship type")
|
|
328
328
|
self.set_next_token()
|
|
329
|
-
if not self.token.
|
|
329
|
+
if not self.token.is_identifier_or_keyword():
|
|
330
330
|
raise ValueError("Expected relationship type identifier")
|
|
331
331
|
rel_type = self.token.value or ""
|
|
332
332
|
self.set_next_token()
|
|
@@ -450,17 +450,17 @@ class Parser(BaseParser):
|
|
|
450
450
|
self.set_next_token()
|
|
451
451
|
self._skip_whitespace_and_comments()
|
|
452
452
|
identifier: Optional[str] = None
|
|
453
|
-
if self.token.
|
|
453
|
+
if self.token.is_identifier_or_keyword():
|
|
454
454
|
identifier = self.token.value
|
|
455
455
|
self.set_next_token()
|
|
456
456
|
self._skip_whitespace_and_comments()
|
|
457
457
|
label: Optional[str] = None
|
|
458
458
|
peek = self.peek()
|
|
459
|
-
if not self.token.is_colon() and peek is not None and peek.
|
|
459
|
+
if not self.token.is_colon() and peek is not None and peek.is_identifier_or_keyword():
|
|
460
460
|
raise ValueError("Expected ':' for node label")
|
|
461
|
-
if self.token.is_colon() and (peek is None or not peek.
|
|
461
|
+
if self.token.is_colon() and (peek is None or not peek.is_identifier_or_keyword()):
|
|
462
462
|
raise ValueError("Expected node label identifier")
|
|
463
|
-
if self.token.is_colon() and peek is not None and peek.
|
|
463
|
+
if self.token.is_colon() and peek is not None and peek.is_identifier_or_keyword():
|
|
464
464
|
self.set_next_token()
|
|
465
465
|
label = cast(str, self.token.value) # Guaranteed by is_identifier check
|
|
466
466
|
self.set_next_token()
|
|
@@ -495,13 +495,13 @@ class Parser(BaseParser):
|
|
|
495
495
|
return None
|
|
496
496
|
self.set_next_token()
|
|
497
497
|
variable: Optional[str] = None
|
|
498
|
-
if self.token.
|
|
498
|
+
if self.token.is_identifier_or_keyword():
|
|
499
499
|
variable = self.token.value
|
|
500
500
|
self.set_next_token()
|
|
501
501
|
if not self.token.is_colon():
|
|
502
502
|
raise ValueError("Expected ':' for relationship type")
|
|
503
503
|
self.set_next_token()
|
|
504
|
-
if not self.token.
|
|
504
|
+
if not self.token.is_identifier_or_keyword():
|
|
505
505
|
raise ValueError("Expected relationship type identifier")
|
|
506
506
|
rel_type: str = self.token.value or ""
|
|
507
507
|
self.set_next_token()
|
|
@@ -633,14 +633,14 @@ class Parser(BaseParser):
|
|
|
633
633
|
def _parse_operand(self, expression: Expression) -> bool:
|
|
634
634
|
"""Parse a single operand (without operators). Returns True if an operand was parsed."""
|
|
635
635
|
self._skip_whitespace_and_comments()
|
|
636
|
-
if self.token.
|
|
636
|
+
if self.token.is_identifier_or_keyword() and (self.peek() is None or not self.peek().is_left_parenthesis()):
|
|
637
637
|
identifier = self.token.value or ""
|
|
638
638
|
reference = Reference(identifier, self._variables.get(identifier))
|
|
639
639
|
self.set_next_token()
|
|
640
640
|
lookup = self._parse_lookup(reference)
|
|
641
641
|
expression.add_node(lookup)
|
|
642
642
|
return True
|
|
643
|
-
elif self.token.
|
|
643
|
+
elif self.token.is_identifier_or_keyword() and self.peek() is not None and self.peek().is_left_parenthesis():
|
|
644
644
|
func = self._parse_predicate_function() or self._parse_function()
|
|
645
645
|
if func is not None:
|
|
646
646
|
lookup = self._parse_lookup(func)
|
|
@@ -650,7 +650,7 @@ class Parser(BaseParser):
|
|
|
650
650
|
self.token.is_left_parenthesis()
|
|
651
651
|
and self.peek() is not None
|
|
652
652
|
and (
|
|
653
|
-
self.peek().
|
|
653
|
+
self.peek().is_identifier_or_keyword()
|
|
654
654
|
or self.peek().is_colon()
|
|
655
655
|
or self.peek().is_right_parenthesis()
|
|
656
656
|
)
|
|
@@ -847,7 +847,7 @@ class Parser(BaseParser):
|
|
|
847
847
|
self._expect_previous_token_to_be_whitespace_or_comment()
|
|
848
848
|
self.set_next_token()
|
|
849
849
|
self._expect_and_skip_whitespace_and_comments()
|
|
850
|
-
if not self.token.
|
|
850
|
+
if not self.token.is_identifier_or_keyword():
|
|
851
851
|
raise ValueError("Expected identifier")
|
|
852
852
|
alias = Alias(self.token.value or "")
|
|
853
853
|
self.set_next_token()
|
|
@@ -106,6 +106,24 @@ class Token:
|
|
|
106
106
|
def is_identifier(self) -> bool:
|
|
107
107
|
return self._type == TokenType.IDENTIFIER or self._type == TokenType.BACKTICK_STRING
|
|
108
108
|
|
|
109
|
+
def is_keyword_that_cannot_be_identifier(self) -> bool:
|
|
110
|
+
"""Returns True for keywords that have special expression-level roles
|
|
111
|
+
and should not be treated as identifiers (NULL, CASE, WHEN, THEN, ELSE, END)."""
|
|
112
|
+
return self.is_keyword() and (
|
|
113
|
+
self.is_null()
|
|
114
|
+
or self.is_case()
|
|
115
|
+
or self.is_when()
|
|
116
|
+
or self.is_then()
|
|
117
|
+
or self.is_else()
|
|
118
|
+
or self.is_end()
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
def is_identifier_or_keyword(self) -> bool:
|
|
122
|
+
"""Returns True if the token is an identifier or a keyword that can be used as an identifier."""
|
|
123
|
+
return self.is_identifier() or (
|
|
124
|
+
self.is_keyword() and not self.is_keyword_that_cannot_be_identifier()
|
|
125
|
+
)
|
|
126
|
+
|
|
109
127
|
# String token
|
|
110
128
|
|
|
111
129
|
@staticmethod
|
|
@@ -1584,4 +1584,63 @@ class TestRunner:
|
|
|
1584
1584
|
assert chases["sample"] is not None
|
|
1585
1585
|
assert "left_id" not in chases["sample"]
|
|
1586
1586
|
assert "right_id" not in chases["sample"]
|
|
1587
|
-
assert "speed" in chases["sample"]
|
|
1587
|
+
assert "speed" in chases["sample"]
|
|
1588
|
+
|
|
1589
|
+
@pytest.mark.asyncio
|
|
1590
|
+
async def test_reserved_keywords_as_identifiers(self):
|
|
1591
|
+
"""Test reserved keywords as identifiers."""
|
|
1592
|
+
runner = Runner("""
|
|
1593
|
+
WITH 1 AS return
|
|
1594
|
+
RETURN return
|
|
1595
|
+
""")
|
|
1596
|
+
await runner.run()
|
|
1597
|
+
results = runner.results
|
|
1598
|
+
assert len(results) == 1
|
|
1599
|
+
assert results[0]["return"] == 1
|
|
1600
|
+
|
|
1601
|
+
@pytest.mark.asyncio
|
|
1602
|
+
async def test_reserved_keywords_as_parts_of_identifiers(self):
|
|
1603
|
+
"""Test reserved keywords as parts of identifiers."""
|
|
1604
|
+
runner = Runner("""
|
|
1605
|
+
unwind [
|
|
1606
|
+
{from: "Alice", to: "Bob", organizer: "Charlie"},
|
|
1607
|
+
{from: "Bob", to: "Charlie", organizer: "Alice"},
|
|
1608
|
+
{from: "Charlie", to: "Alice", organizer: "Bob"}
|
|
1609
|
+
] as data
|
|
1610
|
+
return data.from as from, data.to as to, data.organizer as organizer
|
|
1611
|
+
""")
|
|
1612
|
+
await runner.run()
|
|
1613
|
+
results = runner.results
|
|
1614
|
+
assert len(results) == 3
|
|
1615
|
+
assert results[0] == {"from": "Alice", "to": "Bob", "organizer": "Charlie"}
|
|
1616
|
+
assert results[1] == {"from": "Bob", "to": "Charlie", "organizer": "Alice"}
|
|
1617
|
+
assert results[2] == {"from": "Charlie", "to": "Alice", "organizer": "Bob"}
|
|
1618
|
+
|
|
1619
|
+
@pytest.mark.asyncio
|
|
1620
|
+
async def test_reserved_keywords_as_relationship_types_and_labels(self):
|
|
1621
|
+
"""Test reserved keywords as relationship types and labels."""
|
|
1622
|
+
await Runner("""
|
|
1623
|
+
CREATE VIRTUAL (:Return) AS {
|
|
1624
|
+
unwind [
|
|
1625
|
+
{id: 1, name: 'Node 1'},
|
|
1626
|
+
{id: 2, name: 'Node 2'}
|
|
1627
|
+
] as record
|
|
1628
|
+
RETURN record.id as id, record.name as name
|
|
1629
|
+
}
|
|
1630
|
+
""").run()
|
|
1631
|
+
await Runner("""
|
|
1632
|
+
CREATE VIRTUAL (:Return)-[:With]-(:Return) AS {
|
|
1633
|
+
unwind [
|
|
1634
|
+
{left_id: 1, right_id: 2}
|
|
1635
|
+
] as record
|
|
1636
|
+
RETURN record.left_id as left_id, record.right_id as right_id
|
|
1637
|
+
}
|
|
1638
|
+
""").run()
|
|
1639
|
+
runner = Runner("""
|
|
1640
|
+
MATCH (a:Return)-[:With]->(b:Return)
|
|
1641
|
+
RETURN a.name AS name1, b.name AS name2
|
|
1642
|
+
""")
|
|
1643
|
+
await runner.run()
|
|
1644
|
+
results = runner.results
|
|
1645
|
+
assert len(results) == 1
|
|
1646
|
+
assert results[0] == {"name1": "Node 1", "name2": "Node 2"}
|
|
@@ -288,6 +288,43 @@ class TestParser:
|
|
|
288
288
|
_return = ast.first_child()
|
|
289
289
|
assert _return.first_child().value() == 2
|
|
290
290
|
|
|
291
|
+
def test_lookup_with_reserved_keyword_property_names(self):
|
|
292
|
+
"""Test lookup with reserved keyword property names like end, null, case."""
|
|
293
|
+
parser = Parser()
|
|
294
|
+
ast = parser.parse("with {end: 1, null: 2, case: 3} as x return x.end, x.null, x.case")
|
|
295
|
+
expected = (
|
|
296
|
+
"ASTNode\n"
|
|
297
|
+
"- With\n"
|
|
298
|
+
"-- Expression (x)\n"
|
|
299
|
+
"--- AssociativeArray\n"
|
|
300
|
+
"---- KeyValuePair\n"
|
|
301
|
+
"----- String (end)\n"
|
|
302
|
+
"----- Expression\n"
|
|
303
|
+
"------ Number (1)\n"
|
|
304
|
+
"---- KeyValuePair\n"
|
|
305
|
+
"----- String (null)\n"
|
|
306
|
+
"----- Expression\n"
|
|
307
|
+
"------ Number (2)\n"
|
|
308
|
+
"---- KeyValuePair\n"
|
|
309
|
+
"----- String (case)\n"
|
|
310
|
+
"----- Expression\n"
|
|
311
|
+
"------ Number (3)\n"
|
|
312
|
+
"- Return\n"
|
|
313
|
+
"-- Expression\n"
|
|
314
|
+
"--- Lookup\n"
|
|
315
|
+
"---- Identifier (end)\n"
|
|
316
|
+
"---- Reference (x)\n"
|
|
317
|
+
"-- Expression\n"
|
|
318
|
+
"--- Lookup\n"
|
|
319
|
+
"---- Identifier (null)\n"
|
|
320
|
+
"---- Reference (x)\n"
|
|
321
|
+
"-- Expression\n"
|
|
322
|
+
"--- Lookup\n"
|
|
323
|
+
"---- Identifier (case)\n"
|
|
324
|
+
"---- Reference (x)"
|
|
325
|
+
)
|
|
326
|
+
assert ast.print() == expected
|
|
327
|
+
|
|
291
328
|
def test_load_with_post(self):
|
|
292
329
|
"""Test load with post."""
|
|
293
330
|
parser = Parser()
|
|
@@ -719,3 +756,12 @@ class TestParser:
|
|
|
719
756
|
assert isinstance(relationship, Relationship)
|
|
720
757
|
assert relationship.properties.get("since") is not None
|
|
721
758
|
assert relationship.properties["since"].value() == 2022
|
|
759
|
+
|
|
760
|
+
def test_case_statement_with_keywords_as_identifiers(self):
|
|
761
|
+
"""Test that CASE/WHEN/THEN/ELSE/END are not treated as identifiers."""
|
|
762
|
+
parser = Parser()
|
|
763
|
+
ast = parser.parse("RETURN CASE WHEN 1 THEN 2 ELSE 3 END")
|
|
764
|
+
assert "Case" in ast.print()
|
|
765
|
+
assert "When" in ast.print()
|
|
766
|
+
assert "Then" in ast.print()
|
|
767
|
+
assert "Else" in ast.print()
|
|
@@ -162,3 +162,37 @@ class TestTokenizer:
|
|
|
162
162
|
tokens = tokenizer.tokenize()
|
|
163
163
|
assert tokens is not None
|
|
164
164
|
assert len(tokens) > 0
|
|
165
|
+
|
|
166
|
+
def test_reserved_keywords_as_identifiers(self):
|
|
167
|
+
"""Test reserved keywords as identifiers."""
|
|
168
|
+
tokenizer = Tokenizer("""
|
|
169
|
+
WITH 1 AS return
|
|
170
|
+
RETURN return
|
|
171
|
+
""")
|
|
172
|
+
tokens = tokenizer.tokenize()
|
|
173
|
+
assert tokens is not None
|
|
174
|
+
assert len(tokens) > 0
|
|
175
|
+
|
|
176
|
+
def test_reserved_keywords_as_part_of_identifiers(self):
|
|
177
|
+
"""Test reserved keywords as part of identifiers."""
|
|
178
|
+
tokenizer = Tokenizer("""
|
|
179
|
+
unwind [
|
|
180
|
+
{from: "Alice", to: "Bob", organizer: "Charlie"},
|
|
181
|
+
{from: "Bob", to: "Charlie", organizer: "Alice"},
|
|
182
|
+
{from: "Charlie", to: "Alice", organizer: "Bob"}
|
|
183
|
+
] as data
|
|
184
|
+
return data.from, data.to
|
|
185
|
+
""")
|
|
186
|
+
tokens = tokenizer.tokenize()
|
|
187
|
+
assert tokens is not None
|
|
188
|
+
assert len(tokens) > 0
|
|
189
|
+
|
|
190
|
+
def test_reserved_keywords_as_relationship_types_and_labels(self):
|
|
191
|
+
"""Test reserved keywords as relationship types and labels."""
|
|
192
|
+
tokenizer = Tokenizer("""
|
|
193
|
+
MATCH (a:RETURN)-[r:WITH]->(b:RETURN)
|
|
194
|
+
RETURN a, b
|
|
195
|
+
""")
|
|
196
|
+
tokens = tokenizer.tokenize()
|
|
197
|
+
assert tokens is not None
|
|
198
|
+
assert len(tokens) > 0
|