flowquery 1.0.20 → 1.0.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (151) hide show
  1. package/.github/workflows/release.yml +2 -2
  2. package/.husky/pre-commit +26 -0
  3. package/dist/flowquery.min.js +1 -1
  4. package/dist/graph/data.d.ts +5 -4
  5. package/dist/graph/data.d.ts.map +1 -1
  6. package/dist/graph/data.js +35 -19
  7. package/dist/graph/data.js.map +1 -1
  8. package/dist/graph/node.d.ts +2 -0
  9. package/dist/graph/node.d.ts.map +1 -1
  10. package/dist/graph/node.js +23 -0
  11. package/dist/graph/node.js.map +1 -1
  12. package/dist/graph/node_data.js +1 -1
  13. package/dist/graph/node_data.js.map +1 -1
  14. package/dist/graph/relationship.d.ts +6 -1
  15. package/dist/graph/relationship.d.ts.map +1 -1
  16. package/dist/graph/relationship.js +38 -7
  17. package/dist/graph/relationship.js.map +1 -1
  18. package/dist/graph/relationship_data.d.ts +2 -0
  19. package/dist/graph/relationship_data.d.ts.map +1 -1
  20. package/dist/graph/relationship_data.js +8 -1
  21. package/dist/graph/relationship_data.js.map +1 -1
  22. package/dist/graph/relationship_match_collector.js +2 -2
  23. package/dist/graph/relationship_match_collector.js.map +1 -1
  24. package/dist/graph/relationship_reference.d.ts.map +1 -1
  25. package/dist/graph/relationship_reference.js +2 -1
  26. package/dist/graph/relationship_reference.js.map +1 -1
  27. package/dist/index.d.ts +7 -0
  28. package/dist/index.d.ts.map +1 -1
  29. package/dist/index.js +4 -4
  30. package/dist/index.js.map +1 -1
  31. package/dist/parsing/parser.d.ts +1 -0
  32. package/dist/parsing/parser.d.ts.map +1 -1
  33. package/dist/parsing/parser.js +47 -0
  34. package/dist/parsing/parser.js.map +1 -1
  35. package/docs/flowquery.min.js +1 -1
  36. package/flowquery-py/notebooks/TestFlowQuery.ipynb +1 -1
  37. package/flowquery-py/pyproject.toml +45 -2
  38. package/flowquery-py/src/__init__.py +5 -5
  39. package/flowquery-py/src/compute/runner.py +14 -10
  40. package/flowquery-py/src/extensibility.py +8 -8
  41. package/flowquery-py/src/graph/__init__.py +7 -7
  42. package/flowquery-py/src/graph/data.py +36 -19
  43. package/flowquery-py/src/graph/database.py +10 -20
  44. package/flowquery-py/src/graph/node.py +50 -19
  45. package/flowquery-py/src/graph/node_data.py +1 -1
  46. package/flowquery-py/src/graph/node_reference.py +10 -11
  47. package/flowquery-py/src/graph/pattern.py +23 -36
  48. package/flowquery-py/src/graph/pattern_expression.py +13 -11
  49. package/flowquery-py/src/graph/patterns.py +2 -2
  50. package/flowquery-py/src/graph/physical_node.py +4 -3
  51. package/flowquery-py/src/graph/physical_relationship.py +5 -5
  52. package/flowquery-py/src/graph/relationship.py +56 -15
  53. package/flowquery-py/src/graph/relationship_data.py +7 -2
  54. package/flowquery-py/src/graph/relationship_match_collector.py +15 -10
  55. package/flowquery-py/src/graph/relationship_reference.py +4 -4
  56. package/flowquery-py/src/io/command_line.py +13 -14
  57. package/flowquery-py/src/parsing/__init__.py +2 -2
  58. package/flowquery-py/src/parsing/alias_option.py +1 -1
  59. package/flowquery-py/src/parsing/ast_node.py +21 -20
  60. package/flowquery-py/src/parsing/base_parser.py +7 -7
  61. package/flowquery-py/src/parsing/components/__init__.py +3 -3
  62. package/flowquery-py/src/parsing/components/from_.py +3 -1
  63. package/flowquery-py/src/parsing/components/headers.py +2 -2
  64. package/flowquery-py/src/parsing/components/null.py +2 -2
  65. package/flowquery-py/src/parsing/context.py +7 -7
  66. package/flowquery-py/src/parsing/data_structures/associative_array.py +7 -7
  67. package/flowquery-py/src/parsing/data_structures/json_array.py +3 -3
  68. package/flowquery-py/src/parsing/data_structures/key_value_pair.py +4 -4
  69. package/flowquery-py/src/parsing/data_structures/lookup.py +2 -2
  70. package/flowquery-py/src/parsing/data_structures/range_lookup.py +2 -2
  71. package/flowquery-py/src/parsing/expressions/__init__.py +16 -16
  72. package/flowquery-py/src/parsing/expressions/expression.py +16 -13
  73. package/flowquery-py/src/parsing/expressions/expression_map.py +9 -9
  74. package/flowquery-py/src/parsing/expressions/f_string.py +3 -3
  75. package/flowquery-py/src/parsing/expressions/identifier.py +4 -3
  76. package/flowquery-py/src/parsing/expressions/number.py +3 -3
  77. package/flowquery-py/src/parsing/expressions/operator.py +16 -16
  78. package/flowquery-py/src/parsing/expressions/reference.py +3 -3
  79. package/flowquery-py/src/parsing/expressions/string.py +2 -2
  80. package/flowquery-py/src/parsing/functions/__init__.py +17 -17
  81. package/flowquery-py/src/parsing/functions/aggregate_function.py +8 -8
  82. package/flowquery-py/src/parsing/functions/async_function.py +12 -9
  83. package/flowquery-py/src/parsing/functions/avg.py +4 -4
  84. package/flowquery-py/src/parsing/functions/collect.py +6 -6
  85. package/flowquery-py/src/parsing/functions/function.py +6 -6
  86. package/flowquery-py/src/parsing/functions/function_factory.py +31 -34
  87. package/flowquery-py/src/parsing/functions/function_metadata.py +10 -11
  88. package/flowquery-py/src/parsing/functions/functions.py +14 -6
  89. package/flowquery-py/src/parsing/functions/join.py +3 -3
  90. package/flowquery-py/src/parsing/functions/keys.py +3 -3
  91. package/flowquery-py/src/parsing/functions/predicate_function.py +8 -7
  92. package/flowquery-py/src/parsing/functions/predicate_sum.py +12 -7
  93. package/flowquery-py/src/parsing/functions/rand.py +2 -2
  94. package/flowquery-py/src/parsing/functions/range_.py +9 -4
  95. package/flowquery-py/src/parsing/functions/replace.py +2 -2
  96. package/flowquery-py/src/parsing/functions/round_.py +2 -2
  97. package/flowquery-py/src/parsing/functions/size.py +2 -2
  98. package/flowquery-py/src/parsing/functions/split.py +9 -4
  99. package/flowquery-py/src/parsing/functions/stringify.py +3 -3
  100. package/flowquery-py/src/parsing/functions/sum.py +4 -4
  101. package/flowquery-py/src/parsing/functions/to_json.py +2 -2
  102. package/flowquery-py/src/parsing/functions/type_.py +3 -3
  103. package/flowquery-py/src/parsing/functions/value_holder.py +1 -1
  104. package/flowquery-py/src/parsing/logic/__init__.py +2 -2
  105. package/flowquery-py/src/parsing/logic/case.py +0 -1
  106. package/flowquery-py/src/parsing/logic/when.py +3 -1
  107. package/flowquery-py/src/parsing/operations/__init__.py +10 -10
  108. package/flowquery-py/src/parsing/operations/aggregated_return.py +3 -5
  109. package/flowquery-py/src/parsing/operations/aggregated_with.py +4 -4
  110. package/flowquery-py/src/parsing/operations/call.py +6 -7
  111. package/flowquery-py/src/parsing/operations/create_node.py +5 -4
  112. package/flowquery-py/src/parsing/operations/create_relationship.py +5 -4
  113. package/flowquery-py/src/parsing/operations/group_by.py +18 -16
  114. package/flowquery-py/src/parsing/operations/load.py +21 -19
  115. package/flowquery-py/src/parsing/operations/match.py +8 -7
  116. package/flowquery-py/src/parsing/operations/operation.py +3 -3
  117. package/flowquery-py/src/parsing/operations/projection.py +6 -6
  118. package/flowquery-py/src/parsing/operations/return_op.py +9 -5
  119. package/flowquery-py/src/parsing/operations/unwind.py +3 -2
  120. package/flowquery-py/src/parsing/operations/where.py +9 -7
  121. package/flowquery-py/src/parsing/operations/with_op.py +2 -2
  122. package/flowquery-py/src/parsing/parser.py +104 -57
  123. package/flowquery-py/src/parsing/token_to_node.py +2 -2
  124. package/flowquery-py/src/tokenization/__init__.py +4 -4
  125. package/flowquery-py/src/tokenization/keyword.py +1 -1
  126. package/flowquery-py/src/tokenization/operator.py +1 -1
  127. package/flowquery-py/src/tokenization/string_walker.py +4 -4
  128. package/flowquery-py/src/tokenization/symbol.py +1 -1
  129. package/flowquery-py/src/tokenization/token.py +11 -11
  130. package/flowquery-py/src/tokenization/token_mapper.py +10 -9
  131. package/flowquery-py/src/tokenization/token_type.py +1 -1
  132. package/flowquery-py/src/tokenization/tokenizer.py +19 -19
  133. package/flowquery-py/src/tokenization/trie.py +18 -17
  134. package/flowquery-py/src/utils/__init__.py +1 -1
  135. package/flowquery-py/src/utils/object_utils.py +3 -3
  136. package/flowquery-py/src/utils/string_utils.py +12 -12
  137. package/flowquery-py/tests/compute/test_runner.py +205 -1
  138. package/flowquery-py/tests/parsing/test_parser.py +41 -0
  139. package/flowquery-vscode/flowQueryEngine/flowquery.min.js +1 -1
  140. package/package.json +1 -1
  141. package/src/graph/data.ts +35 -19
  142. package/src/graph/node.ts +23 -0
  143. package/src/graph/node_data.ts +1 -1
  144. package/src/graph/relationship.ts +37 -5
  145. package/src/graph/relationship_data.ts +8 -1
  146. package/src/graph/relationship_match_collector.ts +1 -1
  147. package/src/graph/relationship_reference.ts +2 -1
  148. package/src/index.ts +1 -0
  149. package/src/parsing/parser.ts +47 -0
  150. package/tests/compute/runner.test.ts +178 -0
  151. package/tests/parsing/parser.test.ts +32 -0
@@ -1,6 +1,6 @@
1
1
  """Tokenizes FlowQuery input strings into a sequence of tokens."""
2
2
 
3
- from typing import List, Optional, Iterator, Callable
3
+ from typing import Callable, Iterator, List, Optional
4
4
 
5
5
  from ..utils.string_utils import StringUtils
6
6
  from .keyword import Keyword
@@ -13,11 +13,11 @@ from .token_mapper import TokenMapper
13
13
 
14
14
  class Tokenizer:
15
15
  """Tokenizes FlowQuery input strings into a sequence of tokens.
16
-
16
+
17
17
  The tokenizer performs lexical analysis, breaking down the input text into
18
18
  meaningful tokens such as keywords, identifiers, operators, strings, numbers,
19
19
  and symbols. It handles comments, whitespace, and f-strings.
20
-
20
+
21
21
  Example:
22
22
  tokenizer = Tokenizer("WITH x = 1 RETURN x")
23
23
  tokens = tokenizer.tokenize()
@@ -25,7 +25,7 @@ class Tokenizer:
25
25
 
26
26
  def __init__(self, input_: str):
27
27
  """Creates a new Tokenizer instance for the given input.
28
-
28
+
29
29
  Args:
30
30
  input_: The FlowQuery input string to tokenize
31
31
  """
@@ -36,16 +36,16 @@ class Tokenizer:
36
36
 
37
37
  def tokenize(self) -> List[Token]:
38
38
  """Tokenizes the input string into an array of tokens.
39
-
39
+
40
40
  Returns:
41
41
  An array of Token objects representing the tokenized input
42
-
42
+
43
43
  Raises:
44
44
  ValueError: If an unrecognized token is encountered
45
45
  """
46
46
  tokens: List[Token] = []
47
47
  last: Optional[Token] = None
48
-
48
+
49
49
  while not self._walker.is_at_end:
50
50
  tokens.extend(self._f_string())
51
51
  last = self._get_last_non_whitespace_or_non_comment_token(tokens) or last
@@ -54,7 +54,7 @@ class Tokenizer:
54
54
  raise ValueError(f"Unrecognized token at position {self._walker.position}")
55
55
  token.position = self._walker.position
56
56
  tokens.append(token)
57
-
57
+
58
58
  return tokens
59
59
 
60
60
  def _get_last_non_whitespace_or_non_comment_token(self, tokens: List[Token]) -> Optional[Token]:
@@ -97,9 +97,9 @@ class Tokenizer:
97
97
  def _identifier(self) -> Optional[Token]:
98
98
  start_position = self._walker.position
99
99
  if self._walker.check_for_under_score() or self._walker.check_for_letter():
100
- while (not self._walker.is_at_end and
101
- (self._walker.check_for_letter() or
102
- self._walker.check_for_digit() or
100
+ while (not self._walker.is_at_end and
101
+ (self._walker.check_for_letter() or
102
+ self._walker.check_for_digit() or
103
103
  self._walker.check_for_under_score())):
104
104
  pass
105
105
  return Token.IDENTIFIER(self._walker.get_string(start_position))
@@ -110,7 +110,7 @@ class Tokenizer:
110
110
  quote_char = self._walker.check_for_quote()
111
111
  if quote_char is None:
112
112
  return None
113
-
113
+
114
114
  while not self._walker.is_at_end:
115
115
  if self._walker.escaped(quote_char):
116
116
  self._walker.move_next()
@@ -122,32 +122,32 @@ class Tokenizer:
122
122
  return Token.BACKTICK_STRING(value, quote_char)
123
123
  return Token.STRING(value, quote_char)
124
124
  self._walker.move_next()
125
-
125
+
126
126
  raise ValueError(f"Unterminated string at position {start_position}")
127
127
 
128
128
  def _f_string(self) -> Iterator[Token]:
129
129
  if not self._walker.check_for_f_string_start():
130
130
  return
131
-
131
+
132
132
  self._walker.move_next() # skip the f
133
133
  position = self._walker.position
134
134
  quote_char = self._walker.check_for_quote()
135
135
  if quote_char is None:
136
136
  return
137
-
137
+
138
138
  while not self._walker.is_at_end:
139
139
  if self._walker.escaped(quote_char) or self._walker.escaped_brace():
140
140
  self._walker.move_next()
141
141
  self._walker.move_next()
142
142
  continue
143
-
143
+
144
144
  if self._walker.opening_brace():
145
145
  yield Token.F_STRING(self._walker.get_string(position), quote_char)
146
146
  position = self._walker.position
147
147
  yield Token.OPENING_BRACE()
148
148
  self._walker.move_next() # skip the opening brace
149
149
  position = self._walker.position
150
-
150
+
151
151
  while not self._walker.is_at_end and not self._walker.closing_brace():
152
152
  token = self._get_next_token()
153
153
  if token is not None:
@@ -159,11 +159,11 @@ class Tokenizer:
159
159
  self._walker.move_next() # skip the closing brace
160
160
  position = self._walker.position
161
161
  break
162
-
162
+
163
163
  if self._walker.check_for_string(quote_char):
164
164
  yield Token.F_STRING(self._walker.get_string(position), quote_char)
165
165
  return
166
-
166
+
167
167
  self._walker.move_next()
168
168
 
169
169
  def _whitespace(self) -> Optional[Token]:
@@ -1,6 +1,7 @@
1
1
  """Trie (prefix tree) data structure for efficient keyword and operator lookup."""
2
2
 
3
3
  from __future__ import annotations
4
+
4
5
  from typing import TYPE_CHECKING, Optional
5
6
 
6
7
  if TYPE_CHECKING:
@@ -9,12 +10,12 @@ if TYPE_CHECKING:
9
10
 
10
11
  class TrieNode:
11
12
  """Represents a node in a Trie data structure.
12
-
13
+
13
14
  Each node can have children nodes (one per character) and may contain a token
14
15
  if the path to this node represents a complete word.
15
16
  """
16
17
 
17
- def __init__(self):
18
+ def __init__(self) -> None:
18
19
  self._children: dict[str, TrieNode] = {}
19
20
  self._token: Optional[Token] = None
20
21
 
@@ -43,59 +44,59 @@ class TrieNode:
43
44
 
44
45
  class Trie:
45
46
  """Trie (prefix tree) data structure for efficient keyword and operator lookup.
46
-
47
+
47
48
  Used during tokenization to quickly match input strings against known keywords
48
49
  and operators. Supports case-insensitive matching and tracks the longest match found.
49
-
50
+
50
51
  Example:
51
52
  trie = Trie()
52
53
  trie.insert(Token.WITH)
53
54
  found = trie.find("WITH")
54
55
  """
55
56
 
56
- def __init__(self):
57
+ def __init__(self) -> None:
57
58
  self._root = TrieNode()
58
59
  self._max_length = 0
59
60
  self._last_found: Optional[str] = None
60
61
 
61
62
  def insert(self, token: Token) -> None:
62
63
  """Inserts a token into the trie.
63
-
64
+
64
65
  Args:
65
66
  token: The token to insert
66
-
67
+
67
68
  Raises:
68
69
  ValueError: If the token value is None or empty
69
70
  """
70
71
  if token.value is None or len(token.value) == 0:
71
72
  raise ValueError("Token value cannot be null or empty")
72
-
73
+
73
74
  current_node = self._root
74
75
  for char in token.value:
75
76
  current_node = current_node.map(char.lower())
76
-
77
+
77
78
  if len(token.value) > self._max_length:
78
79
  self._max_length = len(token.value)
79
-
80
+
80
81
  current_node.token = token
81
82
 
82
83
  def find(self, value: str) -> Optional[Token]:
83
84
  """Finds a token by searching for the longest matching prefix in the trie.
84
-
85
+
85
86
  Args:
86
87
  value: The string value to search for
87
-
88
+
88
89
  Returns:
89
90
  The token if found, None otherwise
90
91
  """
91
92
  if len(value) == 0:
92
93
  return None
93
-
94
+
94
95
  index = 0
95
96
  current: Optional[TrieNode] = None
96
97
  found: Optional[Token] = None
97
98
  self._last_found = None
98
-
99
+
99
100
  while True:
100
101
  next_node = (current or self._root).retrieve(value[index].lower())
101
102
  if next_node is None:
@@ -107,17 +108,17 @@ class Trie:
107
108
  index += 1
108
109
  if index >= len(value) or index > self._max_length:
109
110
  break
110
-
111
+
111
112
  if current is not None and current.is_end_of_word():
112
113
  found = current.token
113
114
  self._last_found = value[:index]
114
-
115
+
115
116
  return found
116
117
 
117
118
  @property
118
119
  def last_found(self) -> Optional[str]:
119
120
  """Gets the last matched string from the most recent find operation.
120
-
121
+
121
122
  Returns:
122
123
  The last found string, or None if no match was found
123
124
  """
@@ -1,6 +1,6 @@
1
1
  """Utils module for FlowQuery."""
2
2
 
3
- from .string_utils import StringUtils
4
3
  from .object_utils import ObjectUtils
4
+ from .string_utils import StringUtils
5
5
 
6
6
  __all__ = ["StringUtils", "ObjectUtils"]
@@ -7,13 +7,13 @@ class ObjectUtils:
7
7
  """Utility class for object-related operations."""
8
8
 
9
9
  @staticmethod
10
- def is_instance_of_any(obj: Any, classes: List[Type]) -> bool:
10
+ def is_instance_of_any(obj: Any, classes: List[Type[Any]]) -> bool:
11
11
  """Checks if an object is an instance of any of the provided classes.
12
-
12
+
13
13
  Args:
14
14
  obj: The object to check
15
15
  classes: Array of class constructors to test against
16
-
16
+
17
17
  Returns:
18
18
  True if the object is an instance of any class, False otherwise
19
19
  """
@@ -3,11 +3,11 @@
3
3
 
4
4
  class StringUtils:
5
5
  """Utility class for string manipulation and validation.
6
-
6
+
7
7
  Provides methods for handling quoted strings, comments, escape sequences,
8
8
  and identifier validation.
9
9
  """
10
-
10
+
11
11
  quotes = ['"', "'", '`']
12
12
  letters = 'abcdefghijklmnopqrstuvwxyz'
13
13
  digits = '0123456789'
@@ -17,10 +17,10 @@ class StringUtils:
17
17
  @staticmethod
18
18
  def unquote(s: str) -> str:
19
19
  """Removes surrounding quotes from a string.
20
-
20
+
21
21
  Args:
22
22
  s: The string to unquote
23
-
23
+
24
24
  Returns:
25
25
  The unquoted string
26
26
  """
@@ -41,10 +41,10 @@ class StringUtils:
41
41
  @staticmethod
42
42
  def uncomment(s: str) -> str:
43
43
  """Removes comment markers from a string.
44
-
44
+
45
45
  Args:
46
46
  s: The comment string
47
-
47
+
48
48
  Returns:
49
49
  The string without comment markers
50
50
  """
@@ -59,11 +59,11 @@ class StringUtils:
59
59
  @staticmethod
60
60
  def remove_escaped_quotes(s: str, quote_char: str) -> str:
61
61
  """Removes escape sequences before quotes in a string.
62
-
62
+
63
63
  Args:
64
64
  s: The string to process
65
65
  quote_char: The quote character that was escaped
66
-
66
+
67
67
  Returns:
68
68
  The string with escape sequences removed
69
69
  """
@@ -79,10 +79,10 @@ class StringUtils:
79
79
  @staticmethod
80
80
  def remove_escaped_braces(s: str) -> str:
81
81
  """Removes escaped braces ({{ and }}) from f-strings.
82
-
82
+
83
83
  Args:
84
84
  s: The string to process
85
-
85
+
86
86
  Returns:
87
87
  The string with escaped braces resolved
88
88
  """
@@ -98,10 +98,10 @@ class StringUtils:
98
98
  @staticmethod
99
99
  def can_be_identifier(s: str) -> bool:
100
100
  """Checks if a string is a valid identifier.
101
-
101
+
102
102
  Args:
103
103
  s: The string to validate
104
-
104
+
105
105
  Returns:
106
106
  True if the string can be used as an identifier, false otherwise
107
107
  """
@@ -1335,4 +1335,208 @@ class TestRunner:
1335
1335
  results = match.results
1336
1336
  # With * meaning 0+ hops, Employee 1 (CEO) also matches itself (zero-hop)
1337
1337
  # Employee 1→1 (zero-hop), 2→1, 3→2→1, 4→2→1 = 4 results
1338
- assert len(results) == 4
1338
+ assert len(results) == 4
1339
+
1340
+ @pytest.mark.asyncio
1341
+ async def test_match_with_leftward_relationship_direction(self):
1342
+ """Test match with leftward relationship direction."""
1343
+ await Runner(
1344
+ """
1345
+ CREATE VIRTUAL (:DirPerson) AS {
1346
+ unwind [
1347
+ {id: 1, name: 'Person 1'},
1348
+ {id: 2, name: 'Person 2'},
1349
+ {id: 3, name: 'Person 3'}
1350
+ ] as record
1351
+ RETURN record.id as id, record.name as name
1352
+ }
1353
+ """
1354
+ ).run()
1355
+ await Runner(
1356
+ """
1357
+ CREATE VIRTUAL (:DirPerson)-[:REPORTS_TO]-(:DirPerson) AS {
1358
+ unwind [
1359
+ {left_id: 2, right_id: 1},
1360
+ {left_id: 3, right_id: 1}
1361
+ ] as record
1362
+ RETURN record.left_id as left_id, record.right_id as right_id
1363
+ }
1364
+ """
1365
+ ).run()
1366
+ # Rightward: left_id -> right_id (2->1, 3->1)
1367
+ right_match = Runner(
1368
+ """
1369
+ MATCH (a:DirPerson)-[:REPORTS_TO]->(b:DirPerson)
1370
+ RETURN a.name AS employee, b.name AS manager
1371
+ """
1372
+ )
1373
+ await right_match.run()
1374
+ right_results = right_match.results
1375
+ assert len(right_results) == 2
1376
+ assert right_results[0] == {"employee": "Person 2", "manager": "Person 1"}
1377
+ assert right_results[1] == {"employee": "Person 3", "manager": "Person 1"}
1378
+
1379
+ # Leftward: right_id -> left_id (1->2, 1->3) - reverse traversal
1380
+ left_match = Runner(
1381
+ """
1382
+ MATCH (m:DirPerson)<-[:REPORTS_TO]-(e:DirPerson)
1383
+ RETURN m.name AS manager, e.name AS employee
1384
+ """
1385
+ )
1386
+ await left_match.run()
1387
+ left_results = left_match.results
1388
+ assert len(left_results) == 2
1389
+ assert left_results[0] == {"manager": "Person 1", "employee": "Person 2"}
1390
+ assert left_results[1] == {"manager": "Person 1", "employee": "Person 3"}
1391
+
1392
+ @pytest.mark.asyncio
1393
+ async def test_match_with_leftward_direction_swapped_data(self):
1394
+ """Test match with leftward direction produces same results as rightward with swapped data."""
1395
+ await Runner(
1396
+ """
1397
+ CREATE VIRTUAL (:DirCity) AS {
1398
+ unwind [
1399
+ {id: 1, name: 'New York'},
1400
+ {id: 2, name: 'Boston'},
1401
+ {id: 3, name: 'Chicago'}
1402
+ ] as record
1403
+ RETURN record.id as id, record.name as name
1404
+ }
1405
+ """
1406
+ ).run()
1407
+ await Runner(
1408
+ """
1409
+ CREATE VIRTUAL (:DirCity)-[:ROUTE]-(:DirCity) AS {
1410
+ unwind [
1411
+ {left_id: 1, right_id: 2},
1412
+ {left_id: 1, right_id: 3}
1413
+ ] as record
1414
+ RETURN record.left_id as left_id, record.right_id as right_id
1415
+ }
1416
+ """
1417
+ ).run()
1418
+ # Leftward from destination: find where right_id matches, follow left_id
1419
+ match = Runner(
1420
+ """
1421
+ MATCH (dest:DirCity)<-[:ROUTE]-(origin:DirCity)
1422
+ RETURN dest.name AS destination, origin.name AS origin
1423
+ """
1424
+ )
1425
+ await match.run()
1426
+ results = match.results
1427
+ assert len(results) == 2
1428
+ assert results[0] == {"destination": "Boston", "origin": "New York"}
1429
+ assert results[1] == {"destination": "Chicago", "origin": "New York"}
1430
+
1431
+ @pytest.mark.asyncio
1432
+ async def test_match_with_leftward_variable_length(self):
1433
+ """Test match with leftward variable-length relationships."""
1434
+ await Runner(
1435
+ """
1436
+ CREATE VIRTUAL (:DirVarPerson) AS {
1437
+ unwind [
1438
+ {id: 1, name: 'Person 1'},
1439
+ {id: 2, name: 'Person 2'},
1440
+ {id: 3, name: 'Person 3'}
1441
+ ] as record
1442
+ RETURN record.id as id, record.name as name
1443
+ }
1444
+ """
1445
+ ).run()
1446
+ await Runner(
1447
+ """
1448
+ CREATE VIRTUAL (:DirVarPerson)-[:MANAGES]-(:DirVarPerson) AS {
1449
+ unwind [
1450
+ {left_id: 1, right_id: 2},
1451
+ {left_id: 2, right_id: 3}
1452
+ ] as record
1453
+ RETURN record.left_id as left_id, record.right_id as right_id
1454
+ }
1455
+ """
1456
+ ).run()
1457
+ # Leftward variable-length: traverse from right_id to left_id
1458
+ match = Runner(
1459
+ """
1460
+ MATCH (a:DirVarPerson)<-[:MANAGES*]-(b:DirVarPerson)
1461
+ RETURN a.name AS name1, b.name AS name2
1462
+ """
1463
+ )
1464
+ await match.run()
1465
+ results = match.results
1466
+ # Leftward indexes on right_id. find(id) looks up right_id=id, follows left_id.
1467
+ # Person 1: zero-hop only (no right_id=1)
1468
+ # Person 2: zero-hop, then left_id=1 (1 hop)
1469
+ # Person 3: zero-hop, then left_id=2 (1 hop), then left_id=1 (2 hops)
1470
+ assert len(results) == 6
1471
+ assert results[0] == {"name1": "Person 1", "name2": "Person 1"}
1472
+ assert results[1] == {"name1": "Person 2", "name2": "Person 2"}
1473
+ assert results[2] == {"name1": "Person 2", "name2": "Person 1"}
1474
+ assert results[3] == {"name1": "Person 3", "name2": "Person 3"}
1475
+ assert results[4] == {"name1": "Person 3", "name2": "Person 2"}
1476
+ assert results[5] == {"name1": "Person 3", "name2": "Person 1"}
1477
+
1478
+ @pytest.mark.asyncio
1479
+ async def test_match_with_leftward_double_graph_pattern(self):
1480
+ """Test match with leftward double graph pattern."""
1481
+ await Runner(
1482
+ """
1483
+ CREATE VIRTUAL (:DirDoublePerson) AS {
1484
+ unwind [
1485
+ {id: 1, name: 'Person 1'},
1486
+ {id: 2, name: 'Person 2'},
1487
+ {id: 3, name: 'Person 3'},
1488
+ {id: 4, name: 'Person 4'}
1489
+ ] as record
1490
+ RETURN record.id as id, record.name as name
1491
+ }
1492
+ """
1493
+ ).run()
1494
+ await Runner(
1495
+ """
1496
+ CREATE VIRTUAL (:DirDoublePerson)-[:KNOWS]-(:DirDoublePerson) AS {
1497
+ unwind [
1498
+ {left_id: 1, right_id: 2},
1499
+ {left_id: 2, right_id: 3},
1500
+ {left_id: 3, right_id: 4}
1501
+ ] as record
1502
+ RETURN record.left_id as left_id, record.right_id as right_id
1503
+ }
1504
+ """
1505
+ ).run()
1506
+ # Leftward chain: (c)<-[:KNOWS]-(b)<-[:KNOWS]-(a)
1507
+ match = Runner(
1508
+ """
1509
+ MATCH (c:DirDoublePerson)<-[:KNOWS]-(b:DirDoublePerson)<-[:KNOWS]-(a:DirDoublePerson)
1510
+ RETURN a.name AS name1, b.name AS name2, c.name AS name3
1511
+ """
1512
+ )
1513
+ await match.run()
1514
+ results = match.results
1515
+ assert len(results) == 2
1516
+ assert results[0] == {"name1": "Person 1", "name2": "Person 2", "name3": "Person 3"}
1517
+ assert results[1] == {"name1": "Person 2", "name2": "Person 3", "name3": "Person 4"}
1518
+
1519
+ async def test_match_with_constraints(self):
1520
+ await Runner(
1521
+ """
1522
+ CREATE VIRTUAL (:ConstraintEmployee) AS {
1523
+ unwind [
1524
+ {id: 1, name: 'Employee 1'},
1525
+ {id: 2, name: 'Employee 2'},
1526
+ {id: 3, name: 'Employee 3'},
1527
+ {id: 4, name: 'Employee 4'}
1528
+ ] as record
1529
+ RETURN record.id as id, record.name as name
1530
+ }
1531
+ """
1532
+ ).run()
1533
+ match = Runner(
1534
+ """
1535
+ match (e:ConstraintEmployee{name:'Employee 1'})
1536
+ return e.name as name
1537
+ """
1538
+ )
1539
+ await match.run()
1540
+ results = match.results
1541
+ assert len(results) == 1
1542
+ assert results[0]["name"] == "Employee 1"
@@ -5,6 +5,9 @@ from typing import AsyncIterator
5
5
  from flowquery.parsing.parser import Parser
6
6
  from flowquery.parsing.functions.async_function import AsyncFunction
7
7
  from flowquery.parsing.functions.function_metadata import FunctionDef
8
+ from flowquery.parsing.operations.match import Match
9
+ from flowquery.graph.node import Node
10
+ from flowquery.graph.relationship import Relationship
8
11
 
9
12
 
10
13
  # Test async function for CALL operation parsing test
@@ -678,3 +681,41 @@ class TestParser:
678
681
  parser = Parser()
679
682
  with pytest.raises(Exception, match="PatternExpression must contain at least one NodeReference"):
680
683
  parser.parse("MATCH (a:Person) WHERE (:Person)-[:KNOWS]->(:Person) RETURN a")
684
+
685
+ def test_node_with_properties(self):
686
+ """Test node with properties."""
687
+ parser = Parser()
688
+ ast = parser.parse("MATCH (a:Person{value: 'hello'}) return a")
689
+ expected = (
690
+ "ASTNode\n"
691
+ "- Match\n"
692
+ "- Return\n"
693
+ "-- Expression (a)\n"
694
+ "--- Reference (a)"
695
+ )
696
+ assert ast.print() == expected
697
+ match_op = ast.first_child()
698
+ assert isinstance(match_op, Match)
699
+ node = match_op.patterns[0].chain[0]
700
+ assert isinstance(node, Node)
701
+ assert node.properties.get("value") is not None
702
+ assert node.properties["value"].value() == "hello"
703
+
704
+ def test_relationship_with_properties(self):
705
+ """Test relationship with properties."""
706
+ parser = Parser()
707
+ ast = parser.parse("MATCH (:Person)-[r:LIKES{since: 2022}]->(:Food) return a")
708
+ expected = (
709
+ "ASTNode\n"
710
+ "- Match\n"
711
+ "- Return\n"
712
+ "-- Expression (a)\n"
713
+ "--- Reference (a)"
714
+ )
715
+ assert ast.print() == expected
716
+ match_op = ast.first_child()
717
+ assert isinstance(match_op, Match)
718
+ relationship = match_op.patterns[0].chain[1]
719
+ assert isinstance(relationship, Relationship)
720
+ assert relationship.properties.get("since") is not None
721
+ assert relationship.properties["since"].value() == 2022