omlish 0.0.0.dev116__py3-none-any.whl → 0.0.0.dev118__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
@@ -249,14 +249,14 @@ class Lexer:
249
249
  }
250
250
 
251
251
  def _peek_may_be_valid_unquoted_identifier(self) -> bool:
252
- if (self._position == self._length - 1):
252
+ if self._position == self._length - 1:
253
253
  return False
254
254
  else:
255
255
  nxt = self._chars[self._position + 1]
256
256
  return nxt in self.START_IDENTIFIER
257
257
 
258
258
  def _peek_is_next_digit(self) -> bool:
259
- if (self._position == self._length - 1):
259
+ if self._position == self._length - 1:
260
260
  return False
261
261
  else:
262
262
  nxt = self._chars[self._position + 1]
@@ -20,14 +20,48 @@ A few notes on the implementation.
20
20
  interestingly enough, creating a token list first is actually faster than consuming from the token iterator one token
21
21
  at a time.
22
22
  """
23
+ import dataclasses as dc
23
24
  import random
24
25
  import typing as ta
25
26
 
26
27
  from ... import check
27
- from . import ast
28
- from . import exceptions
29
- from . import lexer
30
- from . import visitor
28
+ from .ast import AndExpression
29
+ from .ast import Arithmetic
30
+ from .ast import ArithmeticUnary
31
+ from .ast import Assign
32
+ from .ast import Comparator
33
+ from .ast import CurrentNode
34
+ from .ast import Expref
35
+ from .ast import Field
36
+ from .ast import FilterProjection
37
+ from .ast import Flatten
38
+ from .ast import FunctionExpression
39
+ from .ast import Identity
40
+ from .ast import Index
41
+ from .ast import IndexExpression
42
+ from .ast import KeyValPair
43
+ from .ast import LetExpression
44
+ from .ast import Literal
45
+ from .ast import MultiSelectDict
46
+ from .ast import MultiSelectList
47
+ from .ast import Node
48
+ from .ast import NotExpression
49
+ from .ast import OrExpression
50
+ from .ast import Pipe
51
+ from .ast import Projection
52
+ from .ast import RootNode
53
+ from .ast import Slice
54
+ from .ast import Subexpression
55
+ from .ast import ValueProjection
56
+ from .ast import VariableRef
57
+ from .exceptions import IncompleteExpressionError
58
+ from .exceptions import LexerError
59
+ from .exceptions import ParseError
60
+ from .lexer import Lexer
61
+ from .lexer import Token
62
+ from .visitor import GraphvizVisitor
63
+ from .visitor import Options
64
+ from .visitor import TreeInterpreter
31
65
 
32
66
 
33
67
  class Parser:
@@ -83,12 +117,12 @@ class Parser:
83
117
  def __init__(self, lookahead: int = 2) -> None:
84
118
  super().__init__()
85
119
 
86
- self._tokenizer: ta.Iterable[lexer.Token] | None = None
87
- self._tokens: list[lexer.Token | None] = [None] * lookahead
120
+ self._tokenizer: ta.Iterable[Token] | None = None
121
+ self._tokens: list[Token | None] = [None] * lookahead
88
122
  self._buffer_size = lookahead
89
123
  self._index = 0
90
124
 
91
- def parse(self, expression: str, options: visitor.Options | None = None) -> 'ParsedResult':
125
+ def parse(self, expression: str, options: Options | None = None) -> 'ParsedResult':
92
126
  cached = self._CACHE.get(expression)
93
127
  if cached is not None:
94
128
  return cached
@@ -101,24 +135,24 @@ class Parser:
101
135
 
102
136
  return parsed_result
103
137
 
104
- def _do_parse(self, expression: str, options: visitor.Options | None = None) -> 'ParsedResult':
138
+ def _do_parse(self, expression: str, options: Options | None = None) -> 'ParsedResult':
105
139
  try:
106
140
  return self._parse(expression, options)
107
141
 
108
- except exceptions.LexerError as e:
142
+ except LexerError as e:
109
143
  e.expression = expression
110
144
  raise
111
145
 
112
- except exceptions.IncompleteExpressionError as e:
146
+ except IncompleteExpressionError as e:
113
147
  e.set_expression(expression)
114
148
  raise
115
149
 
116
- except exceptions.ParseError as e:
150
+ except ParseError as e:
117
151
  e.expression = expression
118
152
  raise
119
153
 
120
- def _parse(self, expression: str, options: visitor.Options | None = None) -> 'ParsedResult':
121
- self._tokenizer = lexer.Lexer().tokenize(expression, options)
154
+ def _parse(self, expression: str, options: Options | None = None) -> 'ParsedResult':
155
+ self._tokenizer = Lexer().tokenize(expression, options)
122
156
  self._tokens = list(self._tokenizer)
123
157
  self._index = 0
124
158
 
@@ -126,7 +160,7 @@ class Parser:
126
160
 
127
161
  if self._current_token() != 'eof':
128
162
  t = check.not_none(self._lookahead_token(0))
129
- raise exceptions.ParseError(
163
+ raise ParseError(
130
164
  t['start'],
131
165
  t['value'],
132
166
  t['type'],
@@ -135,7 +169,7 @@ class Parser:
135
169
 
136
170
  return ParsedResult(expression, parsed)
137
171
 
138
- def _expression(self, binding_power: int = 0) -> ast.Node:
172
+ def _expression(self, binding_power: int = 0) -> Node:
139
173
  left_token = check.not_none(self._lookahead_token(0))
140
174
 
141
175
  self._advance()
@@ -166,19 +200,19 @@ class Parser:
166
200
 
167
201
  return left
168
202
 
169
- def _token_nud_literal(self, token: lexer.Token) -> ast.Node:
170
- return ast.literal(token['value'])
203
+ def _token_nud_literal(self, token: Token) -> Node:
204
+ return Literal(token['value'])
171
205
 
172
- def _token_nud_variable(self, token: lexer.Token) -> ast.Node:
173
- return ast.variable_ref(token['value'][1:])
206
+ def _token_nud_variable(self, token: Token) -> Node:
207
+ return VariableRef(token['value'][1:])
174
208
 
175
- def _token_nud_unquoted_identifier(self, token: lexer.Token) -> ast.Node:
209
+ def _token_nud_unquoted_identifier(self, token: Token) -> Node:
176
210
  if token['value'] == 'let' and self._current_token() == 'variable':
177
211
  return self._parse_let_expression()
178
212
  else:
179
- return ast.field(token['value'])
213
+ return Field(token['value'])
180
214
 
181
- def _parse_let_expression(self) -> ast.Node:
215
+ def _parse_let_expression(self) -> Node:
182
216
  bindings = []
183
217
  while True:
184
218
  var_token = check.not_none(self._lookahead_token(0))
@@ -187,28 +221,28 @@ class Parser:
187
221
  self._advance()
188
222
  self._match('assign')
189
223
  assign_expr = self._expression()
190
- bindings.append(ast.assign(varname, assign_expr))
224
+ bindings.append(Assign(varname, assign_expr))
191
225
  if self._is_in_keyword(check.not_none(self._lookahead_token(0))):
192
226
  self._advance()
193
227
  break
194
228
  else:
195
229
  self._match('comma')
196
230
  expr = self._expression()
197
- return ast.let_expression(bindings, expr)
231
+ return LetExpression(bindings, expr)
198
232
 
199
- def _is_in_keyword(self, token: lexer.Token) -> bool:
233
+ def _is_in_keyword(self, token: Token) -> bool:
200
234
  return (
201
235
  token['type'] == 'unquoted_identifier' and
202
236
  token['value'] == 'in'
203
237
  )
204
238
 
205
- def _token_nud_quoted_identifier(self, token: lexer.Token) -> ast.Node:
206
- field = ast.field(token['value'])
239
+ def _token_nud_quoted_identifier(self, token: Token) -> Node:
240
+ field = Field(token['value'])
207
241
 
208
242
  # You can't have a quoted identifier as a function name.
209
243
  if self._current_token() == 'lparen':
210
244
  t = check.not_none(self._lookahead_token(0))
211
- raise exceptions.ParseError(
245
+ raise ParseError(
212
246
  0,
213
247
  t['value'],
214
248
  t['type'],
@@ -217,73 +251,74 @@ class Parser:
217
251
 
218
252
  return field
219
253
 
220
- def _token_nud_star(self, token: lexer.Token) -> ast.Node:
221
- left = ast.identity()
254
+ def _token_nud_star(self, token: Token) -> Node:
255
+ left = Identity()
256
+ right: Node
222
257
  if self._current_token() == 'rbracket':
223
- right = ast.identity()
258
+ right = Identity()
224
259
  else:
225
260
  right = self._parse_projection_rhs(self.BINDING_POWER['star'])
226
- return ast.value_projection(left, right)
261
+ return ValueProjection(left, right)
227
262
 
228
- def _token_nud_filter(self, token: lexer.Token) -> ast.Node:
229
- return self._token_led_filter(ast.identity())
263
+ def _token_nud_filter(self, token: Token) -> Node:
264
+ return self._token_led_filter(Identity())
230
265
 
231
- def _token_nud_lbrace(self, token: lexer.Token) -> ast.Node:
266
+ def _token_nud_lbrace(self, token: Token) -> Node:
232
267
  return self._parse_multi_select_hash()
233
268
 
234
- def _token_nud_lparen(self, token: lexer.Token) -> ast.Node:
269
+ def _token_nud_lparen(self, token: Token) -> Node:
235
270
  expression = self._expression()
236
271
  self._match('rparen')
237
272
  return expression
238
273
 
239
- def _token_nud_minus(self, token: lexer.Token) -> ast.Node:
274
+ def _token_nud_minus(self, token: Token) -> Node:
240
275
  return self._parse_arithmetic_unary(token)
241
276
 
242
- def _token_nud_plus(self, token: lexer.Token) -> ast.Node:
277
+ def _token_nud_plus(self, token: Token) -> Node:
243
278
  return self._parse_arithmetic_unary(token)
244
279
 
245
- def _token_nud_flatten(self, token: lexer.Token) -> ast.Node:
246
- left = ast.flatten(ast.identity())
280
+ def _token_nud_flatten(self, token: Token) -> Node:
281
+ left = Flatten(Identity())
247
282
  right = self._parse_projection_rhs(
248
283
  self.BINDING_POWER['flatten'])
249
- return ast.projection(left, right)
284
+ return Projection(left, right)
250
285
 
251
- def _token_nud_not(self, token: lexer.Token) -> ast.Node:
286
+ def _token_nud_not(self, token: Token) -> Node:
252
287
  expr = self._expression(self.BINDING_POWER['not'])
253
- return ast.not_expression(expr)
288
+ return NotExpression(expr)
254
289
 
255
- def _token_nud_lbracket(self, token: lexer.Token) -> ast.Node:
290
+ def _token_nud_lbracket(self, token: Token) -> Node:
256
291
  if self._current_token() in ['number', 'colon']:
257
292
  right = self._parse_index_expression()
258
293
  # We could optimize this and remove the identity() node. We don't really need an index_expression node, we
259
294
  # can just use emit an index node here if we're not dealing with a slice.
260
- return self._project_if_slice(ast.identity(), right)
295
+ return self._project_if_slice(Identity(), right)
261
296
 
262
297
  elif self._current_token() == 'star' and self._lookahead(1) == 'rbracket':
263
298
  self._advance()
264
299
  self._advance()
265
300
  right = self._parse_projection_rhs(self.BINDING_POWER['star'])
266
- return ast.projection(ast.identity(), right)
301
+ return Projection(Identity(), right)
267
302
 
268
303
  else:
269
304
  return self._parse_multi_select_list()
270
305
 
271
- def _parse_index_expression(self) -> ast.Node:
306
+ def _parse_index_expression(self) -> Node:
272
307
  # We're here:
273
308
  # [<current>
274
309
  # ^
275
310
  # | current token
276
- if (self._lookahead(0) == 'colon' or self._lookahead(1) == 'colon'):
311
+ if self._lookahead(0) == 'colon' or self._lookahead(1) == 'colon':
277
312
  return self._parse_slice_expression()
278
313
 
279
314
  else:
280
315
  # Parse the syntax [number]
281
- node = ast.index(check.not_none(self._lookahead_token(0))['value'])
316
+ node = Index(check.not_none(self._lookahead_token(0))['value'])
282
317
  self._advance()
283
318
  self._match('rbracket')
284
319
  return node
285
320
 
286
- def _parse_slice_expression(self) -> ast.Node:
321
+ def _parse_slice_expression(self) -> Node:
287
322
  # [start:end:step]
288
323
  # Where start, end, and step are optional. The last colon is optional as well.
289
324
  parts = [None, None, None]
@@ -306,60 +341,59 @@ class Parser:
306
341
  current_token = self._current_token()
307
342
 
308
343
  self._match('rbracket')
309
- return ast.slice(*parts)
344
+ return Slice(*parts)
310
345
 
311
- def _token_nud_current(self, token: lexer.Token) -> ast.Node:
312
- return ast.current_node()
346
+ def _token_nud_current(self, token: Token) -> Node:
347
+ return CurrentNode()
313
348
 
314
- def _token_nud_root(self, token: lexer.Token) -> ast.Node:
315
- return ast.root_node()
349
+ def _token_nud_root(self, token: Token) -> Node:
350
+ return RootNode()
316
351
 
317
- def _token_nud_expref(self, token: lexer.Token) -> ast.Node:
352
+ def _token_nud_expref(self, token: Token) -> Node:
318
353
  expression = self._expression(self.BINDING_POWER['expref'])
319
- return ast.expref(expression)
354
+ return Expref(expression)
320
355
 
321
- def _token_led_dot(self, left: ast.Node) -> ast.Node:
356
+ def _token_led_dot(self, left: Node) -> Node:
322
357
  if self._current_token() != 'star':
323
358
  right = self._parse_dot_rhs(self.BINDING_POWER['dot'])
324
- if left['type'] == 'subexpression':
325
- left['children'].append(right)
326
- return left
359
+ if isinstance(left, Subexpression):
360
+ return dc.replace(left, children_nodes=[*left.children_nodes, right])
327
361
 
328
362
  else:
329
- return ast.subexpression([left, right])
363
+ return Subexpression([left, right])
330
364
 
331
365
  else:
332
366
  # We're creating a projection.
333
367
  self._advance()
334
368
  right = self._parse_projection_rhs(self.BINDING_POWER['dot'])
335
- return ast.value_projection(left, right)
369
+ return ValueProjection(left, right)
336
370
 
337
- def _token_led_pipe(self, left: ast.Node) -> ast.Node:
371
+ def _token_led_pipe(self, left: Node) -> Node:
338
372
  right = self._expression(self.BINDING_POWER['pipe'])
339
- return ast.pipe(left, right)
373
+ return Pipe(left, right)
340
374
 
341
- def _token_led_or(self, left: ast.Node) -> ast.Node:
375
+ def _token_led_or(self, left: Node) -> Node:
342
376
  right = self._expression(self.BINDING_POWER['or'])
343
- return ast.or_expression(left, right)
377
+ return OrExpression(left, right)
344
378
 
345
- def _token_led_and(self, left: ast.Node) -> ast.Node:
379
+ def _token_led_and(self, left: Node) -> Node:
346
380
  right = self._expression(self.BINDING_POWER['and'])
347
- return ast.and_expression(left, right)
381
+ return AndExpression(left, right)
348
382
 
349
- def _token_led_lparen(self, left: ast.Node) -> ast.Node:
350
- if left['type'] != 'field':
383
+ def _token_led_lparen(self, left: Node) -> Node:
384
+ if not isinstance(left, Field):
351
385
  # 0 - first func arg or closing paren.
352
386
  # -1 - '(' token
353
387
  # -2 - invalid function "name".
354
388
  prev_t = check.not_none(self._lookahead_token(-2))
355
- raise exceptions.ParseError(
389
+ raise ParseError(
356
390
  prev_t['start'],
357
391
  prev_t['value'],
358
392
  prev_t['type'],
359
393
  f"Invalid function name '{prev_t['value']}'",
360
394
  )
361
395
 
362
- name = left['value']
396
+ name = left.name
363
397
  args = []
364
398
  while self._current_token() != 'rparen':
365
399
  expression = self._expression()
@@ -368,72 +402,72 @@ class Parser:
368
402
  args.append(expression)
369
403
  self._match('rparen')
370
404
 
371
- function_node = ast.function_expression(name, args)
405
+ function_node = FunctionExpression(name, args)
372
406
  return function_node
373
407
 
374
- def _token_led_filter(self, left: ast.Node) -> ast.Node:
408
+ def _token_led_filter(self, left: Node) -> Node:
375
409
  # Filters are projections.
376
410
  condition = self._expression(0)
377
411
  self._match('rbracket')
412
+ right: Node
378
413
  if self._current_token() == 'flatten':
379
- right = ast.identity()
414
+ right = Identity()
380
415
  else:
381
416
  right = self._parse_projection_rhs(self.BINDING_POWER['filter'])
382
- return ast.filter_projection(left, right, condition)
417
+ return FilterProjection(left, right, condition)
383
418
 
384
- def _token_led_eq(self, left: ast.Node) -> ast.Node:
419
+ def _token_led_eq(self, left: Node) -> Node:
385
420
  return self._parse_comparator(left, 'eq')
386
421
 
387
- def _token_led_ne(self, left: ast.Node) -> ast.Node:
422
+ def _token_led_ne(self, left: Node) -> Node:
388
423
  return self._parse_comparator(left, 'ne')
389
424
 
390
- def _token_led_gt(self, left: ast.Node) -> ast.Node:
425
+ def _token_led_gt(self, left: Node) -> Node:
391
426
  return self._parse_comparator(left, 'gt')
392
427
 
393
- def _token_led_gte(self, left: ast.Node) -> ast.Node:
428
+ def _token_led_gte(self, left: Node) -> Node:
394
429
  return self._parse_comparator(left, 'gte')
395
430
 
396
- def _token_led_lt(self, left: ast.Node) -> ast.Node:
431
+ def _token_led_lt(self, left: Node) -> Node:
397
432
  return self._parse_comparator(left, 'lt')
398
433
 
399
- def _token_led_lte(self, left: ast.Node) -> ast.Node:
434
+ def _token_led_lte(self, left: Node) -> Node:
400
435
  return self._parse_comparator(left, 'lte')
401
436
 
402
- def _token_led_div(self, left: ast.Node) -> ast.Node:
437
+ def _token_led_div(self, left: Node) -> Node:
403
438
  return self._parse_arithmetic(left, 'div')
404
439
 
405
- def _token_led_divide(self, left: ast.Node) -> ast.Node:
440
+ def _token_led_divide(self, left: Node) -> Node:
406
441
  return self._parse_arithmetic(left, 'divide')
407
442
 
408
- def _token_led_minus(self, left: ast.Node) -> ast.Node:
443
+ def _token_led_minus(self, left: Node) -> Node:
409
444
  return self._parse_arithmetic(left, 'minus')
410
445
 
411
- def _token_led_modulo(self, left: ast.Node) -> ast.Node:
446
+ def _token_led_modulo(self, left: Node) -> Node:
412
447
  return self._parse_arithmetic(left, 'modulo')
413
448
 
414
- def _token_led_multiply(self, left: ast.Node) -> ast.Node:
449
+ def _token_led_multiply(self, left: Node) -> Node:
415
450
  return self._parse_arithmetic(left, 'multiply')
416
451
 
417
- def _token_led_plus(self, left: ast.Node) -> ast.Node:
452
+ def _token_led_plus(self, left: Node) -> Node:
418
453
  return self._parse_arithmetic(left, 'plus')
419
454
 
420
- def _token_led_star(self, left: ast.Node) -> ast.Node:
455
+ def _token_led_star(self, left: Node) -> Node:
421
456
  return self._parse_arithmetic(left, 'multiply')
422
457
 
423
- def _token_led_flatten(self, left: ast.Node) -> ast.Node:
424
- left = ast.flatten(left)
458
+ def _token_led_flatten(self, left: Node) -> Node:
459
+ left = Flatten(left)
425
460
  right = self._parse_projection_rhs(self.BINDING_POWER['flatten'])
426
- return ast.projection(left, right)
461
+ return Projection(left, right)
427
462
 
428
- def _token_led_lbracket(self, left: ast.Node) -> ast.Node:
463
+ def _token_led_lbracket(self, left: Node) -> Node:
429
464
  token = check.not_none(self._lookahead_token(0))
430
465
  if token['type'] in ['number', 'colon']:
431
466
  right = self._parse_index_expression()
432
- if left['type'] == 'index_expression':
467
+ if isinstance(left, IndexExpression):
433
468
  # Optimization: if the left node is an index expr, we can avoid creating another node and instead just
434
469
  # add the right node as a child of the left.
435
- left['children'].append(right)
436
- return left
470
+ return dc.replace(left, nodes=[*left.nodes, right])
437
471
 
438
472
  else:
439
473
  return self._project_if_slice(left, right)
@@ -443,32 +477,32 @@ class Parser:
443
477
  self._match('star')
444
478
  self._match('rbracket')
445
479
  right = self._parse_projection_rhs(self.BINDING_POWER['star'])
446
- return ast.projection(left, right)
480
+ return Projection(left, right)
447
481
 
448
- def _project_if_slice(self, left: ast.Node, right: ast.Node) -> ast.Node:
449
- index_expr = ast.index_expression([left, right])
450
- if right['type'] == 'slice':
451
- return ast.projection(
482
+ def _project_if_slice(self, left: Node, right: Node) -> Node:
483
+ index_expr = IndexExpression([left, right])
484
+ if isinstance(right, Slice):
485
+ return Projection(
452
486
  index_expr,
453
487
  self._parse_projection_rhs(self.BINDING_POWER['star']),
454
488
  )
455
489
  else:
456
490
  return index_expr
457
491
 
458
- def _parse_comparator(self, left: ast.Node, comparator: str) -> ast.Node:
492
+ def _parse_comparator(self, left: Node, comparator: str) -> Node:
459
493
  right = self._expression(self.BINDING_POWER[comparator])
460
- return ast.comparator(comparator, left, right)
494
+ return Comparator(comparator, left, right)
461
495
 
462
- def _parse_arithmetic_unary(self, token: lexer.Token) -> ast.Node:
496
+ def _parse_arithmetic_unary(self, token: Token) -> Node:
463
497
  expression = self._expression(self.BINDING_POWER[token['type']])
464
- return ast.arithmetic_unary(token['type'], expression)
498
+ return ArithmeticUnary(token['type'], expression)
465
499
 
466
- def _parse_arithmetic(self, left: ast.Node, operator: str) -> ast.Node:
500
+ def _parse_arithmetic(self, left: Node, operator: str) -> Node:
467
501
  right = self._expression(self.BINDING_POWER[operator])
468
- return ast.arithmetic(operator, left, right)
502
+ return Arithmetic(operator, left, right)
469
503
 
470
- def _parse_multi_select_list(self) -> ast.Node:
471
- expressions: list[ast.Node] = []
504
+ def _parse_multi_select_list(self) -> Node:
505
+ expressions: list[Node] = []
472
506
  while True:
473
507
  expression = self._expression()
474
508
  expressions.append(expression)
@@ -477,10 +511,10 @@ class Parser:
477
511
  else:
478
512
  self._match('comma')
479
513
  self._match('rbracket')
480
- return ast.multi_select_list(expressions)
514
+ return MultiSelectList(expressions)
481
515
 
482
- def _parse_multi_select_hash(self) -> ast.Node:
483
- pairs = []
516
+ def _parse_multi_select_hash(self) -> Node:
517
+ pairs: list[KeyValPair] = []
484
518
  while True:
485
519
  key_token = check.not_none(self._lookahead_token(0))
486
520
 
@@ -491,7 +525,7 @@ class Parser:
491
525
  self._match('colon')
492
526
  value = self._expression(0)
493
527
 
494
- node = ast.key_val_pair(key_name=key_name, node=value)
528
+ node = KeyValPair(key_name=key_name, node=value)
495
529
 
496
530
  pairs.append(node)
497
531
  if self._current_token() == 'comma':
@@ -501,13 +535,15 @@ class Parser:
501
535
  self._match('rbrace')
502
536
  break
503
537
 
504
- return ast.multi_select_dict(nodes=pairs)
538
+ return MultiSelectDict(nodes=pairs)
539
+
540
+ def _parse_projection_rhs(self, binding_power: int) -> Node:
541
+ right: Node
505
542
 
506
- def _parse_projection_rhs(self, binding_power: int) -> ast.Node:
507
543
  # Parse the right hand side of the projection.
508
544
  if self.BINDING_POWER[self._current_token()] < self._PROJECTION_STOP:
509
545
  # BP of 10 are all the tokens that stop a projection.
510
- right = ast.identity()
546
+ right = Identity()
511
547
 
512
548
  elif self._current_token() == 'lbracket':
513
549
  right = self._expression(binding_power)
@@ -524,7 +560,7 @@ class Parser:
524
560
 
525
561
  return right
526
562
 
527
- def _parse_dot_rhs(self, binding_power: int) -> ast.Node:
563
+ def _parse_dot_rhs(self, binding_power: int) -> Node:
528
564
  # From the grammar:
529
565
  # expression '.' ( identifier /
530
566
  # multi-select-list /
@@ -553,9 +589,9 @@ class Parser:
553
589
  self._raise_parse_error_for_token(t, msg)
554
590
  raise RuntimeError # noqa
555
591
 
556
- def _error_nud_token(self, token: lexer.Token) -> ta.NoReturn:
592
+ def _error_nud_token(self, token: Token) -> ta.NoReturn:
557
593
  if token['type'] == 'eof':
558
- raise exceptions.IncompleteExpressionError(
594
+ raise IncompleteExpressionError(
559
595
  token['start'],
560
596
  token['value'],
561
597
  token['type'],
@@ -563,7 +599,7 @@ class Parser:
563
599
 
564
600
  self._raise_parse_error_for_token(token, 'invalid token')
565
601
 
566
- def _error_led_token(self, token: lexer.Token) -> ta.NoReturn:
602
+ def _error_led_token(self, token: Token) -> ta.NoReturn:
567
603
  self._raise_parse_error_for_token(token, 'invalid token')
568
604
 
569
605
  def _match(self, token_type: str | None = None) -> None:
@@ -588,14 +624,14 @@ class Parser:
588
624
  def _lookahead(self, number: int) -> str:
589
625
  return check.not_none(self._tokens[self._index + number])['type']
590
626
 
591
- def _lookahead_token(self, number: int) -> lexer.Token | None:
627
+ def _lookahead_token(self, number: int) -> Token | None:
592
628
  return self._tokens[self._index + number]
593
629
 
594
- def _raise_parse_error_for_token(self, token: lexer.Token, reason: str) -> ta.NoReturn:
630
+ def _raise_parse_error_for_token(self, token: Token, reason: str) -> ta.NoReturn:
595
631
  lex_position = token['start']
596
632
  actual_value = token['value']
597
633
  actual_type = token['type']
598
- raise exceptions.ParseError(
634
+ raise ParseError(
599
635
  lex_position,
600
636
  actual_value,
601
637
  actual_type,
@@ -607,14 +643,14 @@ class Parser:
607
643
  actual_value = token['value']
608
644
  actual_type = token['type']
609
645
  if actual_type == 'eof':
610
- raise exceptions.IncompleteExpressionError(
646
+ raise IncompleteExpressionError(
611
647
  lex_position,
612
648
  actual_value,
613
649
  actual_type,
614
650
  )
615
651
 
616
652
  message = f'Expecting: {expected_type}, got: {actual_type}'
617
- raise exceptions.ParseError(
653
+ raise ParseError(
618
654
  lex_position,
619
655
  actual_value,
620
656
  actual_type,
@@ -627,21 +663,21 @@ class Parser:
627
663
  self._CACHE.pop(key, None)
628
664
 
629
665
  @classmethod
630
- def purge(cls):
666
+ def purge(cls) -> None:
631
667
  """Clear the expression compilation cache."""
632
668
 
633
669
  cls._CACHE.clear()
634
670
 
635
671
 
636
672
  class ParsedResult:
637
- def __init__(self, expression: str, parsed: ast.Node) -> None:
673
+ def __init__(self, expression: str, parsed: Node) -> None:
638
674
  super().__init__()
639
675
 
640
676
  self.expression = expression
641
677
  self.parsed = parsed
642
678
 
643
- def search(self, value: ta.Any, options: visitor.Options | None = None) -> ta.Any:
644
- evaluator = visitor.TreeInterpreter(options)
679
+ def search(self, value: ta.Any, options: Options | None = None) -> ta.Any:
680
+ evaluator = TreeInterpreter(options)
645
681
  return evaluator.evaluate(self.parsed, value)
646
682
 
647
683
  def _render_dot_file(self) -> str:
@@ -653,7 +689,7 @@ class ParsedResult:
653
689
  the public supported API. Use at your own risk.
654
690
  """
655
691
 
656
- renderer = visitor.GraphvizVisitor()
692
+ renderer = GraphvizVisitor()
657
693
  contents = renderer.visit(self.parsed)
658
694
  return contents
659
695
 
@@ -661,9 +697,9 @@ class ParsedResult:
661
697
  return repr(self.parsed)
662
698
 
663
699
 
664
- def compile(expression: str, options: visitor.Options | None = None) -> ParsedResult: # noqa
700
+ def compile(expression: str, options: Options | None = None) -> ParsedResult: # noqa
665
701
  return Parser().parse(expression, options=options)
666
702
 
667
703
 
668
- def search(expression: str, data: ta.Any, options: visitor.Options | None = None) -> ta.Any:
704
+ def search(expression: str, data: ta.Any, options: Options | None = None) -> ta.Any:
669
705
  return compile(expression, options).search(data, options=options)