pbi-parsers 0.7.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (78) hide show
  1. pbi_parsers/__init__.py +9 -0
  2. pbi_parsers/base/__init__.py +7 -0
  3. pbi_parsers/base/lexer.py +127 -0
  4. pbi_parsers/base/tokens.py +61 -0
  5. pbi_parsers/dax/__init__.py +22 -0
  6. pbi_parsers/dax/exprs/__init__.py +107 -0
  7. pbi_parsers/dax/exprs/_base.py +46 -0
  8. pbi_parsers/dax/exprs/_utils.py +45 -0
  9. pbi_parsers/dax/exprs/add_sub.py +73 -0
  10. pbi_parsers/dax/exprs/add_sub_unary.py +72 -0
  11. pbi_parsers/dax/exprs/array.py +75 -0
  12. pbi_parsers/dax/exprs/column.py +56 -0
  13. pbi_parsers/dax/exprs/comparison.py +76 -0
  14. pbi_parsers/dax/exprs/concatenation.py +73 -0
  15. pbi_parsers/dax/exprs/div_mul.py +75 -0
  16. pbi_parsers/dax/exprs/exponent.py +67 -0
  17. pbi_parsers/dax/exprs/function.py +102 -0
  18. pbi_parsers/dax/exprs/hierarchy.py +68 -0
  19. pbi_parsers/dax/exprs/identifier.py +46 -0
  20. pbi_parsers/dax/exprs/ins.py +67 -0
  21. pbi_parsers/dax/exprs/keyword.py +60 -0
  22. pbi_parsers/dax/exprs/literal_number.py +46 -0
  23. pbi_parsers/dax/exprs/literal_string.py +45 -0
  24. pbi_parsers/dax/exprs/logical.py +76 -0
  25. pbi_parsers/dax/exprs/measure.py +44 -0
  26. pbi_parsers/dax/exprs/none.py +30 -0
  27. pbi_parsers/dax/exprs/parens.py +61 -0
  28. pbi_parsers/dax/exprs/returns.py +76 -0
  29. pbi_parsers/dax/exprs/table.py +51 -0
  30. pbi_parsers/dax/exprs/variable.py +68 -0
  31. pbi_parsers/dax/formatter.py +215 -0
  32. pbi_parsers/dax/lexer.py +222 -0
  33. pbi_parsers/dax/main.py +63 -0
  34. pbi_parsers/dax/parser.py +66 -0
  35. pbi_parsers/dax/tokens.py +54 -0
  36. pbi_parsers/dax/utils.py +120 -0
  37. pbi_parsers/pq/__init__.py +17 -0
  38. pbi_parsers/pq/exprs/__init__.py +98 -0
  39. pbi_parsers/pq/exprs/_base.py +33 -0
  40. pbi_parsers/pq/exprs/_utils.py +31 -0
  41. pbi_parsers/pq/exprs/add_sub.py +59 -0
  42. pbi_parsers/pq/exprs/add_sub_unary.py +57 -0
  43. pbi_parsers/pq/exprs/and_or_expr.py +60 -0
  44. pbi_parsers/pq/exprs/array.py +53 -0
  45. pbi_parsers/pq/exprs/arrow.py +50 -0
  46. pbi_parsers/pq/exprs/column.py +42 -0
  47. pbi_parsers/pq/exprs/comparison.py +62 -0
  48. pbi_parsers/pq/exprs/concatenation.py +61 -0
  49. pbi_parsers/pq/exprs/div_mul.py +59 -0
  50. pbi_parsers/pq/exprs/each.py +41 -0
  51. pbi_parsers/pq/exprs/ellipsis_expr.py +28 -0
  52. pbi_parsers/pq/exprs/function.py +63 -0
  53. pbi_parsers/pq/exprs/identifier.py +77 -0
  54. pbi_parsers/pq/exprs/if_expr.py +70 -0
  55. pbi_parsers/pq/exprs/is_expr.py +54 -0
  56. pbi_parsers/pq/exprs/keyword.py +40 -0
  57. pbi_parsers/pq/exprs/literal_number.py +31 -0
  58. pbi_parsers/pq/exprs/literal_string.py +31 -0
  59. pbi_parsers/pq/exprs/meta.py +54 -0
  60. pbi_parsers/pq/exprs/negation.py +52 -0
  61. pbi_parsers/pq/exprs/none.py +22 -0
  62. pbi_parsers/pq/exprs/not_expr.py +39 -0
  63. pbi_parsers/pq/exprs/parens.py +43 -0
  64. pbi_parsers/pq/exprs/record.py +58 -0
  65. pbi_parsers/pq/exprs/row.py +54 -0
  66. pbi_parsers/pq/exprs/row_index.py +57 -0
  67. pbi_parsers/pq/exprs/statement.py +67 -0
  68. pbi_parsers/pq/exprs/try_expr.py +55 -0
  69. pbi_parsers/pq/exprs/type_expr.py +78 -0
  70. pbi_parsers/pq/exprs/variable.py +52 -0
  71. pbi_parsers/pq/formatter.py +13 -0
  72. pbi_parsers/pq/lexer.py +219 -0
  73. pbi_parsers/pq/main.py +63 -0
  74. pbi_parsers/pq/parser.py +65 -0
  75. pbi_parsers/pq/tokens.py +81 -0
  76. pbi_parsers-0.7.8.dist-info/METADATA +66 -0
  77. pbi_parsers-0.7.8.dist-info/RECORD +78 -0
  78. pbi_parsers-0.7.8.dist-info/WHEEL +4 -0
@@ -0,0 +1,61 @@
1
+ import textwrap
2
+ from typing import TYPE_CHECKING
3
+
4
+ from pbi_parsers.pq.tokens import Token, TokenType
5
+
6
+ from ._base import Expression
7
+ from ._utils import lexer_reset
8
+
9
+ if TYPE_CHECKING:
10
+ from pbi_parsers.pq.parser import Parser
11
+
12
+
13
+ class ConcatenationExpression(Expression):
14
+ """Represents an addition or subtraction expression."""
15
+
16
+ operator: Token
17
+ left: Expression
18
+ right: Expression
19
+
20
+ def __init__(self, operator: Token, left: Expression, right: Expression) -> None:
21
+ self.operator = operator
22
+ self.left = left
23
+ self.right = right
24
+
25
+ @classmethod
26
+ @lexer_reset
27
+ def match(cls, parser: "Parser") -> "ConcatenationExpression | None":
28
+ from . import EXPRESSION_HIERARCHY, any_expression_match # noqa: PLC0415
29
+
30
+ skip_index = EXPRESSION_HIERARCHY.index(ConcatenationExpression)
31
+
32
+ left_term = any_expression_match(parser=parser, skip_first=skip_index + 1)
33
+ operator = parser.consume()
34
+
35
+ if not left_term:
36
+ return None
37
+ if operator.tok_type != TokenType.CONCATENATION_OPERATOR:
38
+ return None
39
+
40
+ right_term = any_expression_match(parser=parser, skip_first=skip_index)
41
+ if right_term is None:
42
+ msg = f"Expected a right term after operator {operator.text}, found: {parser.peek()}"
43
+ raise ValueError(msg)
44
+ return ConcatenationExpression(
45
+ operator=operator,
46
+ left=left_term,
47
+ right=right_term,
48
+ )
49
+
50
+ def pprint(self) -> str:
51
+ left_str = textwrap.indent(self.left.pprint(), " " * 10).lstrip()
52
+ right_str = textwrap.indent(self.right.pprint(), " " * 10).lstrip()
53
+ return f"""
54
+ Concat (
55
+ left: {left_str},
56
+ right: {right_str}
57
+ )""".strip()
58
+
59
+ def children(self) -> list[Expression]:
60
+ """Returns a list of child expressions."""
61
+ return [self.left, self.right]
@@ -0,0 +1,59 @@
1
+ import textwrap
2
+ from typing import TYPE_CHECKING
3
+
4
+ from pbi_parsers.pq.tokens import Token, TokenType
5
+
6
+ from ._base import Expression
7
+ from ._utils import lexer_reset
8
+
9
+ if TYPE_CHECKING:
10
+ from pbi_parsers.pq.parser import Parser
11
+
12
+
13
+ class DivMulExpression(Expression):
14
+ """Represents an multiplication or division expression."""
15
+
16
+ operator: Token
17
+ left: Expression
18
+ right: Expression
19
+
20
+ def __init__(self, operator: Token, left: Expression, right: Expression) -> None:
21
+ self.operator = operator
22
+ self.left = left
23
+ self.right = right
24
+
25
+ @classmethod
26
+ @lexer_reset
27
+ def match(cls, parser: "Parser") -> "DivMulExpression | None":
28
+ from . import EXPRESSION_HIERARCHY, any_expression_match # noqa: PLC0415
29
+
30
+ skip_index = EXPRESSION_HIERARCHY.index(DivMulExpression)
31
+
32
+ left_term = any_expression_match(parser=parser, skip_first=skip_index + 1)
33
+ operator = parser.consume()
34
+
35
+ if not left_term:
36
+ return None
37
+ if operator.tok_type not in {TokenType.MULTIPLY_SIGN, TokenType.DIVIDE_SIGN}:
38
+ return None
39
+
40
+ right_term = any_expression_match(parser=parser, skip_first=skip_index)
41
+ if right_term is None:
42
+ msg = f"Expected a right term after operator {operator.text}, found: {parser.peek()}"
43
+ raise ValueError(msg)
44
+ return DivMulExpression(operator=operator, left=left_term, right=right_term)
45
+
46
+ def pprint(self) -> str:
47
+ op_str = "Mul" if self.operator.text == "*" else "Div"
48
+ left_str = textwrap.indent(self.left.pprint(), " " * 10)[10:]
49
+ right_str = textwrap.indent(self.right.pprint(), " " * 10)[10:]
50
+ return f"""
51
+ {op_str} (
52
+ operator: {self.operator.text},
53
+ left: {left_str},
54
+ right: {right_str}
55
+ )""".strip()
56
+
57
+ def children(self) -> list[Expression]:
58
+ """Returns a list of child expressions."""
59
+ return [self.left, self.right]
@@ -0,0 +1,41 @@
1
+ import textwrap
2
+ from typing import TYPE_CHECKING
3
+
4
+ from pbi_parsers.pq.tokens import TokenType
5
+
6
+ from ._base import Expression
7
+ from ._utils import lexer_reset
8
+
9
+ if TYPE_CHECKING:
10
+ from pbi_parsers.pq.parser import Parser
11
+
12
+
13
+ class EachExpression(Expression):
14
+ each_expr: Expression
15
+
16
+ def __init__(self, each_expr: Expression) -> None:
17
+ self.each_expr = each_expr
18
+
19
+ def pprint(self) -> str:
20
+ each_expr = textwrap.indent(self.each_expr.pprint(), " " * 10)[10:]
21
+ return f"""
22
+ Each (
23
+ each: {each_expr},
24
+ )""".strip()
25
+
26
+ @classmethod
27
+ @lexer_reset
28
+ def match(cls, parser: "Parser") -> "EachExpression | None":
29
+ from . import any_expression_match # noqa: PLC0415
30
+
31
+ each = parser.consume()
32
+ if each.tok_type != TokenType.EACH:
33
+ return None
34
+ each_expr: Expression | None = any_expression_match(parser)
35
+ if not each_expr:
36
+ return None
37
+ return EachExpression(each_expr=each_expr)
38
+
39
+ def children(self) -> list[Expression]:
40
+ """Returns a list of child expressions."""
41
+ return [self.each_expr]
@@ -0,0 +1,28 @@
1
+ from typing import TYPE_CHECKING
2
+
3
+ from pbi_parsers.pq.tokens import TokenType
4
+
5
+ from ._base import Expression
6
+ from ._utils import lexer_reset
7
+
8
+ if TYPE_CHECKING:
9
+ from pbi_parsers.pq.parser import Parser
10
+
11
+
12
+ class EllipsisExpression(Expression):
13
+ def __init__(self) -> None:
14
+ pass
15
+
16
+ def pprint(self) -> str: # noqa: PLR6301 # This is ignored because we want to match the interface
17
+ return "Ellipsis ()"
18
+
19
+ @classmethod
20
+ @lexer_reset
21
+ def match(cls, parser: "Parser") -> "EllipsisExpression | None":
22
+ if parser.consume().tok_type != TokenType.ELLIPSIS:
23
+ return None
24
+ return EllipsisExpression()
25
+
26
+ def children(self) -> list[Expression]: # noqa: PLR6301
27
+ """Returns a list of child expressions."""
28
+ return []
@@ -0,0 +1,63 @@
1
+ import textwrap
2
+ from typing import TYPE_CHECKING
3
+
4
+ from pbi_parsers.pq.tokens import TokenType
5
+
6
+ from ._base import Expression
7
+ from ._utils import lexer_reset
8
+ from .identifier import IdentifierExpression
9
+ from .none import NoneExpression
10
+
11
+ if TYPE_CHECKING:
12
+ from pbi_parsers.pq.parser import Parser
13
+
14
+
15
+ class FunctionExpression(Expression):
16
+ name: IdentifierExpression
17
+ args: list[Expression]
18
+
19
+ def __init__(self, name: IdentifierExpression, args: list[Expression]) -> None:
20
+ self.name = name
21
+ self.args = args
22
+
23
+ def pprint(self) -> str:
24
+ args = ",\n".join(arg.pprint() for arg in self.args)
25
+ args = textwrap.indent(args, " " * 10)[10:]
26
+ return f"""
27
+ Function (
28
+ name: {self.name.pprint()},
29
+ args: {args}
30
+ ) """.strip()
31
+
32
+ @classmethod
33
+ @lexer_reset
34
+ def match(cls, parser: "Parser") -> "FunctionExpression | None":
35
+ from . import any_expression_match # noqa: PLC0415
36
+
37
+ args: list[Expression] = []
38
+
39
+ name = IdentifierExpression.match(parser)
40
+ if name is None:
41
+ return None
42
+
43
+ if parser.consume().tok_type != TokenType.LEFT_PAREN:
44
+ return None
45
+
46
+ while not cls.match_tokens(parser, [TokenType.RIGHT_PAREN]):
47
+ arg = any_expression_match(parser)
48
+ if arg is not None:
49
+ args.append(arg)
50
+ elif parser.peek().tok_type == TokenType.COMMA:
51
+ args.append(NoneExpression())
52
+ else:
53
+ msg = f"Unexpected token sequence: {parser.peek()}, {parser.index}"
54
+ raise ValueError(msg)
55
+
56
+ if not cls.match_tokens(parser, [TokenType.RIGHT_PAREN]):
57
+ assert parser.consume().tok_type == TokenType.COMMA
58
+ _right_paren = parser.consume()
59
+ return FunctionExpression(name=name, args=args)
60
+
61
+ def children(self) -> list[Expression]:
62
+ """Returns a list of child expressions."""
63
+ return [self.name, *self.args]
@@ -0,0 +1,77 @@
1
+ from typing import TYPE_CHECKING
2
+
3
+ from pbi_parsers.pq.tokens import TEXT_TOKENS, Token, TokenType
4
+
5
+ from ._base import Expression
6
+ from ._utils import lexer_reset
7
+
8
+ if TYPE_CHECKING:
9
+ from pbi_parsers.pq.parser import Parser
10
+
11
+ NAME_PARTS = (
12
+ TokenType.QUOTED_IDENTIFER,
13
+ TokenType.UNQUOTED_IDENTIFIER,
14
+ TokenType.HASH_IDENTIFIER,
15
+ *TEXT_TOKENS,
16
+ )
17
+
18
+
19
+ class IdentifierExpression(Expression):
20
+ name_parts: list[Token]
21
+
22
+ def __init__(self, name_parts: list[Token]) -> None:
23
+ self.name_parts = name_parts
24
+
25
+ def pprint(self) -> str:
26
+ name = ".".join(part.text for part in self.name_parts)
27
+ return f"Identifier ({name})"
28
+
29
+ @classmethod
30
+ @lexer_reset
31
+ def match(cls, parser: "Parser") -> "IdentifierExpression | None":
32
+ name_parts = [parser.consume()]
33
+ if (
34
+ name_parts[0].tok_type not in NAME_PARTS
35
+ ): # TEXT_TOKENS are used to allow keywords to be used as identifiers.
36
+ # This requires identifiers to be matched after keywords.
37
+ return None
38
+
39
+ while parser.peek().tok_type == TokenType.PERIOD:
40
+ _period, name = parser.consume(), parser.consume()
41
+ if name.tok_type not in NAME_PARTS:
42
+ return None
43
+ name_parts.append(name)
44
+
45
+ return IdentifierExpression(name_parts=name_parts)
46
+
47
+
48
+ class BracketedIdentifierExpression(Expression):
49
+ name: list[Token]
50
+
51
+ def __init__(self, name_parts: list[Token]) -> None:
52
+ self.name_parts = name_parts
53
+
54
+ def pprint(self) -> str:
55
+ return f"""
56
+ Bracketed Identifier ({" ".join(part.text for part in self.name_parts)})""".strip()
57
+
58
+ @classmethod
59
+ @lexer_reset
60
+ def match(cls, parser: "Parser") -> "BracketedIdentifierExpression | None":
61
+ left_bracket = parser.consume()
62
+ if left_bracket.tok_type != TokenType.LEFT_BRACKET:
63
+ return None
64
+ name_parts = []
65
+ while parser.peek().tok_type in {
66
+ *NAME_PARTS,
67
+ TokenType.PERIOD,
68
+ }: # there are cases where keywords can be used as identifiers
69
+ name = parser.consume()
70
+ name_parts.append(name)
71
+ right_bracket = parser.consume()
72
+ if right_bracket.tok_type != TokenType.RIGHT_BRACKET:
73
+ return None
74
+ return BracketedIdentifierExpression(name_parts=name_parts)
75
+
76
+ def children(self) -> list[Expression]:
77
+ return super().children()
@@ -0,0 +1,70 @@
1
+ import textwrap
2
+ from typing import TYPE_CHECKING
3
+
4
+ from pbi_parsers.pq.tokens import TokenType
5
+
6
+ from ._base import Expression
7
+ from ._utils import lexer_reset
8
+
9
+ if TYPE_CHECKING:
10
+ from pbi_parsers.pq.parser import Parser
11
+
12
+
13
+ class IfExpression(Expression):
14
+ if_expr: Expression
15
+ then_expr: Expression
16
+ else_expr: Expression
17
+
18
+ def __init__(
19
+ self,
20
+ if_expr: Expression,
21
+ then_expr: Expression,
22
+ else_expr: Expression,
23
+ ) -> None:
24
+ self.if_expr = if_expr
25
+ self.then_expr = then_expr
26
+ self.else_expr = else_expr
27
+
28
+ def pprint(self) -> str:
29
+ if_expr = textwrap.indent(self.if_expr.pprint(), " " * 10)[10:]
30
+ then_expr = textwrap.indent(self.then_expr.pprint(), " " * 10)[10:]
31
+ else_expr = textwrap.indent(self.else_expr.pprint(), " " * 10)[10:]
32
+ return f"""
33
+ If (
34
+ if: {if_expr},
35
+ then: {then_expr},
36
+ else: {else_expr}
37
+ )""".strip()
38
+
39
+ @classmethod
40
+ @lexer_reset
41
+ def match(cls, parser: "Parser") -> "IfExpression | None": # noqa: PLR0911
42
+ from . import any_expression_match # noqa: PLC0415
43
+
44
+ if_ = parser.consume()
45
+ if if_.tok_type != TokenType.IF:
46
+ return None
47
+ if_expr: Expression | None = any_expression_match(
48
+ parser,
49
+ ) # this expression can recurse
50
+ if not if_expr:
51
+ return None
52
+
53
+ then = parser.consume()
54
+ if then.tok_type != TokenType.THEN:
55
+ return None
56
+ then_expr = any_expression_match(parser)
57
+ if not then_expr:
58
+ return None
59
+
60
+ else_ = parser.consume()
61
+ if else_.tok_type != TokenType.ELSE:
62
+ return None
63
+ else_expr = any_expression_match(parser)
64
+ if not else_expr:
65
+ return None
66
+ return IfExpression(if_expr=if_expr, then_expr=then_expr, else_expr=else_expr)
67
+
68
+ def children(self) -> list[Expression]:
69
+ """Returns a list of child expressions."""
70
+ return [self.if_expr, self.then_expr, self.else_expr]
@@ -0,0 +1,54 @@
1
+ import textwrap
2
+ from typing import TYPE_CHECKING
3
+
4
+ from pbi_parsers.pq.tokens import TokenType
5
+
6
+ from ._base import Expression
7
+ from ._utils import lexer_reset
8
+
9
+ if TYPE_CHECKING:
10
+ from pbi_parsers.pq.parser import Parser
11
+
12
+
13
+ class IsExpression(Expression):
14
+ """Represents an multiplication or division expression."""
15
+
16
+ left: Expression
17
+ right: Expression
18
+
19
+ def __init__(self, left: Expression, right: Expression) -> None:
20
+ self.left = left
21
+ self.right = right
22
+
23
+ @classmethod
24
+ @lexer_reset
25
+ def match(cls, parser: "Parser") -> "IsExpression | None":
26
+ from . import EXPRESSION_HIERARCHY, any_expression_match # noqa: PLC0415
27
+
28
+ skip_index = EXPRESSION_HIERARCHY.index(IsExpression)
29
+
30
+ left_term = any_expression_match(parser=parser, skip_first=skip_index + 1)
31
+
32
+ if not left_term:
33
+ return None
34
+ if parser.consume().tok_type != TokenType.IS:
35
+ return None
36
+
37
+ right_term = any_expression_match(parser=parser, skip_first=skip_index)
38
+ if right_term is None:
39
+ msg = f'Expected a right term after operator "is", found: {parser.peek()}'
40
+ raise ValueError(msg)
41
+ return IsExpression(left=left_term, right=right_term)
42
+
43
+ def pprint(self) -> str:
44
+ left_str = textwrap.indent(self.left.pprint(), " " * 10)[10:]
45
+ right_str = textwrap.indent(self.right.pprint(), " " * 10)[10:]
46
+ return f"""
47
+ Is (
48
+ left: {left_str},
49
+ right: {right_str}
50
+ )""".strip()
51
+
52
+ def children(self) -> list[Expression]:
53
+ """Returns a list of child expressions."""
54
+ return [self.left, self.right]
@@ -0,0 +1,40 @@
1
+ from typing import TYPE_CHECKING
2
+
3
+ from pbi_parsers.pq.tokens import Token, TokenType
4
+
5
+ from ._base import Expression
6
+ from ._utils import lexer_reset
7
+
8
+ if TYPE_CHECKING:
9
+ from pbi_parsers.pq.parser import Parser
10
+
11
+
12
+ class KeywordExpression(Expression):
13
+ name: Token
14
+
15
+ def __init__(self, name: Token) -> None:
16
+ self.name = name
17
+
18
+ def pprint(self) -> str:
19
+ return f"""
20
+ Keyword ({self.name.text})""".strip()
21
+
22
+ @classmethod
23
+ @lexer_reset
24
+ def match(cls, parser: "Parser") -> "KeywordExpression | None":
25
+ name = parser.consume()
26
+ if name.tok_type != TokenType.KEYWORD:
27
+ return None
28
+ if name.text.lower() in {"true", "false"}:
29
+ p1 = parser.peek()
30
+ p2 = parser.peek(1)
31
+ if p1.tok_type == TokenType.LEFT_PAREN and p2.tok_type == TokenType.RIGHT_PAREN:
32
+ # This is a special case for boolean keywords with parentheses.
33
+ # IDK why microsoft made TRUE() a function too
34
+ parser.consume()
35
+ parser.consume()
36
+ return KeywordExpression(name=name)
37
+
38
+ def children(self) -> list[Expression]: # noqa: PLR6301
39
+ """Returns a list of child expressions."""
40
+ return []
@@ -0,0 +1,31 @@
1
+ from typing import TYPE_CHECKING
2
+
3
+ from pbi_parsers.pq.tokens import Token, TokenType
4
+
5
+ from ._base import Expression
6
+ from ._utils import lexer_reset
7
+
8
+ if TYPE_CHECKING:
9
+ from pbi_parsers.pq.parser import Parser
10
+
11
+
12
+ class LiteralNumberExpression(Expression):
13
+ value: Token
14
+
15
+ def __init__(self, value: Token) -> None:
16
+ self.value = value
17
+
18
+ def pprint(self) -> str:
19
+ return f"Number ({self.value.text})"
20
+
21
+ @classmethod
22
+ @lexer_reset
23
+ def match(cls, parser: "Parser") -> "LiteralNumberExpression | None":
24
+ if cls.match_tokens(parser, [TokenType.NUMBER_LITERAL]):
25
+ value = parser.consume()
26
+ return LiteralNumberExpression(value=value)
27
+ return None
28
+
29
+ def children(self) -> list[Expression]: # noqa: PLR6301
30
+ """Returns a list of child expressions."""
31
+ return []
@@ -0,0 +1,31 @@
1
+ from typing import TYPE_CHECKING
2
+
3
+ from pbi_parsers.pq.tokens import Token, TokenType
4
+
5
+ from ._base import Expression
6
+ from ._utils import lexer_reset
7
+
8
+ if TYPE_CHECKING:
9
+ from pbi_parsers.pq.parser import Parser
10
+
11
+
12
+ class LiteralStringExpression(Expression):
13
+ value: Token
14
+
15
+ def __init__(self, value: Token) -> None:
16
+ self.value = value
17
+
18
+ def pprint(self) -> str:
19
+ return f"String ({self.value.text})"
20
+
21
+ @classmethod
22
+ @lexer_reset
23
+ def match(cls, parser: "Parser") -> "LiteralStringExpression | None":
24
+ if cls.match_tokens(parser, [TokenType.STRING_LITERAL]):
25
+ value = parser.consume()
26
+ return LiteralStringExpression(value=value)
27
+ return None
28
+
29
+ def children(self) -> list[Expression]: # noqa: PLR6301
30
+ """Returns a list of child expressions."""
31
+ return []
@@ -0,0 +1,54 @@
1
+ import textwrap
2
+ from typing import TYPE_CHECKING
3
+
4
+ from pbi_parsers.pq.tokens import TokenType
5
+
6
+ from ._base import Expression
7
+ from ._utils import lexer_reset
8
+
9
+ if TYPE_CHECKING:
10
+ from pbi_parsers.pq.parser import Parser
11
+
12
+
13
+ class MetaExpression(Expression):
14
+ left_term: Expression
15
+ right_term: Expression
16
+
17
+ def __init__(self, left_term: Expression, right_term: Expression) -> None:
18
+ self.left_term = left_term
19
+ self.right_term = right_term
20
+
21
+ def pprint(self) -> str:
22
+ left_term = textwrap.indent(self.left_term.pprint(), " " * 10)[10:]
23
+ right_term = textwrap.indent(self.right_term.pprint(), " " * 10)[10:]
24
+ return f"""
25
+ Meta (
26
+ left: {left_term},
27
+ right: {right_term},
28
+ )""".strip()
29
+
30
+ @classmethod
31
+ @lexer_reset
32
+ def match(cls, parser: "Parser") -> "MetaExpression | None":
33
+ from . import EXPRESSION_HIERARCHY, any_expression_match # noqa: PLC0415
34
+
35
+ skip_index = EXPRESSION_HIERARCHY.index(MetaExpression)
36
+ left_term = any_expression_match(parser=parser, skip_first=skip_index + 1)
37
+ if left_term is None:
38
+ return None
39
+
40
+ meta = parser.consume()
41
+ if meta.tok_type != TokenType.META:
42
+ return None
43
+
44
+ right_term: Expression | None = any_expression_match(
45
+ parser,
46
+ ) # this expression can recurse
47
+ if not right_term:
48
+ return None
49
+
50
+ return MetaExpression(left_term=left_term, right_term=right_term)
51
+
52
+ def children(self) -> list[Expression]:
53
+ """Returns a list of child expressions."""
54
+ return [self.left_term, self.right_term]
@@ -0,0 +1,52 @@
1
+ import textwrap
2
+ from typing import TYPE_CHECKING
3
+
4
+ from pbi_parsers.pq.tokens import TokenType
5
+
6
+ from ._base import Expression
7
+ from ._utils import lexer_reset
8
+
9
+ if TYPE_CHECKING:
10
+ from pbi_parsers.pq.parser import Parser
11
+
12
+
13
+ class NegationExpression(Expression):
14
+ """Represents a negation expression."""
15
+
16
+ number: Expression
17
+
18
+ def __init__(self, number: Expression) -> None:
19
+ self.number = number
20
+
21
+ @classmethod
22
+ @lexer_reset
23
+ def match(cls, parser: "Parser") -> "NegationExpression | None":
24
+ from . import EXPRESSION_HIERARCHY, any_expression_match # noqa: PLC0415
25
+
26
+ skip_index = EXPRESSION_HIERARCHY.index(
27
+ NegationExpression,
28
+ ) # intentionally inclusive of self to allow +-++- chains
29
+
30
+ if parser.consume().tok_type != TokenType.EXCLAMATION_POINT:
31
+ return None
32
+
33
+ # Handle chained !!! prefixes
34
+ number: Expression | None = any_expression_match(
35
+ parser=parser,
36
+ skip_first=skip_index,
37
+ )
38
+ if number is None:
39
+ msg = f'Expected a right term after negation "!", found: {parser.peek()}'
40
+ raise ValueError(msg)
41
+ return NegationExpression(number=number)
42
+
43
+ def pprint(self) -> str:
44
+ number = textwrap.indent(self.number.pprint(), " " * 12).lstrip()
45
+ return f"""
46
+ Negation (
47
+ number: {number},
48
+ )""".strip()
49
+
50
+ def children(self) -> list[Expression]:
51
+ """Returns a list of child expressions."""
52
+ return [self.number]
@@ -0,0 +1,22 @@
1
+ from typing import TYPE_CHECKING
2
+
3
+ from ._base import Expression
4
+
5
+ if TYPE_CHECKING:
6
+ from pbi_parsers.pq.parser import Parser
7
+
8
+
9
+ class NoneExpression(Expression):
10
+ """Used to represent the absence of a value, so far only occurring when a argument is skipped in a function."""
11
+
12
+ def pprint(self) -> str: # noqa: PLR6301
13
+ return "None"
14
+
15
+ @classmethod
16
+ def match(cls, parser: "Parser") -> "NoneExpression | None":
17
+ msg = "NoneExpression.match should not be called, this is a placeholder for the absence of an expression."
18
+ raise NotImplementedError(msg)
19
+
20
+ def children(self) -> list[Expression]: # noqa: PLR6301
21
+ """Returns a list of child expressions."""
22
+ return []