pbi-parsers 0.8.1__py3-none-any.whl → 0.8.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pbi_parsers/__init__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  from . import dax, pq
2
2
 
3
- __version__ = "0.8.1"
3
+ __version__ = "0.8.3"
4
4
 
5
5
 
6
6
  __all__ = [
pbi_parsers/dax/tokens.py CHANGED
@@ -2,6 +2,7 @@ from dataclasses import dataclass
2
2
  from enum import Enum, auto
3
3
 
4
4
  from pbi_parsers.base import BaseToken
5
+ from pbi_parsers.base.tokens import TextSlice
5
6
 
6
7
 
7
8
  class TokenType(Enum):
@@ -40,6 +41,9 @@ class TokenType(Enum):
40
41
  VARIABLE = auto()
41
42
  WHITESPACE = auto()
42
43
 
44
+ UNKNOWN = auto()
45
+ """unknown is used when someone replaces a token with a str"""
46
+
43
47
 
44
48
  KEYWORD_MAPPING = {
45
49
  "TRUE": TokenType.TRUE,
@@ -52,3 +56,17 @@ KEYWORD_MAPPING = {
52
56
  @dataclass
53
57
  class Token(BaseToken):
54
58
  tok_type: TokenType = TokenType.EOF
59
+
60
+ @staticmethod
61
+ def from_str(value: str, tok_type: TokenType = TokenType.UNKNOWN) -> "Token":
62
+ tok_type = KEYWORD_MAPPING.get(value, tok_type)
63
+ return Token(
64
+ tok_type=tok_type,
65
+ text_slice=TextSlice(value, 0, len(value)),
66
+ )
67
+
68
+ def add_token_before(self, text: str, tok_type: TokenType) -> None:
69
+ super().add_token_before(text, tok_type)
70
+
71
+ def add_token_after(self, text: str, tok_type: TokenType) -> None:
72
+ super().add_token_after(text, tok_type)
pbi_parsers/dax/utils.py CHANGED
@@ -4,7 +4,7 @@ import jinja2
4
4
  from colorama import Fore, Style
5
5
 
6
6
  from .exprs import Expression
7
- from .tokens import Token
7
+ from .tokens import Token, TokenType
8
8
 
9
9
  T = TypeVar("T", bound=Expression)
10
10
 
@@ -118,3 +118,30 @@ def highlight_section(node: Expression | Token | list[Token] | list[Expression])
118
118
  position = node.position()
119
119
  full_text = node.text_slice.full_text if isinstance(node, Token) else node.full_text()
120
120
  return Context(position, full_text)
121
+
122
+
123
+ def get_inner_text(tok: Token) -> str:
124
+ """Returns the inner text of a token, stripping any surrounding quotes or brackets.
125
+
126
+ Args:
127
+ tok (Token): The token to extract inner text from.
128
+
129
+ Returns:
130
+ str: The inner text of the token.
131
+
132
+ Raises:
133
+ ValueError: If the token type does not support inner text extraction.
134
+
135
+ """
136
+ if tok.tok_type in {
137
+ TokenType.BRACKETED_IDENTIFIER,
138
+ TokenType.SINGLE_QUOTED_IDENTIFIER,
139
+ }:
140
+ return tok.text[1:-1]
141
+ if tok.tok_type in {
142
+ TokenType.STRING_LITERAL,
143
+ TokenType.UNQUOTED_IDENTIFIER,
144
+ }:
145
+ return tok.text
146
+ msg = f"Token type {tok.tok_type} does not have inner text"
147
+ raise ValueError(msg)
pbi_parsers/pq/tokens.py CHANGED
@@ -2,6 +2,7 @@ from dataclasses import dataclass
2
2
  from enum import Enum
3
3
 
4
4
  from pbi_parsers.base import BaseToken
5
+ from pbi_parsers.base.tokens import TextSlice
5
6
 
6
7
 
7
8
  class TokenType(Enum):
@@ -52,12 +53,27 @@ class TokenType(Enum):
52
53
  IS = 46
53
54
  AS = 47
54
55
  EXCLAMATION_POINT = 48
56
+ UNKNOWN = 99
57
+ """unknown is used when someone replaces a token with a str"""
55
58
 
56
59
 
57
60
  @dataclass
58
61
  class Token(BaseToken):
59
62
  tok_type: TokenType = TokenType.EOF
60
63
 
64
+ @staticmethod
65
+ def from_str(value: str, tok_type: TokenType = TokenType.UNKNOWN) -> "Token":
66
+ return Token(
67
+ tok_type=tok_type,
68
+ text_slice=TextSlice(value, 0, len(value)),
69
+ )
70
+
71
+ def add_token_before(self, text: str, tok_type: TokenType) -> None:
72
+ super().add_token_before(text, tok_type)
73
+
74
+ def add_token_after(self, text: str, tok_type: TokenType) -> None:
75
+ super().add_token_after(text, tok_type)
76
+
61
77
 
62
78
  # These are tokens that could also be used as identifiers in expressions.
63
79
  TEXT_TOKENS = (
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pbi_parsers
3
- Version: 0.8.1
3
+ Version: 0.8.3
4
4
  Summary: Power BI lexer, parsers, and formatters for DAX and M (Power Query) languages
5
5
  Project-URL: Homepage, https://github.com/douglassimonsen/pbi_parsers
6
6
  Project-URL: Documentation, https://douglassimonsen.github.io/pbi_parsers/
@@ -1,4 +1,4 @@
1
- pbi_parsers/__init__.py,sha256=ljbk7gxiajzYkx2vu3YTjMDWJjYoFMUhfrwtK2iJbI4,91
1
+ pbi_parsers/__init__.py,sha256=bYOBpDdxjZNL99olLw0gryn94zqaskT48sEBk-mrqs4,91
2
2
  pbi_parsers/base/__init__.py,sha256=U7QpzFFD9A4wK3ZHin6xg5fPgTca0y5KC-O7nrZ-flM,115
3
3
  pbi_parsers/base/lexer.py,sha256=Rl2cWlySJblFHvwW8oMVAjnCh83Zjo4N7jW3pkRZZO0,4335
4
4
  pbi_parsers/base/tokens.py,sha256=c1PIAU5oRxZbI74SY5KT3AZ3WrcukzXtuiLPrZU4f2o,7017
@@ -7,8 +7,8 @@ pbi_parsers/dax/formatter.py,sha256=jOFnwcgQGIzsmi5sfkKoB_pFEGjDPd8E_pwMPwudmy4,
7
7
  pbi_parsers/dax/lexer.py,sha256=2_pERJSrSYd8VujOe9TxJa9R2Ex8mvP-bCotH7uVBZY,8025
8
8
  pbi_parsers/dax/main.py,sha256=FG35XCAPEooXoJShSgOnmQ0py-h_MPtOfnLpQWy61is,1657
9
9
  pbi_parsers/dax/parser.py,sha256=QLKrIBcxZ26TGhTHpeKcTGEHEHUDLC6IgpxxrdJzdek,1821
10
- pbi_parsers/dax/tokens.py,sha256=nY1laCbL8vwALpJ4jcd8k4lAscqOwqdw3dFuj4_KKVk,1234
11
- pbi_parsers/dax/utils.py,sha256=OURPa-b6Ldn0_KKXPdLIPA3Zdc12OfbbFd2X5SocCek,4402
10
+ pbi_parsers/dax/tokens.py,sha256=ela2bX-iXIIsEho5G07Vqsiefc4oEv7mYLJEfWD95ZI,1887
11
+ pbi_parsers/dax/utils.py,sha256=fyGZY_6ASLtH7Hwk89Ti0Bvu49kAmcUE3D8c7QPyzks,5160
12
12
  pbi_parsers/dax/exprs/__init__.py,sha256=OUfiXzZYp5HkTPE9x595MMxpsgG1IvsED8p8spAKZuk,3432
13
13
  pbi_parsers/dax/exprs/_base.py,sha256=bMHLICgAUOqAKl_S9d6V8kk62cqM1jynUY2-gBJlEcs,2732
14
14
  pbi_parsers/dax/exprs/_utils.py,sha256=BxxRCtsqpL9t330ZfBnkm1lq7B_ejAoG89Ot2bLYJig,2123
@@ -39,7 +39,7 @@ pbi_parsers/pq/formatter.py,sha256=gcqj_aP8o5V10ULi5hdGhy3aAOy829jTKAfzH4mZewA,3
39
39
  pbi_parsers/pq/lexer.py,sha256=YOo4chz1N06FLO7cU4-wSoemIzfwG30NeUSJhJB-yOE,8093
40
40
  pbi_parsers/pq/main.py,sha256=4k5ZT-dRv5g2jjFgL1ckSpLR36wzClxe1YjiiIiBMu8,1649
41
41
  pbi_parsers/pq/parser.py,sha256=Fy8cqAGvGv1oVg4vYWJAGHZSWimEJ3wTtL5eqIkfOA8,1885
42
- pbi_parsers/pq/tokens.py,sha256=tll_fijLQ2reUJZIgyTW_a6ewuxiw9dva4dH9zx4GZ0,1637
42
+ pbi_parsers/pq/tokens.py,sha256=vmcZcK_0E99HSR5FCk8LUrxTVT2D5ZeGX-AWPHSZdVc,2245
43
43
  pbi_parsers/pq/exprs/__init__.py,sha256=wV-G51GagUAkA6_uVjsNA5JskO2JN3xXJPjKtzCH5rU,2845
44
44
  pbi_parsers/pq/exprs/_base.py,sha256=GcfWW3rannZBvw4LyjdiGbWGJ1nctw-m5k6LGkX7Wk4,1118
45
45
  pbi_parsers/pq/exprs/_utils.py,sha256=kUCWSzCSy7HMKOWcGjmO4R1WiYHP23afXmq67A0ZAXY,1065
@@ -73,7 +73,7 @@ pbi_parsers/pq/exprs/statement.py,sha256=JSg48pGAU3Ka2pt4lzVsYlVOqGeF_ARGm8Ajf0l
73
73
  pbi_parsers/pq/exprs/try_expr.py,sha256=UcnqfA-t9S1LVrKqeNUT8n4JJcO-ZQZoJrxAdjJ-GMA,1692
74
74
  pbi_parsers/pq/exprs/type_expr.py,sha256=hH5ubrIJaxwQsopNJHUZ4ByS1rHEgv2Tf8ocYqSukXM,2570
75
75
  pbi_parsers/pq/exprs/variable.py,sha256=wp4t0QHIGA264sXnWp7XVe1H8MJzMIOaoLNBQe-dfNk,1602
76
- pbi_parsers-0.8.1.dist-info/METADATA,sha256=-B23BQU7BCehF7DUTBcDeD9gH8ndy730y41H3HxUicU,2906
77
- pbi_parsers-0.8.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
78
- pbi_parsers-0.8.1.dist-info/licenses/LICENSE,sha256=Sn0IfXOE4B0iL9lZXmGmRuTGyJeCtefxcfws0bLjp2g,1072
79
- pbi_parsers-0.8.1.dist-info/RECORD,,
76
+ pbi_parsers-0.8.3.dist-info/METADATA,sha256=69HKbe_XOTTk3mtD8hfc--HTkZjUU1V_yBOjx9-qUdM,2906
77
+ pbi_parsers-0.8.3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
78
+ pbi_parsers-0.8.3.dist-info/licenses/LICENSE,sha256=Sn0IfXOE4B0iL9lZXmGmRuTGyJeCtefxcfws0bLjp2g,1072
79
+ pbi_parsers-0.8.3.dist-info/RECORD,,