pbi-parsers 0.7.19__tar.gz → 0.7.21__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (122) hide show
  1. pbi_parsers-0.7.21/.github/workflows/deploy-docs.yml +35 -0
  2. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/.github/workflows/deploy.yml +24 -0
  3. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/.gitignore +2 -1
  4. pbi_parsers-0.7.21/LICENSE +21 -0
  5. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/PKG-INFO +15 -1
  6. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/README.md +7 -0
  7. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/docs/docs/index.md +4 -2
  8. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/__init__.py +1 -1
  9. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/base/lexer.py +32 -33
  10. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/base/tokens.py +18 -13
  11. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/exprs/_base.py +13 -13
  12. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/exprs/_utils.py +11 -0
  13. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/exprs/add_sub.py +10 -10
  14. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/exprs/column.py +12 -12
  15. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/exprs/div_mul.py +10 -10
  16. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/exprs/function.py +31 -31
  17. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/exprs/literal_number.py +9 -8
  18. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/exprs/literal_string.py +8 -8
  19. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/exprs/measure.py +8 -8
  20. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/exprs/parens.py +12 -12
  21. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/main.py +16 -16
  22. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/utils.py +19 -19
  23. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pyproject.toml +10 -0
  24. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/tests/test_dax/test_exprs/test_literal_number.py +6 -0
  25. pbi_parsers-0.7.19/.github/workflows/publish-to-pypi.yml +0 -57
  26. pbi_parsers-0.7.19/deploy.py +0 -117
  27. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/docs/docs/api/dax/formatter.md +0 -0
  28. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/docs/docs/api/dax/lexer.md +0 -0
  29. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/docs/docs/api/dax/parser.md +0 -0
  30. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/docs/docs/api/pq/formatter.md +0 -0
  31. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/docs/docs/api/pq/lexer.md +0 -0
  32. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/docs/docs/api/pq/parser.md +0 -0
  33. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/docs/docs/api/shared/lexer.md +0 -0
  34. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/docs/docs/api/shared/text_slice.md +0 -0
  35. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/docs/docs/api/shared/token.md +0 -0
  36. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/docs/mkdocs.yml +0 -0
  37. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/base/__init__.py +0 -0
  38. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/__init__.py +0 -0
  39. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/exprs/__init__.py +0 -0
  40. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/exprs/add_sub_unary.py +10 -10
  41. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/exprs/array.py +13 -13
  42. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/exprs/comparison.py +10 -10
  43. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/exprs/concatenation.py +10 -10
  44. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/exprs/exponent.py +10 -10
  45. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/exprs/hierarchy.py +13 -13
  46. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/exprs/identifier.py +9 -9
  47. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/exprs/ins.py +10 -10
  48. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/exprs/keyword.py +9 -9
  49. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/exprs/logical.py +10 -10
  50. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/exprs/none.py +9 -9
  51. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/exprs/returns.py +17 -17
  52. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/exprs/table.py +11 -11
  53. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/exprs/variable.py +14 -14
  54. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/formatter.py +8 -8
  55. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/lexer.py +97 -97
  56. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/parser.py +8 -8
  57. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/dax/tokens.py +0 -0
  58. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/__init__.py +0 -0
  59. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/__init__.py +0 -0
  60. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/_base.py +0 -0
  61. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/_utils.py +0 -0
  62. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/add_sub.py +0 -0
  63. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/add_sub_unary.py +0 -0
  64. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/and_or_expr.py +0 -0
  65. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/array.py +0 -0
  66. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/arrow.py +0 -0
  67. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/column.py +0 -0
  68. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/comparison.py +0 -0
  69. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/concatenation.py +0 -0
  70. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/div_mul.py +0 -0
  71. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/each.py +0 -0
  72. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/ellipsis_expr.py +0 -0
  73. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/function.py +0 -0
  74. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/identifier.py +0 -0
  75. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/if_expr.py +0 -0
  76. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/is_expr.py +0 -0
  77. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/keyword.py +0 -0
  78. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/literal_number.py +0 -0
  79. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/literal_string.py +0 -0
  80. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/meta.py +0 -0
  81. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/negation.py +0 -0
  82. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/none.py +0 -0
  83. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/not_expr.py +0 -0
  84. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/parens.py +0 -0
  85. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/record.py +0 -0
  86. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/row.py +0 -0
  87. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/row_index.py +0 -0
  88. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/statement.py +0 -0
  89. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/try_expr.py +0 -0
  90. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/type_expr.py +0 -0
  91. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/exprs/variable.py +0 -0
  92. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/formatter.py +0 -0
  93. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/lexer.py +0 -0
  94. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/main.py +0 -0
  95. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/parser.py +0 -0
  96. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/pbi_parsers/pq/tokens.py +0 -0
  97. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/tests/__init__.py +0 -0
  98. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/tests/test_dax/__init__.py +0 -0
  99. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/tests/test_dax/test_exprs/__init__.py +0 -0
  100. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/tests/test_dax/test_exprs/test_add_sub.py +0 -0
  101. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/tests/test_dax/test_exprs/test_add_sub_unary.py +0 -0
  102. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/tests/test_dax/test_exprs/test_array.py +0 -0
  103. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/tests/test_dax/test_exprs/test_column.py +0 -0
  104. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/tests/test_dax/test_exprs/test_comparison.py +0 -0
  105. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/tests/test_dax/test_exprs/test_concatenation.py +0 -0
  106. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/tests/test_dax/test_exprs/test_div_mul.py +0 -0
  107. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/tests/test_dax/test_exprs/test_exponent.py +0 -0
  108. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/tests/test_dax/test_exprs/test_function.py +0 -0
  109. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/tests/test_dax/test_exprs/test_hierarchy.py +0 -0
  110. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/tests/test_dax/test_exprs/test_identifier.py +0 -0
  111. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/tests/test_dax/test_exprs/test_ins.py +0 -0
  112. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/tests/test_dax/test_exprs/test_keyword.py +0 -0
  113. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/tests/test_dax/test_exprs/test_literal_string.py +0 -0
  114. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/tests/test_dax/test_exprs/test_logical.py +0 -0
  115. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/tests/test_dax/test_exprs/test_measure.py +0 -0
  116. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/tests/test_dax/test_exprs/test_parens.py +0 -0
  117. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/tests/test_dax/test_exprs/test_returns.py +0 -0
  118. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/tests/test_dax/test_exprs/test_table.py +0 -0
  119. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/tests/test_dax/test_exprs/test_variable.py +0 -0
  120. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/tests/test_dax/test_formatter/__init__.py +0 -0
  121. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/tests/test_dax/test_formatter/test_basic.py +0 -0
  122. {pbi_parsers-0.7.19 → pbi_parsers-0.7.21}/tests/test_dax/test_lexer.py +0 -0
@@ -0,0 +1,35 @@
1
+ # Setup: go to https://github.com/<org/username>/<repo>/settings/actions and set Workflow permissions to "Read and write permissions"
2
+ name: Deploy MkDocs Site
3
+
4
+ on:
5
+ push:
6
+ branches:
7
+ - main
8
+ paths:
9
+ - 'docs/**'
10
+ - '.github/workflows/deploy-docs.yml'
11
+
12
+ jobs:
13
+ deploy-docs:
14
+ runs-on: ubuntu-latest
15
+
16
+ steps:
17
+ - name: Checkout repository
18
+ uses: actions/checkout@v4
19
+ with:
20
+ fetch-depth: 0 # Fetch all history
21
+
22
+ - name: Set up Python
23
+ uses: actions/setup-python@v5
24
+ with:
25
+ python-version: '3.x'
26
+
27
+ - name: Install MkDocs and theme
28
+ run: |
29
+ python -m pip install .[docs]
30
+
31
+ - name: Deploy documentation to GitHub Pages
32
+ env:
33
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
34
+ run: |
35
+ mkdocs gh-deploy --clean -f docs/mkdocs.yml
@@ -1,3 +1,4 @@
1
+ # Setup: go to https://github.com/<org/username>/<repo>/settings/actions and set Workflow permissions to "Read and write permissions"
1
2
  name: Tag and Release on Version Change
2
3
 
3
4
  on:
@@ -17,6 +18,8 @@ jobs:
17
18
  steps:
18
19
  - name: Checkout repository
19
20
  uses: actions/checkout@v4
21
+ with:
22
+ fetch-depth: 0 # Fetch all history for tags
20
23
 
21
24
  - name: Set up Python
22
25
  uses: actions/setup-python@v5
@@ -120,3 +123,24 @@ jobs:
120
123
 
121
124
  - name: Publish Package to PyPI
122
125
  uses: pypa/gh-action-pypi-publish@release/v1
126
+ coverage:
127
+ runs-on: ubuntu-latest
128
+ steps:
129
+ - uses: actions/checkout@v4
130
+ - name: Set up Python
131
+ uses: actions/setup-python@v5
132
+ with:
133
+ python-version: '3.x' # Specify your desired Python version
134
+ - name: Install dependencies
135
+ run: |
136
+ pip install .[dev]
137
+ - name: Run tests with coverage
138
+ run: |
139
+ coverage run --source=pbi_parsers -m pytest tests/
140
+ coverage xml
141
+ - name: Upload coverage to Coveralls
142
+ uses: coverallsapp/github-action@v2
143
+ with:
144
+ github-token: ${{ secrets.GITHUB_TOKEN }}
145
+ format: cobertura
146
+ file: coverage.xml
@@ -2,4 +2,5 @@ __pycache__
2
2
  venv
3
3
  dist
4
4
  docs/site
5
- .env
5
+ .env
6
+ .coverage
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 douglassimonsen
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -1,12 +1,19 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pbi_parsers
3
- Version: 0.7.19
3
+ Version: 0.7.21
4
4
  Summary: Power BI lexer, parsers, and formatters for DAX and M (Power Query) languages
5
+ Project-URL: Homepage, https://github.com/douglassimonsen/pbi_parsers
6
+ Project-URL: Documentation, https://douglassimonsen.github.io/pbi_parsers/
7
+ License-File: LICENSE
5
8
  Requires-Python: >=3.11.0
9
+ Requires-Dist: colorama>=0.4.6
6
10
  Requires-Dist: jinja2>=3.1.6
7
11
  Provides-Extra: dev
8
12
  Requires-Dist: build>=1.2.2; extra == 'dev'
13
+ Requires-Dist: coverage; extra == 'dev'
14
+ Requires-Dist: coveralls; extra == 'dev'
9
15
  Requires-Dist: pre-commit>=3.8.0; extra == 'dev'
16
+ Requires-Dist: pytest; extra == 'dev'
10
17
  Requires-Dist: ruff>=0.12.7; extra == 'dev'
11
18
  Provides-Extra: docs
12
19
  Requires-Dist: mkdocs-material>=9.6.16; extra == 'docs'
@@ -16,6 +23,13 @@ Description-Content-Type: text/markdown
16
23
 
17
24
  # Overview
18
25
 
26
+ [![PyPI Downloads](https://static.pepy.tech/badge/pbi-parsers)](https://pepy.tech/projects/pbi-parsers)
27
+ ![Python](https://img.shields.io/badge/python-3.11-blue.svg)
28
+ [![Coverage Status](https://coveralls.io/repos/github/douglassimonsen/pbi_parsers/badge.svg?branch=main)](https://coveralls.io/github/douglassimonsen/pbi_parsers?branch=main)
29
+ ![Repo Size](https://img.shields.io/github/repo-size/douglassimonsen/pbi_parsers)
30
+ [![FOSSA Status](https://app.fossa.com/api/projects/git%2Bgithub.com%2Fdouglassimonsen%2Fpbi_parsers.svg?type=shield&issueType=license)](https://app.fossa.com/projects/git%2Bgithub.com%2Fdouglassimonsen%2Fpbi_parsers?ref=badge_shield&issueType=license)
31
+ [![FOSSA Status](https://app.fossa.com/api/projects/git%2Bgithub.com%2Fdouglassimonsen%2Fpbi_parsers.svg?type=shield&issueType=security)](https://app.fossa.com/projects/git%2Bgithub.com%2Fdouglassimonsen%2Fpbi_parsers?ref=badge_shield&issueType=security)
32
+
19
33
  Based on [Crafting Interpreters](https://timothya.com/pdfs/crafting-interpreters.pdf). Library provides lexers, parsers, and formatters for DAX and Power Query (M) languages. Designed to support code introspection and analysis, not execution. This enables developement of [ruff](https://github.com/astral-sh/ruff)-equivalent tools for DAX and Power Query. It also enables extracting metadata from DAX and Power Query code, such PQ source types (Excel, SQL, etc.) and DAX lineage dependencies.
20
34
 
21
35
  For more information, see the [docs](https://douglassimonsen.github.io/pbi_parsers/)
@@ -1,5 +1,12 @@
1
1
  # Overview
2
2
 
3
+ [![PyPI Downloads](https://static.pepy.tech/badge/pbi-parsers)](https://pepy.tech/projects/pbi-parsers)
4
+ ![Python](https://img.shields.io/badge/python-3.11-blue.svg)
5
+ [![Coverage Status](https://coveralls.io/repos/github/douglassimonsen/pbi_parsers/badge.svg?branch=main)](https://coveralls.io/github/douglassimonsen/pbi_parsers?branch=main)
6
+ ![Repo Size](https://img.shields.io/github/repo-size/douglassimonsen/pbi_parsers)
7
+ [![FOSSA Status](https://app.fossa.com/api/projects/git%2Bgithub.com%2Fdouglassimonsen%2Fpbi_parsers.svg?type=shield&issueType=license)](https://app.fossa.com/projects/git%2Bgithub.com%2Fdouglassimonsen%2Fpbi_parsers?ref=badge_shield&issueType=license)
8
+ [![FOSSA Status](https://app.fossa.com/api/projects/git%2Bgithub.com%2Fdouglassimonsen%2Fpbi_parsers.svg?type=shield&issueType=security)](https://app.fossa.com/projects/git%2Bgithub.com%2Fdouglassimonsen%2Fpbi_parsers?ref=badge_shield&issueType=security)
9
+
3
10
  Based on [Crafting Interpreters](https://timothya.com/pdfs/crafting-interpreters.pdf). Library provides lexers, parsers, and formatters for DAX and Power Query (M) languages. Designed to support code introspection and analysis, not execution. This enables developement of [ruff](https://github.com/astral-sh/ruff)-equivalent tools for DAX and Power Query. It also enables extracting metadata from DAX and Power Query code, such PQ source types (Excel, SQL, etc.) and DAX lineage dependencies.
4
11
 
5
12
  For more information, see the [docs](https://douglassimonsen.github.io/pbi_parsers/)
@@ -1,6 +1,8 @@
1
1
  # Overview
2
2
 
3
- Based on [Crafting Interpreters](https://timothya.com/pdfs/crafting-interpreters.pdf). Library provides lexers, parsers, and formatters for DAX and Power Query (M) languages. Designed to support code introspection and analysis, not execution. This enables development of [ruff](https://github.com/astral-sh/ruff)-equivalent tools for DAX and Power Query. It also enables extracting metadata from DAX and Power Query code, such PQ source types (Excel, SQL, etc.) and DAX lineage dependencies.
3
+ Based on [Crafting Interpreters](https://timothya.com/pdfs/crafting-interpreters.pdf). Library provides lexers, parsers, and formatters for DAX and Power Query (M) languages. Designed to support code introspection and analysis, not execution. This enables development of [ruff](https://github.com/astral-sh/ruff)-equivalent tools for DAX and Power Query. It also enables extracting metadata from DAX and Power Query code, such PQ source types (Excel, SQL, etc.) and DAX lineage dependencies.
4
+
5
+ This library is used in [pbi_ruff](https://github.com/douglassimonsen/pbi_ruff) to provide DAX and Power Query (M) linting.
4
6
 
5
7
  # Installation
6
8
 
@@ -11,7 +13,7 @@ python -m pip install pbi_parsers
11
13
  ## Functionality
12
14
 
13
15
  !!! info "Rust Implementation"
14
- Although the library is primarily implemented in Python, there are plans to implement a Rust version for performance and efficiency.
16
+ Although the library is primarily implemented in Python, there are plans to implement a Rust version for performance and additional type-safety.
15
17
 
16
18
  - DAX
17
19
  * [x] Lexer
@@ -1,6 +1,6 @@
1
1
  from . import dax, pq
2
2
 
3
- __version__ = "0.7.19"
3
+ __version__ = "0.7.21"
4
4
 
5
5
 
6
6
  __all__ = [
@@ -17,10 +17,33 @@ class BaseLexer:
17
17
  self.current_position = 0
18
18
  self.tokens = []
19
19
 
20
- def scan_helper(self) -> BaseToken:
21
- """Contains the orchestration logic for converting tokens into expressions."""
22
- msg = "Subclasses should implement match_tokens method."
23
- raise NotImplementedError(msg)
20
+ def advance(self, chunk: int = 1) -> None:
21
+ """Advances the current position by the specified chunk size.
22
+
23
+ Generally used alongside peek to consume characters.
24
+
25
+ Args:
26
+ chunk (int): The number of characters to advance the current position.
27
+
28
+ Raises:
29
+ ValueError: If the current position exceeds a predefined MAX_POSITION (1,000,000 characters).
30
+ This is to avoid errors with the lexer causing the process to hang
31
+
32
+ """
33
+ if self.current_position > MAX_POSITION:
34
+ msg = f"Current position exceeds {MAX_POSITION:,} characters."
35
+ raise ValueError(msg)
36
+ self.current_position += chunk
37
+
38
+ def at_end(self) -> bool:
39
+ """Checks if the current position is at (or beyond) the end of the source.
40
+
41
+ Returns:
42
+ bool: True if the current position is at or beyond the end of the source, False
43
+ otherwise.
44
+
45
+ """
46
+ return self.current_position >= len(self.source)
24
47
 
25
48
  def match(
26
49
  self,
@@ -32,7 +55,6 @@ class BaseLexer:
32
55
  """Match a string or a callable matcher against the current position in the source.
33
56
 
34
57
  Args:
35
- ----
36
58
  matcher (Callable[[str], bool] | str): A string to match or a callable that
37
59
  takes a string and returns a boolean.
38
60
  chunk (int): The number of characters to check from the current position.
@@ -61,7 +83,7 @@ class BaseLexer:
61
83
  return False
62
84
 
63
85
  def peek(self, chunk: int = 1) -> str:
64
- """Returns the next chunk of text from the current position. Defaults to a single character.
86
+ """Returns the next section of text from the current position of length `chunk`. Defaults to a single character.
65
87
 
66
88
  Args:
67
89
  chunk (int): The number of characters to return from the current position.
@@ -87,24 +109,6 @@ class BaseLexer:
87
109
  """
88
110
  return self.source[self.current_position :]
89
111
 
90
- def advance(self, chunk: int = 1) -> None:
91
- """Advances the current position by the specified chunk size.
92
-
93
- Generally used alongside peek to consume characters.
94
-
95
- Args:
96
- chunk (int): The number of characters to advance the current position.
97
-
98
- Raises:
99
- ValueError: If the current position exceeds a predefined MAX_POSITION (1,000,000 characters).
100
- This is to avoid errors with the lexer causing the process to hang
101
-
102
- """
103
- if self.current_position > MAX_POSITION:
104
- msg = f"Current position exceeds {MAX_POSITION:,} characters."
105
- raise ValueError(msg)
106
- self.current_position += chunk
107
-
108
112
  def scan(self) -> tuple[BaseToken, ...]:
109
113
  """Repeatedly calls scan_helper until the end of the source is reached.
110
114
 
@@ -116,12 +120,7 @@ class BaseLexer:
116
120
  self.tokens.append(self.scan_helper())
117
121
  return tuple(self.tokens)
118
122
 
119
- def at_end(self) -> bool:
120
- """Checks if the current position is at (or beyond) the end of the source.
121
-
122
- Returns:
123
- bool: True if the current position is at or beyond the end of the source, False
124
- otherwise.
125
-
126
- """
127
- return self.current_position >= len(self.source)
123
+ def scan_helper(self) -> BaseToken:
124
+ """Contains the orchestration logic for converting tokens into expressions."""
125
+ msg = "Subclasses should implement match_tokens method."
126
+ raise NotImplementedError(msg)
@@ -32,15 +32,20 @@ class BaseToken:
32
32
  tok_type: Any
33
33
  text_slice: TextSlice = field(default_factory=TextSlice)
34
34
 
35
+ def __eq__(self, other: object) -> bool:
36
+ """Checks equality based on token type and text slice."""
37
+ if not isinstance(other, BaseToken):
38
+ return NotImplemented
39
+ return self.tok_type == other.tok_type and self.text_slice == other.text_slice
40
+
41
+ def __hash__(self) -> int:
42
+ """Returns a hash based on token type and text slice."""
43
+ return hash((self.tok_type, self.text_slice))
44
+
35
45
  def __repr__(self) -> str:
36
46
  pretty_text = self.text_slice.get_text().replace("\n", "\\n").replace("\r", "\\r")
37
47
  return f"Token(type={self.tok_type.name}, text='{pretty_text}')"
38
48
 
39
- @property
40
- def text(self) -> str:
41
- """Returns the text underlying the token."""
42
- return self.text_slice.get_text()
43
-
44
49
  def position(self) -> tuple[int, int]:
45
50
  """Returns the start and end positions of the token.
46
51
 
@@ -50,12 +55,12 @@ class BaseToken:
50
55
  """
51
56
  return self.text_slice.start, self.text_slice.end
52
57
 
53
- def __eq__(self, other: object) -> bool:
54
- """Checks equality based on token type and text slice."""
55
- if not isinstance(other, BaseToken):
56
- return NotImplemented
57
- return self.tok_type == other.tok_type and self.text_slice == other.text_slice
58
+ @property
59
+ def text(self) -> str:
60
+ """Returns the text underlying the token.
58
61
 
59
- def __hash__(self) -> int:
60
- """Returns a hash based on token type and text slice."""
61
- return hash((self.tok_type, self.text_slice))
62
+ Returns:
63
+ str: The text of the token as a string.
64
+
65
+ """
66
+ return self.text_slice.get_text()
@@ -10,8 +10,17 @@ class Expression:
10
10
  pre_comments: list[Any] = []
11
11
  post_comments: list[Any] = []
12
12
 
13
- def pprint(self) -> str:
14
- msg = "Subclasses should implement this method."
13
+ def __repr__(self) -> str:
14
+ return self.pprint()
15
+
16
+ def children(self) -> list["Expression"]:
17
+ """Returns a list of child expressions."""
18
+ msg = "This method should be implemented by subclasses."
19
+ raise NotImplementedError(msg)
20
+
21
+ def full_text(self) -> str:
22
+ """Returns the full text of the expression."""
23
+ msg = "This method should be implemented by subclasses."
15
24
  raise NotImplementedError(msg)
16
25
 
17
26
  @classmethod
@@ -27,20 +36,11 @@ class Expression:
27
36
  def match_tokens(parser: "Parser", match_tokens: list[TokenType]) -> bool:
28
37
  return all(parser.peek(i).tok_type == token_type for i, token_type in enumerate(match_tokens))
29
38
 
30
- def __repr__(self) -> str:
31
- return self.pprint()
32
-
33
- def children(self) -> list["Expression"]:
34
- """Returns a list of child expressions."""
35
- msg = "This method should be implemented by subclasses."
36
- raise NotImplementedError(msg)
37
-
38
39
  def position(self) -> tuple[int, int]:
39
40
  """Returns the start and end positions of the expression in the source code."""
40
41
  msg = "This method should be implemented by subclasses."
41
42
  raise NotImplementedError(msg)
42
43
 
43
- def full_text(self) -> str:
44
- """Returns the full text of the expression."""
45
- msg = "This method should be implemented by subclasses."
44
+ def pprint(self) -> str:
45
+ msg = "Subclasses should implement this method."
46
46
  raise NotImplementedError(msg)
@@ -10,6 +10,17 @@ R = TypeVar("R") # Represents the return type of the decorated function
10
10
 
11
11
 
12
12
  def lexer_reset(func: Callable[P, R]) -> Callable[P, R]:
13
+ """Decorator to reset the lexer state before and after parsing an expression.
14
+
15
+ This decorator performs the following actions:
16
+ 1. Collects pre-comments before parsing.
17
+ 2. Caches the result of the parsing function to avoid redundant parsing.
18
+ 3. Collects post-comments after parsing.
19
+
20
+ The caching is required since the operator precedence otherwise leads to all other expressions being
21
+ called multiple times.
22
+ """
23
+
13
24
  def lexer_reset_inner(*args: P.args, **kwargs: P.kwargs) -> R:
14
25
  parser = args[1]
15
26
  if not isinstance(parser, Parser):
@@ -28,6 +28,13 @@ class AddSubExpression(Expression):
28
28
  self.left = left
29
29
  self.right = right
30
30
 
31
+ def children(self) -> list[Expression]:
32
+ """Returns a list of child expressions."""
33
+ return [self.left, self.right]
34
+
35
+ def full_text(self) -> str:
36
+ return self.operator.text_slice.full_text
37
+
31
38
  @classmethod
32
39
  @lexer_reset
33
40
  def match(cls, parser: "Parser") -> "AddSubExpression | None":
@@ -52,6 +59,9 @@ class AddSubExpression(Expression):
52
59
  raise ValueError(msg)
53
60
  return AddSubExpression(operator=operator, left=left_term, right=right_term)
54
61
 
62
+ def position(self) -> tuple[int, int]:
63
+ return self.left.position()[0], self.right.position()[1]
64
+
55
65
  def pprint(self) -> str:
56
66
  op_str = "Add" if self.operator.text == "+" else "Sub"
57
67
  left_str = textwrap.indent(self.left.pprint(), " " * 10).lstrip()
@@ -61,13 +71,3 @@ class AddSubExpression(Expression):
61
71
  left: {left_str},
62
72
  right: {right_str}
63
73
  )""".strip()
64
-
65
- def children(self) -> list[Expression]:
66
- """Returns a list of child expressions."""
67
- return [self.left, self.right]
68
-
69
- def position(self) -> tuple[int, int]:
70
- return self.left.position()[0], self.right.position()[1]
71
-
72
- def full_text(self) -> str:
73
- return self.operator.text_slice.full_text
@@ -25,12 +25,12 @@ class ColumnExpression(Expression):
25
25
  self.table = table
26
26
  self.column = column
27
27
 
28
- def pprint(self) -> str:
29
- return f"""
30
- Column (
31
- {self.table.text},
32
- {self.column.text}
33
- )""".strip()
28
+ def children(self) -> list[Expression]: # noqa: PLR6301
29
+ """Returns a list of child expressions."""
30
+ return []
31
+
32
+ def full_text(self) -> str:
33
+ return self.table.text_slice.full_text
34
34
 
35
35
  @classmethod
36
36
  @lexer_reset
@@ -45,12 +45,12 @@ Column (
45
45
  return None
46
46
  return ColumnExpression(table=table, column=column)
47
47
 
48
- def children(self) -> list[Expression]: # noqa: PLR6301
49
- """Returns a list of child expressions."""
50
- return []
51
-
52
48
  def position(self) -> tuple[int, int]:
53
49
  return self.table.text_slice.start, self.column.text_slice.end
54
50
 
55
- def full_text(self) -> str:
56
- return self.table.text_slice.full_text
51
+ def pprint(self) -> str:
52
+ return f"""
53
+ Column (
54
+ {self.table.text},
55
+ {self.column.text}
56
+ )""".strip()
@@ -30,6 +30,13 @@ class DivMulExpression(Expression):
30
30
  self.left = left
31
31
  self.right = right
32
32
 
33
+ def children(self) -> list[Expression]:
34
+ """Returns a list of child expressions."""
35
+ return [self.left, self.right]
36
+
37
+ def full_text(self) -> str:
38
+ return self.operator.text_slice.full_text
39
+
33
40
  @classmethod
34
41
  @lexer_reset
35
42
  def match(cls, parser: "Parser") -> "DivMulExpression | None":
@@ -51,6 +58,9 @@ class DivMulExpression(Expression):
51
58
  raise ValueError(msg)
52
59
  return DivMulExpression(operator=operator, left=left_term, right=right_term)
53
60
 
61
+ def position(self) -> tuple[int, int]:
62
+ return self.left.position()[0], self.right.position()[1]
63
+
54
64
  def pprint(self) -> str:
55
65
  op_str = {
56
66
  TokenType.MULTIPLY_SIGN: "Mul",
@@ -63,13 +73,3 @@ class DivMulExpression(Expression):
63
73
  left: {left_str},
64
74
  right: {right_str}
65
75
  )""".strip()
66
-
67
- def children(self) -> list[Expression]:
68
- """Returns a list of child expressions."""
69
- return [self.left, self.right]
70
-
71
- def position(self) -> tuple[int, int]:
72
- return self.left.position()[0], self.right.position()[1]
73
-
74
- def full_text(self) -> str:
75
- return self.operator.text_slice.full_text
@@ -29,30 +29,15 @@ class FunctionExpression(Expression):
29
29
  self.args = args
30
30
  self.parens = parens
31
31
 
32
- def pprint(self) -> str:
33
- args = ",\n".join(arg.pprint() for arg in self.args)
34
- args = textwrap.indent(args, " " * 10)[10:]
35
- return f"""
36
- Function (
37
- name: {"".join(x.text for x in self.name_parts)},
38
- args: {args}
39
- ) """.strip()
40
-
41
- @classmethod
42
- def _match_function_name(cls, parser: "Parser") -> list[Token] | None:
43
- name_parts = [parser.consume()]
44
- if name_parts[0].tok_type != TokenType.UNQUOTED_IDENTIFIER:
45
- return None
32
+ def children(self) -> list[Expression]:
33
+ """Returns a list of child expressions."""
34
+ return self.args
46
35
 
47
- while parser.peek().tok_type != TokenType.LEFT_PAREN:
48
- period, name = parser.consume(), parser.consume()
49
- if name.tok_type != TokenType.UNQUOTED_IDENTIFIER:
50
- return None
51
- if period.tok_type != TokenType.PERIOD:
52
- return None
53
- name_parts.extend((period, name))
36
+ def full_text(self) -> str:
37
+ return self.parens[0].text_slice.full_text
54
38
 
55
- return name_parts
39
+ def function_name(self) -> str:
40
+ return "".join(x.text for x in self.name_parts)
56
41
 
57
42
  @classmethod
58
43
  @lexer_reset
@@ -88,15 +73,30 @@ Function (
88
73
 
89
74
  return FunctionExpression(name_parts=name_parts, args=args, parens=(left_paren, right_paren))
90
75
 
91
- def function_name(self) -> str:
92
- return "".join(x.text for x in self.name_parts)
93
-
94
- def children(self) -> list[Expression]:
95
- """Returns a list of child expressions."""
96
- return self.args
97
-
98
76
  def position(self) -> tuple[int, int]:
99
77
  return self.parens[0].text_slice.start, self.parens[1].text_slice.end
100
78
 
101
- def full_text(self) -> str:
102
- return self.parens[0].text_slice.full_text
79
+ def pprint(self) -> str:
80
+ args = ",\n".join(arg.pprint() for arg in self.args)
81
+ args = textwrap.indent(args, " " * 10)[10:]
82
+ return f"""
83
+ Function (
84
+ name: {"".join(x.text for x in self.name_parts)},
85
+ args: {args}
86
+ ) """.strip()
87
+
88
+ @classmethod
89
+ def _match_function_name(cls, parser: "Parser") -> list[Token] | None:
90
+ name_parts = [parser.consume()]
91
+ if name_parts[0].tok_type != TokenType.UNQUOTED_IDENTIFIER:
92
+ return None
93
+
94
+ while parser.peek().tok_type != TokenType.LEFT_PAREN:
95
+ period, name = parser.consume(), parser.consume()
96
+ if name.tok_type != TokenType.UNQUOTED_IDENTIFIER:
97
+ return None
98
+ if period.tok_type != TokenType.PERIOD:
99
+ return None
100
+ name_parts.extend((period, name))
101
+
102
+ return name_parts
@@ -16,6 +16,7 @@ class LiteralNumberExpression(Expression):
16
16
  42
17
17
  3.14
18
18
  -1000
19
+ 1.1e2
19
20
 
20
21
  """
21
22
 
@@ -24,8 +25,12 @@ class LiteralNumberExpression(Expression):
24
25
  def __init__(self, value: Token) -> None:
25
26
  self.value = value
26
27
 
27
- def pprint(self) -> str:
28
- return f"Number ({self.value.text})"
28
+ def children(self) -> list[Expression]: # noqa: PLR6301
29
+ """Returns a list of child expressions."""
30
+ return []
31
+
32
+ def full_text(self) -> str:
33
+ return self.value.text_slice.full_text
29
34
 
30
35
  @classmethod
31
36
  @lexer_reset
@@ -35,12 +40,8 @@ class LiteralNumberExpression(Expression):
35
40
  return LiteralNumberExpression(value=value)
36
41
  return None
37
42
 
38
- def children(self) -> list[Expression]: # noqa: PLR6301
39
- """Returns a list of child expressions."""
40
- return []
41
-
42
43
  def position(self) -> tuple[int, int]:
43
44
  return self.value.text_slice.start, self.value.text_slice.end
44
45
 
45
- def full_text(self) -> str:
46
- return self.value.text_slice.full_text
46
+ def pprint(self) -> str:
47
+ return f"Number ({self.value.text})"
@@ -23,8 +23,12 @@ class LiteralStringExpression(Expression):
23
23
  def __init__(self, value: Token) -> None:
24
24
  self.value = value
25
25
 
26
- def pprint(self) -> str:
27
- return f"String ({self.value.text})"
26
+ def children(self) -> list[Expression]: # noqa: PLR6301
27
+ """Returns a list of child expressions."""
28
+ return []
29
+
30
+ def full_text(self) -> str:
31
+ return self.value.text_slice.full_text
28
32
 
29
33
  @classmethod
30
34
  @lexer_reset
@@ -34,12 +38,8 @@ class LiteralStringExpression(Expression):
34
38
  return LiteralStringExpression(value=value)
35
39
  return None
36
40
 
37
- def children(self) -> list[Expression]: # noqa: PLR6301
38
- """Returns a list of child expressions."""
39
- return []
40
-
41
41
  def position(self) -> tuple[int, int]:
42
42
  return self.value.text_slice.start, self.value.text_slice.end
43
43
 
44
- def full_text(self) -> str:
45
- return self.value.text_slice.full_text
44
+ def pprint(self) -> str:
45
+ return f"String ({self.value.text})"