pbi-parsers 0.7.12__tar.gz → 0.7.20__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (122) hide show
  1. pbi_parsers-0.7.20/.github/workflows/deploy-docs.yml +35 -0
  2. pbi_parsers-0.7.20/.github/workflows/deploy.yml +146 -0
  3. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/.gitignore +2 -1
  4. pbi_parsers-0.7.20/LICENSE +21 -0
  5. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/PKG-INFO +13 -1
  6. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/README.md +7 -0
  7. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/docs/docs/index.md +4 -2
  8. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/__init__.py +1 -1
  9. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/base/lexer.py +32 -33
  10. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/base/tokens.py +18 -13
  11. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/exprs/_base.py +13 -13
  12. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/exprs/_utils.py +11 -0
  13. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/exprs/add_sub.py +10 -10
  14. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/exprs/column.py +12 -12
  15. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/exprs/div_mul.py +10 -10
  16. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/exprs/function.py +31 -31
  17. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/exprs/literal_number.py +9 -8
  18. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/exprs/literal_string.py +8 -8
  19. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/exprs/measure.py +8 -8
  20. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/exprs/parens.py +12 -12
  21. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/main.py +16 -16
  22. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/utils.py +19 -19
  23. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pyproject.toml +7 -0
  24. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/tests/test_dax/test_exprs/test_literal_number.py +6 -0
  25. pbi_parsers-0.7.12/.github/workflows/publish-to-pypi.yml +0 -57
  26. pbi_parsers-0.7.12/test.py +0 -17
  27. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/docs/docs/api/dax/formatter.md +0 -0
  28. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/docs/docs/api/dax/lexer.md +0 -0
  29. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/docs/docs/api/dax/parser.md +0 -0
  30. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/docs/docs/api/pq/formatter.md +0 -0
  31. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/docs/docs/api/pq/lexer.md +0 -0
  32. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/docs/docs/api/pq/parser.md +0 -0
  33. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/docs/docs/api/shared/lexer.md +0 -0
  34. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/docs/docs/api/shared/text_slice.md +0 -0
  35. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/docs/docs/api/shared/token.md +0 -0
  36. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/docs/mkdocs.yml +0 -0
  37. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/base/__init__.py +0 -0
  38. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/__init__.py +0 -0
  39. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/exprs/__init__.py +0 -0
  40. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/exprs/add_sub_unary.py +10 -10
  41. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/exprs/array.py +13 -13
  42. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/exprs/comparison.py +10 -10
  43. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/exprs/concatenation.py +10 -10
  44. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/exprs/exponent.py +10 -10
  45. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/exprs/hierarchy.py +13 -13
  46. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/exprs/identifier.py +9 -9
  47. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/exprs/ins.py +10 -10
  48. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/exprs/keyword.py +9 -9
  49. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/exprs/logical.py +10 -10
  50. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/exprs/none.py +9 -9
  51. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/exprs/returns.py +17 -17
  52. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/exprs/table.py +11 -11
  53. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/exprs/variable.py +14 -14
  54. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/formatter.py +8 -8
  55. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/lexer.py +97 -97
  56. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/parser.py +8 -8
  57. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/dax/tokens.py +0 -0
  58. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/__init__.py +0 -0
  59. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/__init__.py +0 -0
  60. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/_base.py +0 -0
  61. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/_utils.py +0 -0
  62. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/add_sub.py +0 -0
  63. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/add_sub_unary.py +0 -0
  64. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/and_or_expr.py +0 -0
  65. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/array.py +0 -0
  66. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/arrow.py +0 -0
  67. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/column.py +0 -0
  68. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/comparison.py +0 -0
  69. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/concatenation.py +0 -0
  70. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/div_mul.py +0 -0
  71. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/each.py +0 -0
  72. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/ellipsis_expr.py +0 -0
  73. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/function.py +0 -0
  74. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/identifier.py +0 -0
  75. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/if_expr.py +0 -0
  76. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/is_expr.py +0 -0
  77. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/keyword.py +0 -0
  78. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/literal_number.py +0 -0
  79. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/literal_string.py +0 -0
  80. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/meta.py +0 -0
  81. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/negation.py +0 -0
  82. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/none.py +0 -0
  83. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/not_expr.py +0 -0
  84. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/parens.py +0 -0
  85. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/record.py +0 -0
  86. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/row.py +0 -0
  87. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/row_index.py +0 -0
  88. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/statement.py +0 -0
  89. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/try_expr.py +0 -0
  90. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/type_expr.py +0 -0
  91. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/exprs/variable.py +0 -0
  92. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/formatter.py +0 -0
  93. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/lexer.py +0 -0
  94. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/main.py +0 -0
  95. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/parser.py +0 -0
  96. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/pbi_parsers/pq/tokens.py +0 -0
  97. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/tests/__init__.py +0 -0
  98. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/tests/test_dax/__init__.py +0 -0
  99. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/tests/test_dax/test_exprs/__init__.py +0 -0
  100. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/tests/test_dax/test_exprs/test_add_sub.py +0 -0
  101. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/tests/test_dax/test_exprs/test_add_sub_unary.py +0 -0
  102. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/tests/test_dax/test_exprs/test_array.py +0 -0
  103. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/tests/test_dax/test_exprs/test_column.py +0 -0
  104. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/tests/test_dax/test_exprs/test_comparison.py +0 -0
  105. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/tests/test_dax/test_exprs/test_concatenation.py +0 -0
  106. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/tests/test_dax/test_exprs/test_div_mul.py +0 -0
  107. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/tests/test_dax/test_exprs/test_exponent.py +0 -0
  108. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/tests/test_dax/test_exprs/test_function.py +0 -0
  109. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/tests/test_dax/test_exprs/test_hierarchy.py +0 -0
  110. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/tests/test_dax/test_exprs/test_identifier.py +0 -0
  111. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/tests/test_dax/test_exprs/test_ins.py +0 -0
  112. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/tests/test_dax/test_exprs/test_keyword.py +0 -0
  113. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/tests/test_dax/test_exprs/test_literal_string.py +0 -0
  114. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/tests/test_dax/test_exprs/test_logical.py +0 -0
  115. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/tests/test_dax/test_exprs/test_measure.py +0 -0
  116. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/tests/test_dax/test_exprs/test_parens.py +0 -0
  117. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/tests/test_dax/test_exprs/test_returns.py +0 -0
  118. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/tests/test_dax/test_exprs/test_table.py +0 -0
  119. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/tests/test_dax/test_exprs/test_variable.py +0 -0
  120. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/tests/test_dax/test_formatter/__init__.py +0 -0
  121. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/tests/test_dax/test_formatter/test_basic.py +0 -0
  122. {pbi_parsers-0.7.12 → pbi_parsers-0.7.20}/tests/test_dax/test_lexer.py +0 -0
@@ -0,0 +1,35 @@
1
+ # Setup: go to https://github.com/<org/username>/<repo>/settings/actions and set Workflow permissions to "Read and write permissions"
2
+ name: Deploy MkDocs Site
3
+
4
+ on:
5
+ push:
6
+ branches:
7
+ - main
8
+ paths:
9
+ - 'docs/**'
10
+ - '.github/workflows/deploy-docs.yml'
11
+
12
+ jobs:
13
+ deploy-docs:
14
+ runs-on: ubuntu-latest
15
+
16
+ steps:
17
+ - name: Checkout repository
18
+ uses: actions/checkout@v4
19
+ with:
20
+ fetch-depth: 0 # Fetch all history
21
+
22
+ - name: Set up Python
23
+ uses: actions/setup-python@v5
24
+ with:
25
+ python-version: '3.x'
26
+
27
+ - name: Install MkDocs and theme
28
+ run: |
29
+ python -m pip install .[docs]
30
+
31
+ - name: Deploy documentation to GitHub Pages
32
+ env:
33
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
34
+ run: |
35
+ mkdocs gh-deploy --clean -f docs/mkdocs.yml
@@ -0,0 +1,146 @@
1
+ # Setup: go to https://github.com/<org/username>/<repo>/settings/actions and set Workflow permissions to "Read and write permissions"
2
+ name: Tag and Release on Version Change
3
+
4
+ on:
5
+ push:
6
+ branches:
7
+ - main
8
+ env:
9
+ PACKAGE_NAME: pbi_parsers # <- replace with your actual package name
10
+
11
+ jobs:
12
+ tag:
13
+ runs-on: ubuntu-latest
14
+ outputs:
15
+ version: ${{ steps.get_version.outputs.VERSION }}
16
+ tagged: ${{ steps.check_tag.outputs.TAG_EXISTS == 'false' }}
17
+
18
+ steps:
19
+ - name: Checkout repository
20
+ uses: actions/checkout@v4
21
+ with:
22
+ fetch-depth: 0 # Fetch all history for tags
23
+
24
+ - name: Set up Python
25
+ uses: actions/setup-python@v5
26
+ with:
27
+ python-version: '3.x'
28
+
29
+ - name: Get current version from file
30
+ id: get_version
31
+ run: |
32
+ VERSION=$(grep -oP '__version__ = \"\K[^\"]*' $PACKAGE_NAME/__init__.py)
33
+ echo "VERSION=$VERSION" >> $GITHUB_OUTPUT
34
+
35
+ - name: Check if tag already exists
36
+ id: check_tag
37
+ run: |
38
+ if git rev-parse "refs/tags/v${{ steps.get_version.outputs.VERSION }}" >/dev/null 2>&1; then
39
+ echo "TAG_EXISTS=true" >> $GITHUB_OUTPUT
40
+ else
41
+ echo "TAG_EXISTS=false" >> $GITHUB_OUTPUT
42
+ fi
43
+
44
+ - name: Create tag and GitHub release
45
+ if: steps.check_tag.outputs.TAG_EXISTS == 'false'
46
+ env:
47
+ VERSION: ${{ steps.get_version.outputs.VERSION }}
48
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
49
+ run: |
50
+ git config user.name "github-actions"
51
+ git config user.email "github-actions@github.com"
52
+ git tag -a "v$VERSION" -m "Release v$VERSION"
53
+ git push origin "v$VERSION"
54
+
55
+ gh release create "v$VERSION" \
56
+ --title "v$VERSION" \
57
+ --notes "Automated release for version $VERSION"
58
+ release:
59
+ needs: tag
60
+ if: needs.tag.outputs.tagged == 'true'
61
+ runs-on: ubuntu-latest
62
+ steps:
63
+ - name: Checkout code at tag
64
+ uses: actions/checkout@v4
65
+ with:
66
+ fetch-depth: 0 # Required to access tags
67
+ ref: "refs/tags/v${{ needs.tag.outputs.version }}"
68
+
69
+ - name: Set up Python
70
+ uses: actions/setup-python@v5
71
+ with:
72
+ python-version: '3.x'
73
+
74
+ - name: Build package (sdist and wheel)
75
+ run: |
76
+ python -m pip install --upgrade build
77
+ python -m build
78
+
79
+ - name: Get commit message for release notes
80
+ id: commit_message
81
+ run: |
82
+ MSG=$(git log -1 --pretty=%B)
83
+ echo "MSG<<EOF" >> $GITHUB_OUTPUT
84
+ echo "$MSG" >> $GITHUB_OUTPUT
85
+ echo "EOF" >> $GITHUB_OUTPUT
86
+
87
+ - name: Upload GitHub release
88
+ uses: softprops/action-gh-release@v1
89
+ with:
90
+ tag_name: "v${{ needs.tag.outputs.version }}"
91
+ name: "v${{ needs.tag.outputs.version }}"
92
+ body: "${{ steps.commit_message.outputs.MSG }}"
93
+ files: |
94
+ dist/*.whl
95
+ dist/*.tar.gz
96
+ env:
97
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
98
+
99
+ - name: Upload dist artifacts
100
+ uses: actions/upload-artifact@v4
101
+ with:
102
+ name: dist-files
103
+ path: dist/*
104
+ publish:
105
+ needs:
106
+ - release
107
+ - tag
108
+ if: needs.tag.outputs.tagged == 'true'
109
+ runs-on: ubuntu-latest
110
+ environment:
111
+ name: pypi
112
+ url: https://pypi.org/p/${{ env.PACKAGE_NAME }} # Replace <package-name> with your PyPI project name
113
+ permissions:
114
+ id-token: write # IMPORTANT: mandatory for trusted publishing
115
+
116
+
117
+ steps:
118
+ - name: Download dist artifacts
119
+ uses: actions/download-artifact@v4
120
+ with:
121
+ name: dist-files
122
+ path: dist
123
+
124
+ - name: Publish Package to PyPI
125
+ uses: pypa/gh-action-pypi-publish@release/v1
126
+ coverage:
127
+ runs-on: ubuntu-latest
128
+ steps:
129
+ - uses: actions/checkout@v4
130
+ - name: Set up Python
131
+ uses: actions/setup-python@v5
132
+ with:
133
+ python-version: '3.x' # Specify your desired Python version
134
+ - name: Install dependencies
135
+ run: |
136
+ pip install .[dev]
137
+ - name: Run tests with coverage
138
+ run: |
139
+ coverage run --source=pbi_parsers -m pytest tests/
140
+ coverage xml
141
+ - name: Upload coverage to Coveralls
142
+ uses: coverallsapp/github-action@v2
143
+ with:
144
+ github-token: ${{ secrets.GITHUB_TOKEN }}
145
+ format: cobertura
146
+ file: coverage.xml
@@ -2,4 +2,5 @@ __pycache__
2
2
  venv
3
3
  dist
4
4
  docs/site
5
- .env
5
+ .env
6
+ .coverage
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 douglassimonsen
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -1,12 +1,17 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pbi_parsers
3
- Version: 0.7.12
3
+ Version: 0.7.20
4
4
  Summary: Power BI lexer, parsers, and formatters for DAX and M (Power Query) languages
5
+ License-File: LICENSE
5
6
  Requires-Python: >=3.11.0
7
+ Requires-Dist: colorama>=0.4.6
6
8
  Requires-Dist: jinja2>=3.1.6
7
9
  Provides-Extra: dev
8
10
  Requires-Dist: build>=1.2.2; extra == 'dev'
11
+ Requires-Dist: coverage; extra == 'dev'
12
+ Requires-Dist: coveralls; extra == 'dev'
9
13
  Requires-Dist: pre-commit>=3.8.0; extra == 'dev'
14
+ Requires-Dist: pytest; extra == 'dev'
10
15
  Requires-Dist: ruff>=0.12.7; extra == 'dev'
11
16
  Provides-Extra: docs
12
17
  Requires-Dist: mkdocs-material>=9.6.16; extra == 'docs'
@@ -16,6 +21,13 @@ Description-Content-Type: text/markdown
16
21
 
17
22
  # Overview
18
23
 
24
+ [![PyPI Downloads](https://static.pepy.tech/badge/pbi-parsers)](https://pepy.tech/projects/pbi-parsers)
25
+ ![Python](https://img.shields.io/badge/python-3.11-blue.svg)
26
+ [![Coverage Status](https://coveralls.io/repos/github/douglassimonsen/pbi_parsers/badge.svg?branch=main)](https://coveralls.io/github/douglassimonsen/pbi_parsers?branch=main)
27
+ ![Repo Size](https://img.shields.io/github/repo-size/douglassimonsen/pbi_parsers)
28
+ [![FOSSA Status](https://app.fossa.com/api/projects/git%2Bgithub.com%2Fdouglassimonsen%2Fpbi_parsers.svg?type=shield&issueType=license)](https://app.fossa.com/projects/git%2Bgithub.com%2Fdouglassimonsen%2Fpbi_parsers?ref=badge_shield&issueType=license)
29
+ [![FOSSA Status](https://app.fossa.com/api/projects/git%2Bgithub.com%2Fdouglassimonsen%2Fpbi_parsers.svg?type=shield&issueType=security)](https://app.fossa.com/projects/git%2Bgithub.com%2Fdouglassimonsen%2Fpbi_parsers?ref=badge_shield&issueType=security)
30
+
19
31
  Based on [Crafting Interpreters](https://timothya.com/pdfs/crafting-interpreters.pdf). Library provides lexers, parsers, and formatters for DAX and Power Query (M) languages. Designed to support code introspection and analysis, not execution. This enables developement of [ruff](https://github.com/astral-sh/ruff)-equivalent tools for DAX and Power Query. It also enables extracting metadata from DAX and Power Query code, such PQ source types (Excel, SQL, etc.) and DAX lineage dependencies.
20
32
 
21
33
  For more information, see the [docs](https://douglassimonsen.github.io/pbi_parsers/)
@@ -1,5 +1,12 @@
1
1
  # Overview
2
2
 
3
+ [![PyPI Downloads](https://static.pepy.tech/badge/pbi-parsers)](https://pepy.tech/projects/pbi-parsers)
4
+ ![Python](https://img.shields.io/badge/python-3.11-blue.svg)
5
+ [![Coverage Status](https://coveralls.io/repos/github/douglassimonsen/pbi_parsers/badge.svg?branch=main)](https://coveralls.io/github/douglassimonsen/pbi_parsers?branch=main)
6
+ ![Repo Size](https://img.shields.io/github/repo-size/douglassimonsen/pbi_parsers)
7
+ [![FOSSA Status](https://app.fossa.com/api/projects/git%2Bgithub.com%2Fdouglassimonsen%2Fpbi_parsers.svg?type=shield&issueType=license)](https://app.fossa.com/projects/git%2Bgithub.com%2Fdouglassimonsen%2Fpbi_parsers?ref=badge_shield&issueType=license)
8
+ [![FOSSA Status](https://app.fossa.com/api/projects/git%2Bgithub.com%2Fdouglassimonsen%2Fpbi_parsers.svg?type=shield&issueType=security)](https://app.fossa.com/projects/git%2Bgithub.com%2Fdouglassimonsen%2Fpbi_parsers?ref=badge_shield&issueType=security)
9
+
3
10
  Based on [Crafting Interpreters](https://timothya.com/pdfs/crafting-interpreters.pdf). Library provides lexers, parsers, and formatters for DAX and Power Query (M) languages. Designed to support code introspection and analysis, not execution. This enables developement of [ruff](https://github.com/astral-sh/ruff)-equivalent tools for DAX and Power Query. It also enables extracting metadata from DAX and Power Query code, such PQ source types (Excel, SQL, etc.) and DAX lineage dependencies.
4
11
 
5
12
  For more information, see the [docs](https://douglassimonsen.github.io/pbi_parsers/)
@@ -1,6 +1,8 @@
1
1
  # Overview
2
2
 
3
- Based on [Crafting Interpreters](https://timothya.com/pdfs/crafting-interpreters.pdf). Library provides lexers, parsers, and formatters for DAX and Power Query (M) languages. Designed to support code introspection and analysis, not execution. This enables development of [ruff](https://github.com/astral-sh/ruff)-equivalent tools for DAX and Power Query. It also enables extracting metadata from DAX and Power Query code, such PQ source types (Excel, SQL, etc.) and DAX lineage dependencies.
3
+ Based on [Crafting Interpreters](https://timothya.com/pdfs/crafting-interpreters.pdf). Library provides lexers, parsers, and formatters for DAX and Power Query (M) languages. Designed to support code introspection and analysis, not execution. This enables development of [ruff](https://github.com/astral-sh/ruff)-equivalent tools for DAX and Power Query. It also enables extracting metadata from DAX and Power Query code, such PQ source types (Excel, SQL, etc.) and DAX lineage dependencies.
4
+
5
+ This library is used in [pbi_ruff](https://github.com/douglassimonsen/pbi_ruff) to provide DAX and Power Query (M) linting.
4
6
 
5
7
  # Installation
6
8
 
@@ -11,7 +13,7 @@ python -m pip install pbi_parsers
11
13
  ## Functionality
12
14
 
13
15
  !!! info "Rust Implementation"
14
- Although the library is primarily implemented in Python, there are plans to implement a Rust version for performance and efficiency.
16
+ Although the library is primarily implemented in Python, there are plans to implement a Rust version for performance and additional type-safety.
15
17
 
16
18
  - DAX
17
19
  * [x] Lexer
@@ -1,6 +1,6 @@
1
1
  from . import dax, pq
2
2
 
3
- __version__ = "0.7.12"
3
+ __version__ = "0.7.20"
4
4
 
5
5
 
6
6
  __all__ = [
@@ -17,10 +17,33 @@ class BaseLexer:
17
17
  self.current_position = 0
18
18
  self.tokens = []
19
19
 
20
- def scan_helper(self) -> BaseToken:
21
- """Contains the orchestration logic for converting tokens into expressions."""
22
- msg = "Subclasses should implement match_tokens method."
23
- raise NotImplementedError(msg)
20
+ def advance(self, chunk: int = 1) -> None:
21
+ """Advances the current position by the specified chunk size.
22
+
23
+ Generally used alongside peek to consume characters.
24
+
25
+ Args:
26
+ chunk (int): The number of characters to advance the current position.
27
+
28
+ Raises:
29
+ ValueError: If the current position exceeds a predefined MAX_POSITION (1,000,000 characters).
30
+ This is to avoid errors with the lexer causing the process to hang
31
+
32
+ """
33
+ if self.current_position > MAX_POSITION:
34
+ msg = f"Current position exceeds {MAX_POSITION:,} characters."
35
+ raise ValueError(msg)
36
+ self.current_position += chunk
37
+
38
+ def at_end(self) -> bool:
39
+ """Checks if the current position is at (or beyond) the end of the source.
40
+
41
+ Returns:
42
+ bool: True if the current position is at or beyond the end of the source, False
43
+ otherwise.
44
+
45
+ """
46
+ return self.current_position >= len(self.source)
24
47
 
25
48
  def match(
26
49
  self,
@@ -32,7 +55,6 @@ class BaseLexer:
32
55
  """Match a string or a callable matcher against the current position in the source.
33
56
 
34
57
  Args:
35
- ----
36
58
  matcher (Callable[[str], bool] | str): A string to match or a callable that
37
59
  takes a string and returns a boolean.
38
60
  chunk (int): The number of characters to check from the current position.
@@ -61,7 +83,7 @@ class BaseLexer:
61
83
  return False
62
84
 
63
85
  def peek(self, chunk: int = 1) -> str:
64
- """Returns the next chunk of text from the current position. Defaults to a single character.
86
+ """Returns the next section of text from the current position of length `chunk`. Defaults to a single character.
65
87
 
66
88
  Args:
67
89
  chunk (int): The number of characters to return from the current position.
@@ -87,24 +109,6 @@ class BaseLexer:
87
109
  """
88
110
  return self.source[self.current_position :]
89
111
 
90
- def advance(self, chunk: int = 1) -> None:
91
- """Advances the current position by the specified chunk size.
92
-
93
- Generally used alongside peek to consume characters.
94
-
95
- Args:
96
- chunk (int): The number of characters to advance the current position.
97
-
98
- Raises:
99
- ValueError: If the current position exceeds a predefined MAX_POSITION (1,000,000 characters).
100
- This is to avoid errors with the lexer causing the process to hang
101
-
102
- """
103
- if self.current_position > MAX_POSITION:
104
- msg = f"Current position exceeds {MAX_POSITION:,} characters."
105
- raise ValueError(msg)
106
- self.current_position += chunk
107
-
108
112
  def scan(self) -> tuple[BaseToken, ...]:
109
113
  """Repeatedly calls scan_helper until the end of the source is reached.
110
114
 
@@ -116,12 +120,7 @@ class BaseLexer:
116
120
  self.tokens.append(self.scan_helper())
117
121
  return tuple(self.tokens)
118
122
 
119
- def at_end(self) -> bool:
120
- """Checks if the current position is at (or beyond) the end of the source.
121
-
122
- Returns:
123
- bool: True if the current position is at or beyond the end of the source, False
124
- otherwise.
125
-
126
- """
127
- return self.current_position >= len(self.source)
123
+ def scan_helper(self) -> BaseToken:
124
+ """Contains the orchestration logic for converting tokens into expressions."""
125
+ msg = "Subclasses should implement match_tokens method."
126
+ raise NotImplementedError(msg)
@@ -32,15 +32,20 @@ class BaseToken:
32
32
  tok_type: Any
33
33
  text_slice: TextSlice = field(default_factory=TextSlice)
34
34
 
35
+ def __eq__(self, other: object) -> bool:
36
+ """Checks equality based on token type and text slice."""
37
+ if not isinstance(other, BaseToken):
38
+ return NotImplemented
39
+ return self.tok_type == other.tok_type and self.text_slice == other.text_slice
40
+
41
+ def __hash__(self) -> int:
42
+ """Returns a hash based on token type and text slice."""
43
+ return hash((self.tok_type, self.text_slice))
44
+
35
45
  def __repr__(self) -> str:
36
46
  pretty_text = self.text_slice.get_text().replace("\n", "\\n").replace("\r", "\\r")
37
47
  return f"Token(type={self.tok_type.name}, text='{pretty_text}')"
38
48
 
39
- @property
40
- def text(self) -> str:
41
- """Returns the text underlying the token."""
42
- return self.text_slice.get_text()
43
-
44
49
  def position(self) -> tuple[int, int]:
45
50
  """Returns the start and end positions of the token.
46
51
 
@@ -50,12 +55,12 @@ class BaseToken:
50
55
  """
51
56
  return self.text_slice.start, self.text_slice.end
52
57
 
53
- def __eq__(self, other: object) -> bool:
54
- """Checks equality based on token type and text slice."""
55
- if not isinstance(other, BaseToken):
56
- return NotImplemented
57
- return self.tok_type == other.tok_type and self.text_slice == other.text_slice
58
+ @property
59
+ def text(self) -> str:
60
+ """Returns the text underlying the token.
58
61
 
59
- def __hash__(self) -> int:
60
- """Returns a hash based on token type and text slice."""
61
- return hash((self.tok_type, self.text_slice))
62
+ Returns:
63
+ str: The text of the token as a string.
64
+
65
+ """
66
+ return self.text_slice.get_text()
@@ -10,8 +10,17 @@ class Expression:
10
10
  pre_comments: list[Any] = []
11
11
  post_comments: list[Any] = []
12
12
 
13
- def pprint(self) -> str:
14
- msg = "Subclasses should implement this method."
13
+ def __repr__(self) -> str:
14
+ return self.pprint()
15
+
16
+ def children(self) -> list["Expression"]:
17
+ """Returns a list of child expressions."""
18
+ msg = "This method should be implemented by subclasses."
19
+ raise NotImplementedError(msg)
20
+
21
+ def full_text(self) -> str:
22
+ """Returns the full text of the expression."""
23
+ msg = "This method should be implemented by subclasses."
15
24
  raise NotImplementedError(msg)
16
25
 
17
26
  @classmethod
@@ -27,20 +36,11 @@ class Expression:
27
36
  def match_tokens(parser: "Parser", match_tokens: list[TokenType]) -> bool:
28
37
  return all(parser.peek(i).tok_type == token_type for i, token_type in enumerate(match_tokens))
29
38
 
30
- def __repr__(self) -> str:
31
- return self.pprint()
32
-
33
- def children(self) -> list["Expression"]:
34
- """Returns a list of child expressions."""
35
- msg = "This method should be implemented by subclasses."
36
- raise NotImplementedError(msg)
37
-
38
39
  def position(self) -> tuple[int, int]:
39
40
  """Returns the start and end positions of the expression in the source code."""
40
41
  msg = "This method should be implemented by subclasses."
41
42
  raise NotImplementedError(msg)
42
43
 
43
- def full_text(self) -> str:
44
- """Returns the full text of the expression."""
45
- msg = "This method should be implemented by subclasses."
44
+ def pprint(self) -> str:
45
+ msg = "Subclasses should implement this method."
46
46
  raise NotImplementedError(msg)
@@ -10,6 +10,17 @@ R = TypeVar("R") # Represents the return type of the decorated function
10
10
 
11
11
 
12
12
  def lexer_reset(func: Callable[P, R]) -> Callable[P, R]:
13
+ """Decorator to reset the lexer state before and after parsing an expression.
14
+
15
+ This decorator performs the following actions:
16
+ 1. Collects pre-comments before parsing.
17
+ 2. Caches the result of the parsing function to avoid redundant parsing.
18
+ 3. Collects post-comments after parsing.
19
+
20
+ The caching is required since the operator precedence otherwise leads to all other expressions being
21
+ called multiple times.
22
+ """
23
+
13
24
  def lexer_reset_inner(*args: P.args, **kwargs: P.kwargs) -> R:
14
25
  parser = args[1]
15
26
  if not isinstance(parser, Parser):
@@ -28,6 +28,13 @@ class AddSubExpression(Expression):
28
28
  self.left = left
29
29
  self.right = right
30
30
 
31
+ def children(self) -> list[Expression]:
32
+ """Returns a list of child expressions."""
33
+ return [self.left, self.right]
34
+
35
+ def full_text(self) -> str:
36
+ return self.operator.text_slice.full_text
37
+
31
38
  @classmethod
32
39
  @lexer_reset
33
40
  def match(cls, parser: "Parser") -> "AddSubExpression | None":
@@ -52,6 +59,9 @@ class AddSubExpression(Expression):
52
59
  raise ValueError(msg)
53
60
  return AddSubExpression(operator=operator, left=left_term, right=right_term)
54
61
 
62
+ def position(self) -> tuple[int, int]:
63
+ return self.left.position()[0], self.right.position()[1]
64
+
55
65
  def pprint(self) -> str:
56
66
  op_str = "Add" if self.operator.text == "+" else "Sub"
57
67
  left_str = textwrap.indent(self.left.pprint(), " " * 10).lstrip()
@@ -61,13 +71,3 @@ class AddSubExpression(Expression):
61
71
  left: {left_str},
62
72
  right: {right_str}
63
73
  )""".strip()
64
-
65
- def children(self) -> list[Expression]:
66
- """Returns a list of child expressions."""
67
- return [self.left, self.right]
68
-
69
- def position(self) -> tuple[int, int]:
70
- return self.left.position()[0], self.right.position()[1]
71
-
72
- def full_text(self) -> str:
73
- return self.operator.text_slice.full_text
@@ -25,12 +25,12 @@ class ColumnExpression(Expression):
25
25
  self.table = table
26
26
  self.column = column
27
27
 
28
- def pprint(self) -> str:
29
- return f"""
30
- Column (
31
- {self.table.text},
32
- {self.column.text}
33
- )""".strip()
28
+ def children(self) -> list[Expression]: # noqa: PLR6301
29
+ """Returns a list of child expressions."""
30
+ return []
31
+
32
+ def full_text(self) -> str:
33
+ return self.table.text_slice.full_text
34
34
 
35
35
  @classmethod
36
36
  @lexer_reset
@@ -45,12 +45,12 @@ Column (
45
45
  return None
46
46
  return ColumnExpression(table=table, column=column)
47
47
 
48
- def children(self) -> list[Expression]: # noqa: PLR6301
49
- """Returns a list of child expressions."""
50
- return []
51
-
52
48
  def position(self) -> tuple[int, int]:
53
49
  return self.table.text_slice.start, self.column.text_slice.end
54
50
 
55
- def full_text(self) -> str:
56
- return self.table.text_slice.full_text
51
+ def pprint(self) -> str:
52
+ return f"""
53
+ Column (
54
+ {self.table.text},
55
+ {self.column.text}
56
+ )""".strip()
@@ -30,6 +30,13 @@ class DivMulExpression(Expression):
30
30
  self.left = left
31
31
  self.right = right
32
32
 
33
+ def children(self) -> list[Expression]:
34
+ """Returns a list of child expressions."""
35
+ return [self.left, self.right]
36
+
37
+ def full_text(self) -> str:
38
+ return self.operator.text_slice.full_text
39
+
33
40
  @classmethod
34
41
  @lexer_reset
35
42
  def match(cls, parser: "Parser") -> "DivMulExpression | None":
@@ -51,6 +58,9 @@ class DivMulExpression(Expression):
51
58
  raise ValueError(msg)
52
59
  return DivMulExpression(operator=operator, left=left_term, right=right_term)
53
60
 
61
+ def position(self) -> tuple[int, int]:
62
+ return self.left.position()[0], self.right.position()[1]
63
+
54
64
  def pprint(self) -> str:
55
65
  op_str = {
56
66
  TokenType.MULTIPLY_SIGN: "Mul",
@@ -63,13 +73,3 @@ class DivMulExpression(Expression):
63
73
  left: {left_str},
64
74
  right: {right_str}
65
75
  )""".strip()
66
-
67
- def children(self) -> list[Expression]:
68
- """Returns a list of child expressions."""
69
- return [self.left, self.right]
70
-
71
- def position(self) -> tuple[int, int]:
72
- return self.left.position()[0], self.right.position()[1]
73
-
74
- def full_text(self) -> str:
75
- return self.operator.text_slice.full_text