semantic-link-labs 0.4.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (78) hide show
  1. semantic_link_labs-0.4.1/.github/workflows/build.yaml +72 -0
  2. semantic_link_labs-0.4.1/.gitignore +160 -0
  3. semantic_link_labs-0.4.1/.readthedocs.yaml +33 -0
  4. semantic_link_labs-0.4.1/.vscode/settings.json +7 -0
  5. semantic_link_labs-0.4.1/CODE_OF_CONDUCT.md +9 -0
  6. semantic_link_labs-0.4.1/LICENSE +21 -0
  7. semantic_link_labs-0.4.1/PKG-INFO +21 -0
  8. semantic_link_labs-0.4.1/README.md +289 -0
  9. semantic_link_labs-0.4.1/SECURITY.md +41 -0
  10. semantic_link_labs-0.4.1/SUPPORT.md +25 -0
  11. semantic_link_labs-0.4.1/docs/Makefile +20 -0
  12. semantic_link_labs-0.4.1/docs/make.bat +35 -0
  13. semantic_link_labs-0.4.1/docs/requirements.txt +10 -0
  14. semantic_link_labs-0.4.1/docs/source/conf.py +46 -0
  15. semantic_link_labs-0.4.1/docs/source/index.rst +20 -0
  16. semantic_link_labs-0.4.1/docs/source/modules.rst +7 -0
  17. semantic_link_labs-0.4.1/environment.yml +12 -0
  18. semantic_link_labs-0.4.1/notebooks/Migration to Direct Lake.ipynb +1 -0
  19. semantic_link_labs-0.4.1/notebooks/Model Optimization.ipynb +1 -0
  20. semantic_link_labs-0.4.1/notebooks/Query Scale Out.ipynb +1 -0
  21. semantic_link_labs-0.4.1/notebooks/Tabular Object Model.ipynb +1 -0
  22. semantic_link_labs-0.4.1/pyproject.toml +40 -0
  23. semantic_link_labs-0.4.1/setup.cfg +4 -0
  24. semantic_link_labs-0.4.1/src/semantic_link_labs.egg-info/PKG-INFO +21 -0
  25. semantic_link_labs-0.4.1/src/semantic_link_labs.egg-info/SOURCES.txt +76 -0
  26. semantic_link_labs-0.4.1/src/semantic_link_labs.egg-info/dependency_links.txt +1 -0
  27. semantic_link_labs-0.4.1/src/semantic_link_labs.egg-info/requires.txt +6 -0
  28. semantic_link_labs-0.4.1/src/semantic_link_labs.egg-info/top_level.txt +1 -0
  29. semantic_link_labs-0.4.1/src/sempy_labs/__init__.py +154 -0
  30. semantic_link_labs-0.4.1/src/sempy_labs/_ai.py +496 -0
  31. semantic_link_labs-0.4.1/src/sempy_labs/_clear_cache.py +39 -0
  32. semantic_link_labs-0.4.1/src/sempy_labs/_connections.py +234 -0
  33. semantic_link_labs-0.4.1/src/sempy_labs/_dax.py +70 -0
  34. semantic_link_labs-0.4.1/src/sempy_labs/_generate_semantic_model.py +280 -0
  35. semantic_link_labs-0.4.1/src/sempy_labs/_helper_functions.py +506 -0
  36. semantic_link_labs-0.4.1/src/sempy_labs/_icons.py +4 -0
  37. semantic_link_labs-0.4.1/src/sempy_labs/_list_functions.py +1372 -0
  38. semantic_link_labs-0.4.1/src/sempy_labs/_model_auto_build.py +143 -0
  39. semantic_link_labs-0.4.1/src/sempy_labs/_model_bpa.py +1354 -0
  40. semantic_link_labs-0.4.1/src/sempy_labs/_model_dependencies.py +341 -0
  41. semantic_link_labs-0.4.1/src/sempy_labs/_one_lake_integration.py +155 -0
  42. semantic_link_labs-0.4.1/src/sempy_labs/_query_scale_out.py +447 -0
  43. semantic_link_labs-0.4.1/src/sempy_labs/_refresh_semantic_model.py +184 -0
  44. semantic_link_labs-0.4.1/src/sempy_labs/_tom.py +3766 -0
  45. semantic_link_labs-0.4.1/src/sempy_labs/_translations.py +378 -0
  46. semantic_link_labs-0.4.1/src/sempy_labs/_vertipaq.py +893 -0
  47. semantic_link_labs-0.4.1/src/sempy_labs/directlake/__init__.py +45 -0
  48. semantic_link_labs-0.4.1/src/sempy_labs/directlake/_directlake_schema_compare.py +110 -0
  49. semantic_link_labs-0.4.1/src/sempy_labs/directlake/_directlake_schema_sync.py +128 -0
  50. semantic_link_labs-0.4.1/src/sempy_labs/directlake/_fallback.py +62 -0
  51. semantic_link_labs-0.4.1/src/sempy_labs/directlake/_get_directlake_lakehouse.py +69 -0
  52. semantic_link_labs-0.4.1/src/sempy_labs/directlake/_get_shared_expression.py +59 -0
  53. semantic_link_labs-0.4.1/src/sempy_labs/directlake/_guardrails.py +84 -0
  54. semantic_link_labs-0.4.1/src/sempy_labs/directlake/_list_directlake_model_calc_tables.py +54 -0
  55. semantic_link_labs-0.4.1/src/sempy_labs/directlake/_show_unsupported_directlake_objects.py +89 -0
  56. semantic_link_labs-0.4.1/src/sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +81 -0
  57. semantic_link_labs-0.4.1/src/sempy_labs/directlake/_update_directlake_partition_entity.py +64 -0
  58. semantic_link_labs-0.4.1/src/sempy_labs/directlake/_warm_cache.py +210 -0
  59. semantic_link_labs-0.4.1/src/sempy_labs/lakehouse/__init__.py +24 -0
  60. semantic_link_labs-0.4.1/src/sempy_labs/lakehouse/_get_lakehouse_columns.py +81 -0
  61. semantic_link_labs-0.4.1/src/sempy_labs/lakehouse/_get_lakehouse_tables.py +250 -0
  62. semantic_link_labs-0.4.1/src/sempy_labs/lakehouse/_lakehouse.py +85 -0
  63. semantic_link_labs-0.4.1/src/sempy_labs/lakehouse/_shortcuts.py +296 -0
  64. semantic_link_labs-0.4.1/src/sempy_labs/migration/__init__.py +29 -0
  65. semantic_link_labs-0.4.1/src/sempy_labs/migration/_create_pqt_file.py +239 -0
  66. semantic_link_labs-0.4.1/src/sempy_labs/migration/_migrate_calctables_to_lakehouse.py +429 -0
  67. semantic_link_labs-0.4.1/src/sempy_labs/migration/_migrate_calctables_to_semantic_model.py +150 -0
  68. semantic_link_labs-0.4.1/src/sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +524 -0
  69. semantic_link_labs-0.4.1/src/sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +165 -0
  70. semantic_link_labs-0.4.1/src/sempy_labs/migration/_migration_validation.py +227 -0
  71. semantic_link_labs-0.4.1/src/sempy_labs/migration/_refresh_calc_tables.py +129 -0
  72. semantic_link_labs-0.4.1/src/sempy_labs/report/__init__.py +35 -0
  73. semantic_link_labs-0.4.1/src/sempy_labs/report/_generate_report.py +253 -0
  74. semantic_link_labs-0.4.1/src/sempy_labs/report/_report_functions.py +855 -0
  75. semantic_link_labs-0.4.1/src/sempy_labs/report/_report_rebind.py +131 -0
  76. semantic_link_labs-0.4.1/tests/__init__.py +0 -0
  77. semantic_link_labs-0.4.1/tests/test_shortcuts.py +56 -0
  78. semantic_link_labs-0.4.1/tests/test_tom.py +31 -0
@@ -0,0 +1,72 @@
1
+ name: Semantic Link Labs
2
+
3
+ on: [push]
4
+
5
+ permissions:
6
+ contents: write # This is required for actions/checkout@v1
7
+ security-events: write # To upload sarif files
8
+
9
+ jobs:
10
+ build:
11
+ runs-on: ubuntu-latest
12
+
13
+ steps:
14
+ - uses: actions/checkout@v3
15
+ - name: Set up Python
16
+ uses: actions/setup-python@v4
17
+ with:
18
+ python-version: "3.10"
19
+
20
+ - name: Initialize CodeQL
21
+ uses: github/codeql-action/init@v3
22
+ with:
23
+ languages: python
24
+
25
+ - name: Perform CodeQL Analysis
26
+ uses: github/codeql-action/analyze@v3
27
+
28
+ - name: Get Date
29
+ id: get-date
30
+ run: echo "today=$(/bin/date -u '+%Y%m%d')" >> $GITHUB_OUTPUT
31
+ shell: bash
32
+
33
+ - name: Cache conda
34
+ uses: actions/cache@v2
35
+ env:
36
+ # Increase this value to reset cache if environment.yml has not changed
37
+ CACHE_NUMBER: 0
38
+ with:
39
+ path: ~/conda_pkgs_dir
40
+ key:
41
+ ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-${{ steps.get-date.outputs.today }}-${{ hashFiles('environment.yml') }}
42
+
43
+ - uses: conda-incubator/setup-miniconda@v2
44
+ with:
45
+ auto-update-conda: true
46
+ activate-environment: fabric
47
+ python-version: "3.10"
48
+ environment-file: environment.yml
49
+ channel-priority: strict
50
+
51
+ - name: Install package
52
+ shell: bash -el {0}
53
+ run: |
54
+ conda info
55
+ pip install -e .
56
+
57
+ # - name: Lint with flake8
58
+ # shell: bash -el {0}
59
+ # run: |
60
+ # flake8 sempy_labs tests --count --show-source --statistics
61
+ # continue-on-error: false
62
+
63
+ # - name: Lint with mypy
64
+ # shell: bash -el {0}
65
+ # run: |
66
+ # mypy sempy_labs tests
67
+ # continue-on-error: false
68
+
69
+ - name: Test with pytest
70
+ shell: bash -el {0}
71
+ run: |
72
+ pytest -s tests/
@@ -0,0 +1,160 @@
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ share/python-wheels/
24
+ *.egg-info/
25
+ .installed.cfg
26
+ *.egg
27
+ MANIFEST
28
+
29
+ # PyInstaller
30
+ # Usually these files are written by a python script from a template
31
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
32
+ *.manifest
33
+ *.spec
34
+
35
+ # Installer logs
36
+ pip-log.txt
37
+ pip-delete-this-directory.txt
38
+
39
+ # Unit test / coverage reports
40
+ htmlcov/
41
+ .tox/
42
+ .nox/
43
+ .coverage
44
+ .coverage.*
45
+ .cache
46
+ nosetests.xml
47
+ coverage.xml
48
+ *.cover
49
+ *.py,cover
50
+ .hypothesis/
51
+ .pytest_cache/
52
+ cover/
53
+
54
+ # Translations
55
+ *.mo
56
+ *.pot
57
+
58
+ # Django stuff:
59
+ *.log
60
+ local_settings.py
61
+ db.sqlite3
62
+ db.sqlite3-journal
63
+
64
+ # Flask stuff:
65
+ instance/
66
+ .webassets-cache
67
+
68
+ # Scrapy stuff:
69
+ .scrapy
70
+
71
+ # Sphinx documentation
72
+ docs/_build/
73
+
74
+ # PyBuilder
75
+ .pybuilder/
76
+ target/
77
+
78
+ # Jupyter Notebook
79
+ .ipynb_checkpoints
80
+
81
+ # IPython
82
+ profile_default/
83
+ ipython_config.py
84
+
85
+ # pyenv
86
+ # For a library or package, you might want to ignore these files since the code is
87
+ # intended to run in multiple environments; otherwise, check them in:
88
+ # .python-version
89
+
90
+ # pipenv
91
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
93
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
94
+ # install all needed dependencies.
95
+ #Pipfile.lock
96
+
97
+ # poetry
98
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
99
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
100
+ # commonly ignored for libraries.
101
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
102
+ #poetry.lock
103
+
104
+ # pdm
105
+ # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
106
+ #pdm.lock
107
+ # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
108
+ # in version control.
109
+ # https://pdm.fming.dev/#use-with-ide
110
+ .pdm.toml
111
+
112
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
113
+ __pypackages__/
114
+
115
+ # Celery stuff
116
+ celerybeat-schedule
117
+ celerybeat.pid
118
+
119
+ # SageMath parsed files
120
+ *.sage.py
121
+
122
+ # Environments
123
+ .env
124
+ .venv
125
+ env/
126
+ venv/
127
+ ENV/
128
+ env.bak/
129
+ venv.bak/
130
+
131
+ # Spyder project settings
132
+ .spyderproject
133
+ .spyproject
134
+
135
+ # Rope project settings
136
+ .ropeproject
137
+
138
+ # mkdocs documentation
139
+ /site
140
+
141
+ # mypy
142
+ .mypy_cache/
143
+ .dmypy.json
144
+ dmypy.json
145
+
146
+ # Pyre type checker
147
+ .pyre/
148
+
149
+ # pytype static type analyzer
150
+ .pytype/
151
+
152
+ # Cython debug symbols
153
+ cython_debug/
154
+
155
+ # PyCharm
156
+ # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
157
+ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
158
+ # and can be added to the global gitignore or merged into this file. For a more nuclear
159
+ # option (not recommended) you can uncomment the following to ignore the entire idea folder.
160
+ #.idea/
@@ -0,0 +1,33 @@
1
+ # .readthedocs.yaml
2
+ # Read the Docs configuration file
3
+ # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
4
+
5
+ # Required
6
+ version: 2
7
+
8
+ # Set the OS, Python version and other tools you might need
9
+ build:
10
+ os: ubuntu-22.04
11
+ tools:
12
+ python: "3.12"
13
+ # You can also specify other tool versions:
14
+ # nodejs: "19"
15
+ # rust: "1.64"
16
+ # golang: "1.19"
17
+ jobs:
18
+ pre_build:
19
+ - sphinx-apidoc -f -o docs/source src/sempy_labs/
20
+
21
+ # Build documentation in the "docs/" directory with Sphinx
22
+ sphinx:
23
+ configuration: docs/source/conf.py
24
+
25
+ # Optionally build your docs in additional formats such as PDF and ePub
26
+ # formats:
27
+ # - pdf
28
+ # - epub
29
+
30
+ # See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
31
+ python:
32
+ install:
33
+ - requirements: docs/requirements.txt
@@ -0,0 +1,7 @@
1
+ {
2
+ "python.testing.pytestArgs": [
3
+ "tests"
4
+ ],
5
+ "python.testing.unittestEnabled": false,
6
+ "python.testing.pytestEnabled": true
7
+ }
@@ -0,0 +1,9 @@
1
+ # Microsoft Open Source Code of Conduct
2
+
3
+ This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
4
+
5
+ Resources:
6
+
7
+ - [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/)
8
+ - [Microsoft Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/)
9
+ - Contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with questions or concerns
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) Microsoft Corporation.
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE
@@ -0,0 +1,21 @@
1
+ Metadata-Version: 2.1
2
+ Name: semantic-link-labs
3
+ Version: 0.4.1
4
+ Summary: Semantic Link Labs project
5
+ Author: Microsoft Corporation
6
+ License: MIT License
7
+ Project-URL: Repository, https://github.com/microsoft/semantic-link-labs.git
8
+ Classifier: Development Status :: 3 - Alpha
9
+ Classifier: Intended Audience :: Developers
10
+ Classifier: Intended Audience :: Education
11
+ Classifier: Intended Audience :: Science/Research
12
+ Classifier: Programming Language :: Python :: 3.10
13
+ Classifier: Programming Language :: Python :: 3 :: Only
14
+ Classifier: Framework :: Jupyter
15
+ Requires-Python: <3.12,>=3.10
16
+ License-File: LICENSE
17
+ Requires-Dist: semantic-link-sempy>=0.7.5
18
+ Requires-Dist: anytree
19
+ Requires-Dist: powerbiclient
20
+ Provides-Extra: test
21
+ Requires-Dist: pytest>=8.2.1; extra == "test"
@@ -0,0 +1,289 @@
1
+ # semantic-link-labs
2
+
3
+ [![PyPI version](https://badge.fury.io/py/semantic-link-labs.svg)](https://badge.fury.io/py/semantic-link-labs)
4
+ [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.4.1&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
5
+ [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
6
+ [![Downloads](https://static.pepy.tech/badge/semantic-link-labs)](https://pepy.tech/project/semantic-link-labs)
7
+
8
+
9
+ This is a python library intended to be used in [Microsoft Fabric notebooks](https://learn.microsoft.com/fabric/data-engineering/how-to-use-notebook). This library was originally intended to contain functions used for [migrating semantic models to Direct Lake mode](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration-1). However, it quickly became apparent that functions within such a library could support many other useful activities in the realm of semantic models, reports, lakehouses and really anything Fabric-related. As such, this library contains a variety of functions ranging from running [Vertipaq Analyzer](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#vertipaq_analyzer) or the [Best Practice Analyzer](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#run_model_bpa) against a semantic model to seeing if any [lakehouse tables hit Direct Lake guardrails](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#get_lakehouse_tables) or accessing the [Tabular Object Model](https://github.com/microsoft/semantic-link-labs/#tabular-object-model-tom) and more!
10
+
11
+ Instructions for migrating import/DirectQuery semantic models to Direct Lake mode can be found [here](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration-1).
12
+
13
+ If you encounter any issues, please [raise a bug](https://github.com/microsoft/semantic-link-labs/issues/new?assignees=&labels=&projects=&template=bug_report.md&title=).
14
+
15
+ If you have ideas for new features/functions, please [request a feature](https://github.com/microsoft/semantic-link-labs/issues/new?assignees=&labels=&projects=&template=feature_request.md&title=).
16
+
17
+ ## Install the .whl file in a Fabric notebook
18
+ ```python
19
+ %pip install "https://raw.githubusercontent.com/microsoft/semantic-link-labs/main/semantic-link-labs-0.4.1-py3-none-any.whl"
20
+ ```
21
+
22
+ ## Once installed, run this code to import the library into your notebook
23
+ ```python
24
+ import semantic-link-labs as labs
25
+ from semantic-link-labs.TOM import connect_semantic_model
26
+ ```
27
+
28
+ ## Load semantic-link-labs into a custom [Fabric environment](https://learn.microsoft.com/fabric/data-engineering/create-and-use-environment)
29
+ An even better way to ensure the semantic-link-labs library is available in your workspace/notebooks is to load it as a library in a custom Fabric environment. If you do this, you will not have to run the above '%pip install' code every time in your notebook. Please follow the steps below.
30
+
31
+ #### Create a custom environment
32
+ 1. Navigate to your Fabric workspace
33
+ 2. Click 'New' -> More options
34
+ 3. Within 'Data Science', click 'Environment'
35
+ 4. Name your environment, click 'Create'
36
+
37
+ #### Add semantic-link-labs as a library to the environment
38
+ 1. Download the [latest](https://github.com/microsoft/semantic-link-labs/raw/main/semantic-link-labs-0.4.1-py3-none-any.whl) semantic-link-labs library
39
+ 2. Within 'Custom Libraries', click 'upload'
40
+ 3. Upload the .whl file which was downloaded in step 1
41
+ 4. Click 'Save' at the top right of the screen
42
+ 5. Click 'Publish' at the top right of the screen
43
+ 6. Click 'Publish All'
44
+
45
+ #### Update your notebook to use the new environment (*must wait for the environment to finish publishing*)
46
+ 1. Navigate to your Notebook
47
+ 2. Select your newly created environment within the 'Environment' drop down in the navigation bar at the top of the notebook
48
+
49
+ # Function Categories
50
+
51
+ ### Semantic Model
52
+ * [clear_cache](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#clear_cache)
53
+ * [create_semantic_model_from_bim](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#create_semantic_model_from_bim)
54
+ * [get_semantic_model_bim](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#get_semantic_model_bim)
55
+ * [get_measure_dependencies](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#get_measure_dependencies)
56
+ * [get_model_calc_dependencies](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#get_model_calc_dependencies)
57
+ * [measure_dependency_tree](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#measure_dependency_tree)
58
+ * [refresh_semantic_model](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#refresh_semantic_model)
59
+ * [cancel_dataset_refresh](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#cancel_dataset_refresh)
60
+ * [run_dax](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#run_dax)
61
+ * [get_object_level_security](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#get_object_level_security)
62
+ * [translate_semantic_model](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#translate_semantic_model)
63
+ * [list_semantic_model_objects](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#list_semantic_model_objects)
64
+
65
+ ### Report
66
+ * [report_rebind](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#report_rebind)
67
+ * [report_rebind_all](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#report_rebind_all)
68
+ * [create_report_from_reportjson](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#create_report_from_reportjson)
69
+ * [get_report_json](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#get_report_json)
70
+ * [export_report](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#export_report)
71
+ * [clone_report](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#clone_report)
72
+ * [list_dashboards](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#list_dashboards)
73
+ * [launch_report](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#launch_report)
74
+ * [generate_embedded_filter](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#generate_embedded_filter)
75
+
76
+ ### Model Optimization
77
+ * [vertipaq_analyzer](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#vertipaq_analyzer)
78
+ * [import_vertipaq_analyzer](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#import_vertipaq_analyzer)
79
+ * [run_model_bpa](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#run_model_bpa)
80
+ * [model_bpa_rules](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#model_bpa_rules)
81
+
82
+ ### Direct Lake Migration
83
+ * [create_pqt_file](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#create_pqt_file)
84
+ * [create_blank_semantic_model](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#create_blank_semantic_model)
85
+ * [migrate_field_parameters](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#migrate_field_parameters)
86
+ * [migrate_tables_columns_to_semantic_model](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#migrate_tables_columns_to_semantic_model)
87
+ * [migrate_calc_tables_to_semantic_model](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#migrate_calc_tables_to_semantic_model)
88
+ * [migrate_model_objects_to_semantic_model](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#migrate_model_objects_to_semantic_model)
89
+ * [migrate_calc_tables_to_lakehouse](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#migrate_calc_tables_to_lakehouse)
90
+ * [refresh_calc_tables](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#refresh_calc_tables)
91
+ * [show_unsupported_direct_lake_objects](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#show_unsupported_direct_lake_objects)
92
+ * [update_direct_lake_partition_entity](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#update_direct_lake_partition_entity)
93
+ * [update_direct_lake_model_lakehouse_connection](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#update_direct_lake_model_lakehouse_connection)
94
+ * [migration_validation](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#migration_validation)
95
+
96
+ ### Direct Lake
97
+ * [check_fallback_reason](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#check_fallback_reason)
98
+ * [control_fallback](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#control_fallback)
99
+ * [direct_lake_schema_compare](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct_lake_schema_compare)
100
+ * [direct_lake_schema_sync](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct_lake_schema_sync)
101
+ * [get_direct_lake_lakehouse](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#get_direct_lake_lakehouse)
102
+ * [get_directlake_guardrails_for_sku](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#get_directlake_guardrails_for_sku)
103
+ * [get_direct_lake_guardrails](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#get_direct_lake_guardrails)
104
+ * [get_shared_expression](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#get_shared_expression)
105
+ * [get_direct_lake_sql_endpoint](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#get_direct_lake_sql_endpoint)
106
+ * [get_sku_size](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#get_sku_size)
107
+ * [list_direct_lake_model_calc_tables](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#list_direct_lake_model_calc_tables)
108
+ * [warm_direct_lake_cache_perspective](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#warm_direct_lake_cache_perspective)
109
+ * [warm_direct_lake_cache_isresident](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#warm_direct_lake_cache_isresident)
110
+
111
+ ### Lakehouse
112
+ * [get_lakehouse_tables](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#get_lakehouse_tables)
113
+ * [get_lakehouse_columns](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#get_lakehouse_columns)
114
+ * [list_lakehouses](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#list_lakehouses)
115
+ * [export_model_to_onelake](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#export_model_to_onelake)
116
+ * [create_shortcut_onelake](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#create_shortcut_onelake)
117
+ * [delete_shortcut](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#delete_shortcut)
118
+ * [list_shortcuts](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#list_shortcuts)
119
+ * [optimize_lakehouse_tables](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#optimize_lakehouse_tables)
120
+ * [create_warehouse](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#create_warehouse)
121
+ * [update_item](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#update_item)
122
+ * [list_dataflow_storage_accounts](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#list_dataflow_storage_accounts)
123
+ * [list_warehouses](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#list_warehouses)
124
+ * [save_as_delta_table](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#save_as_delta_table)
125
+
126
+ ### Helper Functions
127
+ * [resolve_dataset_id](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#resolve_dataset_id)
128
+ * [resolve_dataset_name](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#resolve_dataset_name)
129
+ * [resolve_lakehouse_id](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#resolve_lakehouse_id)
130
+ * [resolve_lakehouse_name](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#resolve_lakehouse_name)
131
+ * [resolve_report_id](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#resolve_report_id)
132
+ * [resolve_report_name](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-files#resolve_report_name)
133
+
134
+ ### [Tabular Object Model](https://learn.microsoft.com/analysis-services/tom/introduction-to-the-tabular-object-model-tom-in-analysis-services-amo?view=asallproducts-allversions) ([TOM](https://learn.microsoft.com/dotnet/api/microsoft.analysisservices.tabular.model?view=analysisservices-dotnet))
135
+ #### 'All' functions for non-parent objects within TOM
136
+ * [all_columns](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#all_columns)
137
+ * [all_measures](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#all_measures)
138
+ * [all_partitions](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#all_partitions)
139
+ * [all_hierarchies](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#all_hierarchies)
140
+ * [all_levels](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#all_levels)
141
+ * [all_calculation_items](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#all_calculation_items)
142
+ * [all_rls](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#all_rls)
143
+
144
+ #### 'Add' functions
145
+ * [add_calculated_column](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#add_calculated_column)
146
+ * [add_calculated_table](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#add_calculated_table)
147
+ * [add_calculated_table_column](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#add_calculated_table_column)
148
+ * [add_calculation_group](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#add_calculation_group)
149
+ * [add_calculation_item](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#add_calculation_item)
150
+ * [add_data_column](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#add_data_column)
151
+ * [add_entity_partition](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#add_entity_partition)
152
+ * [add_expression](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#add_expression)
153
+ * [add_field_parameter](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#add_field_parameter)
154
+ * [add_hierarchy](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#add_hierarchy)
155
+ * [add_m_partition](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#add_m_partition)
156
+ * [add_measure](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#add_measure)
157
+ * [add_perspective](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#add_perspective)
158
+ * [add_relationship](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#add_relationship)
159
+ * [add_role](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#add_role)
160
+ * [add_table](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#add_table)
161
+ * [add_translation](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#add_translation)
162
+
163
+ #### 'Set' functions
164
+ * [set_direct_lake_behavior](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#set_direct_lake_behavior)
165
+ * [set_is_available_in_mdx](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#set_is_available_in_mdx)
166
+ * [set_ols](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#set_ols)
167
+ * [set_rls](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#add_rls)
168
+ * [set_summarize_by](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#set_summarize_by)
169
+ * [set_translation](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#set_translation)
170
+
171
+ #### 'Remove' functions
172
+ * [remove_object](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#remove_object)
173
+ * [remove_translation](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#remove_translation)
174
+
175
+ #### 'Used-in' and dependency functions
176
+ * [used_in_relationships](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#used_in_relationships)
177
+ * [used_in_hierarchies](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#used_in_hierarchies)
178
+ * [used_in_levels](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#used_in_levels)
179
+ * [used_in_sort_by](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#used_in_sort_by)
180
+ * [used_in_rls](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#used_in_rls)
181
+ * [used_in_calc_item](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#used_in_calc_item)
182
+ * [depends_on](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#depends_on)
183
+ * [referenced_by](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#referenced_by)
184
+ * [fully_qualified_measures](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#fully_qualified_measures)
185
+ * [unqualified_columns](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#unqualified_columns)
186
+
187
+ #### Vertipaq Analyzer data functions
188
+ * [remove_vertipaq_annotations](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#remove_vertipaq_annotations)
189
+ * [set_vertipaq_annotations](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#set_vertipaq_annotations)
190
+ * [row_count](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#row_count)
191
+ * [used_size](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#used_size)
192
+ * [data_size](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#data_size)
193
+ * [dictionary_size](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#dictionary_size)
194
+ * [total_size](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#total_size)
195
+ * [cardinality](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#cardinality)
196
+
197
+ #### Perspectives
198
+ * [in_perspective](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#in_perspective)
199
+ * [add_to_perspective](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#add_to_perspective)
200
+ * [remove_from_perspective](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#remove_from_perspective)
201
+
202
+ #### Annotations
203
+ * [get_annotations](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#get_annotations)
204
+ * [set_annotation](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#set_annotation)
205
+ * [get_annotation_value](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#get_annotation_value)
206
+ * [remove_annotation](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#remove_annotation)
207
+ * [clear_annotations](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#clear_annotations)
208
+
209
+ #### Extended Properties
210
+ * [get_extended_properties](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#get_extended_properties)
211
+ * [set_extended_property](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#set_extended_property)
212
+ * [get_extended_property_value](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#get_extended_property_value)
213
+ * [remove_extended_property](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#remove_extended_property)
214
+ * [clear_extended_properties](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#clear_extended_properties)
215
+
216
+ #### Misc
217
+ * [is_direct_lake](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#is_direct_lake)
218
+ * [is_field_parameter](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#is_field_parameter)
219
+
220
+
221
+ ---
222
+ ## Direct Lake migration
223
+
224
+ The following process automates the migration of an import/DirectQuery model to a new [Direct Lake](https://learn.microsoft.com/power-bi/enterprise/directlake-overview) model. The first step is specifically applicable to models which use Power Query to perform data transformations. If your model does not use Power Query, you must migrate the base tables used in your semantic model to a Fabric lakehouse.
225
+
226
+ Check out [Nikola Ilic](https://twitter.com/DataMozart)'s terrific [blog post](https://data-mozart.com/migrate-existing-power-bi-semantic-models-to-direct-lake-a-step-by-step-guide/) on this topic!
227
+
228
+ Check out my [blog post](https://www.elegantbi.com/post/direct-lake-migration) on this topic!
229
+
230
+ ### Prerequisites
231
+
232
+ * Make sure you [enable XMLA Read/Write](https://learn.microsoft.com/power-bi/enterprise/service-premium-connect-tools#enable-xmla-read-write) for your capacity
233
+ * Make sure you have a [lakehouse](https://learn.microsoft.com/fabric/onelake/create-lakehouse-onelake#create-a-lakehouse) in a Fabric workspace
234
+ * Enable the following [setting](https://learn.microsoft.com/power-bi/transform-model/service-edit-data-models#enable-the-preview-feature): Workspace -> Workspace Settings -> General -> Data model settings -> Users can edit data models in the Power BI service
235
+
236
+ ### Instructions
237
+
238
+ 1. Download this [notebook](https://github.com/microsoft/semantic-link-labs/blob/main/Migration%20to%20Direct%20Lake.ipynb). **Use version 0.2.1 or higher only.**
239
+ 2. Make sure you are in the ['Data Engineering' persona](https://learn.microsoft.com/fabric/get-started/microsoft-fabric-overview#components-of-microsoft-fabric). Click the icon at the bottom left corner of your Workspace screen and select 'Data Engineering'
240
+ 3. In your workspace, select 'New -> Import notebook' and import the notebook from step 1.
241
+ 4. [Add your lakehouse](https://learn.microsoft.com/fabric/data-engineering/lakehouse-notebook-explore#add-or-remove-a-lakehouse) to your Fabric notebook
242
+ 5. Follow the instructions within the notebook.
243
+
244
+ ### The migration process
245
+
246
+ > [!NOTE]
247
+ > The first 4 steps are only necessary if you have logic in Power Query. Otherwise, you will need to migrate your semantic model source tables to lakehouse tables.
248
+
249
+ 1. The first step of the notebook creates a Power Query Template (.pqt) file which eases the migration of Power Query logic to Dataflows Gen2.
250
+ 2. After the .pqt file is created, sync files from your [OneLake file explorer](https://www.microsoft.com/download/details.aspx?id=105222)
251
+ 3. Navigate to your lakehouse (this is critical!). From your lakehouse, create a new Dataflows Gen2, and import the Power Query Template file. Doing this step from your lakehouse will automatically set the destination for all tables to this lakehouse (instead of having to manually map each one).
252
+ 4. Publish the Dataflow Gen2 and wait for it to finish creating the delta lake tables in your lakehouse.
253
+ 5. Back in the notebook, the next step will create your new Direct Lake semantic model with the name of your choice, taking all the relevant properties from the orignal semantic model and refreshing/framing your new semantic model.
254
+
255
+ > [!NOTE]
256
+ > As of version 0.2.1, calculated tables are also migrated to Direct Lake (as data tables with their DAX expression stored as model annotations in the new semantic model). Additionally, Field Parameters are migrated as they were in the original semantic model (as a calculated table).
257
+
258
+ 6. Finally, you can easily rebind your all reports which use the import/DQ semantic model to the new Direct Lake semantic model in one click.
259
+
260
+ ### Completing these steps will do the following:
261
+ * Offload your Power Query logic to Dataflows Gen2 inside of Fabric (where it can be maintained and development can continue).
262
+ * Dataflows Gen2 will create delta tables in your Fabric lakehouse. These tables can then be used for your Direct Lake model.
263
+ * Create a new semantic model in Direct Lake mode containing all the standard tables and columns, calculation groups, measures, relationships, hierarchies, roles, row level security, perspectives, and translations from your original semantic model.
264
+ * Viable calculated tables are migrated to the new semantic model as data tables. Delta tables are dynamically generated in the lakehouse to support the Direct Lake model. The calculated table DAX logic is stored as model annotations in the new semantic model.
265
+ * Field parameters are migrated to the new semantic model as they were in the original semantic model (as calculated tables). Any calculated columns used in field parameters are automatically removed in the new semantic model's field parameter(s).
266
+ * Non-supported objects are not transferred (i.e. calculated columns, relationships using columns with unsupported data types etc.).
267
+ * Reports used by your original semantic model will be rebinded to your new semantic model.
268
+
269
+ ## Contributing
270
+
271
+ This project welcomes contributions and suggestions. Most contributions require you to agree to a
272
+ Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us
273
+ the rights to use your contribution. For details, visit https://cla.opensource.microsoft.com.
274
+
275
+ When you submit a pull request, a CLA bot will automatically determine whether you need to provide
276
+ a CLA and decorate the PR appropriately (e.g., status check, comment). Simply follow the instructions
277
+ provided by the bot. You will only need to do this once across all repos using our CLA.
278
+
279
+ This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
280
+ For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or
281
+ contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
282
+
283
+ ## Trademarks
284
+
285
+ This project may contain trademarks or logos for projects, products, or services. Authorized use of Microsoft
286
+ trademarks or logos is subject to and must follow
287
+ [Microsoft's Trademark & Brand Guidelines](https://www.microsoft.com/en-us/legal/intellectualproperty/trademarks/usage/general).
288
+ Use of Microsoft trademarks or logos in modified versions of this project must not cause confusion or imply Microsoft sponsorship.
289
+ Any use of third-party trademarks or logos are subject to those third-party's policies.