semantic-link-labs 0.5.0__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- semantic_link_labs-0.7.0.dist-info/METADATA +148 -0
- semantic_link_labs-0.7.0.dist-info/RECORD +111 -0
- {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +45 -15
- sempy_labs/_ai.py +42 -85
- sempy_labs/_bpa_translation/_translations_am-ET.po +828 -0
- sempy_labs/_bpa_translation/_translations_ar-AE.po +860 -0
- sempy_labs/_bpa_translation/_translations_cs-CZ.po +894 -0
- sempy_labs/_bpa_translation/_translations_da-DK.po +894 -0
- sempy_labs/_bpa_translation/_translations_de-DE.po +933 -0
- sempy_labs/_bpa_translation/_translations_el-GR.po +936 -0
- sempy_labs/_bpa_translation/_translations_es-ES.po +915 -0
- sempy_labs/_bpa_translation/_translations_fa-IR.po +883 -0
- sempy_labs/_bpa_translation/_translations_fr-FR.po +938 -0
- sempy_labs/_bpa_translation/_translations_ga-IE.po +912 -0
- sempy_labs/_bpa_translation/_translations_he-IL.po +855 -0
- sempy_labs/_bpa_translation/_translations_hi-IN.po +892 -0
- sempy_labs/_bpa_translation/_translations_hu-HU.po +910 -0
- sempy_labs/_bpa_translation/_translations_is-IS.po +887 -0
- sempy_labs/_bpa_translation/_translations_it-IT.po +931 -0
- sempy_labs/_bpa_translation/_translations_ja-JP.po +805 -0
- sempy_labs/_bpa_translation/_translations_nl-NL.po +924 -0
- sempy_labs/_bpa_translation/_translations_pl-PL.po +913 -0
- sempy_labs/_bpa_translation/_translations_pt-BR.po +909 -0
- sempy_labs/_bpa_translation/_translations_pt-PT.po +904 -0
- sempy_labs/_bpa_translation/_translations_ru-RU.po +909 -0
- sempy_labs/_bpa_translation/_translations_ta-IN.po +922 -0
- sempy_labs/_bpa_translation/_translations_te-IN.po +896 -0
- sempy_labs/_bpa_translation/_translations_th-TH.po +873 -0
- sempy_labs/_bpa_translation/_translations_zh-CN.po +767 -0
- sempy_labs/_bpa_translation/_translations_zu-ZA.po +916 -0
- sempy_labs/_clear_cache.py +12 -8
- sempy_labs/_connections.py +77 -70
- sempy_labs/_dax.py +7 -9
- sempy_labs/_generate_semantic_model.py +75 -90
- sempy_labs/_helper_functions.py +371 -20
- sempy_labs/_icons.py +23 -0
- sempy_labs/_list_functions.py +855 -427
- sempy_labs/_model_auto_build.py +4 -3
- sempy_labs/_model_bpa.py +307 -1118
- sempy_labs/_model_bpa_bulk.py +363 -0
- sempy_labs/_model_bpa_rules.py +831 -0
- sempy_labs/_model_dependencies.py +20 -16
- sempy_labs/_one_lake_integration.py +18 -12
- sempy_labs/_query_scale_out.py +116 -129
- sempy_labs/_refresh_semantic_model.py +23 -10
- sempy_labs/_translations.py +367 -288
- sempy_labs/_vertipaq.py +152 -123
- sempy_labs/directlake/__init__.py +7 -1
- sempy_labs/directlake/_directlake_schema_compare.py +33 -30
- sempy_labs/directlake/_directlake_schema_sync.py +60 -77
- sempy_labs/directlake/_dl_helper.py +233 -0
- sempy_labs/directlake/_get_directlake_lakehouse.py +7 -8
- sempy_labs/directlake/_get_shared_expression.py +5 -3
- sempy_labs/directlake/_guardrails.py +20 -16
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +17 -10
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +3 -2
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +10 -5
- sempy_labs/directlake/_update_directlake_partition_entity.py +169 -22
- sempy_labs/directlake/_warm_cache.py +7 -4
- sempy_labs/lakehouse/_get_lakehouse_columns.py +1 -1
- sempy_labs/lakehouse/_get_lakehouse_tables.py +65 -71
- sempy_labs/lakehouse/_lakehouse.py +5 -3
- sempy_labs/lakehouse/_shortcuts.py +20 -13
- sempy_labs/migration/__init__.py +1 -1
- sempy_labs/migration/_create_pqt_file.py +184 -186
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +240 -269
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +78 -77
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +444 -425
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +96 -102
- sempy_labs/migration/_migration_validation.py +2 -2
- sempy_labs/migration/_refresh_calc_tables.py +94 -100
- sempy_labs/report/_BPAReportTemplate.json +232 -0
- sempy_labs/report/__init__.py +6 -2
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
- sempy_labs/report/_bpareporttemplate/.platform +11 -0
- sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
- sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
- sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
- sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
- sempy_labs/report/_generate_report.py +260 -139
- sempy_labs/report/_report_functions.py +90 -59
- sempy_labs/report/_report_rebind.py +40 -34
- sempy_labs/tom/__init__.py +1 -4
- sempy_labs/tom/_model.py +601 -181
- semantic_link_labs-0.5.0.dist-info/METADATA +0 -22
- semantic_link_labs-0.5.0.dist-info/RECORD +0 -53
- sempy_labs/directlake/_fallback.py +0 -58
- {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.7.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: semantic-link-labs
|
|
3
|
+
Version: 0.7.0
|
|
4
|
+
Summary: Semantic Link Labs for Microsoft Fabric
|
|
5
|
+
Author: Microsoft Corporation
|
|
6
|
+
License: MIT License
|
|
7
|
+
Project-URL: Repository, https://github.com/microsoft/semantic-link-labs.git
|
|
8
|
+
Classifier: Development Status :: 3 - Alpha
|
|
9
|
+
Classifier: Intended Audience :: Developers
|
|
10
|
+
Classifier: Intended Audience :: Education
|
|
11
|
+
Classifier: Intended Audience :: Science/Research
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
13
|
+
Classifier: Programming Language :: Python :: 3 :: Only
|
|
14
|
+
Classifier: Framework :: Jupyter
|
|
15
|
+
Requires-Python: <3.12,>=3.10
|
|
16
|
+
Description-Content-Type: text/markdown
|
|
17
|
+
License-File: LICENSE
|
|
18
|
+
Requires-Dist: semantic-link-sempy >=0.7.7
|
|
19
|
+
Requires-Dist: anytree
|
|
20
|
+
Requires-Dist: powerbiclient
|
|
21
|
+
Requires-Dist: polib
|
|
22
|
+
Provides-Extra: test
|
|
23
|
+
Requires-Dist: pytest >=8.2.1 ; extra == 'test'
|
|
24
|
+
|
|
25
|
+
# Semantic Link Labs
|
|
26
|
+
|
|
27
|
+
[](https://badge.fury.io/py/semantic-link-labs)
|
|
28
|
+
[](https://readthedocs.org/projects/semantic-link-labs/)
|
|
29
|
+
[](https://github.com/psf/black)
|
|
30
|
+
[](https://pepy.tech/project/semantic-link-labs)
|
|
31
|
+
|
|
32
|
+
---
|
|
33
|
+
[Read the documentation on ReadTheDocs!](https://semantic-link-labs.readthedocs.io/en/stable/)
|
|
34
|
+
---
|
|
35
|
+
|
|
36
|
+
This is a python library intended to be used in [Microsoft Fabric notebooks](https://learn.microsoft.com/fabric/data-engineering/how-to-use-notebook). This library was originally intended to solely contain functions used for [migrating semantic models to Direct Lake mode](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration). However, it quickly became apparent that functions within such a library could support many other useful activities in the realm of semantic models, reports, lakehouses and really anything Fabric-related. As such, this library contains a variety of functions ranging from running [Vertipaq Analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.import_vertipaq_analyzer) or the [Best Practice Analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.run_model_bpa) against a semantic model to seeing if any [lakehouse tables hit Direct Lake guardrails](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.get_lakehouse_tables) or accessing the [Tabular Object Model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html) and more!
|
|
37
|
+
|
|
38
|
+
Instructions for migrating import/DirectQuery semantic models to Direct Lake mode can be found [here](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration).
|
|
39
|
+
|
|
40
|
+
If you encounter any issues, please [raise a bug](https://github.com/microsoft/semantic-link-labs/issues/new?assignees=&labels=&projects=&template=bug_report.md&title=).
|
|
41
|
+
|
|
42
|
+
If you have ideas for new features/functions, please [request a feature](https://github.com/microsoft/semantic-link-labs/issues/new?assignees=&labels=&projects=&template=feature_request.md&title=).
|
|
43
|
+
|
|
44
|
+
## Install the library in a Fabric notebook
|
|
45
|
+
```python
|
|
46
|
+
%pip install semantic-link-labs
|
|
47
|
+
```
|
|
48
|
+
|
|
49
|
+
## Once installed, run this code to import the library into your notebook
|
|
50
|
+
```python
|
|
51
|
+
import sempy_labs as labs
|
|
52
|
+
from sempy_labs import migration, directlake
|
|
53
|
+
from sempy_labs import lakehouse as lake
|
|
54
|
+
from sempy_labs import report as rep
|
|
55
|
+
from sempy_labs.tom import connect_semantic_model
|
|
56
|
+
```
|
|
57
|
+
|
|
58
|
+
## Load semantic-link-labs into a custom [Fabric environment](https://learn.microsoft.com/fabric/data-engineering/create-and-use-environment)
|
|
59
|
+
An even better way to ensure the semantic-link-labs library is available in your workspace/notebooks is to load it as a library in a custom Fabric environment. If you do this, you will not have to run the above '%pip install' code every time in your notebook. Please follow the steps below.
|
|
60
|
+
|
|
61
|
+
#### Create a custom environment
|
|
62
|
+
1. Navigate to your Fabric workspace
|
|
63
|
+
2. Click 'New' -> More options
|
|
64
|
+
3. Within 'Data Science', click 'Environment'
|
|
65
|
+
4. Name your environment, click 'Create'
|
|
66
|
+
|
|
67
|
+
#### Add semantic-link-labs as a library to the environment
|
|
68
|
+
1. Within 'Public libraries', click 'Add from PyPI'
|
|
69
|
+
2. Enter 'semantic-link-labs'.
|
|
70
|
+
3. Click 'Save' at the top right of the screen
|
|
71
|
+
4. Click 'Publish' at the top right of the screen
|
|
72
|
+
5. Click 'Publish All'
|
|
73
|
+
|
|
74
|
+
#### Update your notebook to use the new environment (*must wait for the environment to finish publishing*)
|
|
75
|
+
1. Navigate to your Notebook
|
|
76
|
+
2. Select your newly created environment within the 'Environment' drop down in the navigation bar at the top of the notebook
|
|
77
|
+
|
|
78
|
+
---
|
|
79
|
+
## Direct Lake migration
|
|
80
|
+
|
|
81
|
+
The following process automates the migration of an import/DirectQuery model to a new [Direct Lake](https://learn.microsoft.com/power-bi/enterprise/directlake-overview) model. The first step is specifically applicable to models which use Power Query to perform data transformations. If your model does not use Power Query, you must migrate the base tables used in your semantic model to a Fabric lakehouse.
|
|
82
|
+
|
|
83
|
+
Check out [Nikola Ilic](https://twitter.com/DataMozart)'s terrific [blog post](https://data-mozart.com/migrate-existing-power-bi-semantic-models-to-direct-lake-a-step-by-step-guide/) on this topic!
|
|
84
|
+
|
|
85
|
+
Check out my [blog post](https://www.elegantbi.com/post/direct-lake-migration) on this topic!
|
|
86
|
+
|
|
87
|
+
[](https://www.youtube.com/watch?v=gGIxMrTVyyI?t=495)
|
|
88
|
+
|
|
89
|
+
### Prerequisites
|
|
90
|
+
|
|
91
|
+
* Make sure you [enable XMLA Read/Write](https://learn.microsoft.com/power-bi/enterprise/service-premium-connect-tools#enable-xmla-read-write) for your capacity
|
|
92
|
+
* Make sure you have a [lakehouse](https://learn.microsoft.com/fabric/onelake/create-lakehouse-onelake#create-a-lakehouse) in a Fabric workspace
|
|
93
|
+
* Enable the following [setting](https://learn.microsoft.com/power-bi/transform-model/service-edit-data-models#enable-the-preview-feature): Workspace -> Workspace Settings -> General -> Data model settings -> Users can edit data models in the Power BI service
|
|
94
|
+
|
|
95
|
+
### Instructions
|
|
96
|
+
|
|
97
|
+
1. Download this [notebook](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Migration%20to%20Direct%20Lake.ipynb).
|
|
98
|
+
2. Make sure you are in the ['Data Engineering' persona](https://learn.microsoft.com/fabric/get-started/microsoft-fabric-overview#components-of-microsoft-fabric). Click the icon at the bottom left corner of your Workspace screen and select 'Data Engineering'
|
|
99
|
+
3. In your workspace, select 'New -> Import notebook' and import the notebook from step 1.
|
|
100
|
+
4. [Add your lakehouse](https://learn.microsoft.com/fabric/data-engineering/lakehouse-notebook-explore#add-or-remove-a-lakehouse) to your Fabric notebook
|
|
101
|
+
5. Follow the instructions within the notebook.
|
|
102
|
+
|
|
103
|
+
### The migration process
|
|
104
|
+
|
|
105
|
+
> [!NOTE]
|
|
106
|
+
> The first 4 steps are only necessary if you have logic in Power Query. Otherwise, you will need to migrate your semantic model source tables to lakehouse tables.
|
|
107
|
+
|
|
108
|
+
1. The first step of the notebook creates a Power Query Template (.pqt) file which eases the migration of Power Query logic to Dataflows Gen2.
|
|
109
|
+
2. After the .pqt file is created, sync files from your [OneLake file explorer](https://www.microsoft.com/download/details.aspx?id=105222)
|
|
110
|
+
3. Navigate to your lakehouse (this is critical!). From your lakehouse, create a new Dataflows Gen2, and import the Power Query Template file. Doing this step from your lakehouse will automatically set the destination for all tables to this lakehouse (instead of having to manually map each one).
|
|
111
|
+
4. Publish the Dataflow Gen2 and wait for it to finish creating the delta lake tables in your lakehouse.
|
|
112
|
+
5. Back in the notebook, the next step will create your new Direct Lake semantic model with the name of your choice, taking all the relevant properties from the orignal semantic model and refreshing/framing your new semantic model.
|
|
113
|
+
|
|
114
|
+
> [!NOTE]
|
|
115
|
+
> As of version 0.2.1, calculated tables are also migrated to Direct Lake (as data tables with their DAX expression stored as model annotations in the new semantic model). Additionally, Field Parameters are migrated as they were in the original semantic model (as a calculated table).
|
|
116
|
+
|
|
117
|
+
6. Finally, you can easily rebind your all reports which use the import/DQ semantic model to the new Direct Lake semantic model in one click.
|
|
118
|
+
|
|
119
|
+
### Completing these steps will do the following:
|
|
120
|
+
* Offload your Power Query logic to Dataflows Gen2 inside of Fabric (where it can be maintained and development can continue).
|
|
121
|
+
* Dataflows Gen2 will create delta tables in your Fabric lakehouse. These tables can then be used for your Direct Lake model.
|
|
122
|
+
* Create a new semantic model in Direct Lake mode containing all the standard tables and columns, calculation groups, measures, relationships, hierarchies, roles, row level security, perspectives, and translations from your original semantic model.
|
|
123
|
+
* Viable calculated tables are migrated to the new semantic model as data tables. Delta tables are dynamically generated in the lakehouse to support the Direct Lake model. The calculated table DAX logic is stored as model annotations in the new semantic model.
|
|
124
|
+
* Field parameters are migrated to the new semantic model as they were in the original semantic model (as calculated tables). Any calculated columns used in field parameters are automatically removed in the new semantic model's field parameter(s).
|
|
125
|
+
* Non-supported objects are not transferred (i.e. calculated columns, relationships using columns with unsupported data types etc.).
|
|
126
|
+
* Reports used by your original semantic model will be rebinded to your new semantic model.
|
|
127
|
+
|
|
128
|
+
## Contributing
|
|
129
|
+
|
|
130
|
+
This project welcomes contributions and suggestions. Most contributions require you to agree to a
|
|
131
|
+
Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us
|
|
132
|
+
the rights to use your contribution. For details, visit https://cla.opensource.microsoft.com.
|
|
133
|
+
|
|
134
|
+
When you submit a pull request, a CLA bot will automatically determine whether you need to provide
|
|
135
|
+
a CLA and decorate the PR appropriately (e.g., status check, comment). Simply follow the instructions
|
|
136
|
+
provided by the bot. You will only need to do this once across all repos using our CLA.
|
|
137
|
+
|
|
138
|
+
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
|
|
139
|
+
For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or
|
|
140
|
+
contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
|
|
141
|
+
|
|
142
|
+
## Trademarks
|
|
143
|
+
|
|
144
|
+
This project may contain trademarks or logos for projects, products, or services. Authorized use of Microsoft
|
|
145
|
+
trademarks or logos is subject to and must follow
|
|
146
|
+
[Microsoft's Trademark & Brand Guidelines](https://www.microsoft.com/en-us/legal/intellectualproperty/trademarks/usage/general).
|
|
147
|
+
Use of Microsoft trademarks or logos in modified versions of this project must not cause confusion or imply Microsoft sponsorship.
|
|
148
|
+
Any use of third-party trademarks or logos are subject to those third-party's policies.
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
sempy_labs/__init__.py,sha256=RHVPI2-N-m4tiNOEqMHwWmHoXtTec9-hr3ls-8MGeQ0,5675
|
|
2
|
+
sempy_labs/_ai.py,sha256=CjlFebT35Rzbw90BmsDy7PjLiAZMZ-B7wZ_EoI444bw,16271
|
|
3
|
+
sempy_labs/_clear_cache.py,sha256=NckXmtDCgRqlNL5FvLTut2XWLI0Hft3O4sAaXS1tPfo,1743
|
|
4
|
+
sempy_labs/_connections.py,sha256=w1dFC4WeTNFmLGD2EL_Syk0Wb1Eij18we2FVn_VaCD8,7641
|
|
5
|
+
sempy_labs/_dax.py,sha256=dt1GgHceyM7f6phRBPxRKnmQy_KYKpcgFQHuOjGbpLo,2029
|
|
6
|
+
sempy_labs/_generate_semantic_model.py,sha256=igKsVX-5Nqpipjg0taLFro8OsD3ogwSwKsyVAmuRwG4,8647
|
|
7
|
+
sempy_labs/_helper_functions.py,sha256=rTetza9TRLtKOjEd0ZHz-xDZc1nbsN58ldHkLK8CiMo,24769
|
|
8
|
+
sempy_labs/_icons.py,sha256=UK7chr_tEkZd4Y7Es_KyTc4dFgtYS4f31ggWxyqC9uY,853
|
|
9
|
+
sempy_labs/_list_functions.py,sha256=CwNI7tEvn8upIpCaLDII4QLQVrJhJECPJdo6vZsg0sw,90578
|
|
10
|
+
sempy_labs/_model_auto_build.py,sha256=fX3bCLFCOMQHuheKIoB48fUABG7XAT7qqsMbUiWSrY0,5071
|
|
11
|
+
sempy_labs/_model_bpa.py,sha256=U9rHoGzuAmV1dtJvgSVk3BiUwDp6WTFt1l0CbkmKcdE,20439
|
|
12
|
+
sempy_labs/_model_bpa_bulk.py,sha256=nvQKQ5h7Zs7rPJbybkrx1_cz3xdA-dLcZcWizIX5_oo,14702
|
|
13
|
+
sempy_labs/_model_bpa_rules.py,sha256=uC2nKnT3b6lRMaGB7VokSORXVZvRSTQs2DzFSx4nIYY,47294
|
|
14
|
+
sempy_labs/_model_dependencies.py,sha256=nZdqq2iMhZejnS_LCd2rpK6r1B7jWpa3URkxobRPifY,12986
|
|
15
|
+
sempy_labs/_one_lake_integration.py,sha256=eIuLxlw8eXfUH2avKhsyLmXZbTllSwGsz2j_HMAikpQ,6234
|
|
16
|
+
sempy_labs/_query_scale_out.py,sha256=fliTIx_POeuzjV0bhYM4-2QD74c4r3soxs0_bSaoD28,14441
|
|
17
|
+
sempy_labs/_refresh_semantic_model.py,sha256=2qzP9KqmwA20RuL1o6Lt9bIjC-KtdX8ZgcTvJParg-w,7157
|
|
18
|
+
sempy_labs/_translations.py,sha256=BcrVIrBNSKtbFz4Y9t1Dh1SZCu0K4NHu7n01Z6O76IY,19665
|
|
19
|
+
sempy_labs/_vertipaq.py,sha256=zMKtcCQ2gpgoDLisTbTjFNe60Cg2PlAQ6HvkSlbpKPo,33660
|
|
20
|
+
sempy_labs/_bpa_translation/_translations_am-ET.po,sha256=XW0Djm-jlLZRXEnhZvk8r1aWd2I36mr97XxFt9yZ-N0,36831
|
|
21
|
+
sempy_labs/_bpa_translation/_translations_ar-AE.po,sha256=mPQR83IulMmT2qSXTSOXWahjwqy7dg3AjtFmAxbraOI,34753
|
|
22
|
+
sempy_labs/_bpa_translation/_translations_cs-CZ.po,sha256=D3WiV3LVbAQzy9hjszOEA2GtOsrtUjBYJfF_7a0N0jU,29695
|
|
23
|
+
sempy_labs/_bpa_translation/_translations_da-DK.po,sha256=f-6mncit2OkEJR73_kZTu8CfEXvpEZMrKcPSVGGZMVU,28680
|
|
24
|
+
sempy_labs/_bpa_translation/_translations_de-DE.po,sha256=SEoAyp72awJlboDMRKaZEu-BhOGlRTLCne1P3H7IT6M,30917
|
|
25
|
+
sempy_labs/_bpa_translation/_translations_el-GR.po,sha256=BDlrTp3MvI_NzHG67QVxdwK3LG1NUdrLJiqvn8Yj0Dk,43355
|
|
26
|
+
sempy_labs/_bpa_translation/_translations_es-ES.po,sha256=JlxqiFF7HYEBhmgqddy5HmvJb6o9OjQLpaZqehgh69U,30023
|
|
27
|
+
sempy_labs/_bpa_translation/_translations_fa-IR.po,sha256=kWXO5Oe1_YJ_HK231Co14EaSJS3zuiHUp97_Zt9FDOw,36620
|
|
28
|
+
sempy_labs/_bpa_translation/_translations_fr-FR.po,sha256=lwAPshENCPwZzbwgQk_WNc1UDkXvTOk9jkbEK6wL2bs,31551
|
|
29
|
+
sempy_labs/_bpa_translation/_translations_ga-IE.po,sha256=W_ed6zTDa7BpnOI9QtDS3NmmGaRgKwUiKow89JRrxGY,30786
|
|
30
|
+
sempy_labs/_bpa_translation/_translations_he-IL.po,sha256=amaKGtkie9qDWIxT-Jz_EnDP5VveMRt9oVBNENBMLwU,33492
|
|
31
|
+
sempy_labs/_bpa_translation/_translations_hi-IN.po,sha256=skaR59KMw__cgO7e77ejIW7_ZG2ztuyeb-J-Q3v6pzs,49292
|
|
32
|
+
sempy_labs/_bpa_translation/_translations_hu-HU.po,sha256=vTEkRCJ0Dqy1kJzzKkvGU0y4Sf0HP9hulluK1NE-f9U,30973
|
|
33
|
+
sempy_labs/_bpa_translation/_translations_is-IS.po,sha256=2565DYm_VfM6sXju4YwA7oOb9_JIXYkskSZVTyFj1VI,28992
|
|
34
|
+
sempy_labs/_bpa_translation/_translations_it-IT.po,sha256=7SBempvRLlLWvd2WVb1927ph9RdqqrpbJLi4wsVafl4,30700
|
|
35
|
+
sempy_labs/_bpa_translation/_translations_ja-JP.po,sha256=XJRbXywLBrlUubDACCWTDr37bcfluUdDVzOmI6DVZA8,33218
|
|
36
|
+
sempy_labs/_bpa_translation/_translations_nl-NL.po,sha256=9tqJh81FHndDSa8ZR3kkQnWVvyEM4-s_WsCB3EOYN2M,30242
|
|
37
|
+
sempy_labs/_bpa_translation/_translations_pl-PL.po,sha256=YQxixTn1AVT-ds20CJMpQweoO4qiHh6JxVuLIVarNh4,30393
|
|
38
|
+
sempy_labs/_bpa_translation/_translations_pt-BR.po,sha256=o_fKzTd1AWAvZIM_Na0BC_DTkVQwoG0pcIwclwIEUBc,29644
|
|
39
|
+
sempy_labs/_bpa_translation/_translations_pt-PT.po,sha256=CtAHtWmxCZ_nK2GS_9Y5y-DQVPqq0BfRKX8hj52m-VY,29583
|
|
40
|
+
sempy_labs/_bpa_translation/_translations_ru-RU.po,sha256=pesyfVvCut22mCRxOSbpziy2T-4KCoAJXPoIjdXrhTc,41374
|
|
41
|
+
sempy_labs/_bpa_translation/_translations_ta-IN.po,sha256=8xcyOLO3SVWXTxuwouLquINnA_QtgOH1kwhPLyraxPQ,56195
|
|
42
|
+
sempy_labs/_bpa_translation/_translations_te-IN.po,sha256=j7Zk29lTWZmJoN8MPz1iEzv7rU9X7zcbIp-Ui_X4Q5Y,51377
|
|
43
|
+
sempy_labs/_bpa_translation/_translations_th-TH.po,sha256=oleGJikyzARW36mq8hgwHqJKVZ5zkPPUgJVc1G_8BCc,47689
|
|
44
|
+
sempy_labs/_bpa_translation/_translations_zh-CN.po,sha256=OVrLt1-mCGjeha-o3d7Nt5hq0H9nDq4fUHtZayp1oMw,25548
|
|
45
|
+
sempy_labs/_bpa_translation/_translations_zu-ZA.po,sha256=ZpZTmhkRg7U7esS6ZxVxEgGldB0JhpfdhLE7daJyGSM,29609
|
|
46
|
+
sempy_labs/directlake/__init__.py,sha256=R2AGiGFSQzHNMdeWHcr_bJT6tNPYWvKgCgi6s_vY_nc,1924
|
|
47
|
+
sempy_labs/directlake/_directlake_schema_compare.py,sha256=lsuDsPG4xVNNrT2yhJmSuILyeMiuBxRKqdnmudcLA-c,4448
|
|
48
|
+
sempy_labs/directlake/_directlake_schema_sync.py,sha256=NqciV_O0K7aJ8EbWxQVZIv2dbRuRDRONH5ttgmHr1cw,4380
|
|
49
|
+
sempy_labs/directlake/_dl_helper.py,sha256=2Y6SVVCaE9uh1qv7lawYOdnOJ4gomAm0EEDXaXKIALU,8760
|
|
50
|
+
sempy_labs/directlake/_get_directlake_lakehouse.py,sha256=sovI4ds2SEgkp4Fi465jtJ4seRvQxdYgcixRDvsUwNM,2321
|
|
51
|
+
sempy_labs/directlake/_get_shared_expression.py,sha256=Xl2_GYqRll95cN7JjwLlULbcRXM71Ij9JkrYAp7cNJM,1943
|
|
52
|
+
sempy_labs/directlake/_guardrails.py,sha256=0zqqkEDk02_jb4MzWJCKRNcDtfPGBcWUcxuQcDbgWns,2390
|
|
53
|
+
sempy_labs/directlake/_list_directlake_model_calc_tables.py,sha256=_rpnbgsFAz2W16PpgIOB0Rj_Fs1ZKrDbz3DUaaR_bfU,2143
|
|
54
|
+
sempy_labs/directlake/_show_unsupported_directlake_objects.py,sha256=QNj2wHzFGtjnsAICmlc7BuhCYkw0An0XnditDTCG2JM,3358
|
|
55
|
+
sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py,sha256=b_Y5_GSfWC25wH6R7L37-AHO9fvKkmxRGaP6dVDC7-w,3233
|
|
56
|
+
sempy_labs/directlake/_update_directlake_partition_entity.py,sha256=Pbx7LCdKyqEfX1npLvhw0WzFnOEbluwB3_xW0ELvHL4,8580
|
|
57
|
+
sempy_labs/directlake/_warm_cache.py,sha256=ZgPricISRszx-yDERXihBDGVhEFB9yX-nBtLX0ZJTXI,8258
|
|
58
|
+
sempy_labs/lakehouse/__init__.py,sha256=i6VRx4dR1SIN-1GxioiNwhC4FxbozRCIz5TfXjb9rKc,587
|
|
59
|
+
sempy_labs/lakehouse/_get_lakehouse_columns.py,sha256=Bb_iCTlNwl0wdN4dW_E7tVnfbHhHwQT_l0SUqvcbYpo,2582
|
|
60
|
+
sempy_labs/lakehouse/_get_lakehouse_tables.py,sha256=1IXa_u1c4CJSlmP1rxBCMcOrQw-vmRXjqd5U5xsx_5c,8800
|
|
61
|
+
sempy_labs/lakehouse/_lakehouse.py,sha256=5A4SwVak8AlRVBUeHg9_Zfq1Id8yInRtnimvjo8oUxY,2782
|
|
62
|
+
sempy_labs/lakehouse/_shortcuts.py,sha256=MT_Cqog5cTMz9fN3M_ZjAaQSjXXiyCyPWGY8LbaXZsI,6977
|
|
63
|
+
sempy_labs/migration/__init__.py,sha256=w4vvGk6wTWXVfofJDmio2yIFvSSJsxOpjv6mvNGmrOI,1043
|
|
64
|
+
sempy_labs/migration/_create_pqt_file.py,sha256=oYoKD78K9Ox1fqtkh-BfU_G5nUIoK_-5ChvCKDsYsWU,9257
|
|
65
|
+
sempy_labs/migration/_migrate_calctables_to_lakehouse.py,sha256=p24PBg26gQHoj6VNcoK61o2ILJrVbVrJQ_n3PH4o0p0,17530
|
|
66
|
+
sempy_labs/migration/_migrate_calctables_to_semantic_model.py,sha256=cm3ny8i4b6D-Ew22-WZKyEFPLDr0wovkrlqTazYSbR8,5982
|
|
67
|
+
sempy_labs/migration/_migrate_model_objects_to_semantic_model.py,sha256=-JkxmM8PbEpLBeCssUgkIcnGHYnxHruqrMWp1CdiT6s,23123
|
|
68
|
+
sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py,sha256=G5cfmaK5ThLJc49SV9JMr2mX0--an3fFDr44n8gI1WQ,6349
|
|
69
|
+
sempy_labs/migration/_migration_validation.py,sha256=phbUUIYA5z4dZvEKrVbByMTPPwMscY7Dy9HVxE8z_HM,2483
|
|
70
|
+
sempy_labs/migration/_refresh_calc_tables.py,sha256=eDj0OJQ07Tum4umZH0NsUW5Rx_YXEpGnAu8OVVoQ4yk,5190
|
|
71
|
+
sempy_labs/report/_BPAReportTemplate.json,sha256=9Uh-7E6d2ooxQ7j5JRayv_ayEULc7Gzg42kZGKdOqH8,63920
|
|
72
|
+
sempy_labs/report/__init__.py,sha256=GQcTHbB3SjLEeCH0id_jlmqQ7S1iPCpoISUQfAHI2T8,960
|
|
73
|
+
sempy_labs/report/_generate_report.py,sha256=7H2xQ5nHDK1_2RjvNNHX3IwWyNSRbTGMpGWxMmmjdOk,12189
|
|
74
|
+
sempy_labs/report/_report_functions.py,sha256=YK9UdpVDro_XC7ZAqNLHB4ZbAPwCwEm2YLn6RzJWkA8,29868
|
|
75
|
+
sempy_labs/report/_report_rebind.py,sha256=t33liDvBitOhwxGPPLWJYzcccu9tBTjRFTAZkX6UYv8,4809
|
|
76
|
+
sempy_labs/report/_bpareporttemplate/.platform,sha256=kWRa6B_KwSYLsvVFDx372mQriQO8v7dJ_YzQV_cfD-Q,303
|
|
77
|
+
sempy_labs/report/_bpareporttemplate/definition.pbir,sha256=bttyHZYKqjA8OBb_cezGlX4H82cDvGZVCl1QB3fij4E,343
|
|
78
|
+
sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json,sha256=kzjBlNdjbsSBBSHBwbQc298AJCr9Vp6Ex0D5PemUuT0,1578
|
|
79
|
+
sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json,sha256=4N6sT5nLlYBobGmZ1Xb68uOMVVCBEyheR535js_et28,13467
|
|
80
|
+
sempy_labs/report/_bpareporttemplate/definition/report.json,sha256=-8BK5blTE-nc0Y4-M0pTHD8Znt3pHZ-u2veRppxPDBQ,3975
|
|
81
|
+
sempy_labs/report/_bpareporttemplate/definition/version.json,sha256=yL3ZZqhfHqq0MS0glrbXtQgkPk17xaTSWvPPyxBWpOc,152
|
|
82
|
+
sempy_labs/report/_bpareporttemplate/definition/pages/pages.json,sha256=jBLscHaA4wAQwusHXA3oYFaTsk3LL6S--k6wvvehJhk,311
|
|
83
|
+
sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json,sha256=5qfUCQXMdeDAKtfdLxQZt06-e111OTSP07gsmG1dfpY,313
|
|
84
|
+
sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json,sha256=wyB8w15elLuJorCx3lnQYD7OOXqNsoVgqNaDdukO4G8,4455
|
|
85
|
+
sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json,sha256=v-DCEtln1r6EVG7-KlfWBCbxnWzk9LyjycHjAmowObs,10977
|
|
86
|
+
sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json,sha256=3cg4bTS_7OAMuWYKtPFqZeBbZoNczLg59zQClNqgAw8,2858
|
|
87
|
+
sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json,sha256=ynOggnusTsTBxaXh9Q1n3zmsixHNhIohwku2y40Z-Js,14453
|
|
88
|
+
sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json,sha256=DU_t1cr5eczWFy9EI0R1v7S-cbNIdSyll0az5jKJRf4,5090
|
|
89
|
+
sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json,sha256=0HnplDEjDGa3htdufUksOxzC7iZGERNxgf3k81_kJ7E,12829
|
|
90
|
+
sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json,sha256=kl45g9-GF8wNrQRM2PrsS-Rf0cYhP5b3-lqAeXJfmN8,2866
|
|
91
|
+
sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json,sha256=id6U55GALhCx5BwtM_aCFjMkiVkhSvR79o2pbrMwNyA,12981
|
|
92
|
+
sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json,sha256=7pk03j28AwlE2HIwxei08Pz5VseRPO8eziOC6dgEJfs,249
|
|
93
|
+
sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json,sha256=7ZqozO6t9Ossms8Y20xGea3tdSAESSkxkejqTDRW15E,2982
|
|
94
|
+
sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json,sha256=dNExLBlxFRHASVCz8DUZ2Voq_ZCCuGu1YZmw2HdwCww,314
|
|
95
|
+
sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json,sha256=nZaj33KCp6bqxG0_nplUyi8-AGavN1iOp2lVkI0gLvw,12928
|
|
96
|
+
sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json,sha256=xaykDc6T5qwe8qENlAaAd-Ivw8oF1dderfrhSbUKGW4,10102
|
|
97
|
+
sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json,sha256=Fk7MWX8LhbftzHe_6KCFkSp2jYzRMYnZSWeElnFWLbw,915
|
|
98
|
+
sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json,sha256=O4wgVQuf-tAGDjVuzlnoOGi8GLPG2Vxz6y-JubTRQfY,14305
|
|
99
|
+
sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json,sha256=twzhLrEcCzUikeiKJ5sSEmQZ1otKXxgTtdz0uX4AKes,5445
|
|
100
|
+
sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json,sha256=3dS2XcsYWMEity0zFs0hxrv_w0Tnwe50iZFYvotfsWY,2856
|
|
101
|
+
sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json,sha256=Y59BS3bx16gzmZn-3-JqJZ_BRxeqVuoaVlysZvVAZAQ,14451
|
|
102
|
+
sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json,sha256=zOVrg0CaoOSxedwwyD8Msm94sqFVM0l-6IXX51EMRZY,2866
|
|
103
|
+
sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json,sha256=wBVuNc8S2NaUA0FC708w6stmR2djNZp8nAsHMqesgsc,293
|
|
104
|
+
sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json,sha256=mlY6t9OlSe-Y6_QmXJpS1vggU6Y3FjISUKECL8FVSg8,931
|
|
105
|
+
sempy_labs/tom/__init__.py,sha256=Qbs8leW0fjzvWwOjyWK3Hjeehu7IvpB1beASGsi28bk,121
|
|
106
|
+
sempy_labs/tom/_model.py,sha256=M-es2bES3Usj5uVmt5vwNmtm9vWzeqtVtKREpxjnjiI,151050
|
|
107
|
+
semantic_link_labs-0.7.0.dist-info/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
|
|
108
|
+
semantic_link_labs-0.7.0.dist-info/METADATA,sha256=DM8hGBclkGwWLsMT2CeZWdz6OM2NA6oL8n0zWbWtHxs,11241
|
|
109
|
+
semantic_link_labs-0.7.0.dist-info/WHEEL,sha256=Mdi9PDNwEZptOjTlUcAth7XJDFtKrHYaQMPulZeBCiQ,91
|
|
110
|
+
semantic_link_labs-0.7.0.dist-info/top_level.txt,sha256=kiQX1y42Dbein1l3Q8jMUYyRulDjdlc2tMepvtrvixQ,11
|
|
111
|
+
semantic_link_labs-0.7.0.dist-info/RECORD,,
|
sempy_labs/__init__.py
CHANGED
|
@@ -9,13 +9,16 @@ from sempy_labs._dax import evaluate_dax_impersonation
|
|
|
9
9
|
from sempy_labs._generate_semantic_model import (
|
|
10
10
|
create_blank_semantic_model,
|
|
11
11
|
create_semantic_model_from_bim,
|
|
12
|
-
|
|
12
|
+
deploy_semantic_model,
|
|
13
13
|
get_semantic_model_bim,
|
|
14
14
|
)
|
|
15
15
|
from sempy_labs._list_functions import (
|
|
16
|
+
list_reports_using_semantic_model,
|
|
17
|
+
delete_custom_pool,
|
|
16
18
|
list_semantic_model_objects,
|
|
17
19
|
list_shortcuts,
|
|
18
20
|
get_object_level_security,
|
|
21
|
+
list_capacities,
|
|
19
22
|
# list_annotations,
|
|
20
23
|
# list_columns,
|
|
21
24
|
list_dashboards,
|
|
@@ -49,9 +52,12 @@ from sempy_labs._list_functions import (
|
|
|
49
52
|
update_workspace_user,
|
|
50
53
|
list_workspace_users,
|
|
51
54
|
assign_workspace_to_dataflow_storage,
|
|
55
|
+
get_notebook_definition,
|
|
56
|
+
import_notebook_from_web,
|
|
52
57
|
)
|
|
53
58
|
|
|
54
59
|
from sempy_labs._helper_functions import (
|
|
60
|
+
resolve_workspace_capacity,
|
|
55
61
|
create_abfss_path,
|
|
56
62
|
format_dax_object_name,
|
|
57
63
|
create_relationship_name,
|
|
@@ -64,10 +70,21 @@ from sempy_labs._helper_functions import (
|
|
|
64
70
|
resolve_dataset_name,
|
|
65
71
|
resolve_report_id,
|
|
66
72
|
resolve_report_name,
|
|
73
|
+
is_default_semantic_model,
|
|
74
|
+
resolve_item_type,
|
|
75
|
+
get_capacity_id,
|
|
76
|
+
get_capacity_name,
|
|
77
|
+
resolve_capacity_name,
|
|
67
78
|
# language_validate
|
|
68
79
|
)
|
|
80
|
+
|
|
69
81
|
# from sempy_labs._model_auto_build import model_auto_build
|
|
70
|
-
from sempy_labs.
|
|
82
|
+
from sempy_labs._model_bpa_bulk import (
|
|
83
|
+
run_model_bpa_bulk,
|
|
84
|
+
create_model_bpa_semantic_model,
|
|
85
|
+
)
|
|
86
|
+
from sempy_labs._model_bpa import run_model_bpa
|
|
87
|
+
from sempy_labs._model_bpa_rules import model_bpa_rules
|
|
71
88
|
from sempy_labs._model_dependencies import (
|
|
72
89
|
measure_dependency_tree,
|
|
73
90
|
get_measure_dependencies,
|
|
@@ -97,6 +114,7 @@ from sempy_labs._vertipaq import (
|
|
|
97
114
|
)
|
|
98
115
|
|
|
99
116
|
__all__ = [
|
|
117
|
+
"delete_custom_pool",
|
|
100
118
|
"clear_cache",
|
|
101
119
|
# create_connection_cloud,
|
|
102
120
|
# create_connection_vnet,
|
|
@@ -104,7 +122,7 @@ __all__ = [
|
|
|
104
122
|
"evaluate_dax_impersonation",
|
|
105
123
|
"create_blank_semantic_model",
|
|
106
124
|
"create_semantic_model_from_bim",
|
|
107
|
-
|
|
125
|
+
"deploy_semantic_model",
|
|
108
126
|
"get_semantic_model_bim",
|
|
109
127
|
"get_object_level_security",
|
|
110
128
|
#'list_annotations',
|
|
@@ -125,7 +143,7 @@ __all__ = [
|
|
|
125
143
|
#'list_sqlendpoints',
|
|
126
144
|
#'list_tables',
|
|
127
145
|
"list_warehouses",
|
|
128
|
-
|
|
146
|
+
"list_workspace_role_assignments",
|
|
129
147
|
"create_warehouse",
|
|
130
148
|
"update_item",
|
|
131
149
|
"create_abfss_path",
|
|
@@ -140,26 +158,26 @@ __all__ = [
|
|
|
140
158
|
"resolve_dataset_name",
|
|
141
159
|
"resolve_report_id",
|
|
142
160
|
"resolve_report_name",
|
|
143
|
-
#'language_validate',
|
|
144
|
-
#"model_auto_build",
|
|
161
|
+
# 'language_validate',
|
|
162
|
+
# "model_auto_build",
|
|
145
163
|
"model_bpa_rules",
|
|
146
164
|
"run_model_bpa",
|
|
147
165
|
"measure_dependency_tree",
|
|
148
166
|
"get_measure_dependencies",
|
|
149
167
|
"get_model_calc_dependencies",
|
|
150
168
|
"export_model_to_onelake",
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
169
|
+
"qso_sync",
|
|
170
|
+
"qso_sync_status",
|
|
171
|
+
"set_qso",
|
|
172
|
+
"list_qso_settings",
|
|
173
|
+
"disable_qso",
|
|
174
|
+
"set_semantic_model_storage_format",
|
|
175
|
+
"set_workspace_default_storage_format",
|
|
158
176
|
"refresh_semantic_model",
|
|
159
177
|
"cancel_dataset_refresh",
|
|
160
178
|
"translate_semantic_model",
|
|
161
179
|
"vertipaq_analyzer",
|
|
162
|
-
#'visualize_vertipaq',
|
|
180
|
+
# 'visualize_vertipaq',
|
|
163
181
|
"import_vertipaq_analyzer",
|
|
164
182
|
"list_semantic_model_objects",
|
|
165
183
|
"list_shortcuts",
|
|
@@ -174,5 +192,17 @@ __all__ = [
|
|
|
174
192
|
"delete_user_from_workspace",
|
|
175
193
|
"update_workspace_user",
|
|
176
194
|
"list_workspace_users",
|
|
177
|
-
"assign_workspace_to_dataflow_storage"
|
|
195
|
+
"assign_workspace_to_dataflow_storage",
|
|
196
|
+
"list_capacities",
|
|
197
|
+
"is_default_semantic_model",
|
|
198
|
+
"resolve_item_type",
|
|
199
|
+
"get_notebook_definition",
|
|
200
|
+
"import_notebook_from_web",
|
|
201
|
+
"list_reports_using_semantic_model",
|
|
202
|
+
"resolve_workspace_capacity",
|
|
203
|
+
"get_capacity_id",
|
|
204
|
+
"get_capacity_name",
|
|
205
|
+
"resolve_capacity_name",
|
|
206
|
+
"run_model_bpa_bulk",
|
|
207
|
+
"create_model_bpa_semantic_model",
|
|
178
208
|
]
|
sempy_labs/_ai.py
CHANGED
|
@@ -12,9 +12,8 @@ import sempy_labs._icons as icons
|
|
|
12
12
|
def optimize_semantic_model(dataset: str, workspace: Optional[str] = None):
|
|
13
13
|
|
|
14
14
|
from ._model_bpa import run_model_bpa
|
|
15
|
-
from .directlake.
|
|
15
|
+
from .directlake._dl_helper import check_fallback_reason
|
|
16
16
|
from ._helper_functions import format_dax_object_name
|
|
17
|
-
from sempy_labs.tom import connect_semantic_model
|
|
18
17
|
|
|
19
18
|
modelBPA = run_model_bpa(
|
|
20
19
|
dataset=dataset, workspace=workspace, return_dataframe=True
|
|
@@ -41,7 +40,8 @@ def optimize_semantic_model(dataset: str, workspace: Optional[str] = None):
|
|
|
41
40
|
|
|
42
41
|
if len(fallback_filt) > 0:
|
|
43
42
|
print(
|
|
44
|
-
f"{icons.yellow_dot} The '{dataset}' semantic model is a Direct Lake semantic model which contains views.
|
|
43
|
+
f"{icons.yellow_dot} The '{dataset}' semantic model is a Direct Lake semantic model which contains views. "
|
|
44
|
+
"Since views always fall back to DirectQuery, it is recommended to only use lakehouse tables and not views."
|
|
45
45
|
)
|
|
46
46
|
|
|
47
47
|
# Potential model reduction estimate
|
|
@@ -65,67 +65,6 @@ def optimize_semantic_model(dataset: str, workspace: Optional[str] = None):
|
|
|
65
65
|
print(f"{icons.green_dot} The '{rule}' rule has been followed.")
|
|
66
66
|
|
|
67
67
|
|
|
68
|
-
def generate_measure_descriptions(
|
|
69
|
-
dataset: str,
|
|
70
|
-
measures: Union[str, List[str]],
|
|
71
|
-
gpt_model: Optional[str] = "gpt-35-turbo",
|
|
72
|
-
workspace: Optional[str] = None,
|
|
73
|
-
):
|
|
74
|
-
|
|
75
|
-
service_name = "synapseml-openai"
|
|
76
|
-
|
|
77
|
-
if isinstance(measures, str):
|
|
78
|
-
measures = [measures]
|
|
79
|
-
|
|
80
|
-
validModels = ["gpt-35-turbo", "gpt-35-turbo-16k", "gpt-4"]
|
|
81
|
-
if gpt_model not in validModels:
|
|
82
|
-
raise ValueError(f"{icons.red_dot} The '{gpt_model}' model is not a valid model. Enter a gpt_model from this list: {validModels}.")
|
|
83
|
-
|
|
84
|
-
dfM = fabric.list_measures(dataset=dataset, workspace=workspace)
|
|
85
|
-
|
|
86
|
-
if measures is not None:
|
|
87
|
-
dfM_filt = dfM[dfM["Measure Name"].isin(measures)]
|
|
88
|
-
else:
|
|
89
|
-
dfM_filt = dfM
|
|
90
|
-
|
|
91
|
-
df = dfM_filt[["Table Name", "Measure Name", "Measure Expression"]]
|
|
92
|
-
|
|
93
|
-
df["prompt"] = (
|
|
94
|
-
"The following is DAX code used by Microsoft Power BI. Please explain this code in simple terms:"
|
|
95
|
-
+ df["Measure Expression"]
|
|
96
|
-
)
|
|
97
|
-
|
|
98
|
-
# Generate new column in df dataframe which has the AI-generated descriptions
|
|
99
|
-
completion = {
|
|
100
|
-
OpenAICompletion()
|
|
101
|
-
.setDeploymentName(gpt_model)
|
|
102
|
-
.setMaxTokens(200)
|
|
103
|
-
.setCustomServiceName(service_name)
|
|
104
|
-
.setPromptCol("prompt")
|
|
105
|
-
.setErrorCol("error")
|
|
106
|
-
.setOutputCol("completions")
|
|
107
|
-
}
|
|
108
|
-
|
|
109
|
-
completed_df = completion.transform(df).cache()
|
|
110
|
-
completed_df.select(
|
|
111
|
-
col("prompt"),
|
|
112
|
-
col("error"),
|
|
113
|
-
col("completions.choices.text").getItem(0).alias("text"),
|
|
114
|
-
)
|
|
115
|
-
|
|
116
|
-
# Update the model to use the new descriptions
|
|
117
|
-
#with connect_semantic_model(dataset=dataset, workspace=workspace, readonly=False) as tom:
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
# for t in m.Tables:
|
|
121
|
-
# tName = t.Name
|
|
122
|
-
# for ms in t.Measures:
|
|
123
|
-
# mName = ms.Name
|
|
124
|
-
# mDesc = promptValue
|
|
125
|
-
|
|
126
|
-
# m.SaveChanges()
|
|
127
|
-
|
|
128
|
-
|
|
129
68
|
def generate_aggs(
|
|
130
69
|
dataset: str,
|
|
131
70
|
table_name: str,
|
|
@@ -146,10 +85,10 @@ def generate_aggs(
|
|
|
146
85
|
import System
|
|
147
86
|
|
|
148
87
|
# columns = {
|
|
149
|
-
#'SalesAmount': 'Sum',
|
|
150
|
-
#'ProductKey': 'GroupBy',
|
|
151
|
-
#'OrderDateKey': 'GroupBy'
|
|
152
|
-
#
|
|
88
|
+
# 'SalesAmount': 'Sum',
|
|
89
|
+
# 'ProductKey': 'GroupBy',
|
|
90
|
+
# 'OrderDateKey': 'GroupBy'
|
|
91
|
+
# }
|
|
153
92
|
|
|
154
93
|
if workspace is None:
|
|
155
94
|
workspace_id = fabric.get_workspace_id()
|
|
@@ -171,33 +110,44 @@ def generate_aggs(
|
|
|
171
110
|
numericTypes = ["Int64", "Double", "Decimal"]
|
|
172
111
|
|
|
173
112
|
if any(value not in aggTypes for value in columns.values()):
|
|
174
|
-
raise ValueError(
|
|
113
|
+
raise ValueError(
|
|
114
|
+
f"{icons.red_dot} Invalid aggregation type(s) have been specified in the 'columns' parameter. Valid aggregation types: {aggTypes}."
|
|
115
|
+
)
|
|
175
116
|
|
|
176
117
|
dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
|
|
177
118
|
dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
|
|
178
119
|
dfM = fabric.list_measures(dataset=dataset, workspace=workspace)
|
|
179
120
|
dfR = fabric.list_relationships(dataset=dataset, workspace=workspace)
|
|
180
121
|
if not any(r["Mode"] == "DirectLake" for i, r in dfP.iterrows()):
|
|
181
|
-
raise ValueError(
|
|
182
|
-
|
|
122
|
+
raise ValueError(
|
|
123
|
+
f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode. This function is only relevant for Direct Lake semantic models."
|
|
124
|
+
)
|
|
125
|
+
|
|
183
126
|
dfC_filtT = dfC[dfC["Table Name"] == table_name]
|
|
184
127
|
|
|
185
128
|
if len(dfC_filtT) == 0:
|
|
186
|
-
raise ValueError(
|
|
129
|
+
raise ValueError(
|
|
130
|
+
f"{icons.red_dot} The '{table_name}' table does not exist in the '{dataset}' semantic model within the '{workspace}' workspace."
|
|
131
|
+
)
|
|
187
132
|
|
|
188
133
|
dfC_filt = dfC[
|
|
189
134
|
(dfC["Table Name"] == table_name) & (dfC["Column Name"].isin(columnValues))
|
|
190
135
|
]
|
|
191
136
|
|
|
192
137
|
if len(columns) != len(dfC_filt):
|
|
193
|
-
raise ValueError(
|
|
138
|
+
raise ValueError(
|
|
139
|
+
f"{icons.red_dot} Columns listed in '{columnValues}' do not exist in the '{table_name}' table in the '{dataset}' semantic model within the '{workspace}' workspace."
|
|
140
|
+
)
|
|
194
141
|
|
|
195
142
|
# Check if doing sum/count/min/max etc. on a non-number column
|
|
196
|
-
for
|
|
197
|
-
dfC_col = dfC_filt[dfC_filt["Column Name"] ==
|
|
143
|
+
for cm, agg in columns.items():
|
|
144
|
+
dfC_col = dfC_filt[dfC_filt["Column Name"] == cm]
|
|
198
145
|
dataType = dfC_col["Data Type"].iloc[0]
|
|
199
146
|
if agg in aggTypesAggregate and dataType not in numericTypes:
|
|
200
|
-
raise ValueError(
|
|
147
|
+
raise ValueError(
|
|
148
|
+
f"{icons.red_dot} The '{cm}' column in the '{table_name}' table is of '{dataType}' data type. Only columns of '{numericTypes}' data types"
|
|
149
|
+
f" can be aggregated as '{aggTypesAggregate}' aggregation types."
|
|
150
|
+
)
|
|
201
151
|
|
|
202
152
|
# Create/update lakehouse delta agg table
|
|
203
153
|
aggSuffix = "_agg"
|
|
@@ -213,7 +163,10 @@ def generate_aggs(
|
|
|
213
163
|
dfI_filt = dfI[(dfI["Id"] == sqlEndpointId)]
|
|
214
164
|
|
|
215
165
|
if len(dfI_filt) == 0:
|
|
216
|
-
raise ValueError(
|
|
166
|
+
raise ValueError(
|
|
167
|
+
f"{icons.red_dot} The lakehouse (SQL Endpoint) used by the '{dataset}' semantic model does not reside in"
|
|
168
|
+
f" the '{lakehouse_workspace}' workspace. Please update the lakehouse_workspace parameter."
|
|
169
|
+
)
|
|
217
170
|
|
|
218
171
|
lakehouseName = dfI_filt["Display Name"].iloc[0]
|
|
219
172
|
lakehouse_id = resolve_lakehouse_id(
|
|
@@ -223,8 +176,8 @@ def generate_aggs(
|
|
|
223
176
|
# Generate SQL query
|
|
224
177
|
query = "SELECT"
|
|
225
178
|
groupBy = "\nGROUP BY"
|
|
226
|
-
for
|
|
227
|
-
colFilt = dfC_filt[dfC_filt["Column Name"] ==
|
|
179
|
+
for cm, agg in columns.items():
|
|
180
|
+
colFilt = dfC_filt[dfC_filt["Column Name"] == cm]
|
|
228
181
|
sourceCol = colFilt["Source"].iloc[0]
|
|
229
182
|
|
|
230
183
|
if agg == "GroupBy":
|
|
@@ -242,7 +195,7 @@ def generate_aggs(
|
|
|
242
195
|
delta_table_name=lakeTName,
|
|
243
196
|
)
|
|
244
197
|
df = spark.read.format("delta").load(fromTablePath)
|
|
245
|
-
tempTableName = "delta_table_"
|
|
198
|
+
tempTableName = f"delta_table_{lakeTName}"
|
|
246
199
|
df.createOrReplaceTempView(tempTableName)
|
|
247
200
|
sqlQuery = f"{query} \n FROM {tempTableName} {groupBy}"
|
|
248
201
|
|
|
@@ -328,7 +281,9 @@ def generate_aggs(
|
|
|
328
281
|
col.DataType = System.Enum.Parse(TOM.DataType, dType)
|
|
329
282
|
|
|
330
283
|
m.Tables[aggTableName].Columns.Add(col)
|
|
331
|
-
print(
|
|
284
|
+
print(
|
|
285
|
+
f"{icons.green_dot} The '{aggTableName}'[{cName}] column has been added."
|
|
286
|
+
)
|
|
332
287
|
|
|
333
288
|
# Create relationships
|
|
334
289
|
relMap = {"m": "Many", "1": "One", "0": "None"}
|
|
@@ -367,10 +322,11 @@ def generate_aggs(
|
|
|
367
322
|
print(
|
|
368
323
|
f"{icons.green_dot} '{aggTableName}'[{fromColumn}] -> '{toTable}'[{toColumn}] relationship has been added."
|
|
369
324
|
)
|
|
370
|
-
except:
|
|
325
|
+
except Exception as e:
|
|
371
326
|
print(
|
|
372
327
|
f"{icons.red_dot} '{aggTableName}'[{fromColumn}] -> '{toTable}'[{toColumn}] relationship has not been created."
|
|
373
328
|
)
|
|
329
|
+
print(f"Exception occured: {e}")
|
|
374
330
|
elif toTable == table_name:
|
|
375
331
|
try:
|
|
376
332
|
rel.ToColumn = m.Tables[aggTableName].Columns[toColumn]
|
|
@@ -378,11 +334,12 @@ def generate_aggs(
|
|
|
378
334
|
print(
|
|
379
335
|
f"{icons.green_dot} '{fromTable}'[{fromColumn}] -> '{aggTableName}'[{toColumn}] relationship has been added."
|
|
380
336
|
)
|
|
381
|
-
except:
|
|
337
|
+
except Exception as e:
|
|
382
338
|
print(
|
|
383
339
|
f"{icons.red_dot} '{fromTable}'[{fromColumn}] -> '{aggTableName}'[{toColumn}] relationship has not been created."
|
|
384
340
|
)
|
|
385
|
-
|
|
341
|
+
print(f"Exception occured: {e}")
|
|
342
|
+
"Relationship creation is complete."
|
|
386
343
|
|
|
387
344
|
# Create IF measure
|
|
388
345
|
f"\n{icons.in_progress} Creating measure to check if the agg table can be used..."
|
|
@@ -441,7 +398,7 @@ def generate_aggs(
|
|
|
441
398
|
print(expr)
|
|
442
399
|
print(newExpr)
|
|
443
400
|
|
|
444
|
-
aggMName = mName
|
|
401
|
+
aggMName = f"{mName}{aggSuffix}"
|
|
445
402
|
measure = TOM.Measure()
|
|
446
403
|
measure.Name = aggMName
|
|
447
404
|
measure.IsHidden = True
|