semantic-link-labs 0.9.10__py3-none-any.whl → 0.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (40) hide show
  1. {semantic_link_labs-0.9.10.dist-info → semantic_link_labs-0.10.0.dist-info}/METADATA +28 -21
  2. {semantic_link_labs-0.9.10.dist-info → semantic_link_labs-0.10.0.dist-info}/RECORD +38 -31
  3. {semantic_link_labs-0.9.10.dist-info → semantic_link_labs-0.10.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +26 -1
  5. sempy_labs/_delta_analyzer.py +9 -8
  6. sempy_labs/_dictionary_diffs.py +221 -0
  7. sempy_labs/_environments.py +19 -1
  8. sempy_labs/_generate_semantic_model.py +1 -1
  9. sempy_labs/_helper_functions.py +358 -134
  10. sempy_labs/_kusto.py +25 -23
  11. sempy_labs/_list_functions.py +13 -35
  12. sempy_labs/_model_bpa_rules.py +13 -3
  13. sempy_labs/_notebooks.py +44 -11
  14. sempy_labs/_semantic_models.py +93 -1
  15. sempy_labs/_sql.py +4 -3
  16. sempy_labs/_tags.py +194 -0
  17. sempy_labs/_user_delegation_key.py +42 -0
  18. sempy_labs/_variable_libraries.py +89 -0
  19. sempy_labs/_vpax.py +388 -0
  20. sempy_labs/admin/__init__.py +8 -0
  21. sempy_labs/admin/_tags.py +126 -0
  22. sempy_labs/directlake/_generate_shared_expression.py +5 -1
  23. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +55 -5
  24. sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
  25. sempy_labs/lakehouse/__init__.py +14 -0
  26. sempy_labs/lakehouse/_blobs.py +100 -85
  27. sempy_labs/lakehouse/_get_lakehouse_tables.py +1 -13
  28. sempy_labs/lakehouse/_helper.py +211 -0
  29. sempy_labs/lakehouse/_lakehouse.py +1 -1
  30. sempy_labs/lakehouse/_livy_sessions.py +137 -0
  31. sempy_labs/report/__init__.py +2 -0
  32. sempy_labs/report/_download_report.py +1 -1
  33. sempy_labs/report/_generate_report.py +5 -1
  34. sempy_labs/report/_report_helper.py +27 -128
  35. sempy_labs/report/_reportwrapper.py +1903 -1165
  36. sempy_labs/tom/_model.py +83 -21
  37. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +0 -9
  38. sempy_labs/report/_bpareporttemplate/.platform +0 -11
  39. {semantic_link_labs-0.9.10.dist-info → semantic_link_labs-0.10.0.dist-info}/licenses/LICENSE +0 -0
  40. {semantic_link_labs-0.9.10.dist-info → semantic_link_labs-0.10.0.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: semantic-link-labs
3
- Version: 0.9.10
3
+ Version: 0.10.0
4
4
  Summary: Semantic Link Labs for Microsoft Fabric
5
5
  Author: Microsoft Corporation
6
6
  License: MIT License
@@ -27,7 +27,7 @@ Dynamic: license-file
27
27
  # Semantic Link Labs
28
28
 
29
29
  [![PyPI version](https://badge.fury.io/py/semantic-link-labs.svg)](https://badge.fury.io/py/semantic-link-labs)
30
- [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.9.10&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
30
+ [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.10.0&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
31
31
  [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
32
32
  [![Downloads](https://static.pepy.tech/badge/semantic-link-labs)](https://pepy.tech/project/semantic-link-labs)
33
33
 
@@ -38,6 +38,9 @@ Dynamic: license-file
38
38
  [Read the Wiki!](https://github.com/microsoft/semantic-link-labs/wiki)
39
39
  ---
40
40
 
41
+ [See code examples!](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples)
42
+ ---
43
+
41
44
  Semantic Link Labs is a Python library designed for use in [Microsoft Fabric notebooks](https://learn.microsoft.com/fabric/data-engineering/how-to-use-notebook). This library extends the capabilities of [Semantic Link](https://learn.microsoft.com/fabric/data-science/semantic-link-overview) offering additional functionalities to seamlessly integrate and work alongside it. The goal of Semantic Link Labs is to simplify technical processes, empowering people to focus on higher level activities and allowing tasks that are better suited for machines to be efficiently handled without human intervention.
42
45
 
43
46
  If you encounter any issues, please [raise a bug](https://github.com/microsoft/semantic-link-labs/issues/new?assignees=&labels=&projects=&template=bug_report.md&title=).
@@ -51,43 +54,45 @@ Check out the video below for an introduction to Semantic Link, Semantic Link La
51
54
  ## Featured Scenarios
52
55
  * Semantic Models
53
56
  * [Migrating an import/DirectQuery semantic model to Direct Lake](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration)
54
- * [Model Best Practice Analyzer (BPA)](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.run_model_bpa)
55
- * [Vertipaq Analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.vertipaq_analyzer)
57
+ * [Model Best Practice Analyzer (BPA)](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#model-best-practice-analyzer)
58
+ * [Vertipaq Analyzer](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#vertipaq-analyzer)
56
59
  * [Tabular Object Model](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Tabular%20Object%20Model.ipynb) [(TOM)](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html)
57
- * [Translate a semantic model's metadata](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.translate_semantic_model)
60
+ * [Translate a semantic model's metadata](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#translate-a-semantic-model)
58
61
  * [Check Direct Lake Guardrails](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.get_lakehouse_tables)
59
- * [Refresh](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Semantic%20Model%20Refresh.ipynb), [clear cache](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.clear_cache), [backup](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.backup_semantic_model), [restore](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.restore_semantic_model), [copy backup files](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.copy_semantic_model_backup_file), [move/deploy across workspaces](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.deploy_semantic_model)
62
+ * [Refresh](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Semantic%20Model%20Refresh.ipynb), [clear cache](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.clear_cache), [backup](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#backup-a-semantic-model), [restore](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#restore-a-semantic-model), [copy backup files](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.copy_semantic_model_backup_file), [move/deploy across workspaces](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.deploy_semantic_model)
60
63
  * [Run DAX queries which impersonate a user](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.evaluate_dax_impersonation)
61
64
  * [Manage Query Scale Out](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Query%20Scale%20Out.ipynb)
62
- * [Auto-generate descriptions for any/all measures in bulk](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.generate_measure_descriptions)
63
- * [Warm the cache of a Direct Lake semantic model after a refresh (using columns currently in memory)](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.warm_direct_lake_cache_isresident)
64
- * [Warm the cache of a Direct Lake semantic model (via perspective)](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.warm_direct_lake_cache_perspective)
65
- * [Visualize a refresh](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Semantic%20Model%20Refresh.ipynb)
65
+ * [Auto-generate descriptions for any/all measures in bulk](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#auto-generate-measure-descriptions)
66
+ * [Warm the cache of a Direct Lake semantic model after a refresh (using columns currently in memory)](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#warm-cache-the-cache-of-a-direct-lake-semantic-model)
67
+ * [Warm the cache of a Direct Lake semantic model (via perspective)](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#warm-cache-the-cache-of-a-direct-lake-semantic-model)
68
+ * [Visualize a refresh](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#refresh-a-semantic-model)
66
69
  * [Update the connection of a Direct Lake semantic model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.update_direct_lake_model_connection)
67
70
  * [Dynamically generate a Direct Lake semantic model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.generate_direct_lake_semantic_model)
68
71
  * [Check why a Direct Lake semantic model would fallback to DirectQuery](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.check_fallback_reason)
69
72
  * [View a measure dependency tree](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.measure_dependency_tree)
70
73
  * [View unique columns touched in a single (or multiple) DAX query(ies)](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_dax_query_dependencies)
71
74
  * [Analyze delta tables for Direct Lake semantic models using Delta Analyzer](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Delta%20Analyzer.ipynb)
72
- * [View synonyms from the linguistic schema](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_synonyms)
75
+ * [View synonyms from the linguistic schema](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#list-the-synonyms-in-the-linguistic-metadata)
73
76
  * [Add](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_incremental_refresh_policy), [update](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.update_incremental_refresh_policy) and [view](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.show_incremental_refresh_policy) an incremental refresh policy.
74
77
  * Reports
75
- * [Report Best Practice Analyzer (BPA)](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.run_report_bpa)
76
- * [View report metadata](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Report%20Analysis.ipynb)
77
- * [View semantic model objects most frequently used in Power BI reports](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_semantic_model_object_report_usage)
78
- * [View broken reports](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_report_semantic_model_objects)
79
- * [Set a report theme](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.ReportWrapper.set_theme)
80
- * [Migrate report-level measures to the semantic model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.ReportWrapper.migrate_report_level_measures)
81
- * [Rebind reports](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.report_rebind)
78
+ * [Report Best Practice Analyzer (BPA)](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#report-best-practice-analyzer)
79
+ * [View report metadata](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#view-report-metadata)
80
+ * [View semantic model objects most frequently used in Power BI reports](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#show-the-frequency-of-semantic-model-object-used-within-reports)
81
+ * [View broken reports](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#find-broken-visuals-in-a-power-bi-report)
82
+ * [Set a report theme](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#set-the-theme-of-a-report)
83
+ * [Migrate report-level measures to the semantic model](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#migrate-report-level-measures-to-the-semantic-model)
84
+ * [Rebind reports](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#rebind-a-report-to-a-different-semantic-model)
85
+ * [Save a report as a .pbip](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#save-a-report-as-a-pbip-file)
82
86
  * Capacities
83
87
  * [Migrating a Power BI Premium capacity (P sku) to a Fabric capacity (F sku)](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Capacity%20Migration.ipynb)
84
88
  * [Migrating a Fabric Trial capacity (FT sku) to a Fabric capacity (F sku)](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Capacity%20Migration.ipynb)
85
89
  * [Create](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.create_fabric_capacity)/[update](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.update_fabric_capacity)/[suspend](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.suspend_fabric_capacity)/[resume](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.resume_fabric_capacity) Fabric capacities
86
90
  * Lakehouses
87
- * [Optimize lakehouse tables](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.optimize_lakehouse_tables)
88
- * [Vacuum lakehouse tables](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.vacuum_lakehouse_tables)
89
- * [Create](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.create_shortcut_onelake), [delete](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.delete_shortcut), and [view shortcuts](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_shortcuts)
91
+ * [Optimize lakehouse tables](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#optimize-lakehouse-tables)
92
+ * [Vacuum lakehouse tables](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#vacuum-lakehouse-tables)
93
+ * [Create](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#create-a-onelake-shortcut), [delete](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.delete_shortcut), and [view shortcuts](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_shortcuts)
90
94
  * [Analyze delta tables for Direct Lake semantic models using Delta Analyzer](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Delta%20Analyzer.ipynb)
95
+ * [Recover a soft-deleted lakehouse table/file/folder](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#recover-a-lakehouse-object)
91
96
  * Notebooks
92
97
  * [Import a notebook from the web](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.import_notebook_from_web)
93
98
  * APIs
@@ -149,6 +154,8 @@ An even better way to ensure the semantic-link-labs library is available in your
149
154
  2. Select your newly created environment within the 'Environment' drop down in the navigation bar at the top of the notebook
150
155
 
151
156
  ## Version History
157
+ * [0.10.0](https://github.com/microsoft/semantic-link-labs/releases/tag/0.10.0) (May 30, 2025)
158
+ * [0.9.11](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.11) (May 22, 2025)
152
159
  * [0.9.10](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.10) (April 24, 2025)
153
160
  * [0.9.9](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.9) (April 7, 2025)
154
161
  * [0.9.8](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.8) (April 3, 2025)
@@ -1,5 +1,5 @@
1
- semantic_link_labs-0.9.10.dist-info/licenses/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
2
- sempy_labs/__init__.py,sha256=npRl1YYZaQziVAKhYkxcjFiCR2J--D2azy-SoDe_QtM,15745
1
+ semantic_link_labs-0.10.0.dist-info/licenses/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
2
+ sempy_labs/__init__.py,sha256=RzY_bcDROFRZHTNuEUXpmYOUDZzTSZT7DpT4k_cJbHw,16363
3
3
  sempy_labs/_ai.py,sha256=BD1TdGOJ7T4m3x426OP-FLb7bevn-9gKY8BTEDAJDQU,16205
4
4
  sempy_labs/_authentication.py,sha256=GjtN5XqIyWXbR5Ni4hfYiUNwgFa-ySX8e-BrqE1vgGc,6903
5
5
  sempy_labs/_capacities.py,sha256=n48NYTY03zygRzcfyK1UOkSwTqKSyQefQ10IKQh-dfA,40426
@@ -11,25 +11,26 @@ sempy_labs/_data_pipelines.py,sha256=cW_WGmuWD4V9IgLprKL4TqFXgid4eTBXvEL3-IArS0w
11
11
  sempy_labs/_dataflows.py,sha256=xv-wRDUq4Bzz-BOs1Jdb4bgS9HbPLpa1GqexfA6H0mg,8053
12
12
  sempy_labs/_dax.py,sha256=Q_GylKeuHFnRB_sztZS1ON5v5tr6ua6lc9elyJYKbV8,17219
13
13
  sempy_labs/_dax_query_view.py,sha256=_zSvgystZzBj5euNTLKTg7-G77XVk0vqyqrDT72VvoM,1892
14
- sempy_labs/_delta_analyzer.py,sha256=eT74ed0vaTbuSS0BkdCGRymdyjb3UHV3M8ToPhK8K00,17390
14
+ sempy_labs/_delta_analyzer.py,sha256=d6qxZrEhn3Hfg5qMQODt7dDG5mYSY18xeXUkW_NyMgw,17281
15
15
  sempy_labs/_delta_analyzer_history.py,sha256=A50dlBd2d3ILKV7Fwj4pfIRtXKmCFslhk1gpeEw4inc,10765
16
16
  sempy_labs/_deployment_pipelines.py,sha256=SDQYkCAhOAlxBr58jYxtLFOVySiRXO0_WhfOKGDeYZQ,6254
17
+ sempy_labs/_dictionary_diffs.py,sha256=DCXuASmt45gshsBO1FgSZDqxm68DnojuDcO-H35EH7Q,9003
17
18
  sempy_labs/_documentation.py,sha256=yVA8VPEzx_fmljtcvSxtB7-BeupYsfdMXXjp6Fpnyo8,5007
18
- sempy_labs/_environments.py,sha256=5I5gHU68Crs603R00NgGy3RKdr925-X05V5EPkrHFkY,4995
19
+ sempy_labs/_environments.py,sha256=c_9uU6zhVmZVTLZWuD-OdcicBJvmRQQVmqHW7EqUn_Q,5839
19
20
  sempy_labs/_eventhouses.py,sha256=WEf33difBOTGTZGh1QFmY4gv-e43uwO1V54nrsjGGyY,5376
20
21
  sempy_labs/_eventstreams.py,sha256=c8nNdRM8eeDOYXd_AtRp7zYHYaqV9p1bI2V0boUrCuA,3492
21
22
  sempy_labs/_external_data_shares.py,sha256=s2okFvtCG5FDMbMJ_q6YSlCkVVFiE9sh2imVxZq1woU,6450
22
23
  sempy_labs/_gateways.py,sha256=6JE6VeGFPKF617sf2mMkxXVOz57YHI5jAQLAF-BzRLc,17527
23
- sempy_labs/_generate_semantic_model.py,sha256=9-ziVrbG_IqPAKLnqRYAo8UrUMFw5Hufn02w_QvaKe4,18423
24
+ sempy_labs/_generate_semantic_model.py,sha256=F2NVW6kT1HnrZTqWnDZ4BRApbsUfLFDSsDbRzCJnc0o,18429
24
25
  sempy_labs/_git.py,sha256=RyaT4XzrSi-4NLJZWiWZnnNpMgrKzRNxhyY8b1O2I6c,17819
25
26
  sempy_labs/_graphQL.py,sha256=truXeIUPRKLwc4skhs3FZYNcKP9FCGKly9en0YkR4NE,2690
26
- sempy_labs/_helper_functions.py,sha256=0eNEz8JItYjBe1fwZ0ZkRKJED5ull0I6T24RzgGx-2E,67340
27
+ sempy_labs/_helper_functions.py,sha256=9HhCH3xdEMjS50K7UBAjwnLiQPS4whVZco3v__k8bQU,74249
27
28
  sempy_labs/_icons.py,sha256=SB9EQeoFCfD4bO6fcYuJOoPRSYenSrW0rI9G5RFsH28,3579
28
29
  sempy_labs/_job_scheduler.py,sha256=_-Pifkttk1oPNxewxwWcQ4QC_Hr24GSi6nmrEXwc0pc,15814
29
30
  sempy_labs/_kql_databases.py,sha256=UtpYVBsxwWQDnqwdjq186bZzw5IlkD2S9KHA6Kw75U0,4738
30
31
  sempy_labs/_kql_querysets.py,sha256=Jjcs4SkjeirnDkG6zfsl0KRUXVzMyWii0Yn0JMWwln8,3502
31
- sempy_labs/_kusto.py,sha256=ZEwvErkLUpichnQfrIVXg2XiYvbuxcL8u-9RnsAp98M,4502
32
- sempy_labs/_list_functions.py,sha256=Yn1RZ0kMhA24vVpOr1hoet4WAQT93NZOWpMFdyV3HNw,62036
32
+ sempy_labs/_kusto.py,sha256=g3Up4j1KNdIGC2DDbvoduCdX1Pp8fAPGAlBAqOtaBeg,4544
33
+ sempy_labs/_list_functions.py,sha256=wqaeCpsYPG3SW52ipB-uTCrjWQ1xcVkcazakWtPAsHo,61035
33
34
  sempy_labs/_managed_private_endpoints.py,sha256=Vqicp_EiGg_m8aA2F__gaJiB9cwjbxQOSOi7hkS6FvQ,6907
34
35
  sempy_labs/_mirrored_databases.py,sha256=-9ZV2PdPeIc4lvFNkpPMm_9wkGIY1QLZXspYdSev5oQ,13147
35
36
  sempy_labs/_mirrored_warehouses.py,sha256=Q3WlRjUwCLz8KW1eN8MiTPeY0P52Vkuz5kgnv4GvQ3k,1739
@@ -38,20 +39,24 @@ sempy_labs/_ml_models.py,sha256=69i67MHn-_Fsq-5slLxxhCF8N2s0JBYn_CDTa1Hhhs0,3261
38
39
  sempy_labs/_model_auto_build.py,sha256=PTQo3dufzLSFcQ5shFkmBWAVSdP7cTJgpUclrcXyNbg,5105
39
40
  sempy_labs/_model_bpa.py,sha256=AoHshKqn3z2lNPwu1hKntJuCELYe1bLa_0LUzFXRjgs,22032
40
41
  sempy_labs/_model_bpa_bulk.py,sha256=hRY3dRBUtecrbscCZsEGv6TpCVqg_zAi8NmRq6dVMiE,15845
41
- sempy_labs/_model_bpa_rules.py,sha256=3rpDcsl99ji2KbozqdrAeC_1YrTvF8A-l8VhiUHK0bo,45968
42
+ sempy_labs/_model_bpa_rules.py,sha256=ZK16VqWcITiTKdd9T5Xnu-AMgodLVx0ZpanZjsC88-U,46260
42
43
  sempy_labs/_model_dependencies.py,sha256=0xGgubrq76zIvBdEqmEX_Pd6WdizXFVECBW6BPl2DZo,13162
43
44
  sempy_labs/_mounted_data_factories.py,sha256=-IBxE5XurYyeeQg7BvpXSSR1MW3rRGmue6UGpqlo96U,3906
44
- sempy_labs/_notebooks.py,sha256=GbyBDay_c4dnPmS32e8qgRrKVb3evi_omSMzq-Xk9z0,8082
45
+ sempy_labs/_notebooks.py,sha256=QbDmvxvYZEFE90lQ3Rqi70yjc5Xxg7D3ySemPPVGflY,9102
45
46
  sempy_labs/_one_lake_integration.py,sha256=9ub75-ueEFqn1iRgRd5y97SYujalsWW6ufs1du4PbDs,6303
46
47
  sempy_labs/_query_scale_out.py,sha256=nra1q8s-PKpZTlI_L0lMGO1GmdBk6sqETsBQShF1yPY,15352
47
48
  sempy_labs/_refresh_semantic_model.py,sha256=4w_uaYLbaZptmEFY7QHWzOgXcgc2ctGx8HQvt2aguxk,17360
48
- sempy_labs/_semantic_models.py,sha256=Yh9SfGZSbA9mRROo3wpy9A8sFvvGZZ1VUHjm_Ux4pqk,4455
49
+ sempy_labs/_semantic_models.py,sha256=F9v964IiXqx2qNPtNBzYrWPtXIoQH5-FI5csWJGofoQ,7934
49
50
  sempy_labs/_spark.py,sha256=SuSTjjmtzj7suDgN8Njk_pNBaStDLgIJB_1yk_e2H1Y,19340
50
- sempy_labs/_sql.py,sha256=s4VMcs1lIn39sYKRnSp6QsdRR3J-84kT_SPCKdwzAyo,8158
51
+ sempy_labs/_sql.py,sha256=BnL7Syd9vJZFysSiILYhqwTFS4y30nvkhDLQXGjtveE,8281
51
52
  sempy_labs/_sqldatabase.py,sha256=8HV3UtsLiwexmPSjYnhnYnD6xEvgFpTG13jcOuGheuI,6470
53
+ sempy_labs/_tags.py,sha256=7DvSc3wah26DxHwUhr-yr_JhZiplrePkFaDaVIAQfV4,5666
52
54
  sempy_labs/_translations.py,sha256=i4K2PFk6-TcmAnUpqz-z_GuDv9XEp1cBs0KY-x6ja1w,16168
55
+ sempy_labs/_user_delegation_key.py,sha256=5Qre0ZB_atajtwWfFQqD12q413Fz313GK9nrA9IIwjI,1414
53
56
  sempy_labs/_utils.py,sha256=aKGnUiXSLLRQRXXv8TH_XhGcOtDsnrs0RHmQ6YZMn3o,1786
57
+ sempy_labs/_variable_libraries.py,sha256=t97gj8Mo-YjahKx6XObqh5HkhUMHUke5GdWpSzkC5ZM,3008
54
58
  sempy_labs/_vertipaq.py,sha256=1UvB79xOxeGdRFINsUsreXxtZtiatHlACAfbQhv45as,38536
59
+ sempy_labs/_vpax.py,sha256=4rtXXGVoadvdu7uiU9PVsgKszST3XH-K56zmWdMmZEg,15471
55
60
  sempy_labs/_warehouses.py,sha256=wF38YP4-39KPsXPyexJahZPrYAyLc5xfrerJvS7My5Q,7286
56
61
  sempy_labs/_workloads.py,sha256=ifQ6Jv0_MDzjfu993bU8V7thOdW5kFyp3MjA082rsrE,4687
57
62
  sempy_labs/_workspace_identity.py,sha256=plxgcqt2aBXgLpyn1dpHhzh_5Z-gFkLK8RtId2OIX5s,2561
@@ -94,7 +99,7 @@ sempy_labs/_bpa_translation/_model/_translations_tr-TR.po,sha256=NdW-X4E0QmeLKM0
94
99
  sempy_labs/_bpa_translation/_model/_translations_uk-UA.po,sha256=3NsFN8hoor_5L6738FjpJ8o4spwp8FNFGbVItHD-_ec,43500
95
100
  sempy_labs/_bpa_translation/_model/_translations_zh-CN.po,sha256=ipMbnet7ZI5mZoC8KonYKVwGmFLHFB_9KIDOoBgSNfo,26815
96
101
  sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po,sha256=5v6tVKGruqneAeMoa6F3tyg_JBL8qOpqOJofWpq2W3U,31518
97
- sempy_labs/admin/__init__.py,sha256=McI1-wyaoxcCIzffi-aWvU3ElfUgZLIEYR2tfcYI-hc,3875
102
+ sempy_labs/admin/__init__.py,sha256=XaUXm5uuTVEKtQVDfvobU8qM-QlL5hFd_kVH_o0el4w,4016
98
103
  sempy_labs/admin/_activities.py,sha256=YfISDzhXro9glEa_yJmoYv-2q2M1DIkoyNzgLl7eWuI,6695
99
104
  sempy_labs/admin/_apps.py,sha256=PUEQlXbzVR9u3ZUQUhpfU3J-hfa8A2nTGFBpCqzMdW0,4085
100
105
  sempy_labs/admin/_artifacts.py,sha256=hGTMY0t4qxK1zs89PIZQaNP5QHRCIWAOvp2FiWHtQpE,2249
@@ -109,6 +114,7 @@ sempy_labs/admin/_items.py,sha256=BO4X_hpp94u4FUSn6Rb25gsRz1TgQAvLKLSwtzOLlVw,88
109
114
  sempy_labs/admin/_reports.py,sha256=nPDoC90Yzc67CtiuL4WYBYkGYuUQOnZAy0PCU0aYKj8,7857
110
115
  sempy_labs/admin/_scanner.py,sha256=0mKi0ihJETdsSaeHFBEq3drcCS8J_enWWkIMBMECz64,4370
111
116
  sempy_labs/admin/_shared.py,sha256=srgkqttbMbK5XXjOt4zeAV8rMCvK7zEus55HsGtNUFI,3007
117
+ sempy_labs/admin/_tags.py,sha256=92CoaRwpiVtpbkT9jC6eNAp5vdxzR4YAKo2VfmDPn7k,3752
112
118
  sempy_labs/admin/_tenant.py,sha256=D8x45G4U8aiDlYcYTWUIg--Rrl1T0HwRf0qtk-jUBbo,19347
113
119
  sempy_labs/admin/_users.py,sha256=eEOkgvny3FwMuUrSIBQ0n3JwrzWV_6_nwGc8_c-eXSM,4571
114
120
  sempy_labs/admin/_workspaces.py,sha256=XiiO3vyuJxKkVf9ZrW7261wHSBrnd8r7rbia8HGDFkI,4911
@@ -116,24 +122,27 @@ sempy_labs/directlake/__init__.py,sha256=etaj-3wqe5t93mu74tGYjEOQ6gtHWUogidOygiV
116
122
  sempy_labs/directlake/_directlake_schema_compare.py,sha256=tVc6hIgDxxA7a8V51e5tlzlp3bzVVTqQ_OKsTNxiWG4,5074
117
123
  sempy_labs/directlake/_directlake_schema_sync.py,sha256=ipONLkBaXm4WgcMMChAyD4rVushdqdjAQdexT-fJxcY,6573
118
124
  sempy_labs/directlake/_dl_helper.py,sha256=HHFy6tW-tSVZ4YHxSHvt6pXrloh0O6Lx7yNmZE7IAI4,10348
119
- sempy_labs/directlake/_generate_shared_expression.py,sha256=gAVBK0l_CRxJCZzMJgxOxEHhXx1w0ERL2K3REnEseGw,3391
125
+ sempy_labs/directlake/_generate_shared_expression.py,sha256=fAaFlR5-prqOH3vJ_ktLyEYfKR_uBMvAxOaR-BRCm-w,3561
120
126
  sempy_labs/directlake/_get_directlake_lakehouse.py,sha256=e0WFQm4-daJR4K1aHuVaubu7T26yTeBgfNEMOXk-EzM,2392
121
127
  sempy_labs/directlake/_get_shared_expression.py,sha256=qc85kXggkx_7Sz_rAAli_yPnLzrGZpgD8IfVbTfZhQM,1133
122
128
  sempy_labs/directlake/_guardrails.py,sha256=wNVXpeiZckgLTly4cS5DU5DoV9x1S4DMxN5S08qAavE,2749
123
129
  sempy_labs/directlake/_list_directlake_model_calc_tables.py,sha256=EYT4ELmOZ3Uklzy6uMQMidc4WtBXm21NQqZu1Q5HTsg,2509
124
130
  sempy_labs/directlake/_show_unsupported_directlake_objects.py,sha256=nmrZrtDez7U8Ji76i9fxnnTx1zxMu2LCOZTMz4sFUEc,3504
125
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py,sha256=oKD4rKwD_s6Q3jKPw7H05isZWT8hs9WdtFyy23MDozc,7080
131
+ sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py,sha256=W3LDXDD2pLH5B43NI9ixSIW2MJIORAWu5ANHQRFKMBY,9215
126
132
  sempy_labs/directlake/_update_directlake_partition_entity.py,sha256=8YxrReJObtc7_Huq0qQrLKTVMhPO84guv8bQKtp__4c,9032
127
133
  sempy_labs/directlake/_warm_cache.py,sha256=xc7gG_OJY1rJYg79ztgcLATpnXHNqFaw-6CU1HgdlXk,9258
134
+ sempy_labs/dotnet_lib/dotnet.runtime.config.json,sha256=syhDFQv6cEmZnE1WtFjNe3NwhsIsnd-CFULv-vEWOFI,167
128
135
  sempy_labs/graph/__init__.py,sha256=AZ_IpOL06VvXrYmgbcrvQlxCxdDksvwXKf7JAGohCNI,620
129
136
  sempy_labs/graph/_groups.py,sha256=j3YDeV6MzhRjGJRoD60SAaGyU8yb23x8QhXBzU2RWlE,12590
130
137
  sempy_labs/graph/_teams.py,sha256=SRFaFuxtB7ylC5WeXIdrW0aLCxc_JTJHeEmxOPG99r8,3089
131
138
  sempy_labs/graph/_users.py,sha256=dFOZ-jel6Aj4Um66f1jzQrgV0fOoI0cQnZfmR4OJSXo,5947
132
- sempy_labs/lakehouse/__init__.py,sha256=5dRO6WfcHANed720iGhrgW4QajzV1emT47bPpCSKJNg,956
133
- sempy_labs/lakehouse/_blobs.py,sha256=GgS2Zx6_0xzwUzuSBUll2bkNRFE-ThbK8jdYh-lJ2LY,8095
139
+ sempy_labs/lakehouse/__init__.py,sha256=zKF6-rjy3UEClAlyW-koqrTK3_bAjU6WbDxKamsWCjs,1267
140
+ sempy_labs/lakehouse/_blobs.py,sha256=K2uwzUUkYN1rGfpyQrubxWNjtM6AIWM9VVHh2_LYCTY,8483
134
141
  sempy_labs/lakehouse/_get_lakehouse_columns.py,sha256=dF5rLkdD3PB8EiXQewRdnr7MzbDGkZWHrFfI01_a7K4,3710
135
- sempy_labs/lakehouse/_get_lakehouse_tables.py,sha256=YZkb8AsQmOK801andbCivhTH5DVFXwzTRJp-q4o9QJI,9803
136
- sempy_labs/lakehouse/_lakehouse.py,sha256=Fq_RuY7kn8BZLFiYpnVNTYmbLMMt0mCw83P1YxXyfmg,8861
142
+ sempy_labs/lakehouse/_get_lakehouse_tables.py,sha256=AMQXk40YMN4daS0zILgZm-sc2llnvCaL7kS1v8dfYMA,9369
143
+ sempy_labs/lakehouse/_helper.py,sha256=W9adTkZw9_f9voB3bA2JWkI4LqAcnvpY929vMQJw1xE,7401
144
+ sempy_labs/lakehouse/_lakehouse.py,sha256=JQBQl_E9svzu_ozjYh1eSV2gY1NhHfomW0MiD3JzXPc,8860
145
+ sempy_labs/lakehouse/_livy_sessions.py,sha256=REfBpuDdH7O1CQ3JpMMZpX7-wnnVXmZEqAXsZw1MTjk,5778
137
146
  sempy_labs/lakehouse/_shortcuts.py,sha256=24sPtX98ho84fNV_JCAHZrSkvk0Ui7p-0b-jTdGOGM8,16580
138
147
  sempy_labs/migration/__init__.py,sha256=142n01VAqlcx4E0mGGRtUfVOEwAXVdiHI_XprmUm7As,1175
139
148
  sempy_labs/migration/_create_pqt_file.py,sha256=eRK0Jz9ZeV_7jV3kNRze0bTAIqxsAZXLKMGE_loKOaY,9677
@@ -145,22 +154,20 @@ sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py,sha256=HYi2vn7
145
154
  sempy_labs/migration/_migration_validation.py,sha256=AHURrWofb-U-L2Bdu36mcisVXOuZXi6Smgrrs2kjYBM,2650
146
155
  sempy_labs/migration/_refresh_calc_tables.py,sha256=qUBPZ5HAHyE5ev6STKDcmtEpRuLDX5RzYTKre4ZElj4,5443
147
156
  sempy_labs/report/_BPAReportTemplate.json,sha256=9Uh-7E6d2ooxQ7j5JRayv_ayEULc7Gzg42kZGKdOqH8,63920
148
- sempy_labs/report/__init__.py,sha256=bPZ_MMqKGokskjJwM3T89LxIVNa2AXJg8Lr-mvJhP0E,1392
149
- sempy_labs/report/_download_report.py,sha256=hCQ2_fSXSCqSUeaNM2Tf9T3MpRofREnDwp_zrfp7iTA,2703
157
+ sempy_labs/report/__init__.py,sha256=yuMGbP7rd_50M-CRfIYR7BK8mPzpXXYzOPh9sBV-aqw,1434
158
+ sempy_labs/report/_download_report.py,sha256=01hI26UV_jb5RLPheXRQsIDNNf4i72xICm14slKqEFA,2704
150
159
  sempy_labs/report/_export_report.py,sha256=XCMsZzTBMgvQOe3Ltdod7my7_izpmP-3AVH6W5CExPE,10976
151
- sempy_labs/report/_generate_report.py,sha256=ncFo8brgwPkSNF3urROMkIElqO6pcSy9tM6ymHE_UeQ,13868
160
+ sempy_labs/report/_generate_report.py,sha256=S830woeisjKCYNyacfvSx0fVHzLC7-aw2oPIU2sYiP8,13910
152
161
  sempy_labs/report/_paginated.py,sha256=rsElE0IQ9qxRDuEp6qNF1EcD5XEgfTc7WsWEQsalsuI,2156
153
162
  sempy_labs/report/_report_bpa.py,sha256=ClETB8Q41sY1scCuknhpvalvuBaQ9ZwA4QX7F3sPcjc,13596
154
163
  sempy_labs/report/_report_bpa_rules.py,sha256=tPVGA0hmE6QMLlWtig7Va7Ksr2yXWl_Lndq--tWWd6w,4959
155
164
  sempy_labs/report/_report_functions.py,sha256=pSrsUfMJqmsn9CYb5AM0iYdPR-EmuUSprVnc0dGhO1s,19709
156
- sempy_labs/report/_report_helper.py,sha256=m23osIZMjvHhKbfhmTHyqHibXoWA9eP84TPanbH8kuE,10863
165
+ sempy_labs/report/_report_helper.py,sha256=L9wU0N0rvTUMglZHTxcowywrBDuZvZTv3DA4JrX84Os,7207
157
166
  sempy_labs/report/_report_list_functions.py,sha256=K9tMDQKhIZhelHvfMMW0lsxbVHekJ-5dAQveoD7PUDA,3980
158
167
  sempy_labs/report/_report_rebind.py,sha256=svyxUSdqgXJW1UDNcb-urJxU9erO3JM72uzmuJUWIT0,5090
159
- sempy_labs/report/_reportwrapper.py,sha256=u3MrszXTCQ8JtzdukXcnakdRW225jMXR2QvHgn1Wl_0,83226
168
+ sempy_labs/report/_reportwrapper.py,sha256=YxGfhAXDkDyXuyyGYu5YpQf9S31L64Rpr3hHRE44fsk,108736
160
169
  sempy_labs/report/_save_report.py,sha256=FAzScMQIXl89TgVSRvaJofzKT0TfZh_hhPNNvDiktaI,6033
161
- sempy_labs/report/_bpareporttemplate/.platform,sha256=kWRa6B_KwSYLsvVFDx372mQriQO8v7dJ_YzQV_cfD-Q,303
162
170
  sempy_labs/report/_bpareporttemplate/definition.pbir,sha256=bttyHZYKqjA8OBb_cezGlX4H82cDvGZVCl1QB3fij4E,343
163
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json,sha256=kzjBlNdjbsSBBSHBwbQc298AJCr9Vp6Ex0D5PemUuT0,1578
164
171
  sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json,sha256=4N6sT5nLlYBobGmZ1Xb68uOMVVCBEyheR535js_et28,13467
165
172
  sempy_labs/report/_bpareporttemplate/definition/report.json,sha256=-8BK5blTE-nc0Y4-M0pTHD8Znt3pHZ-u2veRppxPDBQ,3975
166
173
  sempy_labs/report/_bpareporttemplate/definition/version.json,sha256=yL3ZZqhfHqq0MS0glrbXtQgkPk17xaTSWvPPyxBWpOc,152
@@ -188,8 +195,8 @@ sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visua
188
195
  sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json,sha256=wBVuNc8S2NaUA0FC708w6stmR2djNZp8nAsHMqesgsc,293
189
196
  sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json,sha256=mlY6t9OlSe-Y6_QmXJpS1vggU6Y3FjISUKECL8FVSg8,931
190
197
  sempy_labs/tom/__init__.py,sha256=Qbs8leW0fjzvWwOjyWK3Hjeehu7IvpB1beASGsi28bk,121
191
- sempy_labs/tom/_model.py,sha256=sqs8u69RZyQnGzt-ZBJGE6PF-JwhFCm5YosaKeR-hbo,194594
192
- semantic_link_labs-0.9.10.dist-info/METADATA,sha256=CDC3S525ttk6M0qTW9m5mzYSlHXAIVxpZKPO1WAE3vs,26488
193
- semantic_link_labs-0.9.10.dist-info/WHEEL,sha256=SmOxYU7pzNKBqASvQJ7DjX3XGUF92lrGhMb3R6_iiqI,91
194
- semantic_link_labs-0.9.10.dist-info/top_level.txt,sha256=kiQX1y42Dbein1l3Q8jMUYyRulDjdlc2tMepvtrvixQ,11
195
- semantic_link_labs-0.9.10.dist-info/RECORD,,
198
+ sempy_labs/tom/_model.py,sha256=64IJf2Pdag5ECWxJcf4Cg2paoMD0Pr6BHvdjgvW6pwo,197537
199
+ semantic_link_labs-0.10.0.dist-info/METADATA,sha256=vw-G81gYnj_kJXcwm752EAY2d6TpwQ3QzDnjGYMkC3I,26831
200
+ semantic_link_labs-0.10.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
201
+ semantic_link_labs-0.10.0.dist-info/top_level.txt,sha256=kiQX1y42Dbein1l3Q8jMUYyRulDjdlc2tMepvtrvixQ,11
202
+ semantic_link_labs-0.10.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (79.0.1)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
sempy_labs/__init__.py CHANGED
@@ -1,7 +1,14 @@
1
+ from sempy_labs._variable_libraries import (
2
+ list_variable_libraries,
3
+ delete_variable_library,
4
+ )
1
5
  from sempy_labs._kusto import (
2
6
  query_kusto,
3
7
  query_workspace_monitoring,
4
8
  )
9
+ from sempy_labs._vpax import (
10
+ create_vpax,
11
+ )
5
12
  from sempy_labs._delta_analyzer_history import (
6
13
  delta_analyzer_history,
7
14
  )
@@ -13,11 +20,16 @@ from sempy_labs._mounted_data_factories import (
13
20
  get_mounted_data_factory_definition,
14
21
  delete_mounted_data_factory,
15
22
  )
16
-
23
+ from sempy_labs._tags import (
24
+ list_tags,
25
+ apply_tags,
26
+ unapply_tags,
27
+ )
17
28
  from sempy_labs._semantic_models import (
18
29
  get_semantic_model_refresh_schedule,
19
30
  enable_semantic_model_scheduled_refresh,
20
31
  delete_semantic_model,
32
+ update_semantic_model_refresh_schedule,
21
33
  )
22
34
  from sempy_labs._graphQL import (
23
35
  list_graphql_apis,
@@ -124,6 +136,7 @@ from sempy_labs._environments import (
124
136
  create_environment,
125
137
  delete_environment,
126
138
  publish_environment,
139
+ list_environments,
127
140
  )
128
141
  from sempy_labs._clear_cache import (
129
142
  clear_cache,
@@ -327,6 +340,9 @@ from sempy_labs._vertipaq import (
327
340
  vertipaq_analyzer,
328
341
  import_vertipaq_analyzer,
329
342
  )
343
+ from sempy_labs._user_delegation_key import (
344
+ get_user_delegation_key,
345
+ )
330
346
 
331
347
  __all__ = [
332
348
  "resolve_warehouse_id",
@@ -564,4 +580,13 @@ __all__ = [
564
580
  "delta_analyzer_history",
565
581
  "query_kusto",
566
582
  "query_workspace_monitoring",
583
+ "list_environments",
584
+ "list_tags",
585
+ "list_variable_libraries",
586
+ "delete_variable_library",
587
+ "create_vpax",
588
+ "update_semantic_model_refresh_schedule",
589
+ "apply_tags",
590
+ "unapply_tags",
591
+ "get_user_delegation_key",
567
592
  ]
@@ -4,7 +4,6 @@ from datetime import datetime
4
4
  import os
5
5
  from uuid import UUID
6
6
  from typing import Dict, Optional
7
- import pyarrow.dataset as ds
8
7
  import pyarrow.parquet as pq
9
8
  from sempy_labs._helper_functions import (
10
9
  create_abfss_path,
@@ -23,7 +22,12 @@ from sempy_labs._helper_functions import (
23
22
  )
24
23
  from sempy._utils._log import log
25
24
  from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
26
- from sempy_labs.lakehouse._lakehouse import lakehouse_attached
25
+ from sempy_labs.lakehouse._lakehouse import (
26
+ lakehouse_attached,
27
+ )
28
+ from sempy_labs.lakehouse._helper import (
29
+ is_v_ordered,
30
+ )
27
31
  import sempy_labs._icons as icons
28
32
  from tqdm.auto import tqdm
29
33
 
@@ -113,10 +117,6 @@ def delta_analyzer(
113
117
  lakehouse_id, workspace_id, table_name, schema=schema
114
118
  )
115
119
  local_path = _mount(lakehouse=lakehouse, workspace=workspace)
116
- if schema is not None:
117
- table_path = f"{local_path}/Tables/{schema}/{table_name}"
118
- else:
119
- table_path = f"{local_path}/Tables/{table_name}"
120
120
 
121
121
  parquet_file_df_columns = {
122
122
  # "Dataset": "string",
@@ -163,8 +163,9 @@ def delta_analyzer(
163
163
  max_rows_per_row_group = 0
164
164
  min_rows_per_row_group = float("inf")
165
165
 
166
- schema = ds.dataset(table_path).schema.metadata
167
- is_vorder = any(b"vorder" in key for key in schema.keys())
166
+ is_vorder = is_v_ordered(
167
+ table_name=table_name, lakehouse=lakehouse, workspace=workspace, schema=schema
168
+ )
168
169
 
169
170
  # Get the common details of the Delta table
170
171
  delta_table = _get_delta_table(delta_table_path)
@@ -0,0 +1,221 @@
1
+ import re
2
+ import json
3
+ import difflib
4
+ from collections import defaultdict
5
+
6
+
7
+ def color_text(text, color_code):
8
+ return f"\033[{color_code}m{text}\033[0m"
9
+
10
+
11
+ def stringify(payload):
12
+ try:
13
+ if isinstance(payload, list):
14
+ return (
15
+ "[\n" + ",\n".join(f" {json.dumps(item)}" for item in payload) + "\n]"
16
+ )
17
+ return json.dumps(payload, indent=2, sort_keys=True)
18
+ except Exception:
19
+ return str(payload)
20
+
21
+
22
+ def extract_top_level_group(path):
23
+ # For something like: resourcePackages[1].items[1].name → resourcePackages[1].items[1]
24
+ segments = re.split(r"\.(?![^[]*\])", path) # split on dots not in brackets
25
+ return ".".join(segments[:-1]) if len(segments) > 1 else segments[0]
26
+
27
+
28
+ def get_by_path(obj, path):
29
+ """Navigate into nested dict/list based on a dot/bracket path like: a.b[1].c"""
30
+ tokens = re.findall(r"\w+|\[\d+\]", path)
31
+ for token in tokens:
32
+ if token.startswith("["):
33
+ index = int(token[1:-1])
34
+ obj = obj[index]
35
+ else:
36
+ obj = obj.get(token)
37
+ return obj
38
+
39
+
40
+ def deep_diff(d1, d2, path=""):
41
+ diffs = []
42
+ if isinstance(d1, dict) and isinstance(d2, dict):
43
+ keys = set(d1) | set(d2)
44
+ for key in sorted(keys):
45
+ new_path = f"{path}.{key}" if path else key
46
+ if key not in d1:
47
+ diffs.append(("+", new_path, None, d2[key]))
48
+ elif key not in d2:
49
+ diffs.append(("-", new_path, d1[key], None))
50
+ else:
51
+ diffs.extend(deep_diff(d1[key], d2[key], new_path))
52
+ elif isinstance(d1, list) and isinstance(d2, list):
53
+ min_len = min(len(d1), len(d2))
54
+ list_changed = False
55
+ for i in range(min_len):
56
+ if d1[i] != d2[i]:
57
+ list_changed = True
58
+ break
59
+ if list_changed or len(d1) != len(d2):
60
+ diffs.append(("~", path, d1, d2))
61
+ elif d1 != d2:
62
+ diffs.append(("~", path, d1, d2))
63
+ return diffs
64
+
65
+
66
+ def diff_parts(d1, d2):
67
+
68
+ def build_path_map(parts):
69
+ return {part["path"]: part["payload"] for part in parts}
70
+
71
+ try:
72
+ paths1 = build_path_map(d1)
73
+ except Exception:
74
+ paths1 = d1
75
+ try:
76
+ paths2 = build_path_map(d2)
77
+ except Exception:
78
+ paths2 = d2
79
+ all_paths = set(paths1) | set(paths2)
80
+
81
+ for part_path in sorted(all_paths):
82
+ p1 = paths1.get(part_path)
83
+ p2 = paths2.get(part_path)
84
+
85
+ if p1 is None:
86
+ print(color_text(f"+ {part_path}", "32")) # Green
87
+ continue
88
+ elif p2 is None:
89
+ print(color_text(f"- {part_path}", "31")) # Red
90
+ continue
91
+ elif p1 == p2:
92
+ continue
93
+
94
+ if p1 is None or p2 is None:
95
+ print(
96
+ color_text(f"+ {part_path}", "32")
97
+ if p2 and not p1
98
+ else color_text(f"- {part_path}", "31")
99
+ )
100
+ continue
101
+
102
+ # Header for the changed part
103
+ print(color_text(f"~ {part_path}", "33"))
104
+
105
+ # Collect diffs
106
+ diffs = deep_diff(p1, p2)
107
+ # If the diff is only a change of a whole list (like appending to a list), group it under its key
108
+ merged_list_diffs = []
109
+ for change_type, full_path, old_val, new_val in diffs:
110
+ if (
111
+ change_type == "~"
112
+ and isinstance(old_val, list)
113
+ and isinstance(new_val, list)
114
+ ):
115
+ merged_list_diffs.append((change_type, full_path, old_val, new_val))
116
+
117
+ # Replace individual item diffs with unified list diff
118
+ if merged_list_diffs:
119
+ diffs = merged_list_diffs
120
+
121
+ # Group diffs by common parent path (e.g. items[1])
122
+ grouped = defaultdict(list)
123
+ for change_type, full_path, old_val, new_val in diffs:
124
+ group_path = extract_top_level_group(full_path)
125
+ grouped[group_path].append((change_type, full_path, old_val, new_val))
126
+
127
+ # Print each group once with unified diff for the full substructure
128
+ for group_path in sorted(grouped):
129
+ print(" " + color_text(f"~ {group_path}", "33"))
130
+
131
+ try:
132
+ old_group = get_by_path(p1, group_path)
133
+ new_group = get_by_path(p2, group_path)
134
+ except Exception:
135
+ old_group = new_group = None
136
+
137
+ # Skip showing diffs for empty/null groups
138
+ if isinstance(old_group, dict) and isinstance(new_group, dict):
139
+ old_keys = set(old_group.keys())
140
+ new_keys = set(new_group.keys())
141
+
142
+ for key in sorted(old_keys - new_keys):
143
+ print(
144
+ " "
145
+ + color_text(f"- {key}: {json.dumps(old_group[key])}", "31")
146
+ )
147
+ for key in sorted(new_keys - old_keys):
148
+ print(
149
+ " "
150
+ + color_text(f"+ {key}: {json.dumps(new_group[key])}", "32")
151
+ )
152
+ for key in sorted(old_keys & new_keys):
153
+ if old_group[key] != new_group[key]:
154
+ print(" " + color_text(f"~ {key}:", "33"))
155
+ old_val_str = stringify(old_group[key]).splitlines()
156
+ new_val_str = stringify(new_group[key]).splitlines()
157
+ for line in difflib.unified_diff(
158
+ old_val_str,
159
+ new_val_str,
160
+ fromfile="old",
161
+ tofile="new",
162
+ lineterm="",
163
+ ):
164
+ if line.startswith("@@"):
165
+ print(" " + color_text(line, "36"))
166
+ elif line.startswith("-") and not line.startswith("---"):
167
+ print(" " + color_text(line, "31"))
168
+ elif line.startswith("+") and not line.startswith("+++"):
169
+ print(" " + color_text(line, "32"))
170
+ elif old_group is None and new_group is not None:
171
+ if isinstance(new_group, dict):
172
+ # print all added keys
173
+ for key, val in new_group.items():
174
+ print(" " + color_text(f"+ {key}: {json.dumps(val)}", "32"))
175
+ elif isinstance(new_group, list):
176
+ old_str = []
177
+ new_str = stringify(new_group).splitlines()
178
+ for line in difflib.unified_diff(
179
+ old_str, new_str, fromfile="old", tofile="new", lineterm=""
180
+ ):
181
+ if line.startswith("@@"):
182
+ print(" " + color_text(line, "36"))
183
+ elif line.startswith("-") and not line.startswith("---"):
184
+ print(" " + color_text(line, "31"))
185
+ elif line.startswith("+") and not line.startswith("+++"):
186
+ print(" " + color_text(line, "32"))
187
+ else:
188
+ print(" " + color_text(f"+ {json.dumps(new_group)}", "32"))
189
+
190
+ elif new_group is None and old_group is not None:
191
+ if isinstance(old_group, dict):
192
+ # print all removed keys
193
+ for key, val in old_group.items():
194
+ print(" " + color_text(f"- {key}: {json.dumps(val)}", "31"))
195
+ elif isinstance(old_group, list):
196
+ old_str = stringify(old_group).splitlines()
197
+ new_str = []
198
+ for line in difflib.unified_diff(
199
+ old_str, new_str, fromfile="old", tofile="new", lineterm=""
200
+ ):
201
+ if line.startswith("@@"):
202
+ print(" " + color_text(line, "36"))
203
+ elif line.startswith("-") and not line.startswith("---"):
204
+ print(" " + color_text(line, "31"))
205
+ elif line.startswith("+") and not line.startswith("+++"):
206
+ print(" " + color_text(line, "32"))
207
+ else:
208
+ print(" " + color_text(f"- {json.dumps(old_group)}", "31"))
209
+ else:
210
+ old_str = stringify(old_group).splitlines()
211
+ new_str = stringify(new_group).splitlines()
212
+
213
+ for line in difflib.unified_diff(
214
+ old_str, new_str, fromfile="old", tofile="new", lineterm=""
215
+ ):
216
+ if line.startswith("@@"):
217
+ print(" " + color_text(line, "36"))
218
+ elif line.startswith("-") and not line.startswith("---"):
219
+ print(" " + color_text(line, "31"))
220
+ elif line.startswith("+") and not line.startswith("+++"):
221
+ print(" " + color_text(line, "32"))