semantic-link-labs 0.9.9__py3-none-any.whl → 0.9.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.9.9.dist-info → semantic_link_labs-0.9.11.dist-info}/METADATA +30 -22
- {semantic_link_labs-0.9.9.dist-info → semantic_link_labs-0.9.11.dist-info}/RECORD +47 -40
- {semantic_link_labs-0.9.9.dist-info → semantic_link_labs-0.9.11.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +28 -1
- sempy_labs/_clear_cache.py +12 -0
- sempy_labs/_dax.py +8 -2
- sempy_labs/_delta_analyzer.py +17 -26
- sempy_labs/_environments.py +19 -1
- sempy_labs/_generate_semantic_model.py +7 -8
- sempy_labs/_helper_functions.py +351 -151
- sempy_labs/_kql_databases.py +18 -0
- sempy_labs/_kusto.py +137 -0
- sempy_labs/_list_functions.py +18 -36
- sempy_labs/_model_bpa_rules.py +13 -3
- sempy_labs/_notebooks.py +44 -11
- sempy_labs/_semantic_models.py +93 -1
- sempy_labs/_sql.py +3 -2
- sempy_labs/_tags.py +194 -0
- sempy_labs/_variable_libraries.py +89 -0
- sempy_labs/_vertipaq.py +6 -6
- sempy_labs/_vpax.py +386 -0
- sempy_labs/_warehouses.py +3 -3
- sempy_labs/admin/__init__.py +14 -0
- sempy_labs/admin/_artifacts.py +3 -3
- sempy_labs/admin/_capacities.py +161 -1
- sempy_labs/admin/_dataflows.py +45 -0
- sempy_labs/admin/_items.py +16 -11
- sempy_labs/admin/_tags.py +126 -0
- sempy_labs/admin/_tenant.py +5 -5
- sempy_labs/directlake/_generate_shared_expression.py +29 -26
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +55 -5
- sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
- sempy_labs/lakehouse/__init__.py +16 -0
- sempy_labs/lakehouse/_blobs.py +115 -63
- sempy_labs/lakehouse/_get_lakehouse_columns.py +41 -18
- sempy_labs/lakehouse/_get_lakehouse_tables.py +62 -47
- sempy_labs/lakehouse/_helper.py +211 -0
- sempy_labs/lakehouse/_lakehouse.py +45 -36
- sempy_labs/lakehouse/_livy_sessions.py +137 -0
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +7 -12
- sempy_labs/migration/_refresh_calc_tables.py +7 -6
- sempy_labs/report/_download_report.py +1 -1
- sempy_labs/report/_generate_report.py +5 -1
- sempy_labs/report/_reportwrapper.py +31 -18
- sempy_labs/tom/_model.py +104 -35
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +0 -9
- sempy_labs/report/_bpareporttemplate/.platform +0 -11
- {semantic_link_labs-0.9.9.dist-info → semantic_link_labs-0.9.11.dist-info}/licenses/LICENSE +0 -0
- {semantic_link_labs-0.9.9.dist-info → semantic_link_labs-0.9.11.dist-info}/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: semantic-link-labs
|
|
3
|
-
Version: 0.9.
|
|
3
|
+
Version: 0.9.11
|
|
4
4
|
Summary: Semantic Link Labs for Microsoft Fabric
|
|
5
5
|
Author: Microsoft Corporation
|
|
6
6
|
License: MIT License
|
|
@@ -15,7 +15,7 @@ Classifier: Framework :: Jupyter
|
|
|
15
15
|
Requires-Python: <3.12,>=3.10
|
|
16
16
|
Description-Content-Type: text/markdown
|
|
17
17
|
License-File: LICENSE
|
|
18
|
-
Requires-Dist: semantic-link-sempy>=0.
|
|
18
|
+
Requires-Dist: semantic-link-sempy>=0.10.2
|
|
19
19
|
Requires-Dist: anytree
|
|
20
20
|
Requires-Dist: powerbiclient
|
|
21
21
|
Requires-Dist: polib
|
|
@@ -27,7 +27,7 @@ Dynamic: license-file
|
|
|
27
27
|
# Semantic Link Labs
|
|
28
28
|
|
|
29
29
|
[](https://badge.fury.io/py/semantic-link-labs)
|
|
30
|
-
[](https://readthedocs.org/projects/semantic-link-labs/)
|
|
31
31
|
[](https://github.com/psf/black)
|
|
32
32
|
[](https://pepy.tech/project/semantic-link-labs)
|
|
33
33
|
|
|
@@ -38,6 +38,9 @@ Dynamic: license-file
|
|
|
38
38
|
[Read the Wiki!](https://github.com/microsoft/semantic-link-labs/wiki)
|
|
39
39
|
---
|
|
40
40
|
|
|
41
|
+
[See code examples!](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples)
|
|
42
|
+
---
|
|
43
|
+
|
|
41
44
|
Semantic Link Labs is a Python library designed for use in [Microsoft Fabric notebooks](https://learn.microsoft.com/fabric/data-engineering/how-to-use-notebook). This library extends the capabilities of [Semantic Link](https://learn.microsoft.com/fabric/data-science/semantic-link-overview) offering additional functionalities to seamlessly integrate and work alongside it. The goal of Semantic Link Labs is to simplify technical processes, empowering people to focus on higher level activities and allowing tasks that are better suited for machines to be efficiently handled without human intervention.
|
|
42
45
|
|
|
43
46
|
If you encounter any issues, please [raise a bug](https://github.com/microsoft/semantic-link-labs/issues/new?assignees=&labels=&projects=&template=bug_report.md&title=).
|
|
@@ -51,43 +54,45 @@ Check out the video below for an introduction to Semantic Link, Semantic Link La
|
|
|
51
54
|
## Featured Scenarios
|
|
52
55
|
* Semantic Models
|
|
53
56
|
* [Migrating an import/DirectQuery semantic model to Direct Lake](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration)
|
|
54
|
-
* [Model Best Practice Analyzer (BPA)](https://semantic-link-labs
|
|
55
|
-
* [Vertipaq Analyzer](https://semantic-link-labs
|
|
57
|
+
* [Model Best Practice Analyzer (BPA)](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#model-best-practice-analyzer)
|
|
58
|
+
* [Vertipaq Analyzer](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#vertipaq-analyzer)
|
|
56
59
|
* [Tabular Object Model](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Tabular%20Object%20Model.ipynb) [(TOM)](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html)
|
|
57
|
-
* [Translate a semantic model's metadata](https://semantic-link-labs
|
|
60
|
+
* [Translate a semantic model's metadata](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#translate-a-semantic-model)
|
|
58
61
|
* [Check Direct Lake Guardrails](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.get_lakehouse_tables)
|
|
59
|
-
* [Refresh](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Semantic%20Model%20Refresh.ipynb), [clear cache](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.clear_cache), [backup](https://semantic-link-labs
|
|
62
|
+
* [Refresh](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Semantic%20Model%20Refresh.ipynb), [clear cache](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.clear_cache), [backup](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#backup-a-semantic-model), [restore](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#restore-a-semantic-model), [copy backup files](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.copy_semantic_model_backup_file), [move/deploy across workspaces](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.deploy_semantic_model)
|
|
60
63
|
* [Run DAX queries which impersonate a user](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.evaluate_dax_impersonation)
|
|
61
64
|
* [Manage Query Scale Out](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Query%20Scale%20Out.ipynb)
|
|
62
|
-
* [Auto-generate descriptions for any/all measures in bulk](https://semantic-link-labs
|
|
63
|
-
* [Warm the cache of a Direct Lake semantic model after a refresh (using columns currently in memory)](https://semantic-link-labs
|
|
64
|
-
* [Warm the cache of a Direct Lake semantic model (via perspective)](https://semantic-link-labs
|
|
65
|
-
* [Visualize a refresh](https://github.com/microsoft/semantic-link-labs/
|
|
65
|
+
* [Auto-generate descriptions for any/all measures in bulk](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#auto-generate-measure-descriptions)
|
|
66
|
+
* [Warm the cache of a Direct Lake semantic model after a refresh (using columns currently in memory)](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#warm-cache-the-cache-of-a-direct-lake-semantic-model)
|
|
67
|
+
* [Warm the cache of a Direct Lake semantic model (via perspective)](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#warm-cache-the-cache-of-a-direct-lake-semantic-model)
|
|
68
|
+
* [Visualize a refresh](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#refresh-a-semantic-model)
|
|
66
69
|
* [Update the connection of a Direct Lake semantic model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.update_direct_lake_model_connection)
|
|
67
70
|
* [Dynamically generate a Direct Lake semantic model](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.generate_direct_lake_semantic_model)
|
|
68
71
|
* [Check why a Direct Lake semantic model would fallback to DirectQuery](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.directlake.html#sempy_labs.directlake.check_fallback_reason)
|
|
69
72
|
* [View a measure dependency tree](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.measure_dependency_tree)
|
|
70
73
|
* [View unique columns touched in a single (or multiple) DAX query(ies)](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.get_dax_query_dependencies)
|
|
71
74
|
* [Analyze delta tables for Direct Lake semantic models using Delta Analyzer](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Delta%20Analyzer.ipynb)
|
|
72
|
-
* [View synonyms from the linguistic schema](https://semantic-link-labs
|
|
75
|
+
* [View synonyms from the linguistic schema](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#list-the-synonyms-in-the-linguistic-metadata)
|
|
73
76
|
* [Add](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.add_incremental_refresh_policy), [update](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.update_incremental_refresh_policy) and [view](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html#sempy_labs.tom.TOMWrapper.show_incremental_refresh_policy) an incremental refresh policy.
|
|
74
77
|
* Reports
|
|
75
|
-
* [Report Best Practice Analyzer (BPA)](https://semantic-link-labs
|
|
76
|
-
* [View report metadata](https://github.com/microsoft/semantic-link-labs/
|
|
77
|
-
* [View semantic model objects most frequently used in Power BI reports](https://semantic-link-labs
|
|
78
|
-
* [View broken reports](https://semantic-link-labs
|
|
79
|
-
* [Set a report theme](https://semantic-link-labs
|
|
80
|
-
* [Migrate report-level measures to the semantic model](https://semantic-link-labs
|
|
81
|
-
* [Rebind reports](https://semantic-link-labs
|
|
78
|
+
* [Report Best Practice Analyzer (BPA)](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#report-best-practice-analyzer)
|
|
79
|
+
* [View report metadata](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#view-report-metadata)
|
|
80
|
+
* [View semantic model objects most frequently used in Power BI reports](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#show-the-frequency-of-semantic-model-object-used-within-reports)
|
|
81
|
+
* [View broken reports](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#find-broken-visuals-in-a-power-bi-report)
|
|
82
|
+
* [Set a report theme](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#set-the-theme-of-a-report)
|
|
83
|
+
* [Migrate report-level measures to the semantic model](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#migrate-report-level-measures-to-the-semantic-model)
|
|
84
|
+
* [Rebind reports](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#rebind-a-report-to-a-different-semantic-model)
|
|
85
|
+
* [Save a report as a .pbip](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#save-a-report-as-a-pbip-file)
|
|
82
86
|
* Capacities
|
|
83
87
|
* [Migrating a Power BI Premium capacity (P sku) to a Fabric capacity (F sku)](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Capacity%20Migration.ipynb)
|
|
84
88
|
* [Migrating a Fabric Trial capacity (FT sku) to a Fabric capacity (F sku)](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Capacity%20Migration.ipynb)
|
|
85
89
|
* [Create](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.create_fabric_capacity)/[update](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.update_fabric_capacity)/[suspend](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.suspend_fabric_capacity)/[resume](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.resume_fabric_capacity) Fabric capacities
|
|
86
90
|
* Lakehouses
|
|
87
|
-
* [Optimize lakehouse tables](https://semantic-link-labs
|
|
88
|
-
* [Vacuum lakehouse tables](https://semantic-link-labs
|
|
89
|
-
* [Create](https://semantic-link-labs
|
|
91
|
+
* [Optimize lakehouse tables](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#optimize-lakehouse-tables)
|
|
92
|
+
* [Vacuum lakehouse tables](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#vacuum-lakehouse-tables)
|
|
93
|
+
* [Create](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#create-a-onelake-shortcut), [delete](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.delete_shortcut), and [view shortcuts](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_shortcuts)
|
|
90
94
|
* [Analyze delta tables for Direct Lake semantic models using Delta Analyzer](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Delta%20Analyzer.ipynb)
|
|
95
|
+
* [Recover a soft-deleted lakehouse table/file/folder](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#recover-a-lakehouse-object)
|
|
91
96
|
* Notebooks
|
|
92
97
|
* [Import a notebook from the web](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.import_notebook_from_web)
|
|
93
98
|
* APIs
|
|
@@ -149,6 +154,9 @@ An even better way to ensure the semantic-link-labs library is available in your
|
|
|
149
154
|
2. Select your newly created environment within the 'Environment' drop down in the navigation bar at the top of the notebook
|
|
150
155
|
|
|
151
156
|
## Version History
|
|
157
|
+
* [0.9.11](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.11) (May 22, 2025)
|
|
158
|
+
* [0.9.10](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.10) (April 24, 2025)
|
|
159
|
+
* [0.9.9](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.9) (April 7, 2025)
|
|
152
160
|
* [0.9.8](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.8) (April 3, 2025)
|
|
153
161
|
* [0.9.7](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.7) (April 1, 2025)
|
|
154
162
|
* [0.9.6](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.6) (March 12, 2025)
|
|
@@ -1,34 +1,35 @@
|
|
|
1
|
-
semantic_link_labs-0.9.
|
|
2
|
-
sempy_labs/__init__.py,sha256=
|
|
1
|
+
semantic_link_labs-0.9.11.dist-info/licenses/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
|
|
2
|
+
sempy_labs/__init__.py,sha256=X39q1v8jygmKdGc0kfuRcn7V1MAHxzG5mDivJxON7Go,16255
|
|
3
3
|
sempy_labs/_ai.py,sha256=BD1TdGOJ7T4m3x426OP-FLb7bevn-9gKY8BTEDAJDQU,16205
|
|
4
4
|
sempy_labs/_authentication.py,sha256=GjtN5XqIyWXbR5Ni4hfYiUNwgFa-ySX8e-BrqE1vgGc,6903
|
|
5
5
|
sempy_labs/_capacities.py,sha256=n48NYTY03zygRzcfyK1UOkSwTqKSyQefQ10IKQh-dfA,40426
|
|
6
6
|
sempy_labs/_capacity_migration.py,sha256=GGIMrHwc7IEVJ9pDwmikXiF2QHu2nYqNyG235QYbmEw,24837
|
|
7
|
-
sempy_labs/_clear_cache.py,sha256=
|
|
7
|
+
sempy_labs/_clear_cache.py,sha256=UY_pljGeqb168Qe5sP66ecLeREPN4NMpjIEocCmxg0M,13201
|
|
8
8
|
sempy_labs/_connections.py,sha256=Cc3VpQtXUDVpEyn5CVd9lGeZ13Nrdk_E_XrLu4pGRi8,18658
|
|
9
9
|
sempy_labs/_dashboards.py,sha256=cyFD-pUUFu4scGkbitilrI22GW5dTmTkUZ15ou7Bl-A,1880
|
|
10
10
|
sempy_labs/_data_pipelines.py,sha256=cW_WGmuWD4V9IgLprKL4TqFXgid4eTBXvEL3-IArS0w,4817
|
|
11
11
|
sempy_labs/_dataflows.py,sha256=xv-wRDUq4Bzz-BOs1Jdb4bgS9HbPLpa1GqexfA6H0mg,8053
|
|
12
|
-
sempy_labs/_dax.py,sha256=
|
|
12
|
+
sempy_labs/_dax.py,sha256=Q_GylKeuHFnRB_sztZS1ON5v5tr6ua6lc9elyJYKbV8,17219
|
|
13
13
|
sempy_labs/_dax_query_view.py,sha256=_zSvgystZzBj5euNTLKTg7-G77XVk0vqyqrDT72VvoM,1892
|
|
14
|
-
sempy_labs/_delta_analyzer.py,sha256=
|
|
14
|
+
sempy_labs/_delta_analyzer.py,sha256=d6qxZrEhn3Hfg5qMQODt7dDG5mYSY18xeXUkW_NyMgw,17281
|
|
15
15
|
sempy_labs/_delta_analyzer_history.py,sha256=A50dlBd2d3ILKV7Fwj4pfIRtXKmCFslhk1gpeEw4inc,10765
|
|
16
16
|
sempy_labs/_deployment_pipelines.py,sha256=SDQYkCAhOAlxBr58jYxtLFOVySiRXO0_WhfOKGDeYZQ,6254
|
|
17
17
|
sempy_labs/_documentation.py,sha256=yVA8VPEzx_fmljtcvSxtB7-BeupYsfdMXXjp6Fpnyo8,5007
|
|
18
|
-
sempy_labs/_environments.py,sha256=
|
|
18
|
+
sempy_labs/_environments.py,sha256=c_9uU6zhVmZVTLZWuD-OdcicBJvmRQQVmqHW7EqUn_Q,5839
|
|
19
19
|
sempy_labs/_eventhouses.py,sha256=WEf33difBOTGTZGh1QFmY4gv-e43uwO1V54nrsjGGyY,5376
|
|
20
20
|
sempy_labs/_eventstreams.py,sha256=c8nNdRM8eeDOYXd_AtRp7zYHYaqV9p1bI2V0boUrCuA,3492
|
|
21
21
|
sempy_labs/_external_data_shares.py,sha256=s2okFvtCG5FDMbMJ_q6YSlCkVVFiE9sh2imVxZq1woU,6450
|
|
22
22
|
sempy_labs/_gateways.py,sha256=6JE6VeGFPKF617sf2mMkxXVOz57YHI5jAQLAF-BzRLc,17527
|
|
23
|
-
sempy_labs/_generate_semantic_model.py,sha256=
|
|
23
|
+
sempy_labs/_generate_semantic_model.py,sha256=F2NVW6kT1HnrZTqWnDZ4BRApbsUfLFDSsDbRzCJnc0o,18429
|
|
24
24
|
sempy_labs/_git.py,sha256=RyaT4XzrSi-4NLJZWiWZnnNpMgrKzRNxhyY8b1O2I6c,17819
|
|
25
25
|
sempy_labs/_graphQL.py,sha256=truXeIUPRKLwc4skhs3FZYNcKP9FCGKly9en0YkR4NE,2690
|
|
26
|
-
sempy_labs/_helper_functions.py,sha256=
|
|
26
|
+
sempy_labs/_helper_functions.py,sha256=SbFhIT_6j8xSkFflzuWWoo_NIH_uqPuINUGvcblPy7s,68762
|
|
27
27
|
sempy_labs/_icons.py,sha256=SB9EQeoFCfD4bO6fcYuJOoPRSYenSrW0rI9G5RFsH28,3579
|
|
28
28
|
sempy_labs/_job_scheduler.py,sha256=_-Pifkttk1oPNxewxwWcQ4QC_Hr24GSi6nmrEXwc0pc,15814
|
|
29
|
-
sempy_labs/_kql_databases.py,sha256=
|
|
29
|
+
sempy_labs/_kql_databases.py,sha256=UtpYVBsxwWQDnqwdjq186bZzw5IlkD2S9KHA6Kw75U0,4738
|
|
30
30
|
sempy_labs/_kql_querysets.py,sha256=Jjcs4SkjeirnDkG6zfsl0KRUXVzMyWii0Yn0JMWwln8,3502
|
|
31
|
-
sempy_labs/
|
|
31
|
+
sempy_labs/_kusto.py,sha256=g3Up4j1KNdIGC2DDbvoduCdX1Pp8fAPGAlBAqOtaBeg,4544
|
|
32
|
+
sempy_labs/_list_functions.py,sha256=wqaeCpsYPG3SW52ipB-uTCrjWQ1xcVkcazakWtPAsHo,61035
|
|
32
33
|
sempy_labs/_managed_private_endpoints.py,sha256=Vqicp_EiGg_m8aA2F__gaJiB9cwjbxQOSOi7hkS6FvQ,6907
|
|
33
34
|
sempy_labs/_mirrored_databases.py,sha256=-9ZV2PdPeIc4lvFNkpPMm_9wkGIY1QLZXspYdSev5oQ,13147
|
|
34
35
|
sempy_labs/_mirrored_warehouses.py,sha256=Q3WlRjUwCLz8KW1eN8MiTPeY0P52Vkuz5kgnv4GvQ3k,1739
|
|
@@ -37,21 +38,24 @@ sempy_labs/_ml_models.py,sha256=69i67MHn-_Fsq-5slLxxhCF8N2s0JBYn_CDTa1Hhhs0,3261
|
|
|
37
38
|
sempy_labs/_model_auto_build.py,sha256=PTQo3dufzLSFcQ5shFkmBWAVSdP7cTJgpUclrcXyNbg,5105
|
|
38
39
|
sempy_labs/_model_bpa.py,sha256=AoHshKqn3z2lNPwu1hKntJuCELYe1bLa_0LUzFXRjgs,22032
|
|
39
40
|
sempy_labs/_model_bpa_bulk.py,sha256=hRY3dRBUtecrbscCZsEGv6TpCVqg_zAi8NmRq6dVMiE,15845
|
|
40
|
-
sempy_labs/_model_bpa_rules.py,sha256=
|
|
41
|
+
sempy_labs/_model_bpa_rules.py,sha256=ZK16VqWcITiTKdd9T5Xnu-AMgodLVx0ZpanZjsC88-U,46260
|
|
41
42
|
sempy_labs/_model_dependencies.py,sha256=0xGgubrq76zIvBdEqmEX_Pd6WdizXFVECBW6BPl2DZo,13162
|
|
42
43
|
sempy_labs/_mounted_data_factories.py,sha256=-IBxE5XurYyeeQg7BvpXSSR1MW3rRGmue6UGpqlo96U,3906
|
|
43
|
-
sempy_labs/_notebooks.py,sha256=
|
|
44
|
+
sempy_labs/_notebooks.py,sha256=QbDmvxvYZEFE90lQ3Rqi70yjc5Xxg7D3ySemPPVGflY,9102
|
|
44
45
|
sempy_labs/_one_lake_integration.py,sha256=9ub75-ueEFqn1iRgRd5y97SYujalsWW6ufs1du4PbDs,6303
|
|
45
46
|
sempy_labs/_query_scale_out.py,sha256=nra1q8s-PKpZTlI_L0lMGO1GmdBk6sqETsBQShF1yPY,15352
|
|
46
47
|
sempy_labs/_refresh_semantic_model.py,sha256=4w_uaYLbaZptmEFY7QHWzOgXcgc2ctGx8HQvt2aguxk,17360
|
|
47
|
-
sempy_labs/_semantic_models.py,sha256=
|
|
48
|
+
sempy_labs/_semantic_models.py,sha256=F9v964IiXqx2qNPtNBzYrWPtXIoQH5-FI5csWJGofoQ,7934
|
|
48
49
|
sempy_labs/_spark.py,sha256=SuSTjjmtzj7suDgN8Njk_pNBaStDLgIJB_1yk_e2H1Y,19340
|
|
49
|
-
sempy_labs/_sql.py,sha256=
|
|
50
|
+
sempy_labs/_sql.py,sha256=6mtX0I2VTpmpMbAiqdQGPyLiLN3q3pVDTP9IW7Z3JfA,8276
|
|
50
51
|
sempy_labs/_sqldatabase.py,sha256=8HV3UtsLiwexmPSjYnhnYnD6xEvgFpTG13jcOuGheuI,6470
|
|
52
|
+
sempy_labs/_tags.py,sha256=7DvSc3wah26DxHwUhr-yr_JhZiplrePkFaDaVIAQfV4,5666
|
|
51
53
|
sempy_labs/_translations.py,sha256=i4K2PFk6-TcmAnUpqz-z_GuDv9XEp1cBs0KY-x6ja1w,16168
|
|
52
54
|
sempy_labs/_utils.py,sha256=aKGnUiXSLLRQRXXv8TH_XhGcOtDsnrs0RHmQ6YZMn3o,1786
|
|
53
|
-
sempy_labs/
|
|
54
|
-
sempy_labs/
|
|
55
|
+
sempy_labs/_variable_libraries.py,sha256=t97gj8Mo-YjahKx6XObqh5HkhUMHUke5GdWpSzkC5ZM,3008
|
|
56
|
+
sempy_labs/_vertipaq.py,sha256=1UvB79xOxeGdRFINsUsreXxtZtiatHlACAfbQhv45as,38536
|
|
57
|
+
sempy_labs/_vpax.py,sha256=k1UalPGdwmhL8eqH_WeOx1IkPu0Zz2xGWSBuAp4Sq0M,15432
|
|
58
|
+
sempy_labs/_warehouses.py,sha256=wF38YP4-39KPsXPyexJahZPrYAyLc5xfrerJvS7My5Q,7286
|
|
55
59
|
sempy_labs/_workloads.py,sha256=ifQ6Jv0_MDzjfu993bU8V7thOdW5kFyp3MjA082rsrE,4687
|
|
56
60
|
sempy_labs/_workspace_identity.py,sha256=plxgcqt2aBXgLpyn1dpHhzh_5Z-gFkLK8RtId2OIX5s,2561
|
|
57
61
|
sempy_labs/_workspaces.py,sha256=wHBR2e5wOhhWN6PiFefSFzYrwvdgMkG0dg2gEpPcZ4o,13090
|
|
@@ -93,60 +97,65 @@ sempy_labs/_bpa_translation/_model/_translations_tr-TR.po,sha256=NdW-X4E0QmeLKM0
|
|
|
93
97
|
sempy_labs/_bpa_translation/_model/_translations_uk-UA.po,sha256=3NsFN8hoor_5L6738FjpJ8o4spwp8FNFGbVItHD-_ec,43500
|
|
94
98
|
sempy_labs/_bpa_translation/_model/_translations_zh-CN.po,sha256=ipMbnet7ZI5mZoC8KonYKVwGmFLHFB_9KIDOoBgSNfo,26815
|
|
95
99
|
sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po,sha256=5v6tVKGruqneAeMoa6F3tyg_JBL8qOpqOJofWpq2W3U,31518
|
|
96
|
-
sempy_labs/admin/__init__.py,sha256=
|
|
100
|
+
sempy_labs/admin/__init__.py,sha256=XaUXm5uuTVEKtQVDfvobU8qM-QlL5hFd_kVH_o0el4w,4016
|
|
97
101
|
sempy_labs/admin/_activities.py,sha256=YfISDzhXro9glEa_yJmoYv-2q2M1DIkoyNzgLl7eWuI,6695
|
|
98
102
|
sempy_labs/admin/_apps.py,sha256=PUEQlXbzVR9u3ZUQUhpfU3J-hfa8A2nTGFBpCqzMdW0,4085
|
|
99
|
-
sempy_labs/admin/_artifacts.py,sha256=
|
|
103
|
+
sempy_labs/admin/_artifacts.py,sha256=hGTMY0t4qxK1zs89PIZQaNP5QHRCIWAOvp2FiWHtQpE,2249
|
|
100
104
|
sempy_labs/admin/_basic_functions.py,sha256=NAiju3N6xGOHFbK6sRz8NyVOCsgDIwl50U2CRA2SV3g,16320
|
|
101
|
-
sempy_labs/admin/_capacities.py,sha256=
|
|
105
|
+
sempy_labs/admin/_capacities.py,sha256=aYPanX7esKoq2EtlE1iyBYmBYsaifDduUkJU8qHt9DU,15586
|
|
106
|
+
sempy_labs/admin/_dataflows.py,sha256=u7XrYSJg4_nagle4zv46BN70goL3OHbgO_QXpwRZqjk,1457
|
|
102
107
|
sempy_labs/admin/_datasets.py,sha256=kMerpBNro--kKdp2rhEKnVe0JDGjMDsxqgfbbw17K-U,6235
|
|
103
108
|
sempy_labs/admin/_domains.py,sha256=Z0EhIJCcujx1NumeqM4eKAvai18p-9TAw1WJaU-cbbE,15076
|
|
104
109
|
sempy_labs/admin/_external_data_share.py,sha256=q4gw5iYZJDH-9xIM6L0b2CU9ebUIdE-ZVrFsulRHyUU,3364
|
|
105
110
|
sempy_labs/admin/_git.py,sha256=gsbDQKd66knCI_Zh8vHSfHK-uQVJjVmhKKvfMMYKZyA,2264
|
|
106
|
-
sempy_labs/admin/_items.py,sha256=
|
|
111
|
+
sempy_labs/admin/_items.py,sha256=BO4X_hpp94u4FUSn6Rb25gsRz1TgQAvLKLSwtzOLlVw,8824
|
|
107
112
|
sempy_labs/admin/_reports.py,sha256=nPDoC90Yzc67CtiuL4WYBYkGYuUQOnZAy0PCU0aYKj8,7857
|
|
108
113
|
sempy_labs/admin/_scanner.py,sha256=0mKi0ihJETdsSaeHFBEq3drcCS8J_enWWkIMBMECz64,4370
|
|
109
114
|
sempy_labs/admin/_shared.py,sha256=srgkqttbMbK5XXjOt4zeAV8rMCvK7zEus55HsGtNUFI,3007
|
|
110
|
-
sempy_labs/admin/
|
|
115
|
+
sempy_labs/admin/_tags.py,sha256=92CoaRwpiVtpbkT9jC6eNAp5vdxzR4YAKo2VfmDPn7k,3752
|
|
116
|
+
sempy_labs/admin/_tenant.py,sha256=D8x45G4U8aiDlYcYTWUIg--Rrl1T0HwRf0qtk-jUBbo,19347
|
|
111
117
|
sempy_labs/admin/_users.py,sha256=eEOkgvny3FwMuUrSIBQ0n3JwrzWV_6_nwGc8_c-eXSM,4571
|
|
112
118
|
sempy_labs/admin/_workspaces.py,sha256=XiiO3vyuJxKkVf9ZrW7261wHSBrnd8r7rbia8HGDFkI,4911
|
|
113
119
|
sempy_labs/directlake/__init__.py,sha256=etaj-3wqe5t93mu74tGYjEOQ6gtHWUogidOygiVvlq8,2131
|
|
114
120
|
sempy_labs/directlake/_directlake_schema_compare.py,sha256=tVc6hIgDxxA7a8V51e5tlzlp3bzVVTqQ_OKsTNxiWG4,5074
|
|
115
121
|
sempy_labs/directlake/_directlake_schema_sync.py,sha256=ipONLkBaXm4WgcMMChAyD4rVushdqdjAQdexT-fJxcY,6573
|
|
116
122
|
sempy_labs/directlake/_dl_helper.py,sha256=HHFy6tW-tSVZ4YHxSHvt6pXrloh0O6Lx7yNmZE7IAI4,10348
|
|
117
|
-
sempy_labs/directlake/_generate_shared_expression.py,sha256=
|
|
123
|
+
sempy_labs/directlake/_generate_shared_expression.py,sha256=fAaFlR5-prqOH3vJ_ktLyEYfKR_uBMvAxOaR-BRCm-w,3561
|
|
118
124
|
sempy_labs/directlake/_get_directlake_lakehouse.py,sha256=e0WFQm4-daJR4K1aHuVaubu7T26yTeBgfNEMOXk-EzM,2392
|
|
119
125
|
sempy_labs/directlake/_get_shared_expression.py,sha256=qc85kXggkx_7Sz_rAAli_yPnLzrGZpgD8IfVbTfZhQM,1133
|
|
120
126
|
sempy_labs/directlake/_guardrails.py,sha256=wNVXpeiZckgLTly4cS5DU5DoV9x1S4DMxN5S08qAavE,2749
|
|
121
127
|
sempy_labs/directlake/_list_directlake_model_calc_tables.py,sha256=EYT4ELmOZ3Uklzy6uMQMidc4WtBXm21NQqZu1Q5HTsg,2509
|
|
122
128
|
sempy_labs/directlake/_show_unsupported_directlake_objects.py,sha256=nmrZrtDez7U8Ji76i9fxnnTx1zxMu2LCOZTMz4sFUEc,3504
|
|
123
|
-
sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py,sha256=
|
|
129
|
+
sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py,sha256=W3LDXDD2pLH5B43NI9ixSIW2MJIORAWu5ANHQRFKMBY,9215
|
|
124
130
|
sempy_labs/directlake/_update_directlake_partition_entity.py,sha256=8YxrReJObtc7_Huq0qQrLKTVMhPO84guv8bQKtp__4c,9032
|
|
125
131
|
sempy_labs/directlake/_warm_cache.py,sha256=xc7gG_OJY1rJYg79ztgcLATpnXHNqFaw-6CU1HgdlXk,9258
|
|
132
|
+
sempy_labs/dotnet_lib/dotnet.runtime.config.json,sha256=syhDFQv6cEmZnE1WtFjNe3NwhsIsnd-CFULv-vEWOFI,167
|
|
126
133
|
sempy_labs/graph/__init__.py,sha256=AZ_IpOL06VvXrYmgbcrvQlxCxdDksvwXKf7JAGohCNI,620
|
|
127
134
|
sempy_labs/graph/_groups.py,sha256=j3YDeV6MzhRjGJRoD60SAaGyU8yb23x8QhXBzU2RWlE,12590
|
|
128
135
|
sempy_labs/graph/_teams.py,sha256=SRFaFuxtB7ylC5WeXIdrW0aLCxc_JTJHeEmxOPG99r8,3089
|
|
129
136
|
sempy_labs/graph/_users.py,sha256=dFOZ-jel6Aj4Um66f1jzQrgV0fOoI0cQnZfmR4OJSXo,5947
|
|
130
|
-
sempy_labs/lakehouse/__init__.py,sha256=
|
|
131
|
-
sempy_labs/lakehouse/_blobs.py,sha256=
|
|
132
|
-
sempy_labs/lakehouse/_get_lakehouse_columns.py,sha256=
|
|
133
|
-
sempy_labs/lakehouse/_get_lakehouse_tables.py,sha256=
|
|
134
|
-
sempy_labs/lakehouse/
|
|
137
|
+
sempy_labs/lakehouse/__init__.py,sha256=xuYQAxBEEahNA_twvYxeP6cUd8dCreNb0eKyuKZbtPU,1327
|
|
138
|
+
sempy_labs/lakehouse/_blobs.py,sha256=N8s3hYa9dAOLpH9iTavR_FPKrb3j_RqXHJnC6UVeeW0,9745
|
|
139
|
+
sempy_labs/lakehouse/_get_lakehouse_columns.py,sha256=dF5rLkdD3PB8EiXQewRdnr7MzbDGkZWHrFfI01_a7K4,3710
|
|
140
|
+
sempy_labs/lakehouse/_get_lakehouse_tables.py,sha256=AMQXk40YMN4daS0zILgZm-sc2llnvCaL7kS1v8dfYMA,9369
|
|
141
|
+
sempy_labs/lakehouse/_helper.py,sha256=W9adTkZw9_f9voB3bA2JWkI4LqAcnvpY929vMQJw1xE,7401
|
|
142
|
+
sempy_labs/lakehouse/_lakehouse.py,sha256=JQBQl_E9svzu_ozjYh1eSV2gY1NhHfomW0MiD3JzXPc,8860
|
|
143
|
+
sempy_labs/lakehouse/_livy_sessions.py,sha256=REfBpuDdH7O1CQ3JpMMZpX7-wnnVXmZEqAXsZw1MTjk,5778
|
|
135
144
|
sempy_labs/lakehouse/_shortcuts.py,sha256=24sPtX98ho84fNV_JCAHZrSkvk0Ui7p-0b-jTdGOGM8,16580
|
|
136
145
|
sempy_labs/migration/__init__.py,sha256=142n01VAqlcx4E0mGGRtUfVOEwAXVdiHI_XprmUm7As,1175
|
|
137
146
|
sempy_labs/migration/_create_pqt_file.py,sha256=eRK0Jz9ZeV_7jV3kNRze0bTAIqxsAZXLKMGE_loKOaY,9677
|
|
138
147
|
sempy_labs/migration/_direct_lake_to_import.py,sha256=GTSERKSwj4M4wOsENgDbb-ZO7NFqwD1VUcyOS73AbaM,3948
|
|
139
|
-
sempy_labs/migration/_migrate_calctables_to_lakehouse.py,sha256=
|
|
148
|
+
sempy_labs/migration/_migrate_calctables_to_lakehouse.py,sha256=2e3q__JBgm0umQX-Pc_c5pY2rmj-0fvjTqdkLeoMD-U,17774
|
|
140
149
|
sempy_labs/migration/_migrate_calctables_to_semantic_model.py,sha256=Qt4WfmllCtSl-xkWzWWL5sTzi3lQDaJp43lVEXQisVY,6303
|
|
141
150
|
sempy_labs/migration/_migrate_model_objects_to_semantic_model.py,sha256=RD0ttWcBratAzpPKjFF6jpEnZEd6M7m8OfEUFbkInbA,22950
|
|
142
151
|
sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py,sha256=HYi2vn7yYDsBCTAXFTi6UiB86kdSlhQKPdwAt1nTKEE,7169
|
|
143
152
|
sempy_labs/migration/_migration_validation.py,sha256=AHURrWofb-U-L2Bdu36mcisVXOuZXi6Smgrrs2kjYBM,2650
|
|
144
|
-
sempy_labs/migration/_refresh_calc_tables.py,sha256=
|
|
153
|
+
sempy_labs/migration/_refresh_calc_tables.py,sha256=qUBPZ5HAHyE5ev6STKDcmtEpRuLDX5RzYTKre4ZElj4,5443
|
|
145
154
|
sempy_labs/report/_BPAReportTemplate.json,sha256=9Uh-7E6d2ooxQ7j5JRayv_ayEULc7Gzg42kZGKdOqH8,63920
|
|
146
155
|
sempy_labs/report/__init__.py,sha256=bPZ_MMqKGokskjJwM3T89LxIVNa2AXJg8Lr-mvJhP0E,1392
|
|
147
|
-
sempy_labs/report/_download_report.py,sha256=
|
|
156
|
+
sempy_labs/report/_download_report.py,sha256=01hI26UV_jb5RLPheXRQsIDNNf4i72xICm14slKqEFA,2704
|
|
148
157
|
sempy_labs/report/_export_report.py,sha256=XCMsZzTBMgvQOe3Ltdod7my7_izpmP-3AVH6W5CExPE,10976
|
|
149
|
-
sempy_labs/report/_generate_report.py,sha256=
|
|
158
|
+
sempy_labs/report/_generate_report.py,sha256=S830woeisjKCYNyacfvSx0fVHzLC7-aw2oPIU2sYiP8,13910
|
|
150
159
|
sempy_labs/report/_paginated.py,sha256=rsElE0IQ9qxRDuEp6qNF1EcD5XEgfTc7WsWEQsalsuI,2156
|
|
151
160
|
sempy_labs/report/_report_bpa.py,sha256=ClETB8Q41sY1scCuknhpvalvuBaQ9ZwA4QX7F3sPcjc,13596
|
|
152
161
|
sempy_labs/report/_report_bpa_rules.py,sha256=tPVGA0hmE6QMLlWtig7Va7Ksr2yXWl_Lndq--tWWd6w,4959
|
|
@@ -154,11 +163,9 @@ sempy_labs/report/_report_functions.py,sha256=pSrsUfMJqmsn9CYb5AM0iYdPR-EmuUSprV
|
|
|
154
163
|
sempy_labs/report/_report_helper.py,sha256=m23osIZMjvHhKbfhmTHyqHibXoWA9eP84TPanbH8kuE,10863
|
|
155
164
|
sempy_labs/report/_report_list_functions.py,sha256=K9tMDQKhIZhelHvfMMW0lsxbVHekJ-5dAQveoD7PUDA,3980
|
|
156
165
|
sempy_labs/report/_report_rebind.py,sha256=svyxUSdqgXJW1UDNcb-urJxU9erO3JM72uzmuJUWIT0,5090
|
|
157
|
-
sempy_labs/report/_reportwrapper.py,sha256=
|
|
166
|
+
sempy_labs/report/_reportwrapper.py,sha256=itzDImW0XUJuQBL1gQpXjIRBk0Knic0VCf7MHw8Kt68,83421
|
|
158
167
|
sempy_labs/report/_save_report.py,sha256=FAzScMQIXl89TgVSRvaJofzKT0TfZh_hhPNNvDiktaI,6033
|
|
159
|
-
sempy_labs/report/_bpareporttemplate/.platform,sha256=kWRa6B_KwSYLsvVFDx372mQriQO8v7dJ_YzQV_cfD-Q,303
|
|
160
168
|
sempy_labs/report/_bpareporttemplate/definition.pbir,sha256=bttyHZYKqjA8OBb_cezGlX4H82cDvGZVCl1QB3fij4E,343
|
|
161
|
-
sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json,sha256=kzjBlNdjbsSBBSHBwbQc298AJCr9Vp6Ex0D5PemUuT0,1578
|
|
162
169
|
sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json,sha256=4N6sT5nLlYBobGmZ1Xb68uOMVVCBEyheR535js_et28,13467
|
|
163
170
|
sempy_labs/report/_bpareporttemplate/definition/report.json,sha256=-8BK5blTE-nc0Y4-M0pTHD8Znt3pHZ-u2veRppxPDBQ,3975
|
|
164
171
|
sempy_labs/report/_bpareporttemplate/definition/version.json,sha256=yL3ZZqhfHqq0MS0glrbXtQgkPk17xaTSWvPPyxBWpOc,152
|
|
@@ -186,8 +193,8 @@ sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visua
|
|
|
186
193
|
sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json,sha256=wBVuNc8S2NaUA0FC708w6stmR2djNZp8nAsHMqesgsc,293
|
|
187
194
|
sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json,sha256=mlY6t9OlSe-Y6_QmXJpS1vggU6Y3FjISUKECL8FVSg8,931
|
|
188
195
|
sempy_labs/tom/__init__.py,sha256=Qbs8leW0fjzvWwOjyWK3Hjeehu7IvpB1beASGsi28bk,121
|
|
189
|
-
sempy_labs/tom/_model.py,sha256=
|
|
190
|
-
semantic_link_labs-0.9.
|
|
191
|
-
semantic_link_labs-0.9.
|
|
192
|
-
semantic_link_labs-0.9.
|
|
193
|
-
semantic_link_labs-0.9.
|
|
196
|
+
sempy_labs/tom/_model.py,sha256=64IJf2Pdag5ECWxJcf4Cg2paoMD0Pr6BHvdjgvW6pwo,197537
|
|
197
|
+
semantic_link_labs-0.9.11.dist-info/METADATA,sha256=93cuq3I17_DH0fWpnYNqB8YF8rckYYICQdRBtzN_SZs,26736
|
|
198
|
+
semantic_link_labs-0.9.11.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
|
|
199
|
+
semantic_link_labs-0.9.11.dist-info/top_level.txt,sha256=kiQX1y42Dbein1l3Q8jMUYyRulDjdlc2tMepvtrvixQ,11
|
|
200
|
+
semantic_link_labs-0.9.11.dist-info/RECORD,,
|
sempy_labs/__init__.py
CHANGED
|
@@ -1,3 +1,14 @@
|
|
|
1
|
+
from sempy_labs._variable_libraries import (
|
|
2
|
+
list_variable_libraries,
|
|
3
|
+
delete_variable_library,
|
|
4
|
+
)
|
|
5
|
+
from sempy_labs._kusto import (
|
|
6
|
+
query_kusto,
|
|
7
|
+
query_workspace_monitoring,
|
|
8
|
+
)
|
|
9
|
+
from sempy_labs._vpax import (
|
|
10
|
+
create_vpax,
|
|
11
|
+
)
|
|
1
12
|
from sempy_labs._delta_analyzer_history import (
|
|
2
13
|
delta_analyzer_history,
|
|
3
14
|
)
|
|
@@ -9,11 +20,16 @@ from sempy_labs._mounted_data_factories import (
|
|
|
9
20
|
get_mounted_data_factory_definition,
|
|
10
21
|
delete_mounted_data_factory,
|
|
11
22
|
)
|
|
12
|
-
|
|
23
|
+
from sempy_labs._tags import (
|
|
24
|
+
list_tags,
|
|
25
|
+
apply_tags,
|
|
26
|
+
unapply_tags,
|
|
27
|
+
)
|
|
13
28
|
from sempy_labs._semantic_models import (
|
|
14
29
|
get_semantic_model_refresh_schedule,
|
|
15
30
|
enable_semantic_model_scheduled_refresh,
|
|
16
31
|
delete_semantic_model,
|
|
32
|
+
update_semantic_model_refresh_schedule,
|
|
17
33
|
)
|
|
18
34
|
from sempy_labs._graphQL import (
|
|
19
35
|
list_graphql_apis,
|
|
@@ -120,6 +136,7 @@ from sempy_labs._environments import (
|
|
|
120
136
|
create_environment,
|
|
121
137
|
delete_environment,
|
|
122
138
|
publish_environment,
|
|
139
|
+
list_environments,
|
|
123
140
|
)
|
|
124
141
|
from sempy_labs._clear_cache import (
|
|
125
142
|
clear_cache,
|
|
@@ -558,4 +575,14 @@ __all__ = [
|
|
|
558
575
|
"delete_sql_database",
|
|
559
576
|
"list_sql_databases",
|
|
560
577
|
"delta_analyzer_history",
|
|
578
|
+
"query_kusto",
|
|
579
|
+
"query_workspace_monitoring",
|
|
580
|
+
"list_environments",
|
|
581
|
+
"list_tags",
|
|
582
|
+
"list_variable_libraries",
|
|
583
|
+
"delete_variable_library",
|
|
584
|
+
"create_vpax",
|
|
585
|
+
"update_semantic_model_refresh_schedule",
|
|
586
|
+
"apply_tags",
|
|
587
|
+
"unapply_tags",
|
|
561
588
|
]
|
sempy_labs/_clear_cache.py
CHANGED
|
@@ -59,6 +59,7 @@ def backup_semantic_model(
|
|
|
59
59
|
allow_overwrite: bool = True,
|
|
60
60
|
apply_compression: bool = True,
|
|
61
61
|
workspace: Optional[str | UUID] = None,
|
|
62
|
+
password: Optional[str] = None,
|
|
62
63
|
):
|
|
63
64
|
"""
|
|
64
65
|
`Backs up <https://learn.microsoft.com/azure/analysis-services/analysis-services-backup>`_ a semantic model to the ADLS Gen2 storage account connected to the workspace.
|
|
@@ -72,6 +73,8 @@ def backup_semantic_model(
|
|
|
72
73
|
Must end in '.abf'.
|
|
73
74
|
Example 1: file_path = 'MyModel.abf'
|
|
74
75
|
Example 2: file_path = 'MyFolder/MyModel.abf'
|
|
76
|
+
password : Optional[str], default=None
|
|
77
|
+
Password to encrypt the backup file. If None, no password is used.
|
|
75
78
|
allow_overwrite : bool, default=True
|
|
76
79
|
If True, overwrites backup files of the same name. If False, the file you are saving cannot have the same name as a file that already exists in the same location.
|
|
77
80
|
apply_compression : bool, default=True
|
|
@@ -99,6 +102,9 @@ def backup_semantic_model(
|
|
|
99
102
|
}
|
|
100
103
|
}
|
|
101
104
|
|
|
105
|
+
if password:
|
|
106
|
+
tmsl["backup"]["password"] = password # Add password only if provided
|
|
107
|
+
|
|
102
108
|
fabric.execute_tmsl(script=tmsl, workspace=workspace_id)
|
|
103
109
|
print(
|
|
104
110
|
f"{icons.green_dot} The '{dataset_name}' semantic model within the '{workspace_name}' workspace has been backed up to the '{file_path}' location."
|
|
@@ -113,6 +119,7 @@ def restore_semantic_model(
|
|
|
113
119
|
ignore_incompatibilities: bool = True,
|
|
114
120
|
force_restore: bool = False,
|
|
115
121
|
workspace: Optional[str | UUID] = None,
|
|
122
|
+
password: Optional[str] = None,
|
|
116
123
|
):
|
|
117
124
|
"""
|
|
118
125
|
`Restores <https://learn.microsoft.com/power-bi/enterprise/service-premium-backup-restore-dataset>`_ a semantic model based on a backup (.abf) file
|
|
@@ -126,6 +133,8 @@ def restore_semantic_model(
|
|
|
126
133
|
The location in which to backup the semantic model. Must end in '.abf'.
|
|
127
134
|
Example 1: file_path = 'MyModel.abf'
|
|
128
135
|
Example 2: file_path = 'MyFolder/MyModel.abf'
|
|
136
|
+
password : Optional[str], default=None
|
|
137
|
+
Password to decrypt the backup file. If None, no password is used.
|
|
129
138
|
allow_overwrite : bool, default=True
|
|
130
139
|
If True, overwrites backup files of the same name. If False, the file you are saving cannot have the same name as a file that already exists in the same location.
|
|
131
140
|
ignore_incompatibilities : bool, default=True
|
|
@@ -155,6 +164,9 @@ def restore_semantic_model(
|
|
|
155
164
|
}
|
|
156
165
|
}
|
|
157
166
|
|
|
167
|
+
if password:
|
|
168
|
+
tmsl["restore"]["password"] = password
|
|
169
|
+
|
|
158
170
|
if force_restore:
|
|
159
171
|
tmsl["restore"]["forceRestore"] = force_restore
|
|
160
172
|
|
sempy_labs/_dax.py
CHANGED
|
@@ -206,9 +206,15 @@ def get_dax_query_dependencies(
|
|
|
206
206
|
].reset_index(drop=True)
|
|
207
207
|
|
|
208
208
|
if put_in_memory:
|
|
209
|
-
|
|
209
|
+
# Only put columns in memory if they are in a Direct Lake table (and are not already in memory)
|
|
210
|
+
dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
|
|
211
|
+
dl_tables = dfP[dfP["Mode"] == "DirectLake"]["Table Name"].unique().tolist()
|
|
212
|
+
not_in_memory = dfC_filtered[
|
|
213
|
+
(dfC_filtered["Table Name"].isin(dl_tables))
|
|
214
|
+
& (dfC_filtered["Is Resident"] == False)
|
|
215
|
+
]
|
|
210
216
|
|
|
211
|
-
if
|
|
217
|
+
if not not_in_memory.empty:
|
|
212
218
|
_put_columns_into_memory(
|
|
213
219
|
dataset=dataset,
|
|
214
220
|
workspace=workspace,
|
sempy_labs/_delta_analyzer.py
CHANGED
|
@@ -4,7 +4,6 @@ from datetime import datetime
|
|
|
4
4
|
import os
|
|
5
5
|
from uuid import UUID
|
|
6
6
|
from typing import Dict, Optional
|
|
7
|
-
import pyarrow.dataset as ds
|
|
8
7
|
import pyarrow.parquet as pq
|
|
9
8
|
from sempy_labs._helper_functions import (
|
|
10
9
|
create_abfss_path,
|
|
@@ -16,11 +15,19 @@ from sempy_labs._helper_functions import (
|
|
|
16
15
|
resolve_lakehouse_name_and_id,
|
|
17
16
|
_read_delta_table,
|
|
18
17
|
_mount,
|
|
19
|
-
|
|
18
|
+
_read_delta_table_history,
|
|
19
|
+
resolve_workspace_id,
|
|
20
|
+
resolve_lakehouse_id,
|
|
21
|
+
_get_delta_table,
|
|
20
22
|
)
|
|
21
23
|
from sempy._utils._log import log
|
|
22
24
|
from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
|
|
23
|
-
from sempy_labs.lakehouse._lakehouse import
|
|
25
|
+
from sempy_labs.lakehouse._lakehouse import (
|
|
26
|
+
lakehouse_attached,
|
|
27
|
+
)
|
|
28
|
+
from sempy_labs.lakehouse._helper import (
|
|
29
|
+
is_v_ordered,
|
|
30
|
+
)
|
|
24
31
|
import sempy_labs._icons as icons
|
|
25
32
|
from tqdm.auto import tqdm
|
|
26
33
|
|
|
@@ -110,10 +117,6 @@ def delta_analyzer(
|
|
|
110
117
|
lakehouse_id, workspace_id, table_name, schema=schema
|
|
111
118
|
)
|
|
112
119
|
local_path = _mount(lakehouse=lakehouse, workspace=workspace)
|
|
113
|
-
if schema is not None:
|
|
114
|
-
table_path = f"{local_path}/Tables/{schema}/{table_name}"
|
|
115
|
-
else:
|
|
116
|
-
table_path = f"{local_path}/Tables/{table_name}"
|
|
117
120
|
|
|
118
121
|
parquet_file_df_columns = {
|
|
119
122
|
# "Dataset": "string",
|
|
@@ -160,15 +163,12 @@ def delta_analyzer(
|
|
|
160
163
|
max_rows_per_row_group = 0
|
|
161
164
|
min_rows_per_row_group = float("inf")
|
|
162
165
|
|
|
163
|
-
|
|
164
|
-
|
|
166
|
+
is_vorder = is_v_ordered(
|
|
167
|
+
table_name=table_name, lakehouse=lakehouse, workspace=workspace, schema=schema
|
|
168
|
+
)
|
|
165
169
|
|
|
166
170
|
# Get the common details of the Delta table
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
from delta import DeltaTable
|
|
170
|
-
|
|
171
|
-
delta_table = DeltaTable.forPath(spark, delta_table_path)
|
|
171
|
+
delta_table = _get_delta_table(delta_table_path)
|
|
172
172
|
table_df = delta_table.toDF()
|
|
173
173
|
# total_partition_count = table_df.rdd.getNumPartitions()
|
|
174
174
|
row_count = table_df.count()
|
|
@@ -457,19 +457,10 @@ def get_delta_table_history(
|
|
|
457
457
|
def camel_to_title(text):
|
|
458
458
|
return re.sub(r"([a-z])([A-Z])", r"\1 \2", text).title()
|
|
459
459
|
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace=workspace)
|
|
463
|
-
(lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
|
|
464
|
-
lakehouse=lakehouse, workspace=workspace
|
|
465
|
-
)
|
|
460
|
+
workspace_id = resolve_workspace_id(workspace=workspace)
|
|
461
|
+
lakehouse_id = resolve_lakehouse_id(lakehouse=lakehouse, workspace=workspace_id)
|
|
466
462
|
path = create_abfss_path(lakehouse_id, workspace_id, table_name, schema)
|
|
467
|
-
|
|
468
|
-
from delta import DeltaTable
|
|
469
|
-
|
|
470
|
-
delta_table = DeltaTable.forPath(spark, path)
|
|
471
|
-
df = delta_table.history().toPandas()
|
|
472
|
-
|
|
463
|
+
df = _read_delta_table_history(path=path)
|
|
473
464
|
df.rename(columns=lambda col: camel_to_title(col), inplace=True)
|
|
474
465
|
|
|
475
466
|
return df
|
sempy_labs/_environments.py
CHANGED
|
@@ -3,6 +3,7 @@ import sempy_labs._icons as icons
|
|
|
3
3
|
from typing import Optional
|
|
4
4
|
from sempy_labs._helper_functions import (
|
|
5
5
|
resolve_workspace_name_and_id,
|
|
6
|
+
resolve_workspace_id,
|
|
6
7
|
_base_api,
|
|
7
8
|
_create_dataframe,
|
|
8
9
|
resolve_item_id,
|
|
@@ -67,10 +68,16 @@ def list_environments(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
67
68
|
"Environment Name": "string",
|
|
68
69
|
"Environment Id": "string",
|
|
69
70
|
"Description": "string",
|
|
71
|
+
"Publish State": "string",
|
|
72
|
+
"Publish Target Version": "string",
|
|
73
|
+
"Publish Start Time": "string",
|
|
74
|
+
"Publish End Time": "string",
|
|
75
|
+
"Spark Libraries State": "string",
|
|
76
|
+
"Spark Settings State": "string",
|
|
70
77
|
}
|
|
71
78
|
df = _create_dataframe(columns=columns)
|
|
72
79
|
|
|
73
|
-
|
|
80
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
74
81
|
|
|
75
82
|
responses = _base_api(
|
|
76
83
|
request=f"/v1/workspaces/{workspace_id}/environments",
|
|
@@ -80,10 +87,21 @@ def list_environments(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
80
87
|
|
|
81
88
|
for r in responses:
|
|
82
89
|
for v in r.get("value", []):
|
|
90
|
+
pub = v.get("properties", {}).get("publishDetails", {})
|
|
83
91
|
new_data = {
|
|
84
92
|
"Environment Name": v.get("displayName"),
|
|
85
93
|
"Environment Id": v.get("id"),
|
|
86
94
|
"Description": v.get("description"),
|
|
95
|
+
"Publish State": pub.get("state"),
|
|
96
|
+
"Publish Target Version": pub.get("targetVersion"),
|
|
97
|
+
"Publish Start Time": pub.get("startTime"),
|
|
98
|
+
"Publish End Time": pub.get("endTime"),
|
|
99
|
+
"Spark Libraries State": pub.get("componentPublishInfo", {})
|
|
100
|
+
.get("sparkLibraries", {})
|
|
101
|
+
.get("state"),
|
|
102
|
+
"Spark Settings State": pub.get("componentPublishInfo", {})
|
|
103
|
+
.get("sparkSettings", {})
|
|
104
|
+
.get("state"),
|
|
87
105
|
}
|
|
88
106
|
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
89
107
|
|
|
@@ -493,23 +493,20 @@ def get_semantic_model_size(
|
|
|
493
493
|
Returns
|
|
494
494
|
-------
|
|
495
495
|
int
|
|
496
|
-
The size of the semantic model in
|
|
496
|
+
The size of the semantic model in bytes
|
|
497
497
|
"""
|
|
498
498
|
|
|
499
|
-
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
500
|
-
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
|
|
501
|
-
|
|
502
499
|
dict = fabric.evaluate_dax(
|
|
503
|
-
dataset=
|
|
504
|
-
workspace=
|
|
500
|
+
dataset=dataset,
|
|
501
|
+
workspace=workspace,
|
|
505
502
|
dax_string="""
|
|
506
503
|
EVALUATE SELECTCOLUMNS(FILTER(INFO.STORAGETABLECOLUMNS(), [COLUMN_TYPE] = "BASIC_DATA"),[DICTIONARY_SIZE])
|
|
507
504
|
""",
|
|
508
505
|
)
|
|
509
506
|
|
|
510
507
|
used_size = fabric.evaluate_dax(
|
|
511
|
-
dataset=
|
|
512
|
-
workspace=
|
|
508
|
+
dataset=dataset,
|
|
509
|
+
workspace=workspace,
|
|
513
510
|
dax_string="""
|
|
514
511
|
EVALUATE SELECTCOLUMNS(INFO.STORAGETABLECOLUMNSEGMENTS(),[USED_SIZE])
|
|
515
512
|
""",
|
|
@@ -524,5 +521,7 @@ def get_semantic_model_size(
|
|
|
524
521
|
result = model_size / (1024**2) * 10**6
|
|
525
522
|
elif model_size >= 10**3:
|
|
526
523
|
result = model_size / (1024) * 10**3
|
|
524
|
+
else:
|
|
525
|
+
result = model_size
|
|
527
526
|
|
|
528
527
|
return result
|