semantic-link-labs 0.8.0__py3-none-any.whl → 0.8.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.8.0.dist-info → semantic_link_labs-0.8.2.dist-info}/METADATA +39 -7
- {semantic_link_labs-0.8.0.dist-info → semantic_link_labs-0.8.2.dist-info}/RECORD +47 -37
- sempy_labs/__init__.py +70 -51
- sempy_labs/_ai.py +0 -2
- sempy_labs/_capacity_migration.py +1 -2
- sempy_labs/_data_pipelines.py +118 -0
- sempy_labs/_documentation.py +144 -0
- sempy_labs/_eventhouses.py +118 -0
- sempy_labs/_eventstreams.py +118 -0
- sempy_labs/_generate_semantic_model.py +3 -3
- sempy_labs/_git.py +3 -3
- sempy_labs/_helper_functions.py +117 -26
- sempy_labs/_icons.py +21 -0
- sempy_labs/_kql_databases.py +134 -0
- sempy_labs/_kql_querysets.py +124 -0
- sempy_labs/_list_functions.py +12 -425
- sempy_labs/_mirrored_warehouses.py +50 -0
- sempy_labs/_ml_experiments.py +122 -0
- sempy_labs/_ml_models.py +120 -0
- sempy_labs/_model_auto_build.py +0 -4
- sempy_labs/_model_bpa.py +11 -11
- sempy_labs/_model_bpa_bulk.py +8 -7
- sempy_labs/_model_dependencies.py +26 -18
- sempy_labs/_notebooks.py +5 -16
- sempy_labs/_query_scale_out.py +2 -2
- sempy_labs/_refresh_semantic_model.py +7 -19
- sempy_labs/_spark.py +10 -10
- sempy_labs/_vertipaq.py +16 -18
- sempy_labs/_warehouses.py +132 -0
- sempy_labs/_workspaces.py +0 -3
- sempy_labs/admin/_basic_functions.py +92 -10
- sempy_labs/admin/_domains.py +1 -1
- sempy_labs/directlake/_directlake_schema_sync.py +1 -1
- sempy_labs/directlake/_dl_helper.py +32 -16
- sempy_labs/directlake/_guardrails.py +7 -7
- sempy_labs/directlake/_update_directlake_partition_entity.py +1 -1
- sempy_labs/directlake/_warm_cache.py +1 -1
- sempy_labs/lakehouse/_get_lakehouse_tables.py +3 -3
- sempy_labs/lakehouse/_lakehouse.py +3 -2
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +5 -0
- sempy_labs/report/_generate_report.py +1 -1
- sempy_labs/report/_report_bpa.py +13 -3
- sempy_labs/report/_reportwrapper.py +14 -16
- sempy_labs/tom/_model.py +261 -24
- {semantic_link_labs-0.8.0.dist-info → semantic_link_labs-0.8.2.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.8.0.dist-info → semantic_link_labs-0.8.2.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.8.0.dist-info → semantic_link_labs-0.8.2.dist-info}/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: semantic-link-labs
|
|
3
|
-
Version: 0.8.
|
|
3
|
+
Version: 0.8.2
|
|
4
4
|
Summary: Semantic Link Labs for Microsoft Fabric
|
|
5
5
|
Author: Microsoft Corporation
|
|
6
6
|
License: MIT License
|
|
@@ -27,7 +27,7 @@ Requires-Dist: pytest >=8.2.1 ; extra == 'test'
|
|
|
27
27
|
# Semantic Link Labs
|
|
28
28
|
|
|
29
29
|
[](https://badge.fury.io/py/semantic-link-labs)
|
|
30
|
-
[](https://readthedocs.org/projects/semantic-link-labs/)
|
|
31
31
|
[](https://github.com/psf/black)
|
|
32
32
|
[](https://pepy.tech/project/semantic-link-labs)
|
|
33
33
|
|
|
@@ -35,9 +35,31 @@ Requires-Dist: pytest >=8.2.1 ; extra == 'test'
|
|
|
35
35
|
[Read the documentation on ReadTheDocs!](https://semantic-link-labs.readthedocs.io/en/stable/)
|
|
36
36
|
---
|
|
37
37
|
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
38
|
+
Semantic Link Labs is a Python library designed for use in [Microsoft Fabric notebooks](https://learn.microsoft.com/fabric/data-engineering/how-to-use-notebook). This library extends the capabilities of [Semantic Link](https://learn.microsoft.com/fabric/data-science/semantic-link-overview) offering additional functionalities to seamlessly integrate and work alongside it. The goal of Semantic Link Labs is to simplify technical processes, empowering people to focus on higher level activities and allowing tasks that are better suited for machines to be efficiently handled without human intervention.
|
|
39
|
+
|
|
40
|
+
## Featured Scenarios
|
|
41
|
+
* Semantic Models
|
|
42
|
+
* [Migrating an import/DirectQuery semantic model to Direct Lake](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration)
|
|
43
|
+
* [Model Best Practice Analyzer (BPA)](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.run_model_bpa)
|
|
44
|
+
* [Vertipaq Analyzer](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.vertipaq_analyzer)
|
|
45
|
+
* [Tabular Object Model](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Tabular%20Object%20Model.ipynb) [(TOM)](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html)
|
|
46
|
+
* [Translate a semantic model's metadata](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.translate_semantic_model)
|
|
47
|
+
* [Check Direct Lake Guardrails](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.get_lakehouse_tables)
|
|
48
|
+
* [Refresh](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Semantic%20Model%20Refresh.ipynb), [clear cache](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.clear_cache), [backup](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.backup_semantic_model), [restore](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.restore_semantic_model), [copy backup files](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.copy_semantic_model_backup_file), [move/deploy across workspaces](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.deploy_semantic_model)
|
|
49
|
+
* [Run DAX queries which impersonate a user](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.evaluate_dax_impersonation)
|
|
50
|
+
* Reports
|
|
51
|
+
* [Report Best Practice Analyzer (BPA)](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.run_report_bpa)
|
|
52
|
+
* [View report metadata](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Report%20Analysis.ipynb)
|
|
53
|
+
* [View semantic model objects most frequently used in Power BI reports](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_semantic_model_object_report_usage)
|
|
54
|
+
* [View broken reports](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.html#sempy_labs.list_report_semantic_model_objects)
|
|
55
|
+
* [Rebind reports](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.report.html#sempy_labs.report.report_rebind)
|
|
56
|
+
* Capacities
|
|
57
|
+
* [Migrating a Power BI Premium capacity (P sku) to a Fabric capacity (F sku)](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Capacity%20Migration.ipynb)
|
|
58
|
+
* APIs
|
|
59
|
+
* Wrapper functions for [Power BI](https://learn.microsoft.com/rest/api/power-bi/), [Fabric](https://learn.microsoft.com/rest/api/fabric/articles/using-fabric-apis), and [Azure](https://learn.microsoft.com/rest/api/azure/?view=rest-power-bi-embedded-2021-01-01) APIs
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
### Check out the [helper notebooks](https://github.com/microsoft/semantic-link-labs/tree/main/notebooks) for getting started!
|
|
41
63
|
|
|
42
64
|
If you encounter any issues, please [raise a bug](https://github.com/microsoft/semantic-link-labs/issues/new?assignees=&labels=&projects=&template=bug_report.md&title=).
|
|
43
65
|
|
|
@@ -81,7 +103,9 @@ An even better way to ensure the semantic-link-labs library is available in your
|
|
|
81
103
|
2. Select your newly created environment within the 'Environment' drop down in the navigation bar at the top of the notebook
|
|
82
104
|
|
|
83
105
|
## Version History
|
|
84
|
-
* [0.8.
|
|
106
|
+
* [0.8.2](https://github.com/microsoft/semantic-link-labs/releases/tag/0.8.2) (October 2, 2024)
|
|
107
|
+
* [0.8.1](https://github.com/microsoft/semantic-link-labs/releases/tag/0.8.1) (October 2, 2024)
|
|
108
|
+
* [0.8.0](https://github.com/microsoft/semantic-link-labs/releases/tag/0.8.0) (September 25, 2024)
|
|
85
109
|
* [0.7.4](https://github.com/microsoft/semantic-link-labs/releases/tag/0.7.4) (September 16, 2024)
|
|
86
110
|
* [0.7.3](https://github.com/microsoft/semantic-link-labs/releases/tag/0.7.3) (September 11, 2024)
|
|
87
111
|
* [0.7.2](https://github.com/microsoft/semantic-link-labs/releases/tag/0.7.2) (August 30, 2024)
|
|
@@ -128,7 +152,7 @@ Check out my [blog post](https://www.elegantbi.com/post/direct-lake-migration) o
|
|
|
128
152
|
5. Back in the notebook, the next step will create your new Direct Lake semantic model with the name of your choice, taking all the relevant properties from the orignal semantic model and refreshing/framing your new semantic model.
|
|
129
153
|
|
|
130
154
|
> [!NOTE]
|
|
131
|
-
>
|
|
155
|
+
> Calculated tables are also migrated to Direct Lake (as data tables with their DAX expression stored as model annotations in the new semantic model). Additionally, Field Parameters are migrated as they were in the original semantic model (as a calculated table). [Auto date/time tables](https://learn.microsoft.com/power-bi/guidance/auto-date-time) are not migrated. Auto date/time must be disabled in Power BI Desktop and proper date table(s) must be created prior to migration.
|
|
132
156
|
|
|
133
157
|
6. Finally, you can easily rebind your all reports which use the import/DQ semantic model to the new Direct Lake semantic model in one click.
|
|
134
158
|
|
|
@@ -141,6 +165,14 @@ Check out my [blog post](https://www.elegantbi.com/post/direct-lake-migration) o
|
|
|
141
165
|
* Non-supported objects are not transferred (i.e. calculated columns, relationships using columns with unsupported data types etc.).
|
|
142
166
|
* Reports used by your original semantic model will be rebinded to your new semantic model.
|
|
143
167
|
|
|
168
|
+
### Limitations
|
|
169
|
+
* Calculated columns are not migrated.
|
|
170
|
+
* Auto date/time tables are not migrated.
|
|
171
|
+
* References to calculated columns in Field Parameters are removed.
|
|
172
|
+
* References to calculated columns in measure expressions or other DAX expressions will break.
|
|
173
|
+
* Calculated tables are migrated as possible. The success of this migration depends on the interdependencies and complexity of the calculated table. This part of the migration is a workaround as technically calculated tables are not supported in Direct Lake.
|
|
174
|
+
* See [here](https://learn.microsoft.com/fabric/get-started/direct-lake-overview#considerations-and-limitations) for the rest of the limitations of Direct Lake.
|
|
175
|
+
|
|
144
176
|
## Contributing
|
|
145
177
|
|
|
146
178
|
This project welcomes contributions and suggestions. Most contributions require you to agree to a
|
|
@@ -1,33 +1,43 @@
|
|
|
1
|
-
sempy_labs/__init__.py,sha256=
|
|
2
|
-
sempy_labs/_ai.py,sha256=
|
|
1
|
+
sempy_labs/__init__.py,sha256=rX6aU2I1Y30fOXJs2xNTvep8Hni5s3J67TT9MCDLpcw,9957
|
|
2
|
+
sempy_labs/_ai.py,sha256=CzsNw6Wpd2B5Rd0RcY250-_p0L-0gFoMNLEc_KmrobU,16177
|
|
3
3
|
sempy_labs/_capacities.py,sha256=X39LHYde3rgwW8vTbNUMt2S9dekTfy6sqQGXg-MCybY,20132
|
|
4
|
-
sempy_labs/_capacity_migration.py,sha256=
|
|
4
|
+
sempy_labs/_capacity_migration.py,sha256=tVrIT4sSWKQg-6AqjrvGQPIBGHTcMtPqdymUnEJKVbs,23612
|
|
5
5
|
sempy_labs/_clear_cache.py,sha256=JgY4Nio7Zy9YHOPxQseiZ5lJLWEi5s3Qn5SLSvuyBwo,12490
|
|
6
6
|
sempy_labs/_connections.py,sha256=w1NeC9auc07ko2pY3m5vM_9-sgW_chw1OxPzDviVC5k,12179
|
|
7
|
+
sempy_labs/_data_pipelines.py,sha256=gSmlkBpse0bodNfcfmS45_LKLtiv98nIqxbH70nSo-I,3590
|
|
7
8
|
sempy_labs/_dataflows.py,sha256=ApGMklXMQZQssYa7egxlBtsYz5mcR3JS69ZgeORXVBM,4051
|
|
8
9
|
sempy_labs/_dax.py,sha256=dt1GgHceyM7f6phRBPxRKnmQy_KYKpcgFQHuOjGbpLo,2029
|
|
9
10
|
sempy_labs/_deployment_pipelines.py,sha256=-wjqQieR25BR-0r2QfTzg4QuFzOgcayZLnS-fNYIPPU,5347
|
|
11
|
+
sempy_labs/_documentation.py,sha256=yVA8VPEzx_fmljtcvSxtB7-BeupYsfdMXXjp6Fpnyo8,5007
|
|
10
12
|
sempy_labs/_environments.py,sha256=oyfEeOLX5XPevumj4Il5t1z2gmsR8xm6pg1wfGoutMc,4821
|
|
11
|
-
sempy_labs/
|
|
12
|
-
sempy_labs/
|
|
13
|
-
sempy_labs/
|
|
14
|
-
sempy_labs/
|
|
15
|
-
sempy_labs/
|
|
16
|
-
sempy_labs/
|
|
17
|
-
sempy_labs/
|
|
18
|
-
sempy_labs/
|
|
13
|
+
sempy_labs/_eventhouses.py,sha256=tVUYJkAcwoBZooXawJpCKzx2YypawKECrfKouRTkMJU,3537
|
|
14
|
+
sempy_labs/_eventstreams.py,sha256=NV0YAC6V3nBxJIVKWH7ceAtooZoH9rA4zv45xTcb-t0,3556
|
|
15
|
+
sempy_labs/_generate_semantic_model.py,sha256=mvM1DDWEkyfU8Ee4NT9Zu52qZyCnmq0971esIjp_LpQ,13253
|
|
16
|
+
sempy_labs/_git.py,sha256=vXWv6e45H0OGIyzBvB-anSJYrBFjA2MVUYIjaUPd54I,12731
|
|
17
|
+
sempy_labs/_helper_functions.py,sha256=7mTpsTZpW0M3lGIReSuGIgntnb_wcQr2Jm6FolxRClI,32310
|
|
18
|
+
sempy_labs/_icons.py,sha256=pwei9IrwbNAxmxCNgX_8cACwjhvmM6cYYZcS3sLR9jc,2696
|
|
19
|
+
sempy_labs/_kql_databases.py,sha256=DE_5un0QO5ayYiqKiMDinis0ZysmIWWaEoYQ9ach_8o,4110
|
|
20
|
+
sempy_labs/_kql_querysets.py,sha256=kH8diN3DxuCINlKhAvzyXKXtj0uJ0uhcxrWI9j097vU,3646
|
|
21
|
+
sempy_labs/_list_functions.py,sha256=jBdLo_8gnNk1nQdTlGvRbC_PMdyctxGnLQXwkDFNMcE,55255
|
|
22
|
+
sempy_labs/_mirrored_warehouses.py,sha256=xcVnrNwi46snQiS5iFKcZe-GGoFLqva7qW4iOPUD7Y8,1577
|
|
23
|
+
sempy_labs/_ml_experiments.py,sha256=fTqFB0qn0T1k4vGmaghvKUolb5Kvg_qfTOYl3ABknt8,3676
|
|
24
|
+
sempy_labs/_ml_models.py,sha256=nAISwah2fQI0OOJfBAEuI7d4uKu4DUIHi4BqUye6FQE,3572
|
|
25
|
+
sempy_labs/_model_auto_build.py,sha256=-qDwmFx3KMuIaaUU8CzmCX7CF7ZUVWWu-_GH2UhAU_8,5045
|
|
26
|
+
sempy_labs/_model_bpa.py,sha256=ow2UiiTGF_9nAldQxLdR4wmVrExXMYurhOVxu9pOFBQ,20377
|
|
27
|
+
sempy_labs/_model_bpa_bulk.py,sha256=35M6FnHP6T-3rkwUHOeL6BRj-aKL7lRxpPvJKnj8wwU,14944
|
|
19
28
|
sempy_labs/_model_bpa_rules.py,sha256=B8hg3_ktS5ql859nL3e9kdV8fNDBgMNO5j2hGrXp6po,44855
|
|
20
|
-
sempy_labs/_model_dependencies.py,sha256=
|
|
21
|
-
sempy_labs/_notebooks.py,sha256=
|
|
29
|
+
sempy_labs/_model_dependencies.py,sha256=rFPVDA6gLKxy2rDPtHHIvVJF9SmJz4xRe4n922bzWtA,13452
|
|
30
|
+
sempy_labs/_notebooks.py,sha256=K1ZZ9y-lOzDLeJ2UlCmJlN7kml5FRPmE0Bj5SvNDwq8,4339
|
|
22
31
|
sempy_labs/_one_lake_integration.py,sha256=eIuLxlw8eXfUH2avKhsyLmXZbTllSwGsz2j_HMAikpQ,6234
|
|
23
|
-
sempy_labs/_query_scale_out.py,sha256=
|
|
24
|
-
sempy_labs/_refresh_semantic_model.py,sha256=
|
|
25
|
-
sempy_labs/_spark.py,sha256=
|
|
32
|
+
sempy_labs/_query_scale_out.py,sha256=Wk6vxyI6x1xFV30UpEwvzlD0TnentpKOO-8mEVLc7zE,15253
|
|
33
|
+
sempy_labs/_refresh_semantic_model.py,sha256=Kb3sZf9ihBcbh2la6c7wuAhUvGnqrlNR2obAdNJNz98,6920
|
|
34
|
+
sempy_labs/_spark.py,sha256=_gkmitnGIf0Q_wKDHcnkmGSyE6GtPgsi2QnudLcBWKU,19477
|
|
26
35
|
sempy_labs/_sql.py,sha256=p0CvDD3fF0IQdoFI6308-DDUFQR9V_qWpXfQvyX3znw,5356
|
|
27
36
|
sempy_labs/_translations.py,sha256=ocRtdgymcPYOT3jiYcVv9ze5i2gDjgLcTJBEl66pKtg,19771
|
|
28
|
-
sempy_labs/_vertipaq.py,sha256=
|
|
37
|
+
sempy_labs/_vertipaq.py,sha256=txae-LoCE8K8Z7lVmlC7FgIS6vVxwLtKhGcnU28ZuBk,35738
|
|
38
|
+
sempy_labs/_warehouses.py,sha256=qIDteSE5dFm0rkzF3Ouryb467VKxrHywNSSvtf_TcXM,3920
|
|
29
39
|
sempy_labs/_workspace_identity.py,sha256=2uum6Ojv1x3Zzp8JEAQ2ER9J5yafb_v_TgZwY5vdBwA,2120
|
|
30
|
-
sempy_labs/_workspaces.py,sha256=
|
|
40
|
+
sempy_labs/_workspaces.py,sha256=KZfQ1hAKhfIsq52ZF-RBptueWUh6-P-OA8YDfmSQbpM,10080
|
|
31
41
|
sempy_labs/_bpa_translation/_model/_translations_am-ET.po,sha256=ve23guQ48HXXn2_yGicUtr_Orhi9nth_lna8-x93GjA,37928
|
|
32
42
|
sempy_labs/_bpa_translation/_model/_translations_ar-AE.po,sha256=YtJzrUXQesme9D-m9JUGw4q7RSYfyXF25FH7PwDWcFI,35787
|
|
33
43
|
sempy_labs/_bpa_translation/_model/_translations_bg-BG.po,sha256=bvX36ZCeHGYiiZjDTzGz1kSxR89u05ZwuGPoIl9olX8,42546
|
|
@@ -67,29 +77,29 @@ sempy_labs/_bpa_translation/_model/_translations_uk-UA.po,sha256=t7kpMB65cOKNIg8
|
|
|
67
77
|
sempy_labs/_bpa_translation/_model/_translations_zh-CN.po,sha256=QhmS3HSnILLN-OgMUGmNFvGr4TEhoH1FKL0F1SgU2nk,26035
|
|
68
78
|
sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po,sha256=0ZivJJa-ggc7PJqLAgDmi8OGGUGzEcleGPmlLL9YYb4,30495
|
|
69
79
|
sempy_labs/admin/__init__.py,sha256=whrBFHvjWav0ni_LOYJ4Q63WtiJEPOQ1etudl2wiWKA,1445
|
|
70
|
-
sempy_labs/admin/_basic_functions.py,sha256=
|
|
71
|
-
sempy_labs/admin/_domains.py,sha256=
|
|
80
|
+
sempy_labs/admin/_basic_functions.py,sha256=2gFJ9XSU3Bm-IpBYk9vUDqnCCkWicRcNGjF6qP4ds6w,29229
|
|
81
|
+
sempy_labs/admin/_domains.py,sha256=ByDqLsm1eDPnTr93yuVsPnhEQLGerWWwWk4_rAlRD1M,11691
|
|
72
82
|
sempy_labs/directlake/__init__.py,sha256=ZlpQQDdA5-C10zSf9YICH0waMIFM-55aacsEzvpuHEM,2047
|
|
73
83
|
sempy_labs/directlake/_directlake_schema_compare.py,sha256=ocHFU6E6HSKgcNLywGM0dx0ie9AXYwk-E7o7EYcqiN4,4422
|
|
74
|
-
sempy_labs/directlake/_directlake_schema_sync.py,sha256=
|
|
75
|
-
sempy_labs/directlake/_dl_helper.py,sha256=
|
|
84
|
+
sempy_labs/directlake/_directlake_schema_sync.py,sha256=I2SohqatMPDY8WXinAYP5QrVZ2wIaE4xsP-fVbTyAHE,4319
|
|
85
|
+
sempy_labs/directlake/_dl_helper.py,sha256=K4IN37gA_aRaFjpUT6hN1ExBMOR04_be2RSOtHh7k0Y,9260
|
|
76
86
|
sempy_labs/directlake/_generate_shared_expression.py,sha256=ayMkgmlmFzDTYrqbXGm5C3w05YDkI4SYnyi1eaIo9Vs,3087
|
|
77
87
|
sempy_labs/directlake/_get_directlake_lakehouse.py,sha256=sovI4ds2SEgkp4Fi465jtJ4seRvQxdYgcixRDvsUwNM,2321
|
|
78
88
|
sempy_labs/directlake/_get_shared_expression.py,sha256=Xl2_GYqRll95cN7JjwLlULbcRXM71Ij9JkrYAp7cNJM,1943
|
|
79
|
-
sempy_labs/directlake/_guardrails.py,sha256=
|
|
89
|
+
sempy_labs/directlake/_guardrails.py,sha256=20l2f2jeIJ2-z2H64Vi2n1e8cjMuideXSbBT9OYQ3kE,2500
|
|
80
90
|
sempy_labs/directlake/_list_directlake_model_calc_tables.py,sha256=_rpnbgsFAz2W16PpgIOB0Rj_Fs1ZKrDbz3DUaaR_bfU,2143
|
|
81
91
|
sempy_labs/directlake/_show_unsupported_directlake_objects.py,sha256=-8werbzIi_xknMkymIsq-JwNTAvt0W18xA4HagXhT8U,3332
|
|
82
92
|
sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py,sha256=LpxYPxF_rkLD2okyKejLIV-qA8UeJ9xRWyv32kNdR_c,2822
|
|
83
|
-
sempy_labs/directlake/_update_directlake_partition_entity.py,sha256=
|
|
84
|
-
sempy_labs/directlake/_warm_cache.py,sha256=
|
|
93
|
+
sempy_labs/directlake/_update_directlake_partition_entity.py,sha256=DCPNZfhqVG_yXnQbrMuattGGo1tuXt54RdAaiN84MSQ,8570
|
|
94
|
+
sempy_labs/directlake/_warm_cache.py,sha256=X4R2_i4jFnFXYmwhDIxPb9h1CdPVLzFKHItsz0QpeRg,8248
|
|
85
95
|
sempy_labs/lakehouse/__init__.py,sha256=6LVQltQ3cjyiuxvjXTuNdJ163zSqi4h_tEZY4zsxuSw,647
|
|
86
96
|
sempy_labs/lakehouse/_get_lakehouse_columns.py,sha256=Bb_iCTlNwl0wdN4dW_E7tVnfbHhHwQT_l0SUqvcbYpo,2582
|
|
87
|
-
sempy_labs/lakehouse/_get_lakehouse_tables.py,sha256=
|
|
88
|
-
sempy_labs/lakehouse/_lakehouse.py,sha256=
|
|
97
|
+
sempy_labs/lakehouse/_get_lakehouse_tables.py,sha256=W5ZNC52v79FBXBhHwmhnyQHBuy-PnDJyC5OX4zf-h_4,8802
|
|
98
|
+
sempy_labs/lakehouse/_lakehouse.py,sha256=_yn0ySUrJQD9nySa3gFpEGr6AvF-vOKIMNJruotfxHQ,5224
|
|
89
99
|
sempy_labs/lakehouse/_shortcuts.py,sha256=MT_Cqog5cTMz9fN3M_ZjAaQSjXXiyCyPWGY8LbaXZsI,6977
|
|
90
100
|
sempy_labs/migration/__init__.py,sha256=w4vvGk6wTWXVfofJDmio2yIFvSSJsxOpjv6mvNGmrOI,1043
|
|
91
101
|
sempy_labs/migration/_create_pqt_file.py,sha256=oYoKD78K9Ox1fqtkh-BfU_G5nUIoK_-5ChvCKDsYsWU,9257
|
|
92
|
-
sempy_labs/migration/_migrate_calctables_to_lakehouse.py,sha256=
|
|
102
|
+
sempy_labs/migration/_migrate_calctables_to_lakehouse.py,sha256=y5fDjqgl-rpXgZWMGagLQS_aUqJ2ksWLR-cnriWJO7Q,17986
|
|
93
103
|
sempy_labs/migration/_migrate_calctables_to_semantic_model.py,sha256=GC3zthtCsESLl_Mao-3C01_CzbfGBDTaEdq6k0DT2yQ,6203
|
|
94
104
|
sempy_labs/migration/_migrate_model_objects_to_semantic_model.py,sha256=DAXz5TwPlrDxxciBp3NrTOlnkjeKaOYfntWMgyCXI10,23318
|
|
95
105
|
sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py,sha256=TX0NIffDocbK1B4cfssUHXjOG1htcaFOFWzDsBCGsi0,6515
|
|
@@ -97,14 +107,14 @@ sempy_labs/migration/_migration_validation.py,sha256=ZsUdVlBmo03ycaZE6fFRRGU6qpE
|
|
|
97
107
|
sempy_labs/migration/_refresh_calc_tables.py,sha256=eDj0OJQ07Tum4umZH0NsUW5Rx_YXEpGnAu8OVVoQ4yk,5190
|
|
98
108
|
sempy_labs/report/_BPAReportTemplate.json,sha256=9Uh-7E6d2ooxQ7j5JRayv_ayEULc7Gzg42kZGKdOqH8,63920
|
|
99
109
|
sempy_labs/report/__init__.py,sha256=LqbIi1SnB532l79ldsFbeIkk-6dEnWkJYgNCaF2IR08,1056
|
|
100
|
-
sempy_labs/report/_generate_report.py,sha256=
|
|
101
|
-
sempy_labs/report/_report_bpa.py,sha256=
|
|
110
|
+
sempy_labs/report/_generate_report.py,sha256=d7GVgLP77TApY4EKgvsvMwEYFo1Yl2_al8i8FCB6FPM,12179
|
|
111
|
+
sempy_labs/report/_report_bpa.py,sha256=vPUCgGXgMbKBlFj3wFVOko2hWiuxrdTf9G-eSOfq3tQ,13667
|
|
102
112
|
sempy_labs/report/_report_bpa_rules.py,sha256=tPVGA0hmE6QMLlWtig7Va7Ksr2yXWl_Lndq--tWWd6w,4959
|
|
103
113
|
sempy_labs/report/_report_functions.py,sha256=qY2lE9tyf-xQo8Fdu2yl2nneiMMM6LFkwlqnyAOJjgg,29318
|
|
104
114
|
sempy_labs/report/_report_helper.py,sha256=fkSo5m3_KlAlo-fu8FTnxINigWbZI66ex-r44WalKsw,8711
|
|
105
115
|
sempy_labs/report/_report_list_functions.py,sha256=4k-bPEi4uW_ozHTZNc_67idP1LQZPT-gO9ToRCv06fk,3127
|
|
106
116
|
sempy_labs/report/_report_rebind.py,sha256=F1gXE-VM_812EFOfqbMDnAdynVfVKKx9Cvs31Hnuras,5004
|
|
107
|
-
sempy_labs/report/_reportwrapper.py,sha256=
|
|
117
|
+
sempy_labs/report/_reportwrapper.py,sha256=NczgthYYI7oAgrm2yXp5Z1aocxqoXAdhnXI06wRd3s0,75719
|
|
108
118
|
sempy_labs/report/_bpareporttemplate/.platform,sha256=kWRa6B_KwSYLsvVFDx372mQriQO8v7dJ_YzQV_cfD-Q,303
|
|
109
119
|
sempy_labs/report/_bpareporttemplate/definition.pbir,sha256=bttyHZYKqjA8OBb_cezGlX4H82cDvGZVCl1QB3fij4E,343
|
|
110
120
|
sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json,sha256=kzjBlNdjbsSBBSHBwbQc298AJCr9Vp6Ex0D5PemUuT0,1578
|
|
@@ -135,9 +145,9 @@ sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visua
|
|
|
135
145
|
sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json,sha256=wBVuNc8S2NaUA0FC708w6stmR2djNZp8nAsHMqesgsc,293
|
|
136
146
|
sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json,sha256=mlY6t9OlSe-Y6_QmXJpS1vggU6Y3FjISUKECL8FVSg8,931
|
|
137
147
|
sempy_labs/tom/__init__.py,sha256=Qbs8leW0fjzvWwOjyWK3Hjeehu7IvpB1beASGsi28bk,121
|
|
138
|
-
sempy_labs/tom/_model.py,sha256=
|
|
139
|
-
semantic_link_labs-0.8.
|
|
140
|
-
semantic_link_labs-0.8.
|
|
141
|
-
semantic_link_labs-0.8.
|
|
142
|
-
semantic_link_labs-0.8.
|
|
143
|
-
semantic_link_labs-0.8.
|
|
148
|
+
sempy_labs/tom/_model.py,sha256=tfC241UprOx8ZLGqnSnaAzi20qlghESc0uK95i2HaAs,165380
|
|
149
|
+
semantic_link_labs-0.8.2.dist-info/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
|
|
150
|
+
semantic_link_labs-0.8.2.dist-info/METADATA,sha256=1hsvBRmEpNCyw0quXQ3UDVsVS98cv0RYb8R3z0AMU4c,15783
|
|
151
|
+
semantic_link_labs-0.8.2.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
|
|
152
|
+
semantic_link_labs-0.8.2.dist-info/top_level.txt,sha256=kiQX1y42Dbein1l3Q8jMUYyRulDjdlc2tMepvtrvixQ,11
|
|
153
|
+
semantic_link_labs-0.8.2.dist-info/RECORD,,
|
sempy_labs/__init__.py
CHANGED
|
@@ -1,3 +1,44 @@
|
|
|
1
|
+
from sempy_labs._ml_models import (
|
|
2
|
+
list_ml_models,
|
|
3
|
+
create_ml_model,
|
|
4
|
+
delete_ml_model,
|
|
5
|
+
)
|
|
6
|
+
from sempy_labs._ml_experiments import (
|
|
7
|
+
list_ml_experiments,
|
|
8
|
+
create_ml_experiment,
|
|
9
|
+
delete_ml_experiment,
|
|
10
|
+
)
|
|
11
|
+
from sempy_labs._warehouses import (
|
|
12
|
+
create_warehouse,
|
|
13
|
+
list_warehouses,
|
|
14
|
+
delete_warehouse,
|
|
15
|
+
)
|
|
16
|
+
from sempy_labs._data_pipelines import (
|
|
17
|
+
list_data_pipelines,
|
|
18
|
+
create_data_pipeline,
|
|
19
|
+
delete_data_pipeline,
|
|
20
|
+
)
|
|
21
|
+
from sempy_labs._eventhouses import (
|
|
22
|
+
create_eventhouse,
|
|
23
|
+
list_eventhouses,
|
|
24
|
+
delete_eventhouse,
|
|
25
|
+
)
|
|
26
|
+
from sempy_labs._eventstreams import (
|
|
27
|
+
list_eventstreams,
|
|
28
|
+
create_eventstream,
|
|
29
|
+
delete_eventstream,
|
|
30
|
+
)
|
|
31
|
+
from sempy_labs._kql_querysets import (
|
|
32
|
+
list_kql_querysets,
|
|
33
|
+
create_kql_queryset,
|
|
34
|
+
delete_kql_queryset,
|
|
35
|
+
)
|
|
36
|
+
from sempy_labs._kql_databases import (
|
|
37
|
+
list_kql_databases,
|
|
38
|
+
create_kql_database,
|
|
39
|
+
delete_kql_database,
|
|
40
|
+
)
|
|
41
|
+
from sempy_labs._mirrored_warehouses import list_mirrored_warehouses
|
|
1
42
|
from sempy_labs._environments import (
|
|
2
43
|
create_environment,
|
|
3
44
|
delete_environment,
|
|
@@ -23,8 +64,6 @@ from sempy_labs._capacity_migration import (
|
|
|
23
64
|
)
|
|
24
65
|
from sempy_labs._capacities import (
|
|
25
66
|
create_fabric_capacity,
|
|
26
|
-
# get_capacity_resource_governance,
|
|
27
|
-
# list_vcores,
|
|
28
67
|
resume_fabric_capacity,
|
|
29
68
|
suspend_fabric_capacity,
|
|
30
69
|
update_fabric_capacity,
|
|
@@ -33,7 +72,6 @@ from sempy_labs._capacities import (
|
|
|
33
72
|
delete_embedded_capacity,
|
|
34
73
|
delete_premium_capacity,
|
|
35
74
|
)
|
|
36
|
-
|
|
37
75
|
from sempy_labs._spark import (
|
|
38
76
|
get_spark_settings,
|
|
39
77
|
update_spark_settings,
|
|
@@ -42,7 +80,6 @@ from sempy_labs._spark import (
|
|
|
42
80
|
delete_custom_pool,
|
|
43
81
|
update_custom_pool,
|
|
44
82
|
)
|
|
45
|
-
|
|
46
83
|
from sempy_labs._workspaces import (
|
|
47
84
|
list_workspace_users,
|
|
48
85
|
update_workspace_user,
|
|
@@ -107,27 +144,14 @@ from sempy_labs._list_functions import (
|
|
|
107
144
|
list_shortcuts,
|
|
108
145
|
get_object_level_security,
|
|
109
146
|
list_capacities,
|
|
110
|
-
# list_annotations,
|
|
111
|
-
# list_columns,
|
|
112
147
|
list_dashboards,
|
|
113
|
-
|
|
114
|
-
# list_datapipelines,
|
|
115
|
-
# list_eventstreams,
|
|
116
|
-
# list_kpis,
|
|
117
|
-
# list_kqldatabases,
|
|
118
|
-
# list_kqlquerysets,
|
|
148
|
+
list_datamarts,
|
|
119
149
|
list_lakehouses,
|
|
120
|
-
|
|
121
|
-
# list_mlexperiments,
|
|
122
|
-
# list_mlmodels,
|
|
123
|
-
# list_relationships,
|
|
124
|
-
# list_sqlendpoints,
|
|
125
|
-
# list_tables,
|
|
126
|
-
list_warehouses,
|
|
127
|
-
create_warehouse,
|
|
150
|
+
list_sql_endpoints,
|
|
128
151
|
update_item,
|
|
129
152
|
)
|
|
130
153
|
from sempy_labs._helper_functions import (
|
|
154
|
+
convert_to_friendly_case,
|
|
131
155
|
resolve_environment_id,
|
|
132
156
|
resolve_capacity_id,
|
|
133
157
|
resolve_warehouse_id,
|
|
@@ -150,10 +174,7 @@ from sempy_labs._helper_functions import (
|
|
|
150
174
|
get_capacity_id,
|
|
151
175
|
get_capacity_name,
|
|
152
176
|
resolve_capacity_name,
|
|
153
|
-
# language_validate
|
|
154
177
|
)
|
|
155
|
-
|
|
156
|
-
# from sempy_labs._model_auto_build import model_auto_build
|
|
157
178
|
from sempy_labs._model_bpa_bulk import (
|
|
158
179
|
run_model_bpa_bulk,
|
|
159
180
|
create_model_bpa_semantic_model,
|
|
@@ -184,7 +205,6 @@ from sempy_labs._refresh_semantic_model import (
|
|
|
184
205
|
from sempy_labs._translations import translate_semantic_model
|
|
185
206
|
from sempy_labs._vertipaq import (
|
|
186
207
|
vertipaq_analyzer,
|
|
187
|
-
# visualize_vertipaq,
|
|
188
208
|
import_vertipaq_analyzer,
|
|
189
209
|
)
|
|
190
210
|
|
|
@@ -218,26 +238,13 @@ __all__ = [
|
|
|
218
238
|
"deploy_semantic_model",
|
|
219
239
|
"get_semantic_model_bim",
|
|
220
240
|
"get_object_level_security",
|
|
221
|
-
#'list_annotations',
|
|
222
|
-
#'list_columns',
|
|
223
241
|
"list_dashboards",
|
|
224
242
|
"list_dataflow_storage_accounts",
|
|
225
|
-
#'list_datamarts',
|
|
226
|
-
#'list_datapipelines',
|
|
227
|
-
#'list_eventstreams',
|
|
228
|
-
#'list_kpis',
|
|
229
|
-
#'list_kqldatabases',
|
|
230
|
-
#'list_kqlquerysets',
|
|
231
243
|
"list_lakehouses",
|
|
232
|
-
#'list_mirroredwarehouses',
|
|
233
|
-
#'list_mlexperiments',
|
|
234
|
-
#'list_mlmodels',
|
|
235
|
-
#'list_relationships',
|
|
236
|
-
#'list_sqlendpoints',
|
|
237
|
-
#'list_tables',
|
|
238
244
|
"list_warehouses",
|
|
239
245
|
"list_workspace_role_assignments",
|
|
240
246
|
"create_warehouse",
|
|
247
|
+
"delete_warehouse",
|
|
241
248
|
"update_item",
|
|
242
249
|
"create_abfss_path",
|
|
243
250
|
"format_dax_object_name",
|
|
@@ -251,8 +258,6 @@ __all__ = [
|
|
|
251
258
|
"resolve_dataset_name",
|
|
252
259
|
"resolve_report_id",
|
|
253
260
|
"resolve_report_name",
|
|
254
|
-
# 'language_validate',
|
|
255
|
-
# "model_auto_build",
|
|
256
261
|
"model_bpa_rules",
|
|
257
262
|
"run_model_bpa",
|
|
258
263
|
"measure_dependency_tree",
|
|
@@ -270,7 +275,6 @@ __all__ = [
|
|
|
270
275
|
"cancel_dataset_refresh",
|
|
271
276
|
"translate_semantic_model",
|
|
272
277
|
"vertipaq_analyzer",
|
|
273
|
-
# 'visualize_vertipaq',
|
|
274
278
|
"import_vertipaq_analyzer",
|
|
275
279
|
"list_semantic_model_objects",
|
|
276
280
|
"list_shortcuts",
|
|
@@ -318,7 +322,6 @@ __all__ = [
|
|
|
318
322
|
"delete_fabric_capacity",
|
|
319
323
|
"resume_fabric_capacity",
|
|
320
324
|
"update_fabric_capacity",
|
|
321
|
-
"create_fabric_capacity",
|
|
322
325
|
"delete_premium_capacity",
|
|
323
326
|
"suspend_fabric_capacity",
|
|
324
327
|
"delete_embedded_capacity",
|
|
@@ -327,17 +330,33 @@ __all__ = [
|
|
|
327
330
|
"migrate_capacities",
|
|
328
331
|
"create_fabric_capacity",
|
|
329
332
|
"migrate_capacity_settings",
|
|
330
|
-
# "get_capacity_resource_governance",
|
|
331
|
-
# "list_vcores",
|
|
332
333
|
"migrate_disaster_recovery_settings",
|
|
333
334
|
"migrate_notification_settings",
|
|
334
335
|
"migrate_access_settings",
|
|
335
336
|
"migrate_delegated_tenant_settings",
|
|
336
|
-
"
|
|
337
|
-
"
|
|
338
|
-
"
|
|
339
|
-
"
|
|
340
|
-
"
|
|
341
|
-
"
|
|
342
|
-
"
|
|
337
|
+
"convert_to_friendly_case",
|
|
338
|
+
"list_mirrored_warehouses",
|
|
339
|
+
"list_kql_databases",
|
|
340
|
+
"create_kql_database",
|
|
341
|
+
"delete_kql_database",
|
|
342
|
+
"create_eventhouse",
|
|
343
|
+
"list_eventhouses",
|
|
344
|
+
"delete_eventhouse",
|
|
345
|
+
"list_data_pipelines",
|
|
346
|
+
"create_data_pipeline",
|
|
347
|
+
"delete_data_pipeline",
|
|
348
|
+
"list_eventstreams",
|
|
349
|
+
"create_eventstream",
|
|
350
|
+
"delete_eventstream",
|
|
351
|
+
"list_kql_querysets",
|
|
352
|
+
"create_kql_queryset",
|
|
353
|
+
"delete_kql_queryset",
|
|
354
|
+
"list_ml_models",
|
|
355
|
+
"create_ml_model",
|
|
356
|
+
"delete_ml_model",
|
|
357
|
+
"list_ml_experiments",
|
|
358
|
+
"create_ml_experiment",
|
|
359
|
+
"delete_ml_experiment",
|
|
360
|
+
"list_sql_endpoints",
|
|
361
|
+
"list_datamarts",
|
|
343
362
|
]
|
sempy_labs/_ai.py
CHANGED
|
@@ -1,8 +1,6 @@
|
|
|
1
1
|
import sempy
|
|
2
2
|
import sempy.fabric as fabric
|
|
3
3
|
import pandas as pd
|
|
4
|
-
from synapse.ml.services.openai import OpenAICompletion
|
|
5
|
-
from pyspark.sql.functions import col
|
|
6
4
|
from pyspark.sql import SparkSession
|
|
7
5
|
from typing import List, Optional, Union
|
|
8
6
|
from IPython.display import display
|
|
@@ -136,8 +136,7 @@ def migrate_capacities(
|
|
|
136
136
|
resource_group : str | dict
|
|
137
137
|
The name of the Azure resource group.
|
|
138
138
|
For A skus, this parameter will be ignored and the resource group used for the F sku will be the same as the A sku's resource group.
|
|
139
|
-
For P skus, if this parameter is a string, it will use that resource group for all of the newly created F skus.
|
|
140
|
-
if this parameter is a dictionary, it will use that mapping (capacity name -> resource group) for creating capacities with the mapped resource groups.
|
|
139
|
+
For P skus, if this parameter is a string, it will use that resource group for all of the newly created F skus. If this parameter is a dictionary, it will use that mapping (capacity name -> resource group) for creating capacities with the mapped resource groups.
|
|
141
140
|
capacities : str | List[str], default=None
|
|
142
141
|
The capacity(ies) to migrate from A/P -> F sku.
|
|
143
142
|
Defaults to None which migrates all accessible A/P sku capacities to F skus.
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
import pandas as pd
|
|
3
|
+
import sempy_labs._icons as icons
|
|
4
|
+
from typing import Optional
|
|
5
|
+
from sempy_labs._helper_functions import (
|
|
6
|
+
resolve_workspace_name_and_id,
|
|
7
|
+
lro,
|
|
8
|
+
pagination,
|
|
9
|
+
)
|
|
10
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def list_data_pipelines(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
14
|
+
"""
|
|
15
|
+
Shows the data pipelines within a workspace.
|
|
16
|
+
|
|
17
|
+
Parameters
|
|
18
|
+
----------
|
|
19
|
+
workspace : str, default=None
|
|
20
|
+
The Fabric workspace name.
|
|
21
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
22
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
23
|
+
|
|
24
|
+
Returns
|
|
25
|
+
-------
|
|
26
|
+
pandas.DataFrame
|
|
27
|
+
A pandas dataframe showing the data pipelines within a workspace.
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
df = pd.DataFrame(columns=["Data Pipeline Name", "Data Pipeline ID", "Description"])
|
|
31
|
+
|
|
32
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
33
|
+
|
|
34
|
+
client = fabric.FabricRestClient()
|
|
35
|
+
response = client.get(f"/v1/workspaces/{workspace_id}/dataPipelines")
|
|
36
|
+
if response.status_code != 200:
|
|
37
|
+
raise FabricHTTPException(response)
|
|
38
|
+
|
|
39
|
+
responses = pagination(client, response)
|
|
40
|
+
|
|
41
|
+
for r in responses:
|
|
42
|
+
for v in r.get("value", []):
|
|
43
|
+
new_data = {
|
|
44
|
+
"Data Pipeline Name": v.get("displayName"),
|
|
45
|
+
"Data Pipeline ID": v.get("id"),
|
|
46
|
+
"Description": v.get("description"),
|
|
47
|
+
}
|
|
48
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
49
|
+
|
|
50
|
+
return df
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def create_data_pipeline(
|
|
54
|
+
name: str, description: Optional[str] = None, workspace: Optional[str] = None
|
|
55
|
+
):
|
|
56
|
+
"""
|
|
57
|
+
Creates a Fabric data pipeline.
|
|
58
|
+
|
|
59
|
+
Parameters
|
|
60
|
+
----------
|
|
61
|
+
name: str
|
|
62
|
+
Name of the data pipeline.
|
|
63
|
+
description : str, default=None
|
|
64
|
+
A description of the environment.
|
|
65
|
+
workspace : str, default=None
|
|
66
|
+
The Fabric workspace name.
|
|
67
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
68
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
69
|
+
"""
|
|
70
|
+
|
|
71
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
72
|
+
|
|
73
|
+
request_body = {"displayName": name}
|
|
74
|
+
|
|
75
|
+
if description:
|
|
76
|
+
request_body["description"] = description
|
|
77
|
+
|
|
78
|
+
client = fabric.FabricRestClient()
|
|
79
|
+
response = client.post(
|
|
80
|
+
f"/v1/workspaces/{workspace_id}/dataPipelines", json=request_body
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
lro(client, response, status_codes=[201, 202])
|
|
84
|
+
|
|
85
|
+
print(
|
|
86
|
+
f"{icons.green_dot} The '{name}' data pipeline has been created within the '{workspace}' workspace."
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def delete_data_pipeline(name: str, workspace: Optional[str] = None):
|
|
91
|
+
"""
|
|
92
|
+
Deletes a Fabric data pipeline.
|
|
93
|
+
|
|
94
|
+
Parameters
|
|
95
|
+
----------
|
|
96
|
+
name: str
|
|
97
|
+
Name of the data pipeline.
|
|
98
|
+
workspace : str, default=None
|
|
99
|
+
The Fabric workspace name.
|
|
100
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
101
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
102
|
+
"""
|
|
103
|
+
|
|
104
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
105
|
+
|
|
106
|
+
item_id = fabric.resolve_item_id(
|
|
107
|
+
item_name=name, type="DataPipeline", workspace=workspace
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
client = fabric.FabricRestClient()
|
|
111
|
+
response = client.delete(f"/v1/workspaces/{workspace_id}/dataPipelines/{item_id}")
|
|
112
|
+
|
|
113
|
+
if response.status_code != 200:
|
|
114
|
+
raise FabricHTTPException(response)
|
|
115
|
+
|
|
116
|
+
print(
|
|
117
|
+
f"{icons.green_dot} The '{name}' data pipeline within the '{workspace}' workspace has been deleted."
|
|
118
|
+
)
|