semantic-link-labs 0.9.5__py3-none-any.whl → 0.9.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (48) hide show
  1. {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.6.dist-info}/METADATA +3 -2
  2. {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.6.dist-info}/RECORD +48 -47
  3. {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.6.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +15 -1
  5. sempy_labs/_capacities.py +37 -1
  6. sempy_labs/_capacity_migration.py +11 -14
  7. sempy_labs/_connections.py +2 -4
  8. sempy_labs/_dataflows.py +2 -2
  9. sempy_labs/_dax_query_view.py +55 -0
  10. sempy_labs/_delta_analyzer.py +16 -14
  11. sempy_labs/_environments.py +8 -1
  12. sempy_labs/_eventhouses.py +5 -1
  13. sempy_labs/_external_data_shares.py +4 -10
  14. sempy_labs/_generate_semantic_model.py +2 -1
  15. sempy_labs/_graphQL.py +5 -1
  16. sempy_labs/_helper_functions.py +272 -51
  17. sempy_labs/_kql_databases.py +5 -1
  18. sempy_labs/_list_functions.py +5 -37
  19. sempy_labs/_managed_private_endpoints.py +9 -2
  20. sempy_labs/_mirrored_databases.py +3 -1
  21. sempy_labs/_ml_experiments.py +1 -1
  22. sempy_labs/_model_bpa.py +2 -11
  23. sempy_labs/_model_bpa_bulk.py +23 -27
  24. sempy_labs/_one_lake_integration.py +2 -1
  25. sempy_labs/_semantic_models.py +20 -0
  26. sempy_labs/_sql.py +6 -2
  27. sempy_labs/_sqldatabase.py +61 -100
  28. sempy_labs/_vertipaq.py +8 -11
  29. sempy_labs/_warehouses.py +14 -3
  30. sempy_labs/_workspace_identity.py +6 -0
  31. sempy_labs/_workspaces.py +42 -2
  32. sempy_labs/admin/_basic_functions.py +3 -2
  33. sempy_labs/admin/_scanner.py +2 -2
  34. sempy_labs/directlake/_directlake_schema_compare.py +2 -1
  35. sempy_labs/directlake/_directlake_schema_sync.py +65 -19
  36. sempy_labs/directlake/_dl_helper.py +0 -6
  37. sempy_labs/directlake/_generate_shared_expression.py +10 -11
  38. sempy_labs/directlake/_guardrails.py +2 -1
  39. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +12 -25
  40. sempy_labs/directlake/_update_directlake_partition_entity.py +2 -2
  41. sempy_labs/lakehouse/_shortcuts.py +7 -5
  42. sempy_labs/migration/_migration_validation.py +0 -4
  43. sempy_labs/report/_download_report.py +4 -6
  44. sempy_labs/report/_generate_report.py +6 -6
  45. sempy_labs/report/_report_functions.py +2 -1
  46. sempy_labs/report/_report_rebind.py +8 -6
  47. {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.6.dist-info}/LICENSE +0 -0
  48. {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.6.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: semantic-link-labs
3
- Version: 0.9.5
3
+ Version: 0.9.6
4
4
  Summary: Semantic Link Labs for Microsoft Fabric
5
5
  Author: Microsoft Corporation
6
6
  License: MIT License
@@ -26,7 +26,7 @@ Requires-Dist: pytest>=8.2.1; extra == "test"
26
26
  # Semantic Link Labs
27
27
 
28
28
  [![PyPI version](https://badge.fury.io/py/semantic-link-labs.svg)](https://badge.fury.io/py/semantic-link-labs)
29
- [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.9.5&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
29
+ [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.9.6&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
30
30
  [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
31
31
  [![Downloads](https://static.pepy.tech/badge/semantic-link-labs)](https://pepy.tech/project/semantic-link-labs)
32
32
 
@@ -148,6 +148,7 @@ An even better way to ensure the semantic-link-labs library is available in your
148
148
  2. Select your newly created environment within the 'Environment' drop down in the navigation bar at the top of the notebook
149
149
 
150
150
  ## Version History
151
+ * [0.9.6](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.6) (March 12, 2025)
151
152
  * [0.9.5](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.5) (March 7, 2025)
152
153
  * [0.9.4](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.4) (February 27, 2025)
153
154
  * [0.9.3](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.3) (February 13, 2025)
@@ -1,57 +1,58 @@
1
- sempy_labs/__init__.py,sha256=YJVsQPcDSMiXCXSSeWYvqptLTgw7woGOAUWO_dTTOj4,15133
1
+ sempy_labs/__init__.py,sha256=FVczkUAO4jdRM325pPRwLZ_GZuhPaaEZfxT902JZ7w8,15500
2
2
  sempy_labs/_ai.py,sha256=rhVohfwrU1mvWH0EN_vCTnldb8xJNfGHZGba34k1JVw,16174
3
3
  sempy_labs/_authentication.py,sha256=GjtN5XqIyWXbR5Ni4hfYiUNwgFa-ySX8e-BrqE1vgGc,6903
4
- sempy_labs/_capacities.py,sha256=0JStFjrkDnpDQJyttMCtNNZG5rJvLRZ_WjCheFjVFT8,39509
5
- sempy_labs/_capacity_migration.py,sha256=bNjVHFMYS1izIs_yiBAB8c9mOum_oAjqzmuhelYem7U,24796
4
+ sempy_labs/_capacities.py,sha256=hoHRWkZ_Gls0bMc8QGO-ZZj9zTOfH9iGvkEgFVCIm2g,40456
5
+ sempy_labs/_capacity_migration.py,sha256=GGIMrHwc7IEVJ9pDwmikXiF2QHu2nYqNyG235QYbmEw,24837
6
6
  sempy_labs/_clear_cache.py,sha256=5z73I4Zdr3C0Bd4zyxrQdcGG2VOzsXWukzB_evm4bRs,12731
7
- sempy_labs/_connections.py,sha256=aLMneYcw9on-GXr6KIGmDIvo8cuMevbkmtlmB3uWXhU,18693
7
+ sempy_labs/_connections.py,sha256=Cc3VpQtXUDVpEyn5CVd9lGeZ13Nrdk_E_XrLu4pGRi8,18658
8
8
  sempy_labs/_dashboards.py,sha256=cyFD-pUUFu4scGkbitilrI22GW5dTmTkUZ15ou7Bl-A,1880
9
9
  sempy_labs/_data_pipelines.py,sha256=cW_WGmuWD4V9IgLprKL4TqFXgid4eTBXvEL3-IArS0w,4817
10
- sempy_labs/_dataflows.py,sha256=SZThUDRQcWujK30nNw1YI06y1L6-piNHLiPBb72s9Bk,8049
10
+ sempy_labs/_dataflows.py,sha256=xv-wRDUq4Bzz-BOs1Jdb4bgS9HbPLpa1GqexfA6H0mg,8053
11
11
  sempy_labs/_dax.py,sha256=cFaXJUHuG93lYmjq_4CLG6gStvSTtgvJ8NA43TqeW_g,16871
12
- sempy_labs/_delta_analyzer.py,sha256=TFZFlrC4_AYdPv0St3E9Spxtm7qILRPfOLvxxxq6OcY,17403
12
+ sempy_labs/_dax_query_view.py,sha256=owOHoU9_yV8deBnSVtEJk-xi-relyTV3wyLGelYiD_E,1853
13
+ sempy_labs/_delta_analyzer.py,sha256=1H2bfB8j8VovdtIOPLztfTIQYCdbfR54wrMW4qO6R-4,17576
13
14
  sempy_labs/_deployment_pipelines.py,sha256=SDQYkCAhOAlxBr58jYxtLFOVySiRXO0_WhfOKGDeYZQ,6254
14
15
  sempy_labs/_documentation.py,sha256=yVA8VPEzx_fmljtcvSxtB7-BeupYsfdMXXjp6Fpnyo8,5007
15
- sempy_labs/_environments.py,sha256=mVZn24mPtRvFaOSXWhMqD5RddU8N_P2_85hNVcrrt7U,4580
16
- sempy_labs/_eventhouses.py,sha256=HgvuhRVBol2Y0T4JYRg6xsg4xHXShFwEB_uxFIDVHVU,5164
16
+ sempy_labs/_environments.py,sha256=5I5gHU68Crs603R00NgGy3RKdr925-X05V5EPkrHFkY,4995
17
+ sempy_labs/_eventhouses.py,sha256=WEf33difBOTGTZGh1QFmY4gv-e43uwO1V54nrsjGGyY,5376
17
18
  sempy_labs/_eventstreams.py,sha256=c8nNdRM8eeDOYXd_AtRp7zYHYaqV9p1bI2V0boUrCuA,3492
18
- sempy_labs/_external_data_shares.py,sha256=MP0nx9uFzjiV04xpq09UAM8n_1ckPBae2uhI6Vk0ZoY,6537
19
+ sempy_labs/_external_data_shares.py,sha256=s2okFvtCG5FDMbMJ_q6YSlCkVVFiE9sh2imVxZq1woU,6450
19
20
  sempy_labs/_gateways.py,sha256=6JE6VeGFPKF617sf2mMkxXVOz57YHI5jAQLAF-BzRLc,17527
20
- sempy_labs/_generate_semantic_model.py,sha256=DOdtIdN4JJeemZ7jpBGFaHFEZkkk5u_JHWGPlWExsUM,18531
21
+ sempy_labs/_generate_semantic_model.py,sha256=5BRdobiNJ035HShCEpEkMPN-KfqVdqzGFuR0HM346mA,18560
21
22
  sempy_labs/_git.py,sha256=RyaT4XzrSi-4NLJZWiWZnnNpMgrKzRNxhyY8b1O2I6c,17819
22
- sempy_labs/_graphQL.py,sha256=nBvuZefJaK7fGdJelQhFFoIvUJQcnwK2rODq0IX9JfM,2478
23
- sempy_labs/_helper_functions.py,sha256=z73XNSmJRU1u545H6WoaziUuOnWasEuuH3qSKOYDn5A,52723
23
+ sempy_labs/_graphQL.py,sha256=truXeIUPRKLwc4skhs3FZYNcKP9FCGKly9en0YkR4NE,2690
24
+ sempy_labs/_helper_functions.py,sha256=CgRxc1_EH-C2SZtJX5xYR69m3nIuiltRZBBXZFz9tgE,60805
24
25
  sempy_labs/_icons.py,sha256=ez2dx_LCti71S_-eB6WYQ-kOMyiBL8ZJN12-ev5dcmA,3579
25
26
  sempy_labs/_job_scheduler.py,sha256=_-Pifkttk1oPNxewxwWcQ4QC_Hr24GSi6nmrEXwc0pc,15814
26
- sempy_labs/_kql_databases.py,sha256=V7-rqOVOvdBrDRid17DkPFB2DBFeHz1BMbomQqOuFR8,4000
27
+ sempy_labs/_kql_databases.py,sha256=0WVG9oiWgbjgV_oQc4f96QogqitVkbIeCmF_dVeePQE,4212
27
28
  sempy_labs/_kql_querysets.py,sha256=Jjcs4SkjeirnDkG6zfsl0KRUXVzMyWii0Yn0JMWwln8,3502
28
- sempy_labs/_list_functions.py,sha256=qHsArkVsBS0OW6gFkyZvRrIyT0Dh7eP2n2Ch0q1Aj9Y,62564
29
- sempy_labs/_managed_private_endpoints.py,sha256=Po5ki9jQ5Wg3uxvHkAWuhtPHAkgOYspv19ZoAYzg9JM,6350
30
- sempy_labs/_mirrored_databases.py,sha256=kRVcMU616yrX4Wp6np67fWCk2nelNd4T5WLkSqOv4JQ,12970
29
+ sempy_labs/_list_functions.py,sha256=HCCJEwi3Q4VXfBXXAUNnYpvuP-CwGSOkN4uCn92TZZU,61842
30
+ sempy_labs/_managed_private_endpoints.py,sha256=Vqicp_EiGg_m8aA2F__gaJiB9cwjbxQOSOi7hkS6FvQ,6907
31
+ sempy_labs/_mirrored_databases.py,sha256=-9ZV2PdPeIc4lvFNkpPMm_9wkGIY1QLZXspYdSev5oQ,13147
31
32
  sempy_labs/_mirrored_warehouses.py,sha256=Q3WlRjUwCLz8KW1eN8MiTPeY0P52Vkuz5kgnv4GvQ3k,1739
32
- sempy_labs/_ml_experiments.py,sha256=k1-7wBub4kVkOzxazEtEBZVcQ7SAJ_S_Ze0XGHR3okI,3373
33
+ sempy_labs/_ml_experiments.py,sha256=-DA71k91cvrIhAlT5z5CDAL1ygVRsRUDxyJ-5e6fVJs,3375
33
34
  sempy_labs/_ml_models.py,sha256=69i67MHn-_Fsq-5slLxxhCF8N2s0JBYn_CDTa1Hhhs0,3261
34
35
  sempy_labs/_model_auto_build.py,sha256=PTQo3dufzLSFcQ5shFkmBWAVSdP7cTJgpUclrcXyNbg,5105
35
- sempy_labs/_model_bpa.py,sha256=RXFNChiBX_ulzsIXkkC5FOdNogsGZKUmzu8v5cvVp6E,22375
36
- sempy_labs/_model_bpa_bulk.py,sha256=F_AzebdpMm7QvYuSuSj52NTg6jYE9H1FJJDpdnyNF-g,16039
36
+ sempy_labs/_model_bpa.py,sha256=AoHshKqn3z2lNPwu1hKntJuCELYe1bLa_0LUzFXRjgs,22032
37
+ sempy_labs/_model_bpa_bulk.py,sha256=SyiIOunbhKiuV8fNnpoFA_D09OtaAdm7tJHUL_Zi7Bo,15758
37
38
  sempy_labs/_model_bpa_rules.py,sha256=L3XaK1SThgz3h0uTZZH92IVkWooG5rhqVus-ddnBIHw,45975
38
39
  sempy_labs/_model_dependencies.py,sha256=0xGgubrq76zIvBdEqmEX_Pd6WdizXFVECBW6BPl2DZo,13162
39
40
  sempy_labs/_mounted_data_factories.py,sha256=-IBxE5XurYyeeQg7BvpXSSR1MW3rRGmue6UGpqlo96U,3906
40
41
  sempy_labs/_notebooks.py,sha256=GbyBDay_c4dnPmS32e8qgRrKVb3evi_omSMzq-Xk9z0,8082
41
- sempy_labs/_one_lake_integration.py,sha256=3RYJ6det_oM3NcAqwrONATTggkNAXRZFvVbrOA1Pg94,6274
42
+ sempy_labs/_one_lake_integration.py,sha256=9ub75-ueEFqn1iRgRd5y97SYujalsWW6ufs1du4PbDs,6303
42
43
  sempy_labs/_query_scale_out.py,sha256=nra1q8s-PKpZTlI_L0lMGO1GmdBk6sqETsBQShF1yPY,15352
43
44
  sempy_labs/_refresh_semantic_model.py,sha256=4w_uaYLbaZptmEFY7QHWzOgXcgc2ctGx8HQvt2aguxk,17360
44
- sempy_labs/_semantic_models.py,sha256=PeH80kas-a0bQY6fmsjVWVQXZHYq34qZHjAVP847mCo,3699
45
+ sempy_labs/_semantic_models.py,sha256=Yh9SfGZSbA9mRROo3wpy9A8sFvvGZZ1VUHjm_Ux4pqk,4455
45
46
  sempy_labs/_spark.py,sha256=SuSTjjmtzj7suDgN8Njk_pNBaStDLgIJB_1yk_e2H1Y,19340
46
- sempy_labs/_sql.py,sha256=vgAI9YRpDwMTZuNn4ptd8fwyGN5QQTBYR0SnB2CpfAg,8028
47
- sempy_labs/_sqldatabase.py,sha256=Jyp4bkinFfn7UaHjUtaT3FiT5IO7_uYcvOemk7bc5Ic,7829
47
+ sempy_labs/_sql.py,sha256=s4VMcs1lIn39sYKRnSp6QsdRR3J-84kT_SPCKdwzAyo,8158
48
+ sempy_labs/_sqldatabase.py,sha256=8HV3UtsLiwexmPSjYnhnYnD6xEvgFpTG13jcOuGheuI,6470
48
49
  sempy_labs/_translations.py,sha256=i4K2PFk6-TcmAnUpqz-z_GuDv9XEp1cBs0KY-x6ja1w,16168
49
50
  sempy_labs/_utils.py,sha256=aKGnUiXSLLRQRXXv8TH_XhGcOtDsnrs0RHmQ6YZMn3o,1786
50
- sempy_labs/_vertipaq.py,sha256=gXK6l8zZ7AOayVe-trRGqSXPGC6jmOvJVOkt67oogI4,38789
51
- sempy_labs/_warehouses.py,sha256=0wkyC9Jr04VrmS05y8fg7rkmPRZYTE6A-vYzDoCzVgc,6955
51
+ sempy_labs/_vertipaq.py,sha256=7nYpOuya5FpnK_OdtJKTQtLD_9iPCIVxhFnpD9P85-E,38556
52
+ sempy_labs/_warehouses.py,sha256=Lk0U4VCT1sMkO5d4QJ5viarTxnLjlB-DhLhORKdaFmE,7297
52
53
  sempy_labs/_workloads.py,sha256=ifQ6Jv0_MDzjfu993bU8V7thOdW5kFyp3MjA082rsrE,4687
53
- sempy_labs/_workspace_identity.py,sha256=ZtaaYyFUmRabdxCYw9xoUNRwKvXi1YibryI9Ww9MzxU,2155
54
- sempy_labs/_workspaces.py,sha256=paYNM3GybpTCsL7HsX3f12I5_1PT3joeYJXZ_RsHpmY,11233
54
+ sempy_labs/_workspace_identity.py,sha256=plxgcqt2aBXgLpyn1dpHhzh_5Z-gFkLK8RtId2OIX5s,2561
55
+ sempy_labs/_workspaces.py,sha256=wHBR2e5wOhhWN6PiFefSFzYrwvdgMkG0dg2gEpPcZ4o,13090
55
56
  sempy_labs/_bpa_translation/_model/_translations_am-ET.po,sha256=zQVjJ-t0vtgIYan-HaXtUVJLB_PJvB53Nf5BNoOReU4,39199
56
57
  sempy_labs/_bpa_translation/_model/_translations_ar-AE.po,sha256=QP1PjDLFccLDs9zq456crdAST57wrcWVk5rRiqqoCws,36959
57
58
  sempy_labs/_bpa_translation/_model/_translations_bg-BG.po,sha256=sqezjpS3wfk09WD7x27bHoCBtgmqeHtyHNKTwG7-bkI,44132
@@ -94,7 +95,7 @@ sempy_labs/admin/__init__.py,sha256=MIWuLkSdQ4BsHBgRKWMkPi1atDA-bQeUeRhNPW6_IEs,
94
95
  sempy_labs/admin/_activities.py,sha256=YfISDzhXro9glEa_yJmoYv-2q2M1DIkoyNzgLl7eWuI,6695
95
96
  sempy_labs/admin/_apps.py,sha256=PUEQlXbzVR9u3ZUQUhpfU3J-hfa8A2nTGFBpCqzMdW0,4085
96
97
  sempy_labs/admin/_artifacts.py,sha256=eCiNBdgNSUhOsE3i-Y1lp6p6T3is7RZJPy7ctu36oW4,2246
97
- sempy_labs/admin/_basic_functions.py,sha256=if6ZEqKvM7wSzW9E2CxFbGwQDl_JH32eo-qIKz24sWY,15388
98
+ sempy_labs/admin/_basic_functions.py,sha256=OvQ1X2KQoKFmqLSwnfy9uiSJTI6IUXXy5d7D89Ij5cw,15419
98
99
  sempy_labs/admin/_capacities.py,sha256=O-hqtiWWjZC25hOFLnx_PvnH0-m9Ky_hx3_1ubLUkgI,9863
99
100
  sempy_labs/admin/_datasets.py,sha256=kMerpBNro--kKdp2rhEKnVe0JDGjMDsxqgfbbw17K-U,6235
100
101
  sempy_labs/admin/_domains.py,sha256=Z0EhIJCcujx1NumeqM4eKAvai18p-9TAw1WJaU-cbbE,15076
@@ -102,23 +103,23 @@ sempy_labs/admin/_external_data_share.py,sha256=q4gw5iYZJDH-9xIM6L0b2CU9ebUIdE-Z
102
103
  sempy_labs/admin/_git.py,sha256=gsbDQKd66knCI_Zh8vHSfHK-uQVJjVmhKKvfMMYKZyA,2264
103
104
  sempy_labs/admin/_items.py,sha256=zX-eUDyQWiB8mY8Nojj03-_R728JvVIOlp0iEOisnKE,8750
104
105
  sempy_labs/admin/_reports.py,sha256=qkqV53w6XG-eyCrYWSEUG5xm7WARu4y8H5DzibbSHuE,7858
105
- sempy_labs/admin/_scanner.py,sha256=Nz4HPf-ibf-wl2I4_8gR88P6SwaourpjcCrpsjLfUK8,4434
106
+ sempy_labs/admin/_scanner.py,sha256=qBBY_ga9sShOQhkVR6TMobMBfNjwmNA2sGR1GYYSNFA,4425
106
107
  sempy_labs/admin/_shared.py,sha256=srgkqttbMbK5XXjOt4zeAV8rMCvK7zEus55HsGtNUFI,3007
107
108
  sempy_labs/admin/_tenant.py,sha256=6CrJ8LBz5epst5kmqxbhoc0AmaU5KHZCJ36Hj6mlaVY,19141
108
109
  sempy_labs/admin/_users.py,sha256=eEOkgvny3FwMuUrSIBQ0n3JwrzWV_6_nwGc8_c-eXSM,4571
109
110
  sempy_labs/admin/_workspaces.py,sha256=XiiO3vyuJxKkVf9ZrW7261wHSBrnd8r7rbia8HGDFkI,4911
110
111
  sempy_labs/directlake/__init__.py,sha256=etaj-3wqe5t93mu74tGYjEOQ6gtHWUogidOygiVvlq8,2131
111
- sempy_labs/directlake/_directlake_schema_compare.py,sha256=In3Ac07GI6T3eLDvQK7Xt9bXwJLI7MgNAk83rOKsYKc,5040
112
- sempy_labs/directlake/_directlake_schema_sync.py,sha256=5nDyE-8ApeaUJO9aJLasop8G9bG9TjPamDQvgAlCUew,4671
113
- sempy_labs/directlake/_dl_helper.py,sha256=rr510sIDv5FL5Ipz-YqZWjY6XomBNhhoFbzWZcUyJvE,10672
114
- sempy_labs/directlake/_generate_shared_expression.py,sha256=WkE2onOO6XiloMNFdwwavGlANgmDGFMv2NXpqorhDAc,3002
112
+ sempy_labs/directlake/_directlake_schema_compare.py,sha256=tVc6hIgDxxA7a8V51e5tlzlp3bzVVTqQ_OKsTNxiWG4,5074
113
+ sempy_labs/directlake/_directlake_schema_sync.py,sha256=ipONLkBaXm4WgcMMChAyD4rVushdqdjAQdexT-fJxcY,6573
114
+ sempy_labs/directlake/_dl_helper.py,sha256=HHFy6tW-tSVZ4YHxSHvt6pXrloh0O6Lx7yNmZE7IAI4,10348
115
+ sempy_labs/directlake/_generate_shared_expression.py,sha256=9GyGWy_IXSLMs-J2UXDgHGYNjYgS9-1G4z2PHFRokOw,2862
115
116
  sempy_labs/directlake/_get_directlake_lakehouse.py,sha256=e0WFQm4-daJR4K1aHuVaubu7T26yTeBgfNEMOXk-EzM,2392
116
117
  sempy_labs/directlake/_get_shared_expression.py,sha256=qc85kXggkx_7Sz_rAAli_yPnLzrGZpgD8IfVbTfZhQM,1133
117
- sempy_labs/directlake/_guardrails.py,sha256=YO8OycXDxmWrtH8nTz5a4AddfnpskM83rTPEPBVpqbM,2701
118
+ sempy_labs/directlake/_guardrails.py,sha256=wNVXpeiZckgLTly4cS5DU5DoV9x1S4DMxN5S08qAavE,2749
118
119
  sempy_labs/directlake/_list_directlake_model_calc_tables.py,sha256=EYT4ELmOZ3Uklzy6uMQMidc4WtBXm21NQqZu1Q5HTsg,2509
119
120
  sempy_labs/directlake/_show_unsupported_directlake_objects.py,sha256=nmrZrtDez7U8Ji76i9fxnnTx1zxMu2LCOZTMz4sFUEc,3504
120
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py,sha256=DC8Ig8oJtPY5qdWytIy03iiqYiQxLoRmQ5hsu6I69Sw,6348
121
- sempy_labs/directlake/_update_directlake_partition_entity.py,sha256=I2Qe2jrvUtNy6Wi75fHg47L4aRiw0UNQfQbURsyZhGY,8984
121
+ sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py,sha256=IQ5Pfv_ffvzIJd4jeoQZ8cGerWTxAjGOyclKqsb8-c8,5746
122
+ sempy_labs/directlake/_update_directlake_partition_entity.py,sha256=VzNYQ_Kiqz7TnKwNp7Nfbs0YYF0x6Crx7MeKFKc3C-Y,8988
122
123
  sempy_labs/directlake/_warm_cache.py,sha256=xc7gG_OJY1rJYg79ztgcLATpnXHNqFaw-6CU1HgdlXk,9258
123
124
  sempy_labs/graph/__init__.py,sha256=AZ_IpOL06VvXrYmgbcrvQlxCxdDksvwXKf7JAGohCNI,620
124
125
  sempy_labs/graph/_groups.py,sha256=2axQ__eHNgJfb0ITOjexysz2Tq4AQ7xSejH4zG-QCFc,12531
@@ -128,7 +129,7 @@ sempy_labs/lakehouse/__init__.py,sha256=htZjFvJs3hCUUtMzRHQKbG6JlyC808QmXa_sVrPj
128
129
  sempy_labs/lakehouse/_get_lakehouse_columns.py,sha256=FWCyJhqqnZg837eK-S5st0xZpxbhxkcS8aTguazxjjY,2685
129
130
  sempy_labs/lakehouse/_get_lakehouse_tables.py,sha256=OmgYLNiegc8HwJP7hEPo5JMWFPyDIPSoqewBxsDkucc,8544
130
131
  sempy_labs/lakehouse/_lakehouse.py,sha256=dzDhBGN2FizREYFMsDsDzGm9cwLln1__OTAV_JV_GPY,8753
131
- sempy_labs/lakehouse/_shortcuts.py,sha256=SS-FYRfdIHkeS-6lN7_S85SxjxlS5j5EJshEveKvKH0,15283
132
+ sempy_labs/lakehouse/_shortcuts.py,sha256=jeecQA5EYj_D7OcfQJ1sgE3lcVNprTYxyijmUA2Q6yk,15469
132
133
  sempy_labs/migration/__init__.py,sha256=142n01VAqlcx4E0mGGRtUfVOEwAXVdiHI_XprmUm7As,1175
133
134
  sempy_labs/migration/_create_pqt_file.py,sha256=eRK0Jz9ZeV_7jV3kNRze0bTAIqxsAZXLKMGE_loKOaY,9677
134
135
  sempy_labs/migration/_direct_lake_to_import.py,sha256=uMqvElwkCHMyraP9t2nGNgstRobiHPFo4AMuS60dXyU,2732
@@ -136,20 +137,20 @@ sempy_labs/migration/_migrate_calctables_to_lakehouse.py,sha256=XzOPphiUmDIgv1ru
136
137
  sempy_labs/migration/_migrate_calctables_to_semantic_model.py,sha256=Qt4WfmllCtSl-xkWzWWL5sTzi3lQDaJp43lVEXQisVY,6303
137
138
  sempy_labs/migration/_migrate_model_objects_to_semantic_model.py,sha256=RD0ttWcBratAzpPKjFF6jpEnZEd6M7m8OfEUFbkInbA,22950
138
139
  sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py,sha256=HYi2vn7yYDsBCTAXFTi6UiB86kdSlhQKPdwAt1nTKEE,7169
139
- sempy_labs/migration/_migration_validation.py,sha256=pl5Yv4BwRHZfAL0p2soj_Gk8FL3UcwiqKbX23pJe1oQ,2788
140
+ sempy_labs/migration/_migration_validation.py,sha256=AHURrWofb-U-L2Bdu36mcisVXOuZXi6Smgrrs2kjYBM,2650
140
141
  sempy_labs/migration/_refresh_calc_tables.py,sha256=W-lYdUZZcoYyLRIpMdpgaz03PEMM6Zf7E1vzT6MmMAE,5516
141
142
  sempy_labs/report/_BPAReportTemplate.json,sha256=9Uh-7E6d2ooxQ7j5JRayv_ayEULc7Gzg42kZGKdOqH8,63920
142
143
  sempy_labs/report/__init__.py,sha256=wnmjUX3w521OYnQBV3h9EaJs1eFLkIxmjc59VQOC19I,1293
143
- sempy_labs/report/_download_report.py,sha256=Hu2cVzXN_6yjHJKvS0KGy0uwbbJNxd0P-w6M8-XeZao,2767
144
+ sempy_labs/report/_download_report.py,sha256=hCQ2_fSXSCqSUeaNM2Tf9T3MpRofREnDwp_zrfp7iTA,2703
144
145
  sempy_labs/report/_export_report.py,sha256=XCMsZzTBMgvQOe3Ltdod7my7_izpmP-3AVH6W5CExPE,10976
145
- sempy_labs/report/_generate_report.py,sha256=6Glk_q6f68aIHQJy_7EJjCJbNMRUJbnGriOyUJ2Qub8,13946
146
+ sempy_labs/report/_generate_report.py,sha256=ncFo8brgwPkSNF3urROMkIElqO6pcSy9tM6ymHE_UeQ,13868
146
147
  sempy_labs/report/_paginated.py,sha256=rsElE0IQ9qxRDuEp6qNF1EcD5XEgfTc7WsWEQsalsuI,2156
147
148
  sempy_labs/report/_report_bpa.py,sha256=ClETB8Q41sY1scCuknhpvalvuBaQ9ZwA4QX7F3sPcjc,13596
148
149
  sempy_labs/report/_report_bpa_rules.py,sha256=tPVGA0hmE6QMLlWtig7Va7Ksr2yXWl_Lndq--tWWd6w,4959
149
- sempy_labs/report/_report_functions.py,sha256=XojlMOuAYompdDk1bNaJx6KRK7MRsZ6vfLTiBXfjdXA,19680
150
+ sempy_labs/report/_report_functions.py,sha256=Y6MGxi_WVW-k1-JK1UlezlKPYqpfGZHYHeex5Oiit-A,19709
150
151
  sempy_labs/report/_report_helper.py,sha256=NcdWgFuh1GjDwVPzy6QWwg3ecaJKoWzZdhbxT6hbbdA,10599
151
152
  sempy_labs/report/_report_list_functions.py,sha256=K9tMDQKhIZhelHvfMMW0lsxbVHekJ-5dAQveoD7PUDA,3980
152
- sempy_labs/report/_report_rebind.py,sha256=pOzg_XWbip8ledM79APsLxUjLJNS9aTDKK9dEx1F7Ds,4990
153
+ sempy_labs/report/_report_rebind.py,sha256=svyxUSdqgXJW1UDNcb-urJxU9erO3JM72uzmuJUWIT0,5090
153
154
  sempy_labs/report/_reportwrapper.py,sha256=gwilmrE_QqWLQankc1rFlbp1_bexbdR5K5pGyx0N3Go,82945
154
155
  sempy_labs/report/_bpareporttemplate/.platform,sha256=kWRa6B_KwSYLsvVFDx372mQriQO8v7dJ_YzQV_cfD-Q,303
155
156
  sempy_labs/report/_bpareporttemplate/definition.pbir,sha256=bttyHZYKqjA8OBb_cezGlX4H82cDvGZVCl1QB3fij4E,343
@@ -182,8 +183,8 @@ sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.
182
183
  sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json,sha256=mlY6t9OlSe-Y6_QmXJpS1vggU6Y3FjISUKECL8FVSg8,931
183
184
  sempy_labs/tom/__init__.py,sha256=Qbs8leW0fjzvWwOjyWK3Hjeehu7IvpB1beASGsi28bk,121
184
185
  sempy_labs/tom/_model.py,sha256=uQXCiaym-00LNxGLVEUMb5VyhBXL2Hd59bIGnWtle08,189080
185
- semantic_link_labs-0.9.5.dist-info/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
186
- semantic_link_labs-0.9.5.dist-info/METADATA,sha256=e0OiZ-sSGgLcn2UMrpp-1QzGCtAje51zx8DSXT3bwEc,25981
187
- semantic_link_labs-0.9.5.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
188
- semantic_link_labs-0.9.5.dist-info/top_level.txt,sha256=kiQX1y42Dbein1l3Q8jMUYyRulDjdlc2tMepvtrvixQ,11
189
- semantic_link_labs-0.9.5.dist-info/RECORD,,
186
+ semantic_link_labs-0.9.6.dist-info/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
187
+ semantic_link_labs-0.9.6.dist-info/METADATA,sha256=iPChIwMy-nN_f_SXFLSC66d0Ig0tPMtdB5E-ClRT5mk,26076
188
+ semantic_link_labs-0.9.6.dist-info/WHEEL,sha256=52BFRY2Up02UkjOa29eZOS2VxUrpPORXg1pkohGGUS8,91
189
+ semantic_link_labs-0.9.6.dist-info/top_level.txt,sha256=kiQX1y42Dbein1l3Q8jMUYyRulDjdlc2tMepvtrvixQ,11
190
+ semantic_link_labs-0.9.6.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.8.2)
2
+ Generator: setuptools (76.0.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
sempy_labs/__init__.py CHANGED
@@ -1,3 +1,6 @@
1
+ from sempy_labs._dax_query_view import (
2
+ generate_dax_query_view_url,
3
+ )
1
4
  from sempy_labs._mounted_data_factories import (
2
5
  list_mounted_data_factories,
3
6
  get_mounted_data_factory_definition,
@@ -7,6 +10,7 @@ from sempy_labs._mounted_data_factories import (
7
10
  from sempy_labs._semantic_models import (
8
11
  get_semantic_model_refresh_schedule,
9
12
  enable_semantic_model_scheduled_refresh,
13
+ delete_semantic_model,
10
14
  )
11
15
  from sempy_labs._graphQL import (
12
16
  list_graphql_apis,
@@ -148,6 +152,7 @@ from sempy_labs._capacities import (
148
152
  create_or_update_resource_group,
149
153
  list_resource_groups,
150
154
  get_resource_group,
155
+ list_capacities,
151
156
  )
152
157
  from sempy_labs._spark import (
153
158
  get_spark_settings,
@@ -165,6 +170,7 @@ from sempy_labs._workspaces import (
165
170
  assign_workspace_to_capacity,
166
171
  unassign_workspace_from_capacity,
167
172
  list_workspace_role_assignments,
173
+ delete_workspace,
168
174
  )
169
175
  from sempy_labs._notebooks import (
170
176
  get_notebook_definition,
@@ -180,6 +186,9 @@ from sempy_labs._sql import (
180
186
  from sempy_labs._sqldatabase import (
181
187
  get_sql_database_columns,
182
188
  get_sql_database_tables,
189
+ create_sql_database,
190
+ delete_sql_database,
191
+ list_sql_databases,
183
192
  )
184
193
  from sempy_labs._workspace_identity import (
185
194
  provision_workspace_identity,
@@ -244,7 +253,6 @@ from sempy_labs._list_functions import (
244
253
  list_semantic_model_objects,
245
254
  list_shortcuts,
246
255
  get_object_level_security,
247
- list_capacities,
248
256
  list_datamarts,
249
257
  list_lakehouses,
250
258
  list_sql_endpoints,
@@ -540,4 +548,10 @@ __all__ = [
540
548
  "list_mounted_data_factories",
541
549
  "get_mounted_data_factory_definition",
542
550
  "delete_mounted_data_factory",
551
+ "generate_dax_query_view_url",
552
+ "delete_semantic_model",
553
+ "delete_workspace",
554
+ "create_sql_database",
555
+ "delete_sql_database",
556
+ "list_sql_databases",
543
557
  ]
sempy_labs/_capacities.py CHANGED
@@ -242,7 +242,7 @@ def list_vcores() -> pd.DataFrame:
242
242
 
243
243
  def get_capacity_resource_governance(capacity_name: str):
244
244
 
245
- dfC = fabric.list_capacities()
245
+ dfC = list_capacities()
246
246
  dfC_filt = dfC[dfC["Display Name"] == capacity_name]
247
247
  capacity_id = dfC_filt["Id"].iloc[0].upper()
248
248
 
@@ -1131,3 +1131,39 @@ def get_resource_group(azure_subscription_id: str, resource_group: str) -> pd.Da
1131
1131
  }
1132
1132
 
1133
1133
  return pd.DataFrame(new_data, index=[0])
1134
+
1135
+
1136
+ def list_capacities() -> pd.DataFrame:
1137
+ """
1138
+ Shows the capacities and their properties.
1139
+
1140
+ Returns
1141
+ -------
1142
+ pandas.DataFrame
1143
+ A pandas dataframe showing the capacities and their properties
1144
+ """
1145
+
1146
+ columns = {
1147
+ "Id": "string",
1148
+ "Display Name": "string",
1149
+ "Sku": "string",
1150
+ "Region": "string",
1151
+ "State": "string",
1152
+ "Admins": "string",
1153
+ }
1154
+ df = _create_dataframe(columns=columns)
1155
+
1156
+ response = _base_api(request="/v1.0/myorg/capacities", client="fabric_sp")
1157
+
1158
+ for i in response.json().get("value", []):
1159
+ new_data = {
1160
+ "Id": i.get("id").lower(),
1161
+ "Display Name": i.get("displayName"),
1162
+ "Sku": i.get("sku"),
1163
+ "Region": i.get("region"),
1164
+ "State": i.get("state"),
1165
+ "Admins": [i.get("admins", [])],
1166
+ }
1167
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
1168
+
1169
+ return df
@@ -106,17 +106,13 @@ def migrate_workspaces(
106
106
  migrated_workspaces = []
107
107
 
108
108
  for i, r in dfW.iterrows():
109
- workspace = r["Name"]
110
-
111
- if workspaces is None or workspace in workspaces:
112
- pass
113
- else:
114
- continue
115
-
116
- if assign_workspace_to_capacity(
117
- capacity_name=target_capacity, workspace=workspace
118
- ):
119
- migrated_workspaces.append(workspace)
109
+ workspace_id = r["Id"]
110
+ workspace_name = r["Name"]
111
+ if workspaces is None or workspace_name in workspaces:
112
+ assign_workspace_to_capacity(
113
+ capacity=target_capacity, workspace=workspace_id
114
+ )
115
+ migrated_workspaces.append(workspace_name)
120
116
 
121
117
  if len(migrated_workspaces) < workspace_count:
122
118
  print(
@@ -124,10 +120,11 @@ def migrate_workspaces(
124
120
  )
125
121
  print(f"{icons.in_progress} Initiating rollback...")
126
122
  for i, r in dfW.iterrows():
127
- workspace = r["Name"]
128
- if workspace in migrated_workspaces:
123
+ workspace_id = r["Id"]
124
+ workspace_name = r["Name"]
125
+ if workspace_name in migrated_workspaces:
129
126
  assign_workspace_to_capacity(
130
- capacity_name=source_capacity, workspace=workspace
127
+ capacity=source_capacity, workspace=workspace_id
131
128
  )
132
129
  print(
133
130
  f"{icons.green_dot} Rollback of the workspaces to the '{source_capacity}' capacity is complete."
@@ -1,4 +1,3 @@
1
- import sempy.fabric as fabric
2
1
  import pandas as pd
3
2
  from typing import Optional
4
3
  from sempy_labs._helper_functions import (
@@ -7,6 +6,7 @@ from sempy_labs._helper_functions import (
7
6
  _update_dataframe_datatypes,
8
7
  _base_api,
9
8
  _create_dataframe,
9
+ resolve_item_id,
10
10
  )
11
11
  from uuid import UUID
12
12
  import sempy_labs._icons as icons
@@ -230,9 +230,7 @@ def list_item_connections(
230
230
 
231
231
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
232
232
  item_type = item_type[0].upper() + item_type[1:]
233
- item_id = fabric.resolve_item_id(
234
- item_name=item_name, type=item_type, workspace=workspace_id
235
- )
233
+ item_id = resolve_item_id(item=item_name, type=item_type, workspace=workspace_id)
236
234
 
237
235
  columns = {
238
236
  "Connection Name": "string",
sempy_labs/_dataflows.py CHANGED
@@ -1,4 +1,3 @@
1
- import sempy.fabric as fabric
2
1
  import pandas as pd
3
2
  from sempy_labs._helper_functions import (
4
3
  resolve_workspace_name_and_id,
@@ -6,6 +5,7 @@ from sempy_labs._helper_functions import (
6
5
  _update_dataframe_datatypes,
7
6
  _base_api,
8
7
  _create_dataframe,
8
+ resolve_workspace_name,
9
9
  )
10
10
  from typing import Optional, Tuple
11
11
  import sempy_labs._icons as icons
@@ -187,7 +187,7 @@ def list_upstream_dataflows(
187
187
  for v in values:
188
188
  tgt_dataflow_id = v.get("targetDataflowId")
189
189
  tgt_workspace_id = v.get("groupId")
190
- tgt_workspace_name = fabric.resolve_workspace_name(tgt_workspace_id)
190
+ tgt_workspace_name = resolve_workspace_name(workspace_id=tgt_workspace_id)
191
191
  (tgt_dataflow_name, _) = _resolve_dataflow_name_and_id(
192
192
  dataflow=tgt_dataflow_id, workspace=tgt_workspace_id
193
193
  )
@@ -0,0 +1,55 @@
1
+ from typing import Optional
2
+ from uuid import UUID
3
+ from sempy_labs._helper_functions import (
4
+ resolve_dataset_id,
5
+ _get_fabric_context_setting,
6
+ resolve_workspace_id,
7
+ )
8
+ import gzip
9
+ import base64
10
+ import urllib.parse
11
+
12
+
13
+ def generate_dax_query_view_url(
14
+ dataset: str | UUID, dax_string: str, workspace: Optional[str | UUID] = None
15
+ ):
16
+ """
17
+ Prints a URL based on query provided. This URL opens `DAX query view <https://learn.microsoft.com/power-bi/transform-model/dax-query-view>`_ in the Power BI service, connected to the semantic model and using the query provided.
18
+
19
+ Parameters
20
+ ----------
21
+ dataset : str | uuid.UUID
22
+ The semantic model name or ID.
23
+ dax_string : str
24
+ The DAX query string.
25
+ workspace : str | uuid.UUID, default=None
26
+ The workspace name or ID.
27
+ Defaults to None which resolves to the workspace of the attached lakehouse
28
+ or if no lakehouse attached, resolves to the workspace of the notebook.
29
+ """
30
+
31
+ workspace_id = resolve_workspace_id(workspace=workspace)
32
+ dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace_id)
33
+
34
+ prefix = _get_fabric_context_setting(name="spark.trident.pbienv").lower()
35
+
36
+ if prefix == "prod":
37
+ prefix = "app"
38
+
39
+ def gzip_base64_urlsafe(input_string):
40
+ # Compress the string with gzip
41
+ compressed_data = gzip.compress(input_string.encode("utf-8"))
42
+
43
+ # Encode the compressed data in base64
44
+ base64_data = base64.b64encode(compressed_data)
45
+
46
+ # Make the base64 string URL-safe
47
+ urlsafe_data = urllib.parse.quote_plus(base64_data.decode("utf-8"))
48
+
49
+ return urlsafe_data
50
+
51
+ formatted_query = gzip_base64_urlsafe(dax_string)
52
+
53
+ url = f"https://{prefix}.powerbi.com/groups/{workspace_id}/modeling/{dataset_id}/daxQueryView?query={formatted_query}"
54
+
55
+ print(url)
@@ -52,6 +52,7 @@ def delta_analyzer(
52
52
  workspace: Optional[str | UUID] = None,
53
53
  column_stats: bool = True,
54
54
  skip_cardinality: bool = True,
55
+ schema: Optional[str] = None,
55
56
  ) -> Dict[str, pd.DataFrame]:
56
57
  """
57
58
  Analyzes a delta table and shows the results in dictionary containing a set of 5 dataframes. If 'export' is set to True, the results will be saved to delta tables in the lakehouse attached to the notebook.
@@ -85,6 +86,8 @@ def delta_analyzer(
85
86
  If True, collects data about column chunks and columns. If False, skips that step and only returns the other 3 dataframes.
86
87
  skip_cardinality : bool, default=True
87
88
  If True, skips the cardinality calculation for each column. If False, calculates the cardinality for each column.
89
+ schema : str, default=None
90
+ The name of the schema to which the table belongs (for schema-enabled lakehouses). If None, the default schema is used.
88
91
 
89
92
  Returns
90
93
  -------
@@ -96,25 +99,21 @@ def delta_analyzer(
96
99
  if not skip_cardinality:
97
100
  column_stats = True
98
101
 
99
- # display_toggle = notebookutils.common.configs.pandas_display
100
-
101
- # Turn off notebookutils display
102
- # if display_toggle is True:
103
- # notebookutils.common.configs.pandas_display = False
104
-
105
102
  prefix = "SLL_DeltaAnalyzer_"
106
103
  now = datetime.now()
107
104
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace=workspace)
108
105
  (lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
109
106
  lakehouse=lakehouse, workspace=workspace
110
107
  )
111
- path = create_abfss_path(lakehouse_id, workspace_id, table_name)
112
- local_path = _mount(lakehouse=lakehouse, workspace=workspace)
113
- table_path = f"{local_path}/Tables/{table_name}"
114
- delta_table_path = create_abfss_path(lakehouse_id, workspace_id, table_name)
115
108
 
116
- # Set back to original value
117
- # notebookutils.common.configs.pandas_display = display_toggle
109
+ delta_table_path = create_abfss_path(
110
+ lakehouse_id, workspace_id, table_name, schema=schema
111
+ )
112
+ local_path = _mount(lakehouse=lakehouse, workspace=workspace)
113
+ if schema is not None:
114
+ table_path = f"{local_path}/Tables/{schema}/{table_name}"
115
+ else:
116
+ table_path = f"{local_path}/Tables/{table_name}"
118
117
 
119
118
  parquet_file_df_columns = {
120
119
  # "Dataset": "string",
@@ -183,7 +182,7 @@ def delta_analyzer(
183
182
  # min_reader_version = table_details.get("minReaderVersion")
184
183
  # min_writer_version = table_details.get("minWriterVersion")
185
184
 
186
- latest_files = _read_delta_table(path).inputFiles()
185
+ latest_files = _read_delta_table(delta_table_path).inputFiles()
187
186
  # file_paths = [f.split("/")[-1] for f in latest_files]
188
187
  all_parquet_files = get_parquet_file_infos(delta_table_path)
189
188
  common_file_paths = set(
@@ -430,6 +429,7 @@ def get_delta_table_history(
430
429
  table_name: str,
431
430
  lakehouse: Optional[str | UUID] = None,
432
431
  workspace: Optional[str | UUID] = None,
432
+ schema: Optional[str] = None,
433
433
  ) -> pd.DataFrame:
434
434
  """
435
435
  Returns the history of a delta table as a pandas dataframe.
@@ -445,6 +445,8 @@ def get_delta_table_history(
445
445
  The Fabric workspace name or ID used by the lakehouse.
446
446
  Defaults to None which resolves to the workspace of the attached lakehouse
447
447
  or if no lakehouse attached, resolves to the workspace of the notebook.
448
+ schema : str, default=None
449
+ The name of the schema to which the table belongs (for schema-enabled lakehouses). If None, the default schema is used.
448
450
 
449
451
  Returns
450
452
  -------
@@ -461,7 +463,7 @@ def get_delta_table_history(
461
463
  (lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
462
464
  lakehouse=lakehouse, workspace=workspace
463
465
  )
464
- path = create_abfss_path(lakehouse_id, workspace_id, table_name)
466
+ path = create_abfss_path(lakehouse_id, workspace_id, table_name, schema)
465
467
 
466
468
  from delta import DeltaTable
467
469
 
@@ -48,6 +48,8 @@ def list_environments(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
48
48
 
49
49
  This is a wrapper function for the following API: `Items - List Environments <https://learn.microsoft.com/rest/api/fabric/environment/items/list-environments>`_.
50
50
 
51
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
52
+
51
53
  Parameters
52
54
  ----------
53
55
  workspace : str | uuid.UUID, default=None
@@ -71,7 +73,9 @@ def list_environments(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
71
73
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
72
74
 
73
75
  responses = _base_api(
74
- request=f"/v1/workspaces/{workspace_id}/environments", uses_pagination=True
76
+ request=f"/v1/workspaces/{workspace_id}/environments",
77
+ uses_pagination=True,
78
+ client="fabric_sp",
75
79
  )
76
80
 
77
81
  for r in responses:
@@ -113,6 +117,8 @@ def publish_environment(
113
117
 
114
118
  This is a wrapper function for the following API: `Spark Libraries - Publish Environment <https://learn.microsoft.com/rest/api/fabric/environment/spark-libraries/publish-environment>`_.
115
119
 
120
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
121
+
116
122
  Parameters
117
123
  ----------
118
124
  environment: str | uuid.UUID
@@ -133,6 +139,7 @@ def publish_environment(
133
139
  method="post",
134
140
  lro_return_status_code=True,
135
141
  status_codes=None,
142
+ client="fabric_sp",
136
143
  )
137
144
 
138
145
  print(
@@ -72,6 +72,8 @@ def list_eventhouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
72
72
 
73
73
  This is a wrapper function for the following API: `Items - List Eventhouses <https://learn.microsoft.com/rest/api/fabric/environment/items/list-eventhouses>`_.
74
74
 
75
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
76
+
75
77
  Parameters
76
78
  ----------
77
79
  workspace : str | uuid.UUID, default=None
@@ -95,7 +97,9 @@ def list_eventhouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
95
97
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
96
98
 
97
99
  responses = _base_api(
98
- request=f"/v1/workspaces/{workspace_id}/eventhouses", uses_pagination=True
100
+ request=f"/v1/workspaces/{workspace_id}/eventhouses",
101
+ uses_pagination=True,
102
+ client="fabric_sp",
99
103
  )
100
104
 
101
105
  for r in responses: