semantic-link-labs 0.9.5__py3-none-any.whl → 0.9.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (65) hide show
  1. {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.7.dist-info}/METADATA +8 -5
  2. {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.7.dist-info}/RECORD +65 -61
  3. {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.7.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +19 -1
  5. sempy_labs/_ai.py +3 -1
  6. sempy_labs/_capacities.py +37 -2
  7. sempy_labs/_capacity_migration.py +11 -14
  8. sempy_labs/_connections.py +2 -4
  9. sempy_labs/_dataflows.py +2 -2
  10. sempy_labs/_dax_query_view.py +57 -0
  11. sempy_labs/_delta_analyzer.py +16 -14
  12. sempy_labs/_delta_analyzer_history.py +298 -0
  13. sempy_labs/_environments.py +8 -1
  14. sempy_labs/_eventhouses.py +5 -1
  15. sempy_labs/_external_data_shares.py +4 -10
  16. sempy_labs/_generate_semantic_model.py +2 -1
  17. sempy_labs/_graphQL.py +5 -1
  18. sempy_labs/_helper_functions.py +440 -63
  19. sempy_labs/_icons.py +6 -6
  20. sempy_labs/_kql_databases.py +5 -1
  21. sempy_labs/_list_functions.py +8 -38
  22. sempy_labs/_managed_private_endpoints.py +9 -2
  23. sempy_labs/_mirrored_databases.py +3 -1
  24. sempy_labs/_ml_experiments.py +1 -1
  25. sempy_labs/_model_bpa.py +2 -11
  26. sempy_labs/_model_bpa_bulk.py +33 -38
  27. sempy_labs/_model_bpa_rules.py +1 -1
  28. sempy_labs/_one_lake_integration.py +2 -1
  29. sempy_labs/_semantic_models.py +20 -0
  30. sempy_labs/_sql.py +6 -2
  31. sempy_labs/_sqldatabase.py +61 -100
  32. sempy_labs/_vertipaq.py +8 -11
  33. sempy_labs/_warehouses.py +14 -3
  34. sempy_labs/_workspace_identity.py +6 -0
  35. sempy_labs/_workspaces.py +42 -2
  36. sempy_labs/admin/_basic_functions.py +29 -2
  37. sempy_labs/admin/_reports.py +1 -1
  38. sempy_labs/admin/_scanner.py +2 -4
  39. sempy_labs/admin/_tenant.py +8 -3
  40. sempy_labs/directlake/_directlake_schema_compare.py +2 -1
  41. sempy_labs/directlake/_directlake_schema_sync.py +65 -19
  42. sempy_labs/directlake/_dl_helper.py +0 -6
  43. sempy_labs/directlake/_generate_shared_expression.py +19 -12
  44. sempy_labs/directlake/_guardrails.py +2 -1
  45. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +90 -57
  46. sempy_labs/directlake/_update_directlake_partition_entity.py +5 -2
  47. sempy_labs/graph/_groups.py +6 -0
  48. sempy_labs/graph/_teams.py +2 -0
  49. sempy_labs/graph/_users.py +4 -0
  50. sempy_labs/lakehouse/__init__.py +12 -3
  51. sempy_labs/lakehouse/_blobs.py +231 -0
  52. sempy_labs/lakehouse/_shortcuts.py +29 -8
  53. sempy_labs/migration/_direct_lake_to_import.py +47 -10
  54. sempy_labs/migration/_migration_validation.py +0 -4
  55. sempy_labs/report/__init__.py +4 -0
  56. sempy_labs/report/_download_report.py +4 -6
  57. sempy_labs/report/_generate_report.py +6 -6
  58. sempy_labs/report/_report_functions.py +5 -4
  59. sempy_labs/report/_report_helper.py +17 -5
  60. sempy_labs/report/_report_rebind.py +8 -6
  61. sempy_labs/report/_reportwrapper.py +17 -8
  62. sempy_labs/report/_save_report.py +147 -0
  63. sempy_labs/tom/_model.py +154 -23
  64. {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.7.dist-info/licenses}/LICENSE +0 -0
  65. {semantic_link_labs-0.9.5.dist-info → semantic_link_labs-0.9.7.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: semantic-link-labs
3
- Version: 0.9.5
3
+ Version: 0.9.7
4
4
  Summary: Semantic Link Labs for Microsoft Fabric
5
5
  Author: Microsoft Corporation
6
6
  License: MIT License
@@ -15,18 +15,19 @@ Classifier: Framework :: Jupyter
15
15
  Requires-Python: <3.12,>=3.10
16
16
  Description-Content-Type: text/markdown
17
17
  License-File: LICENSE
18
- Requires-Dist: semantic-link-sempy>=0.8.5
18
+ Requires-Dist: semantic-link-sempy>=0.9.3
19
19
  Requires-Dist: anytree
20
20
  Requires-Dist: powerbiclient
21
21
  Requires-Dist: polib
22
22
  Requires-Dist: jsonpath_ng
23
23
  Provides-Extra: test
24
24
  Requires-Dist: pytest>=8.2.1; extra == "test"
25
+ Dynamic: license-file
25
26
 
26
27
  # Semantic Link Labs
27
28
 
28
29
  [![PyPI version](https://badge.fury.io/py/semantic-link-labs.svg)](https://badge.fury.io/py/semantic-link-labs)
29
- [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.9.5&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
30
+ [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.9.7&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
30
31
  [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
31
32
  [![Downloads](https://static.pepy.tech/badge/semantic-link-labs)](https://pepy.tech/project/semantic-link-labs)
32
33
 
@@ -148,6 +149,8 @@ An even better way to ensure the semantic-link-labs library is available in your
148
149
  2. Select your newly created environment within the 'Environment' drop down in the navigation bar at the top of the notebook
149
150
 
150
151
  ## Version History
152
+ * [0.9.7](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.7) (April 1, 2025)
153
+ * [0.9.6](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.6) (March 12, 2025)
151
154
  * [0.9.5](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.5) (March 7, 2025)
152
155
  * [0.9.4](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.4) (February 27, 2025)
153
156
  * [0.9.3](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.3) (February 13, 2025)
@@ -308,7 +311,7 @@ We use [black](github.com/psf/black) formatting as a code formatting standard. M
308
311
 
309
312
  Run this code to install black
310
313
  ```cli
311
- pip install black
314
+ pip install black==25.1.0
312
315
  ```
313
316
 
314
317
  Run this code to format your code using black
@@ -1,57 +1,60 @@
1
- sempy_labs/__init__.py,sha256=YJVsQPcDSMiXCXSSeWYvqptLTgw7woGOAUWO_dTTOj4,15133
2
- sempy_labs/_ai.py,sha256=rhVohfwrU1mvWH0EN_vCTnldb8xJNfGHZGba34k1JVw,16174
1
+ semantic_link_labs-0.9.7.dist-info/licenses/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
2
+ sempy_labs/__init__.py,sha256=DCtHeonEHwZmNAmVJz_kX-a97d04NsenB-b8eK38Omo,15609
3
+ sempy_labs/_ai.py,sha256=BD1TdGOJ7T4m3x426OP-FLb7bevn-9gKY8BTEDAJDQU,16205
3
4
  sempy_labs/_authentication.py,sha256=GjtN5XqIyWXbR5Ni4hfYiUNwgFa-ySX8e-BrqE1vgGc,6903
4
- sempy_labs/_capacities.py,sha256=0JStFjrkDnpDQJyttMCtNNZG5rJvLRZ_WjCheFjVFT8,39509
5
- sempy_labs/_capacity_migration.py,sha256=bNjVHFMYS1izIs_yiBAB8c9mOum_oAjqzmuhelYem7U,24796
5
+ sempy_labs/_capacities.py,sha256=n48NYTY03zygRzcfyK1UOkSwTqKSyQefQ10IKQh-dfA,40426
6
+ sempy_labs/_capacity_migration.py,sha256=GGIMrHwc7IEVJ9pDwmikXiF2QHu2nYqNyG235QYbmEw,24837
6
7
  sempy_labs/_clear_cache.py,sha256=5z73I4Zdr3C0Bd4zyxrQdcGG2VOzsXWukzB_evm4bRs,12731
7
- sempy_labs/_connections.py,sha256=aLMneYcw9on-GXr6KIGmDIvo8cuMevbkmtlmB3uWXhU,18693
8
+ sempy_labs/_connections.py,sha256=Cc3VpQtXUDVpEyn5CVd9lGeZ13Nrdk_E_XrLu4pGRi8,18658
8
9
  sempy_labs/_dashboards.py,sha256=cyFD-pUUFu4scGkbitilrI22GW5dTmTkUZ15ou7Bl-A,1880
9
10
  sempy_labs/_data_pipelines.py,sha256=cW_WGmuWD4V9IgLprKL4TqFXgid4eTBXvEL3-IArS0w,4817
10
- sempy_labs/_dataflows.py,sha256=SZThUDRQcWujK30nNw1YI06y1L6-piNHLiPBb72s9Bk,8049
11
+ sempy_labs/_dataflows.py,sha256=xv-wRDUq4Bzz-BOs1Jdb4bgS9HbPLpa1GqexfA6H0mg,8053
11
12
  sempy_labs/_dax.py,sha256=cFaXJUHuG93lYmjq_4CLG6gStvSTtgvJ8NA43TqeW_g,16871
12
- sempy_labs/_delta_analyzer.py,sha256=TFZFlrC4_AYdPv0St3E9Spxtm7qILRPfOLvxxxq6OcY,17403
13
+ sempy_labs/_dax_query_view.py,sha256=_zSvgystZzBj5euNTLKTg7-G77XVk0vqyqrDT72VvoM,1892
14
+ sempy_labs/_delta_analyzer.py,sha256=1H2bfB8j8VovdtIOPLztfTIQYCdbfR54wrMW4qO6R-4,17576
15
+ sempy_labs/_delta_analyzer_history.py,sha256=A50dlBd2d3ILKV7Fwj4pfIRtXKmCFslhk1gpeEw4inc,10765
13
16
  sempy_labs/_deployment_pipelines.py,sha256=SDQYkCAhOAlxBr58jYxtLFOVySiRXO0_WhfOKGDeYZQ,6254
14
17
  sempy_labs/_documentation.py,sha256=yVA8VPEzx_fmljtcvSxtB7-BeupYsfdMXXjp6Fpnyo8,5007
15
- sempy_labs/_environments.py,sha256=mVZn24mPtRvFaOSXWhMqD5RddU8N_P2_85hNVcrrt7U,4580
16
- sempy_labs/_eventhouses.py,sha256=HgvuhRVBol2Y0T4JYRg6xsg4xHXShFwEB_uxFIDVHVU,5164
18
+ sempy_labs/_environments.py,sha256=5I5gHU68Crs603R00NgGy3RKdr925-X05V5EPkrHFkY,4995
19
+ sempy_labs/_eventhouses.py,sha256=WEf33difBOTGTZGh1QFmY4gv-e43uwO1V54nrsjGGyY,5376
17
20
  sempy_labs/_eventstreams.py,sha256=c8nNdRM8eeDOYXd_AtRp7zYHYaqV9p1bI2V0boUrCuA,3492
18
- sempy_labs/_external_data_shares.py,sha256=MP0nx9uFzjiV04xpq09UAM8n_1ckPBae2uhI6Vk0ZoY,6537
21
+ sempy_labs/_external_data_shares.py,sha256=s2okFvtCG5FDMbMJ_q6YSlCkVVFiE9sh2imVxZq1woU,6450
19
22
  sempy_labs/_gateways.py,sha256=6JE6VeGFPKF617sf2mMkxXVOz57YHI5jAQLAF-BzRLc,17527
20
- sempy_labs/_generate_semantic_model.py,sha256=DOdtIdN4JJeemZ7jpBGFaHFEZkkk5u_JHWGPlWExsUM,18531
23
+ sempy_labs/_generate_semantic_model.py,sha256=5BRdobiNJ035HShCEpEkMPN-KfqVdqzGFuR0HM346mA,18560
21
24
  sempy_labs/_git.py,sha256=RyaT4XzrSi-4NLJZWiWZnnNpMgrKzRNxhyY8b1O2I6c,17819
22
- sempy_labs/_graphQL.py,sha256=nBvuZefJaK7fGdJelQhFFoIvUJQcnwK2rODq0IX9JfM,2478
23
- sempy_labs/_helper_functions.py,sha256=z73XNSmJRU1u545H6WoaziUuOnWasEuuH3qSKOYDn5A,52723
24
- sempy_labs/_icons.py,sha256=ez2dx_LCti71S_-eB6WYQ-kOMyiBL8ZJN12-ev5dcmA,3579
25
+ sempy_labs/_graphQL.py,sha256=truXeIUPRKLwc4skhs3FZYNcKP9FCGKly9en0YkR4NE,2690
26
+ sempy_labs/_helper_functions.py,sha256=Dx1B2VZdCY-X8qwDo3KZ5wE3jm1XL0jDsTqQ8F0tJks,65798
27
+ sempy_labs/_icons.py,sha256=SB9EQeoFCfD4bO6fcYuJOoPRSYenSrW0rI9G5RFsH28,3579
25
28
  sempy_labs/_job_scheduler.py,sha256=_-Pifkttk1oPNxewxwWcQ4QC_Hr24GSi6nmrEXwc0pc,15814
26
- sempy_labs/_kql_databases.py,sha256=V7-rqOVOvdBrDRid17DkPFB2DBFeHz1BMbomQqOuFR8,4000
29
+ sempy_labs/_kql_databases.py,sha256=0WVG9oiWgbjgV_oQc4f96QogqitVkbIeCmF_dVeePQE,4212
27
30
  sempy_labs/_kql_querysets.py,sha256=Jjcs4SkjeirnDkG6zfsl0KRUXVzMyWii0Yn0JMWwln8,3502
28
- sempy_labs/_list_functions.py,sha256=qHsArkVsBS0OW6gFkyZvRrIyT0Dh7eP2n2Ch0q1Aj9Y,62564
29
- sempy_labs/_managed_private_endpoints.py,sha256=Po5ki9jQ5Wg3uxvHkAWuhtPHAkgOYspv19ZoAYzg9JM,6350
30
- sempy_labs/_mirrored_databases.py,sha256=kRVcMU616yrX4Wp6np67fWCk2nelNd4T5WLkSqOv4JQ,12970
31
+ sempy_labs/_list_functions.py,sha256=HwKFs7ojnzWiPk-DyhGHoGYlHeBOtmkYwOtuhMV9we0,61870
32
+ sempy_labs/_managed_private_endpoints.py,sha256=Vqicp_EiGg_m8aA2F__gaJiB9cwjbxQOSOi7hkS6FvQ,6907
33
+ sempy_labs/_mirrored_databases.py,sha256=-9ZV2PdPeIc4lvFNkpPMm_9wkGIY1QLZXspYdSev5oQ,13147
31
34
  sempy_labs/_mirrored_warehouses.py,sha256=Q3WlRjUwCLz8KW1eN8MiTPeY0P52Vkuz5kgnv4GvQ3k,1739
32
- sempy_labs/_ml_experiments.py,sha256=k1-7wBub4kVkOzxazEtEBZVcQ7SAJ_S_Ze0XGHR3okI,3373
35
+ sempy_labs/_ml_experiments.py,sha256=-DA71k91cvrIhAlT5z5CDAL1ygVRsRUDxyJ-5e6fVJs,3375
33
36
  sempy_labs/_ml_models.py,sha256=69i67MHn-_Fsq-5slLxxhCF8N2s0JBYn_CDTa1Hhhs0,3261
34
37
  sempy_labs/_model_auto_build.py,sha256=PTQo3dufzLSFcQ5shFkmBWAVSdP7cTJgpUclrcXyNbg,5105
35
- sempy_labs/_model_bpa.py,sha256=RXFNChiBX_ulzsIXkkC5FOdNogsGZKUmzu8v5cvVp6E,22375
36
- sempy_labs/_model_bpa_bulk.py,sha256=F_AzebdpMm7QvYuSuSj52NTg6jYE9H1FJJDpdnyNF-g,16039
37
- sempy_labs/_model_bpa_rules.py,sha256=L3XaK1SThgz3h0uTZZH92IVkWooG5rhqVus-ddnBIHw,45975
38
+ sempy_labs/_model_bpa.py,sha256=AoHshKqn3z2lNPwu1hKntJuCELYe1bLa_0LUzFXRjgs,22032
39
+ sempy_labs/_model_bpa_bulk.py,sha256=hRY3dRBUtecrbscCZsEGv6TpCVqg_zAi8NmRq6dVMiE,15845
40
+ sempy_labs/_model_bpa_rules.py,sha256=3rpDcsl99ji2KbozqdrAeC_1YrTvF8A-l8VhiUHK0bo,45968
38
41
  sempy_labs/_model_dependencies.py,sha256=0xGgubrq76zIvBdEqmEX_Pd6WdizXFVECBW6BPl2DZo,13162
39
42
  sempy_labs/_mounted_data_factories.py,sha256=-IBxE5XurYyeeQg7BvpXSSR1MW3rRGmue6UGpqlo96U,3906
40
43
  sempy_labs/_notebooks.py,sha256=GbyBDay_c4dnPmS32e8qgRrKVb3evi_omSMzq-Xk9z0,8082
41
- sempy_labs/_one_lake_integration.py,sha256=3RYJ6det_oM3NcAqwrONATTggkNAXRZFvVbrOA1Pg94,6274
44
+ sempy_labs/_one_lake_integration.py,sha256=9ub75-ueEFqn1iRgRd5y97SYujalsWW6ufs1du4PbDs,6303
42
45
  sempy_labs/_query_scale_out.py,sha256=nra1q8s-PKpZTlI_L0lMGO1GmdBk6sqETsBQShF1yPY,15352
43
46
  sempy_labs/_refresh_semantic_model.py,sha256=4w_uaYLbaZptmEFY7QHWzOgXcgc2ctGx8HQvt2aguxk,17360
44
- sempy_labs/_semantic_models.py,sha256=PeH80kas-a0bQY6fmsjVWVQXZHYq34qZHjAVP847mCo,3699
47
+ sempy_labs/_semantic_models.py,sha256=Yh9SfGZSbA9mRROo3wpy9A8sFvvGZZ1VUHjm_Ux4pqk,4455
45
48
  sempy_labs/_spark.py,sha256=SuSTjjmtzj7suDgN8Njk_pNBaStDLgIJB_1yk_e2H1Y,19340
46
- sempy_labs/_sql.py,sha256=vgAI9YRpDwMTZuNn4ptd8fwyGN5QQTBYR0SnB2CpfAg,8028
47
- sempy_labs/_sqldatabase.py,sha256=Jyp4bkinFfn7UaHjUtaT3FiT5IO7_uYcvOemk7bc5Ic,7829
49
+ sempy_labs/_sql.py,sha256=s4VMcs1lIn39sYKRnSp6QsdRR3J-84kT_SPCKdwzAyo,8158
50
+ sempy_labs/_sqldatabase.py,sha256=8HV3UtsLiwexmPSjYnhnYnD6xEvgFpTG13jcOuGheuI,6470
48
51
  sempy_labs/_translations.py,sha256=i4K2PFk6-TcmAnUpqz-z_GuDv9XEp1cBs0KY-x6ja1w,16168
49
52
  sempy_labs/_utils.py,sha256=aKGnUiXSLLRQRXXv8TH_XhGcOtDsnrs0RHmQ6YZMn3o,1786
50
- sempy_labs/_vertipaq.py,sha256=gXK6l8zZ7AOayVe-trRGqSXPGC6jmOvJVOkt67oogI4,38789
51
- sempy_labs/_warehouses.py,sha256=0wkyC9Jr04VrmS05y8fg7rkmPRZYTE6A-vYzDoCzVgc,6955
53
+ sempy_labs/_vertipaq.py,sha256=7nYpOuya5FpnK_OdtJKTQtLD_9iPCIVxhFnpD9P85-E,38556
54
+ sempy_labs/_warehouses.py,sha256=Lk0U4VCT1sMkO5d4QJ5viarTxnLjlB-DhLhORKdaFmE,7297
52
55
  sempy_labs/_workloads.py,sha256=ifQ6Jv0_MDzjfu993bU8V7thOdW5kFyp3MjA082rsrE,4687
53
- sempy_labs/_workspace_identity.py,sha256=ZtaaYyFUmRabdxCYw9xoUNRwKvXi1YibryI9Ww9MzxU,2155
54
- sempy_labs/_workspaces.py,sha256=paYNM3GybpTCsL7HsX3f12I5_1PT3joeYJXZ_RsHpmY,11233
56
+ sempy_labs/_workspace_identity.py,sha256=plxgcqt2aBXgLpyn1dpHhzh_5Z-gFkLK8RtId2OIX5s,2561
57
+ sempy_labs/_workspaces.py,sha256=wHBR2e5wOhhWN6PiFefSFzYrwvdgMkG0dg2gEpPcZ4o,13090
55
58
  sempy_labs/_bpa_translation/_model/_translations_am-ET.po,sha256=zQVjJ-t0vtgIYan-HaXtUVJLB_PJvB53Nf5BNoOReU4,39199
56
59
  sempy_labs/_bpa_translation/_model/_translations_ar-AE.po,sha256=QP1PjDLFccLDs9zq456crdAST57wrcWVk5rRiqqoCws,36959
57
60
  sempy_labs/_bpa_translation/_model/_translations_bg-BG.po,sha256=sqezjpS3wfk09WD7x27bHoCBtgmqeHtyHNKTwG7-bkI,44132
@@ -94,63 +97,65 @@ sempy_labs/admin/__init__.py,sha256=MIWuLkSdQ4BsHBgRKWMkPi1atDA-bQeUeRhNPW6_IEs,
94
97
  sempy_labs/admin/_activities.py,sha256=YfISDzhXro9glEa_yJmoYv-2q2M1DIkoyNzgLl7eWuI,6695
95
98
  sempy_labs/admin/_apps.py,sha256=PUEQlXbzVR9u3ZUQUhpfU3J-hfa8A2nTGFBpCqzMdW0,4085
96
99
  sempy_labs/admin/_artifacts.py,sha256=eCiNBdgNSUhOsE3i-Y1lp6p6T3is7RZJPy7ctu36oW4,2246
97
- sempy_labs/admin/_basic_functions.py,sha256=if6ZEqKvM7wSzW9E2CxFbGwQDl_JH32eo-qIKz24sWY,15388
100
+ sempy_labs/admin/_basic_functions.py,sha256=NAiju3N6xGOHFbK6sRz8NyVOCsgDIwl50U2CRA2SV3g,16320
98
101
  sempy_labs/admin/_capacities.py,sha256=O-hqtiWWjZC25hOFLnx_PvnH0-m9Ky_hx3_1ubLUkgI,9863
99
102
  sempy_labs/admin/_datasets.py,sha256=kMerpBNro--kKdp2rhEKnVe0JDGjMDsxqgfbbw17K-U,6235
100
103
  sempy_labs/admin/_domains.py,sha256=Z0EhIJCcujx1NumeqM4eKAvai18p-9TAw1WJaU-cbbE,15076
101
104
  sempy_labs/admin/_external_data_share.py,sha256=q4gw5iYZJDH-9xIM6L0b2CU9ebUIdE-ZVrFsulRHyUU,3364
102
105
  sempy_labs/admin/_git.py,sha256=gsbDQKd66knCI_Zh8vHSfHK-uQVJjVmhKKvfMMYKZyA,2264
103
106
  sempy_labs/admin/_items.py,sha256=zX-eUDyQWiB8mY8Nojj03-_R728JvVIOlp0iEOisnKE,8750
104
- sempy_labs/admin/_reports.py,sha256=qkqV53w6XG-eyCrYWSEUG5xm7WARu4y8H5DzibbSHuE,7858
105
- sempy_labs/admin/_scanner.py,sha256=Nz4HPf-ibf-wl2I4_8gR88P6SwaourpjcCrpsjLfUK8,4434
107
+ sempy_labs/admin/_reports.py,sha256=nPDoC90Yzc67CtiuL4WYBYkGYuUQOnZAy0PCU0aYKj8,7857
108
+ sempy_labs/admin/_scanner.py,sha256=0mKi0ihJETdsSaeHFBEq3drcCS8J_enWWkIMBMECz64,4370
106
109
  sempy_labs/admin/_shared.py,sha256=srgkqttbMbK5XXjOt4zeAV8rMCvK7zEus55HsGtNUFI,3007
107
- sempy_labs/admin/_tenant.py,sha256=6CrJ8LBz5epst5kmqxbhoc0AmaU5KHZCJ36Hj6mlaVY,19141
110
+ sempy_labs/admin/_tenant.py,sha256=4--NxSqVbuS4BpNRcJoEikdJnJ2LV0R21HXuBD6d7vY,19357
108
111
  sempy_labs/admin/_users.py,sha256=eEOkgvny3FwMuUrSIBQ0n3JwrzWV_6_nwGc8_c-eXSM,4571
109
112
  sempy_labs/admin/_workspaces.py,sha256=XiiO3vyuJxKkVf9ZrW7261wHSBrnd8r7rbia8HGDFkI,4911
110
113
  sempy_labs/directlake/__init__.py,sha256=etaj-3wqe5t93mu74tGYjEOQ6gtHWUogidOygiVvlq8,2131
111
- sempy_labs/directlake/_directlake_schema_compare.py,sha256=In3Ac07GI6T3eLDvQK7Xt9bXwJLI7MgNAk83rOKsYKc,5040
112
- sempy_labs/directlake/_directlake_schema_sync.py,sha256=5nDyE-8ApeaUJO9aJLasop8G9bG9TjPamDQvgAlCUew,4671
113
- sempy_labs/directlake/_dl_helper.py,sha256=rr510sIDv5FL5Ipz-YqZWjY6XomBNhhoFbzWZcUyJvE,10672
114
- sempy_labs/directlake/_generate_shared_expression.py,sha256=WkE2onOO6XiloMNFdwwavGlANgmDGFMv2NXpqorhDAc,3002
114
+ sempy_labs/directlake/_directlake_schema_compare.py,sha256=tVc6hIgDxxA7a8V51e5tlzlp3bzVVTqQ_OKsTNxiWG4,5074
115
+ sempy_labs/directlake/_directlake_schema_sync.py,sha256=ipONLkBaXm4WgcMMChAyD4rVushdqdjAQdexT-fJxcY,6573
116
+ sempy_labs/directlake/_dl_helper.py,sha256=HHFy6tW-tSVZ4YHxSHvt6pXrloh0O6Lx7yNmZE7IAI4,10348
117
+ sempy_labs/directlake/_generate_shared_expression.py,sha256=SccfwnVnIocDdBj1159PUvWW4aaCRpLYwrePhdUntRw,3314
115
118
  sempy_labs/directlake/_get_directlake_lakehouse.py,sha256=e0WFQm4-daJR4K1aHuVaubu7T26yTeBgfNEMOXk-EzM,2392
116
119
  sempy_labs/directlake/_get_shared_expression.py,sha256=qc85kXggkx_7Sz_rAAli_yPnLzrGZpgD8IfVbTfZhQM,1133
117
- sempy_labs/directlake/_guardrails.py,sha256=YO8OycXDxmWrtH8nTz5a4AddfnpskM83rTPEPBVpqbM,2701
120
+ sempy_labs/directlake/_guardrails.py,sha256=wNVXpeiZckgLTly4cS5DU5DoV9x1S4DMxN5S08qAavE,2749
118
121
  sempy_labs/directlake/_list_directlake_model_calc_tables.py,sha256=EYT4ELmOZ3Uklzy6uMQMidc4WtBXm21NQqZu1Q5HTsg,2509
119
122
  sempy_labs/directlake/_show_unsupported_directlake_objects.py,sha256=nmrZrtDez7U8Ji76i9fxnnTx1zxMu2LCOZTMz4sFUEc,3504
120
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py,sha256=DC8Ig8oJtPY5qdWytIy03iiqYiQxLoRmQ5hsu6I69Sw,6348
121
- sempy_labs/directlake/_update_directlake_partition_entity.py,sha256=I2Qe2jrvUtNy6Wi75fHg47L4aRiw0UNQfQbURsyZhGY,8984
123
+ sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py,sha256=oKD4rKwD_s6Q3jKPw7H05isZWT8hs9WdtFyy23MDozc,7080
124
+ sempy_labs/directlake/_update_directlake_partition_entity.py,sha256=8YxrReJObtc7_Huq0qQrLKTVMhPO84guv8bQKtp__4c,9032
122
125
  sempy_labs/directlake/_warm_cache.py,sha256=xc7gG_OJY1rJYg79ztgcLATpnXHNqFaw-6CU1HgdlXk,9258
123
126
  sempy_labs/graph/__init__.py,sha256=AZ_IpOL06VvXrYmgbcrvQlxCxdDksvwXKf7JAGohCNI,620
124
- sempy_labs/graph/_groups.py,sha256=2axQ__eHNgJfb0ITOjexysz2Tq4AQ7xSejH4zG-QCFc,12531
125
- sempy_labs/graph/_teams.py,sha256=KTkIHd9XShsAiv1RsmVQpL2XmUVt4p9QyJFMP5evDj4,3050
126
- sempy_labs/graph/_users.py,sha256=o3bnpjln-K8JDAJQlcjpfgthAHPTT3CiAYzsx-3n6I4,5898
127
- sempy_labs/lakehouse/__init__.py,sha256=htZjFvJs3hCUUtMzRHQKbG6JlyC808QmXa_sVrPj0xg,799
127
+ sempy_labs/graph/_groups.py,sha256=j3YDeV6MzhRjGJRoD60SAaGyU8yb23x8QhXBzU2RWlE,12590
128
+ sempy_labs/graph/_teams.py,sha256=SRFaFuxtB7ylC5WeXIdrW0aLCxc_JTJHeEmxOPG99r8,3089
129
+ sempy_labs/graph/_users.py,sha256=dFOZ-jel6Aj4Um66f1jzQrgV0fOoI0cQnZfmR4OJSXo,5947
130
+ sempy_labs/lakehouse/__init__.py,sha256=5dRO6WfcHANed720iGhrgW4QajzV1emT47bPpCSKJNg,956
131
+ sempy_labs/lakehouse/_blobs.py,sha256=GgS2Zx6_0xzwUzuSBUll2bkNRFE-ThbK8jdYh-lJ2LY,8095
128
132
  sempy_labs/lakehouse/_get_lakehouse_columns.py,sha256=FWCyJhqqnZg837eK-S5st0xZpxbhxkcS8aTguazxjjY,2685
129
133
  sempy_labs/lakehouse/_get_lakehouse_tables.py,sha256=OmgYLNiegc8HwJP7hEPo5JMWFPyDIPSoqewBxsDkucc,8544
130
134
  sempy_labs/lakehouse/_lakehouse.py,sha256=dzDhBGN2FizREYFMsDsDzGm9cwLln1__OTAV_JV_GPY,8753
131
- sempy_labs/lakehouse/_shortcuts.py,sha256=SS-FYRfdIHkeS-6lN7_S85SxjxlS5j5EJshEveKvKH0,15283
135
+ sempy_labs/lakehouse/_shortcuts.py,sha256=24sPtX98ho84fNV_JCAHZrSkvk0Ui7p-0b-jTdGOGM8,16580
132
136
  sempy_labs/migration/__init__.py,sha256=142n01VAqlcx4E0mGGRtUfVOEwAXVdiHI_XprmUm7As,1175
133
137
  sempy_labs/migration/_create_pqt_file.py,sha256=eRK0Jz9ZeV_7jV3kNRze0bTAIqxsAZXLKMGE_loKOaY,9677
134
- sempy_labs/migration/_direct_lake_to_import.py,sha256=uMqvElwkCHMyraP9t2nGNgstRobiHPFo4AMuS60dXyU,2732
138
+ sempy_labs/migration/_direct_lake_to_import.py,sha256=GTSERKSwj4M4wOsENgDbb-ZO7NFqwD1VUcyOS73AbaM,3948
135
139
  sempy_labs/migration/_migrate_calctables_to_lakehouse.py,sha256=XzOPphiUmDIgv1ruhMyhAOs80hOfXCTKCrBGRP3PKtE,17998
136
140
  sempy_labs/migration/_migrate_calctables_to_semantic_model.py,sha256=Qt4WfmllCtSl-xkWzWWL5sTzi3lQDaJp43lVEXQisVY,6303
137
141
  sempy_labs/migration/_migrate_model_objects_to_semantic_model.py,sha256=RD0ttWcBratAzpPKjFF6jpEnZEd6M7m8OfEUFbkInbA,22950
138
142
  sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py,sha256=HYi2vn7yYDsBCTAXFTi6UiB86kdSlhQKPdwAt1nTKEE,7169
139
- sempy_labs/migration/_migration_validation.py,sha256=pl5Yv4BwRHZfAL0p2soj_Gk8FL3UcwiqKbX23pJe1oQ,2788
143
+ sempy_labs/migration/_migration_validation.py,sha256=AHURrWofb-U-L2Bdu36mcisVXOuZXi6Smgrrs2kjYBM,2650
140
144
  sempy_labs/migration/_refresh_calc_tables.py,sha256=W-lYdUZZcoYyLRIpMdpgaz03PEMM6Zf7E1vzT6MmMAE,5516
141
145
  sempy_labs/report/_BPAReportTemplate.json,sha256=9Uh-7E6d2ooxQ7j5JRayv_ayEULc7Gzg42kZGKdOqH8,63920
142
- sempy_labs/report/__init__.py,sha256=wnmjUX3w521OYnQBV3h9EaJs1eFLkIxmjc59VQOC19I,1293
143
- sempy_labs/report/_download_report.py,sha256=Hu2cVzXN_6yjHJKvS0KGy0uwbbJNxd0P-w6M8-XeZao,2767
146
+ sempy_labs/report/__init__.py,sha256=bPZ_MMqKGokskjJwM3T89LxIVNa2AXJg8Lr-mvJhP0E,1392
147
+ sempy_labs/report/_download_report.py,sha256=hCQ2_fSXSCqSUeaNM2Tf9T3MpRofREnDwp_zrfp7iTA,2703
144
148
  sempy_labs/report/_export_report.py,sha256=XCMsZzTBMgvQOe3Ltdod7my7_izpmP-3AVH6W5CExPE,10976
145
- sempy_labs/report/_generate_report.py,sha256=6Glk_q6f68aIHQJy_7EJjCJbNMRUJbnGriOyUJ2Qub8,13946
149
+ sempy_labs/report/_generate_report.py,sha256=ncFo8brgwPkSNF3urROMkIElqO6pcSy9tM6ymHE_UeQ,13868
146
150
  sempy_labs/report/_paginated.py,sha256=rsElE0IQ9qxRDuEp6qNF1EcD5XEgfTc7WsWEQsalsuI,2156
147
151
  sempy_labs/report/_report_bpa.py,sha256=ClETB8Q41sY1scCuknhpvalvuBaQ9ZwA4QX7F3sPcjc,13596
148
152
  sempy_labs/report/_report_bpa_rules.py,sha256=tPVGA0hmE6QMLlWtig7Va7Ksr2yXWl_Lndq--tWWd6w,4959
149
- sempy_labs/report/_report_functions.py,sha256=XojlMOuAYompdDk1bNaJx6KRK7MRsZ6vfLTiBXfjdXA,19680
150
- sempy_labs/report/_report_helper.py,sha256=NcdWgFuh1GjDwVPzy6QWwg3ecaJKoWzZdhbxT6hbbdA,10599
153
+ sempy_labs/report/_report_functions.py,sha256=pSrsUfMJqmsn9CYb5AM0iYdPR-EmuUSprVnc0dGhO1s,19709
154
+ sempy_labs/report/_report_helper.py,sha256=m23osIZMjvHhKbfhmTHyqHibXoWA9eP84TPanbH8kuE,10863
151
155
  sempy_labs/report/_report_list_functions.py,sha256=K9tMDQKhIZhelHvfMMW0lsxbVHekJ-5dAQveoD7PUDA,3980
152
- sempy_labs/report/_report_rebind.py,sha256=pOzg_XWbip8ledM79APsLxUjLJNS9aTDKK9dEx1F7Ds,4990
153
- sempy_labs/report/_reportwrapper.py,sha256=gwilmrE_QqWLQankc1rFlbp1_bexbdR5K5pGyx0N3Go,82945
156
+ sempy_labs/report/_report_rebind.py,sha256=svyxUSdqgXJW1UDNcb-urJxU9erO3JM72uzmuJUWIT0,5090
157
+ sempy_labs/report/_reportwrapper.py,sha256=u3MrszXTCQ8JtzdukXcnakdRW225jMXR2QvHgn1Wl_0,83226
158
+ sempy_labs/report/_save_report.py,sha256=FAzScMQIXl89TgVSRvaJofzKT0TfZh_hhPNNvDiktaI,6033
154
159
  sempy_labs/report/_bpareporttemplate/.platform,sha256=kWRa6B_KwSYLsvVFDx372mQriQO8v7dJ_YzQV_cfD-Q,303
155
160
  sempy_labs/report/_bpareporttemplate/definition.pbir,sha256=bttyHZYKqjA8OBb_cezGlX4H82cDvGZVCl1QB3fij4E,343
156
161
  sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json,sha256=kzjBlNdjbsSBBSHBwbQc298AJCr9Vp6Ex0D5PemUuT0,1578
@@ -181,9 +186,8 @@ sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visua
181
186
  sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json,sha256=wBVuNc8S2NaUA0FC708w6stmR2djNZp8nAsHMqesgsc,293
182
187
  sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json,sha256=mlY6t9OlSe-Y6_QmXJpS1vggU6Y3FjISUKECL8FVSg8,931
183
188
  sempy_labs/tom/__init__.py,sha256=Qbs8leW0fjzvWwOjyWK3Hjeehu7IvpB1beASGsi28bk,121
184
- sempy_labs/tom/_model.py,sha256=uQXCiaym-00LNxGLVEUMb5VyhBXL2Hd59bIGnWtle08,189080
185
- semantic_link_labs-0.9.5.dist-info/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
186
- semantic_link_labs-0.9.5.dist-info/METADATA,sha256=e0OiZ-sSGgLcn2UMrpp-1QzGCtAje51zx8DSXT3bwEc,25981
187
- semantic_link_labs-0.9.5.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
188
- semantic_link_labs-0.9.5.dist-info/top_level.txt,sha256=kiQX1y42Dbein1l3Q8jMUYyRulDjdlc2tMepvtrvixQ,11
189
- semantic_link_labs-0.9.5.dist-info/RECORD,,
189
+ sempy_labs/tom/_model.py,sha256=nHKFDh9yjTAvO96yPCsbW7ZMUPn44m8mlSElZp6ebbs,194439
190
+ semantic_link_labs-0.9.7.dist-info/METADATA,sha256=pbas6wJ715KVGnLNZ4R3vk3vq3tU6tKWp_mGydHWJKs,26200
191
+ semantic_link_labs-0.9.7.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
192
+ semantic_link_labs-0.9.7.dist-info/top_level.txt,sha256=kiQX1y42Dbein1l3Q8jMUYyRulDjdlc2tMepvtrvixQ,11
193
+ semantic_link_labs-0.9.7.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.8.2)
2
+ Generator: setuptools (78.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
sempy_labs/__init__.py CHANGED
@@ -1,3 +1,9 @@
1
+ from sempy_labs._delta_analyzer_history import (
2
+ delta_analyzer_history,
3
+ )
4
+ from sempy_labs._dax_query_view import (
5
+ generate_dax_query_view_url,
6
+ )
1
7
  from sempy_labs._mounted_data_factories import (
2
8
  list_mounted_data_factories,
3
9
  get_mounted_data_factory_definition,
@@ -7,6 +13,7 @@ from sempy_labs._mounted_data_factories import (
7
13
  from sempy_labs._semantic_models import (
8
14
  get_semantic_model_refresh_schedule,
9
15
  enable_semantic_model_scheduled_refresh,
16
+ delete_semantic_model,
10
17
  )
11
18
  from sempy_labs._graphQL import (
12
19
  list_graphql_apis,
@@ -148,6 +155,7 @@ from sempy_labs._capacities import (
148
155
  create_or_update_resource_group,
149
156
  list_resource_groups,
150
157
  get_resource_group,
158
+ list_capacities,
151
159
  )
152
160
  from sempy_labs._spark import (
153
161
  get_spark_settings,
@@ -165,6 +173,7 @@ from sempy_labs._workspaces import (
165
173
  assign_workspace_to_capacity,
166
174
  unassign_workspace_from_capacity,
167
175
  list_workspace_role_assignments,
176
+ delete_workspace,
168
177
  )
169
178
  from sempy_labs._notebooks import (
170
179
  get_notebook_definition,
@@ -180,6 +189,9 @@ from sempy_labs._sql import (
180
189
  from sempy_labs._sqldatabase import (
181
190
  get_sql_database_columns,
182
191
  get_sql_database_tables,
192
+ create_sql_database,
193
+ delete_sql_database,
194
+ list_sql_databases,
183
195
  )
184
196
  from sempy_labs._workspace_identity import (
185
197
  provision_workspace_identity,
@@ -244,7 +256,6 @@ from sempy_labs._list_functions import (
244
256
  list_semantic_model_objects,
245
257
  list_shortcuts,
246
258
  get_object_level_security,
247
- list_capacities,
248
259
  list_datamarts,
249
260
  list_lakehouses,
250
261
  list_sql_endpoints,
@@ -540,4 +551,11 @@ __all__ = [
540
551
  "list_mounted_data_factories",
541
552
  "get_mounted_data_factory_definition",
542
553
  "delete_mounted_data_factory",
554
+ "generate_dax_query_view_url",
555
+ "delete_semantic_model",
556
+ "delete_workspace",
557
+ "create_sql_database",
558
+ "delete_sql_database",
559
+ "list_sql_databases",
560
+ "delta_analyzer_history",
543
561
  ]
sempy_labs/_ai.py CHANGED
@@ -216,7 +216,9 @@ def generate_aggs(
216
216
  f"{icons.green_dot} The '{aggLakeTName}' table has been created/updated in the lakehouse."
217
217
 
218
218
  # Create/update semantic model agg table
219
- tom_server = fabric.create_tom_server(readonly=False, workspace=workspace)
219
+ tom_server = fabric.create_tom_server(
220
+ dataset=dataset, readonly=False, workspace=workspace
221
+ )
220
222
  m = tom_server.Databases.GetByName(dataset).Model
221
223
  print(f"\n{icons.in_progress} Updating the '{dataset}' semantic model...")
222
224
  dfC_agg = dfC[dfC["Table Name"] == aggTableName]
sempy_labs/_capacities.py CHANGED
@@ -1,4 +1,3 @@
1
- import sempy.fabric as fabric
2
1
  from typing import Optional, List, Tuple
3
2
  from sempy._utils._log import log
4
3
  import sempy_labs._icons as icons
@@ -242,7 +241,7 @@ def list_vcores() -> pd.DataFrame:
242
241
 
243
242
  def get_capacity_resource_governance(capacity_name: str):
244
243
 
245
- dfC = fabric.list_capacities()
244
+ dfC = list_capacities()
246
245
  dfC_filt = dfC[dfC["Display Name"] == capacity_name]
247
246
  capacity_id = dfC_filt["Id"].iloc[0].upper()
248
247
 
@@ -1131,3 +1130,39 @@ def get_resource_group(azure_subscription_id: str, resource_group: str) -> pd.Da
1131
1130
  }
1132
1131
 
1133
1132
  return pd.DataFrame(new_data, index=[0])
1133
+
1134
+
1135
+ def list_capacities() -> pd.DataFrame:
1136
+ """
1137
+ Shows the capacities and their properties.
1138
+
1139
+ Returns
1140
+ -------
1141
+ pandas.DataFrame
1142
+ A pandas dataframe showing the capacities and their properties
1143
+ """
1144
+
1145
+ columns = {
1146
+ "Id": "string",
1147
+ "Display Name": "string",
1148
+ "Sku": "string",
1149
+ "Region": "string",
1150
+ "State": "string",
1151
+ "Admins": "string",
1152
+ }
1153
+ df = _create_dataframe(columns=columns)
1154
+
1155
+ response = _base_api(request="/v1.0/myorg/capacities", client="fabric_sp")
1156
+
1157
+ for i in response.json().get("value", []):
1158
+ new_data = {
1159
+ "Id": i.get("id").lower(),
1160
+ "Display Name": i.get("displayName"),
1161
+ "Sku": i.get("sku"),
1162
+ "Region": i.get("region"),
1163
+ "State": i.get("state"),
1164
+ "Admins": [i.get("admins", [])],
1165
+ }
1166
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
1167
+
1168
+ return df
@@ -106,17 +106,13 @@ def migrate_workspaces(
106
106
  migrated_workspaces = []
107
107
 
108
108
  for i, r in dfW.iterrows():
109
- workspace = r["Name"]
110
-
111
- if workspaces is None or workspace in workspaces:
112
- pass
113
- else:
114
- continue
115
-
116
- if assign_workspace_to_capacity(
117
- capacity_name=target_capacity, workspace=workspace
118
- ):
119
- migrated_workspaces.append(workspace)
109
+ workspace_id = r["Id"]
110
+ workspace_name = r["Name"]
111
+ if workspaces is None or workspace_name in workspaces:
112
+ assign_workspace_to_capacity(
113
+ capacity=target_capacity, workspace=workspace_id
114
+ )
115
+ migrated_workspaces.append(workspace_name)
120
116
 
121
117
  if len(migrated_workspaces) < workspace_count:
122
118
  print(
@@ -124,10 +120,11 @@ def migrate_workspaces(
124
120
  )
125
121
  print(f"{icons.in_progress} Initiating rollback...")
126
122
  for i, r in dfW.iterrows():
127
- workspace = r["Name"]
128
- if workspace in migrated_workspaces:
123
+ workspace_id = r["Id"]
124
+ workspace_name = r["Name"]
125
+ if workspace_name in migrated_workspaces:
129
126
  assign_workspace_to_capacity(
130
- capacity_name=source_capacity, workspace=workspace
127
+ capacity=source_capacity, workspace=workspace_id
131
128
  )
132
129
  print(
133
130
  f"{icons.green_dot} Rollback of the workspaces to the '{source_capacity}' capacity is complete."
@@ -1,4 +1,3 @@
1
- import sempy.fabric as fabric
2
1
  import pandas as pd
3
2
  from typing import Optional
4
3
  from sempy_labs._helper_functions import (
@@ -7,6 +6,7 @@ from sempy_labs._helper_functions import (
7
6
  _update_dataframe_datatypes,
8
7
  _base_api,
9
8
  _create_dataframe,
9
+ resolve_item_id,
10
10
  )
11
11
  from uuid import UUID
12
12
  import sempy_labs._icons as icons
@@ -230,9 +230,7 @@ def list_item_connections(
230
230
 
231
231
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
232
232
  item_type = item_type[0].upper() + item_type[1:]
233
- item_id = fabric.resolve_item_id(
234
- item_name=item_name, type=item_type, workspace=workspace_id
235
- )
233
+ item_id = resolve_item_id(item=item_name, type=item_type, workspace=workspace_id)
236
234
 
237
235
  columns = {
238
236
  "Connection Name": "string",
sempy_labs/_dataflows.py CHANGED
@@ -1,4 +1,3 @@
1
- import sempy.fabric as fabric
2
1
  import pandas as pd
3
2
  from sempy_labs._helper_functions import (
4
3
  resolve_workspace_name_and_id,
@@ -6,6 +5,7 @@ from sempy_labs._helper_functions import (
6
5
  _update_dataframe_datatypes,
7
6
  _base_api,
8
7
  _create_dataframe,
8
+ resolve_workspace_name,
9
9
  )
10
10
  from typing import Optional, Tuple
11
11
  import sempy_labs._icons as icons
@@ -187,7 +187,7 @@ def list_upstream_dataflows(
187
187
  for v in values:
188
188
  tgt_dataflow_id = v.get("targetDataflowId")
189
189
  tgt_workspace_id = v.get("groupId")
190
- tgt_workspace_name = fabric.resolve_workspace_name(tgt_workspace_id)
190
+ tgt_workspace_name = resolve_workspace_name(workspace_id=tgt_workspace_id)
191
191
  (tgt_dataflow_name, _) = _resolve_dataflow_name_and_id(
192
192
  dataflow=tgt_dataflow_id, workspace=tgt_workspace_id
193
193
  )
@@ -0,0 +1,57 @@
1
+ from typing import Optional
2
+ from uuid import UUID
3
+ from sempy_labs._helper_functions import (
4
+ resolve_dataset_id,
5
+ _get_fabric_context_setting,
6
+ resolve_workspace_id,
7
+ )
8
+ from sempy._utils._log import log
9
+ import gzip
10
+ import base64
11
+ import urllib.parse
12
+
13
+
14
+ @log
15
+ def generate_dax_query_view_url(
16
+ dataset: str | UUID, dax_string: str, workspace: Optional[str | UUID] = None
17
+ ):
18
+ """
19
+ Prints a URL based on query provided. This URL opens `DAX query view <https://learn.microsoft.com/power-bi/transform-model/dax-query-view>`_ in the Power BI service, connected to the semantic model and using the query provided.
20
+
21
+ Parameters
22
+ ----------
23
+ dataset : str | uuid.UUID
24
+ The semantic model name or ID.
25
+ dax_string : str
26
+ The DAX query string.
27
+ workspace : str | uuid.UUID, default=None
28
+ The workspace name or ID.
29
+ Defaults to None which resolves to the workspace of the attached lakehouse
30
+ or if no lakehouse attached, resolves to the workspace of the notebook.
31
+ """
32
+
33
+ workspace_id = resolve_workspace_id(workspace=workspace)
34
+ dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace_id)
35
+
36
+ prefix = _get_fabric_context_setting(name="spark.trident.pbienv").lower()
37
+
38
+ if prefix == "prod":
39
+ prefix = "app"
40
+
41
+ def gzip_base64_urlsafe(input_string):
42
+ # Compress the string with gzip
43
+ compressed_data = gzip.compress(input_string.encode("utf-8"))
44
+
45
+ # Encode the compressed data in base64
46
+ base64_data = base64.b64encode(compressed_data)
47
+
48
+ # Make the base64 string URL-safe
49
+ urlsafe_data = urllib.parse.quote_plus(base64_data.decode("utf-8"))
50
+
51
+ return urlsafe_data
52
+
53
+ formatted_query = gzip_base64_urlsafe(dax_string)
54
+
55
+ url = f"https://{prefix}.powerbi.com/groups/{workspace_id}/modeling/{dataset_id}/daxQueryView?query={formatted_query}"
56
+
57
+ print(url)
@@ -52,6 +52,7 @@ def delta_analyzer(
52
52
  workspace: Optional[str | UUID] = None,
53
53
  column_stats: bool = True,
54
54
  skip_cardinality: bool = True,
55
+ schema: Optional[str] = None,
55
56
  ) -> Dict[str, pd.DataFrame]:
56
57
  """
57
58
  Analyzes a delta table and shows the results in dictionary containing a set of 5 dataframes. If 'export' is set to True, the results will be saved to delta tables in the lakehouse attached to the notebook.
@@ -85,6 +86,8 @@ def delta_analyzer(
85
86
  If True, collects data about column chunks and columns. If False, skips that step and only returns the other 3 dataframes.
86
87
  skip_cardinality : bool, default=True
87
88
  If True, skips the cardinality calculation for each column. If False, calculates the cardinality for each column.
89
+ schema : str, default=None
90
+ The name of the schema to which the table belongs (for schema-enabled lakehouses). If None, the default schema is used.
88
91
 
89
92
  Returns
90
93
  -------
@@ -96,25 +99,21 @@ def delta_analyzer(
96
99
  if not skip_cardinality:
97
100
  column_stats = True
98
101
 
99
- # display_toggle = notebookutils.common.configs.pandas_display
100
-
101
- # Turn off notebookutils display
102
- # if display_toggle is True:
103
- # notebookutils.common.configs.pandas_display = False
104
-
105
102
  prefix = "SLL_DeltaAnalyzer_"
106
103
  now = datetime.now()
107
104
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace=workspace)
108
105
  (lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
109
106
  lakehouse=lakehouse, workspace=workspace
110
107
  )
111
- path = create_abfss_path(lakehouse_id, workspace_id, table_name)
112
- local_path = _mount(lakehouse=lakehouse, workspace=workspace)
113
- table_path = f"{local_path}/Tables/{table_name}"
114
- delta_table_path = create_abfss_path(lakehouse_id, workspace_id, table_name)
115
108
 
116
- # Set back to original value
117
- # notebookutils.common.configs.pandas_display = display_toggle
109
+ delta_table_path = create_abfss_path(
110
+ lakehouse_id, workspace_id, table_name, schema=schema
111
+ )
112
+ local_path = _mount(lakehouse=lakehouse, workspace=workspace)
113
+ if schema is not None:
114
+ table_path = f"{local_path}/Tables/{schema}/{table_name}"
115
+ else:
116
+ table_path = f"{local_path}/Tables/{table_name}"
118
117
 
119
118
  parquet_file_df_columns = {
120
119
  # "Dataset": "string",
@@ -183,7 +182,7 @@ def delta_analyzer(
183
182
  # min_reader_version = table_details.get("minReaderVersion")
184
183
  # min_writer_version = table_details.get("minWriterVersion")
185
184
 
186
- latest_files = _read_delta_table(path).inputFiles()
185
+ latest_files = _read_delta_table(delta_table_path).inputFiles()
187
186
  # file_paths = [f.split("/")[-1] for f in latest_files]
188
187
  all_parquet_files = get_parquet_file_infos(delta_table_path)
189
188
  common_file_paths = set(
@@ -430,6 +429,7 @@ def get_delta_table_history(
430
429
  table_name: str,
431
430
  lakehouse: Optional[str | UUID] = None,
432
431
  workspace: Optional[str | UUID] = None,
432
+ schema: Optional[str] = None,
433
433
  ) -> pd.DataFrame:
434
434
  """
435
435
  Returns the history of a delta table as a pandas dataframe.
@@ -445,6 +445,8 @@ def get_delta_table_history(
445
445
  The Fabric workspace name or ID used by the lakehouse.
446
446
  Defaults to None which resolves to the workspace of the attached lakehouse
447
447
  or if no lakehouse attached, resolves to the workspace of the notebook.
448
+ schema : str, default=None
449
+ The name of the schema to which the table belongs (for schema-enabled lakehouses). If None, the default schema is used.
448
450
 
449
451
  Returns
450
452
  -------
@@ -461,7 +463,7 @@ def get_delta_table_history(
461
463
  (lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
462
464
  lakehouse=lakehouse, workspace=workspace
463
465
  )
464
- path = create_abfss_path(lakehouse_id, workspace_id, table_name)
466
+ path = create_abfss_path(lakehouse_id, workspace_id, table_name, schema)
465
467
 
466
468
  from delta import DeltaTable
467
469