semantic-link-labs 0.9.2__py3-none-any.whl → 0.9.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: semantic-link-labs
3
- Version: 0.9.2
3
+ Version: 0.9.3
4
4
  Summary: Semantic Link Labs for Microsoft Fabric
5
5
  Author: Microsoft Corporation
6
6
  License: MIT License
@@ -26,7 +26,7 @@ Requires-Dist: pytest>=8.2.1; extra == "test"
26
26
  # Semantic Link Labs
27
27
 
28
28
  [![PyPI version](https://badge.fury.io/py/semantic-link-labs.svg)](https://badge.fury.io/py/semantic-link-labs)
29
- [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.9.2&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
29
+ [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.9.3&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
30
30
  [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
31
31
  [![Downloads](https://static.pepy.tech/badge/semantic-link-labs)](https://pepy.tech/project/semantic-link-labs)
32
32
 
@@ -144,6 +144,7 @@ An even better way to ensure the semantic-link-labs library is available in your
144
144
  2. Select your newly created environment within the 'Environment' drop down in the navigation bar at the top of the notebook
145
145
 
146
146
  ## Version History
147
+ * [0.9.3](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.3) (February 13, 2025)
147
148
  * [0.9.2](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.2) (February 5, 2025)
148
149
  * [0.9.1](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.1) (January 22, 2025)
149
150
  * [0.9.0](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.0) (January 22, 2025)
@@ -1,39 +1,39 @@
1
1
  sempy_labs/__init__.py,sha256=l2Vnhvr_zSC9xnCQjuLdHCA2tBOj2pRTht1GPwmOvbo,13933
2
- sempy_labs/_ai.py,sha256=CzsNw6Wpd2B5Rd0RcY250-_p0L-0gFoMNLEc_KmrobU,16177
2
+ sempy_labs/_ai.py,sha256=rhVohfwrU1mvWH0EN_vCTnldb8xJNfGHZGba34k1JVw,16174
3
3
  sempy_labs/_authentication.py,sha256=GjtN5XqIyWXbR5Ni4hfYiUNwgFa-ySX8e-BrqE1vgGc,6903
4
4
  sempy_labs/_capacities.py,sha256=7rR4BL5XuWr5UO25jp8_5moymvX35AY_FG27H4HUcqQ,43949
5
5
  sempy_labs/_capacity_migration.py,sha256=wzOD14W7lCTx539GvkBSROMnTHEYNeFkz4L4C-0CXOQ,24762
6
6
  sempy_labs/_clear_cache.py,sha256=5z73I4Zdr3C0Bd4zyxrQdcGG2VOzsXWukzB_evm4bRs,12731
7
- sempy_labs/_connections.py,sha256=vPvyDUYsxNWX-sePyWVyP-pUD8MctB4vpKAjxL5KdJc,16953
7
+ sempy_labs/_connections.py,sha256=aLMneYcw9on-GXr6KIGmDIvo8cuMevbkmtlmB3uWXhU,18693
8
8
  sempy_labs/_data_pipelines.py,sha256=pL6hkYxHifZk6OCBt3MJm-eG2lK4iaYXs0FbbGGbXEc,5564
9
9
  sempy_labs/_dataflows.py,sha256=SZThUDRQcWujK30nNw1YI06y1L6-piNHLiPBb72s9Bk,8049
10
10
  sempy_labs/_dax.py,sha256=2FCN2SInk8E5LMNMO5fnrWhQ-gC9S-B4FG3bDYUk_Jk,16452
11
- sempy_labs/_delta_analyzer.py,sha256=W-tnic6gG_bCF4441Qjx7vSWbY4QzkhgZseUdIpwV_Q,10480
11
+ sempy_labs/_delta_analyzer.py,sha256=_o8DvD7wWxKTGpGEZ5F1mIM2yp28_-U0v0Y-eNPIHZ8,11608
12
12
  sempy_labs/_deployment_pipelines.py,sha256=SDQYkCAhOAlxBr58jYxtLFOVySiRXO0_WhfOKGDeYZQ,6254
13
13
  sempy_labs/_documentation.py,sha256=yVA8VPEzx_fmljtcvSxtB7-BeupYsfdMXXjp6Fpnyo8,5007
14
14
  sempy_labs/_environments.py,sha256=F0lbOqkBGFINIB49C9M_bv-F3A5OtwyYbCFKdCHkkQc,5433
15
15
  sempy_labs/_eventhouses.py,sha256=bxbcCIq0qH2_k5A1x7z-BDfzPMoE8f1sFm9SUIHJ9Go,3948
16
16
  sempy_labs/_eventstreams.py,sha256=TY0HBWRjizonKr3g8doKrffvOqlGY4l1VomcBnUUh-g,3996
17
17
  sempy_labs/_external_data_shares.py,sha256=MP0nx9uFzjiV04xpq09UAM8n_1ckPBae2uhI6Vk0ZoY,6537
18
- sempy_labs/_gateways.py,sha256=3_0zjMS1vaL00B3M-1jT2Yq-5fJY_DF9_gcHu8Wrw0k,15328
18
+ sempy_labs/_gateways.py,sha256=tbSEOGCc-mnvAYTVwJeDv8nWZkokf_OUOGfh3z8ePbw,17495
19
19
  sempy_labs/_generate_semantic_model.py,sha256=OtzWWJ1EeDCYfEKn5TKnAUfbXOvqNI3cNkoJow3KkmE,17865
20
20
  sempy_labs/_git.py,sha256=lB0mJMGecvS1scPTbMolk0Tg_0skdbaj0-9W_2cA1K0,15014
21
21
  sempy_labs/_graphQL.py,sha256=7zyXXzsA_l2NeumeSrg5J7JMAexHFzeKFr59XTJWfrk,2906
22
- sempy_labs/_helper_functions.py,sha256=AiXwnyebS7jFhjBubrKpiZRtUXN3hKMIkSxygk2hcI0,46792
22
+ sempy_labs/_helper_functions.py,sha256=3v3pY9dLe8KBLXztaauzrsjXDvWJciF3C2It1pawX-M,47624
23
23
  sempy_labs/_icons.py,sha256=ez2dx_LCti71S_-eB6WYQ-kOMyiBL8ZJN12-ev5dcmA,3579
24
24
  sempy_labs/_job_scheduler.py,sha256=kVuBiUhzaNJJ7d_HiP3CNhMe3lHa6u87MyYAWslgghA,7522
25
25
  sempy_labs/_kql_databases.py,sha256=ed5w9k9iACsy7hMr0pBQB3ETKW3rucVZuRc-FVb_ihY,4524
26
26
  sempy_labs/_kql_querysets.py,sha256=7E9FpV_wlZ1F_S2GncAzRiHoryqDQFvZaRftde0_oKc,4036
27
- sempy_labs/_list_functions.py,sha256=OqbNTseY8rwfqdNm62jLO_Eb5oOf97-nkcMZxuHRB8g,66633
27
+ sempy_labs/_list_functions.py,sha256=O6EDE8-MdPkkb6VWlN9gl4IxkT5s1oL5muggqd_0ZlY,66614
28
28
  sempy_labs/_managed_private_endpoints.py,sha256=Po5ki9jQ5Wg3uxvHkAWuhtPHAkgOYspv19ZoAYzg9JM,6350
29
29
  sempy_labs/_mirrored_databases.py,sha256=OrxHlMjahW0Bx5GfTwAHGxDO5hy-9Pt5e9N5HKiRjLo,14028
30
30
  sempy_labs/_mirrored_warehouses.py,sha256=Q3WlRjUwCLz8KW1eN8MiTPeY0P52Vkuz5kgnv4GvQ3k,1739
31
- sempy_labs/_ml_experiments.py,sha256=cMJCgNMwpovvQgZt4vSQ7x4HM3f_P5ZSqA74uF86_KE,4054
31
+ sempy_labs/_ml_experiments.py,sha256=pTpc7vx5z7dvjaViB8DMbiIEN7thVO4cLhyUKP-RYpE,4057
32
32
  sempy_labs/_ml_models.py,sha256=BGnYC0-K1cb9VikCmqods5bxckzYplkJkDO3jwttpqI,3924
33
33
  sempy_labs/_model_auto_build.py,sha256=PTQo3dufzLSFcQ5shFkmBWAVSdP7cTJgpUclrcXyNbg,5105
34
- sempy_labs/_model_bpa.py,sha256=7NIKlF7Mhxm0RxLK42Pi3aLOiRmxFFb_20aPTHLM9W4,21413
34
+ sempy_labs/_model_bpa.py,sha256=SWKVe5WwfmU7OZVyCbsUlK0w6VdJ08MLklQbPJ98Z64,21376
35
35
  sempy_labs/_model_bpa_bulk.py,sha256=F_AzebdpMm7QvYuSuSj52NTg6jYE9H1FJJDpdnyNF-g,16039
36
- sempy_labs/_model_bpa_rules.py,sha256=rYDIsGcoynntcdNA1O3sLf-e6R6dspIBByhWrziV59Q,45555
36
+ sempy_labs/_model_bpa_rules.py,sha256=L1xu5T5LsTxsk-KyrCB561BKzgAkj7izDbSiyMA6upY,45651
37
37
  sempy_labs/_model_dependencies.py,sha256=0xGgubrq76zIvBdEqmEX_Pd6WdizXFVECBW6BPl2DZo,13162
38
38
  sempy_labs/_notebooks.py,sha256=Ev8E4kxJHAnpqUacBxmov227pZH54CwbCP-C6OpjWD0,8458
39
39
  sempy_labs/_one_lake_integration.py,sha256=3RYJ6det_oM3NcAqwrONATTggkNAXRZFvVbrOA1Pg94,6274
@@ -41,9 +41,9 @@ sempy_labs/_query_scale_out.py,sha256=nra1q8s-PKpZTlI_L0lMGO1GmdBk6sqETsBQShF1yP
41
41
  sempy_labs/_refresh_semantic_model.py,sha256=4w_uaYLbaZptmEFY7QHWzOgXcgc2ctGx8HQvt2aguxk,17360
42
42
  sempy_labs/_spark.py,sha256=SuSTjjmtzj7suDgN8Njk_pNBaStDLgIJB_1yk_e2H1Y,19340
43
43
  sempy_labs/_sql.py,sha256=cXPjmb3OiOucdWi8QU73lPUiNnZ3LOeoBpoaRwjj1l4,5337
44
- sempy_labs/_translations.py,sha256=CxpfnyCmn6hAiImxTOvz6x1HG4ppZORHCemJogKPV-M,16193
45
- sempy_labs/_vertipaq.py,sha256=jpaRnqRT-Hks0d79SCVs3BKs3dMF63tom5TwMXs1Iig,38084
46
- sempy_labs/_warehouses.py,sha256=ym_AnJIekGLOEpeIK_iVtzfYehf-iyc1eW6JCW6ZhzU,7302
44
+ sempy_labs/_translations.py,sha256=i4K2PFk6-TcmAnUpqz-z_GuDv9XEp1cBs0KY-x6ja1w,16168
45
+ sempy_labs/_vertipaq.py,sha256=xssYOQeVsEbptzWZAfhIuVb4n9WQKn64F329ONal1FI,38052
46
+ sempy_labs/_warehouses.py,sha256=EKUrgPXmQEvdZxY84CazKdbz6Sd8vqaV7sq7UF4odl0,7302
47
47
  sempy_labs/_workloads.py,sha256=uuTl-_7D_nubwCY5xrTfWF1apTpFbgRKwdY0oNFPDSk,4216
48
48
  sempy_labs/_workspace_identity.py,sha256=ZtaaYyFUmRabdxCYw9xoUNRwKvXi1YibryI9Ww9MzxU,2155
49
49
  sempy_labs/_workspaces.py,sha256=_4cwd5lg4JMEGuHaKyLHo7JQGJDb3v93eOO3Q8RYo44,11004
@@ -87,11 +87,11 @@ sempy_labs/_bpa_translation/_model/_translations_zh-CN.po,sha256=ipMbnet7ZI5mZoC
87
87
  sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po,sha256=5v6tVKGruqneAeMoa6F3tyg_JBL8qOpqOJofWpq2W3U,31518
88
88
  sempy_labs/admin/__init__.py,sha256=ZH4hnVyBpMZbgtcVeeagsJ7J0ihE7QES3WxIL4z78g4,1995
89
89
  sempy_labs/admin/_basic_functions.py,sha256=vrHszPvZdHF2UIYkpkYPDKKc1YfX5ZHlakYC6kDup4Q,39495
90
- sempy_labs/admin/_domains.py,sha256=Eo7QIkf4TixupC7cbiMlB2M-UVBbzH0-44bO8YIkxt8,15127
90
+ sempy_labs/admin/_domains.py,sha256=pw9_szJEiclYH__v7cMeAHMLgYBhFTYTixK-b-5x3Jc,15064
91
91
  sempy_labs/admin/_external_data_share.py,sha256=q4gw5iYZJDH-9xIM6L0b2CU9ebUIdE-ZVrFsulRHyUU,3364
92
92
  sempy_labs/admin/_git.py,sha256=gsbDQKd66knCI_Zh8vHSfHK-uQVJjVmhKKvfMMYKZyA,2264
93
93
  sempy_labs/admin/_items.py,sha256=8RT3CqFQiDCs-XEgAtQeD3YSFezOz3_E_MArTZAiV-o,8705
94
- sempy_labs/admin/_scanner.py,sha256=c8m4k3s_CHFg8qsMgv0jGUdwIjoIbVDT27z5cEkI_-E,4781
94
+ sempy_labs/admin/_scanner.py,sha256=58mez0RchcPd3aelD7bJKtg9lI1uqDDmJ_z_ATYiwvs,4453
95
95
  sempy_labs/directlake/__init__.py,sha256=etaj-3wqe5t93mu74tGYjEOQ6gtHWUogidOygiVvlq8,2131
96
96
  sempy_labs/directlake/_directlake_schema_compare.py,sha256=In3Ac07GI6T3eLDvQK7Xt9bXwJLI7MgNAk83rOKsYKc,5040
97
97
  sempy_labs/directlake/_directlake_schema_sync.py,sha256=5nDyE-8ApeaUJO9aJLasop8G9bG9TjPamDQvgAlCUew,4671
@@ -110,19 +110,19 @@ sempy_labs/graph/_groups.py,sha256=2axQ__eHNgJfb0ITOjexysz2Tq4AQ7xSejH4zG-QCFc,1
110
110
  sempy_labs/graph/_teams.py,sha256=IxYaYxD3L6RGf4XFiC6VCLH5tqNWGEijtulEG8RwdRM,3043
111
111
  sempy_labs/graph/_users.py,sha256=5leq_Htox_rfbGQvK4T7NvNasur-HuoFey26qrSHjYM,5652
112
112
  sempy_labs/lakehouse/__init__.py,sha256=k4offCjolrw8cpe1mj5AmPq5JcwsxohBwqR37nNc_7E,757
113
- sempy_labs/lakehouse/_get_lakehouse_columns.py,sha256=_XgEF6BJHp6jxRTqb0jmDG1OkdkKpmriKSVPrAB6o3E,2706
114
- sempy_labs/lakehouse/_get_lakehouse_tables.py,sha256=SSMRfxgeTquYspEiiBcjwEJNOcWqAekShcHZ_-OiCHs,8565
115
- sempy_labs/lakehouse/_lakehouse.py,sha256=XEl23FxGDgeP9Yr5rlJ2W9tzfF75OLqJ2dBuAgcUGOQ,8770
116
- sempy_labs/lakehouse/_shortcuts.py,sha256=wJfld4rbmHgSfPzejmfE1GAABjV2zbupiyNxk1PoN4A,8812
113
+ sempy_labs/lakehouse/_get_lakehouse_columns.py,sha256=FWCyJhqqnZg837eK-S5st0xZpxbhxkcS8aTguazxjjY,2685
114
+ sempy_labs/lakehouse/_get_lakehouse_tables.py,sha256=OmgYLNiegc8HwJP7hEPo5JMWFPyDIPSoqewBxsDkucc,8544
115
+ sempy_labs/lakehouse/_lakehouse.py,sha256=sD1bQ2mEnSmRJLd1D1yMeXOZHRvq8ZlyAGxjLCwFi0s,8734
116
+ sempy_labs/lakehouse/_shortcuts.py,sha256=ASlHqJR5mGgRTjQkKvZXKXT7-hHjgeKVqhtXh5joYqk,8819
117
117
  sempy_labs/migration/__init__.py,sha256=142n01VAqlcx4E0mGGRtUfVOEwAXVdiHI_XprmUm7As,1175
118
118
  sempy_labs/migration/_create_pqt_file.py,sha256=eRK0Jz9ZeV_7jV3kNRze0bTAIqxsAZXLKMGE_loKOaY,9677
119
119
  sempy_labs/migration/_direct_lake_to_import.py,sha256=uMqvElwkCHMyraP9t2nGNgstRobiHPFo4AMuS60dXyU,2732
120
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py,sha256=QzQ5X7lrLD3lzlXfMtXdlkciT46nVJyZ5DunuO8vvBA,18019
120
+ sempy_labs/migration/_migrate_calctables_to_lakehouse.py,sha256=XzOPphiUmDIgv1ruhMyhAOs80hOfXCTKCrBGRP3PKtE,17998
121
121
  sempy_labs/migration/_migrate_calctables_to_semantic_model.py,sha256=Qt4WfmllCtSl-xkWzWWL5sTzi3lQDaJp43lVEXQisVY,6303
122
122
  sempy_labs/migration/_migrate_model_objects_to_semantic_model.py,sha256=RD0ttWcBratAzpPKjFF6jpEnZEd6M7m8OfEUFbkInbA,22950
123
123
  sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py,sha256=HYi2vn7yYDsBCTAXFTi6UiB86kdSlhQKPdwAt1nTKEE,7169
124
124
  sempy_labs/migration/_migration_validation.py,sha256=pl5Yv4BwRHZfAL0p2soj_Gk8FL3UcwiqKbX23pJe1oQ,2788
125
- sempy_labs/migration/_refresh_calc_tables.py,sha256=lKkwVBN7eyDoMJd0cv4jkLyO6iGjsuh96RPv6_Iiwrc,5537
125
+ sempy_labs/migration/_refresh_calc_tables.py,sha256=W-lYdUZZcoYyLRIpMdpgaz03PEMM6Zf7E1vzT6MmMAE,5516
126
126
  sempy_labs/report/_BPAReportTemplate.json,sha256=9Uh-7E6d2ooxQ7j5JRayv_ayEULc7Gzg42kZGKdOqH8,63920
127
127
  sempy_labs/report/__init__.py,sha256=esE_i1lL2AdcwWs7cE8AKSXZy_7w_4jJJtFULFkDPcU,1244
128
128
  sempy_labs/report/_download_report.py,sha256=hX6BlIN1dCE6wiDpTZymGus9ZC4UnnI32OiUeMQ7PN0,2709
@@ -130,10 +130,10 @@ sempy_labs/report/_generate_report.py,sha256=Ir4xDldITvWWJebbY1Ljj5m04cKQ8oPTQta
130
130
  sempy_labs/report/_paginated.py,sha256=rsElE0IQ9qxRDuEp6qNF1EcD5XEgfTc7WsWEQsalsuI,2156
131
131
  sempy_labs/report/_report_bpa.py,sha256=bMZlvFVKypRCC7uC0QmkqV9i4n5nMj3X-tI2N6OOxLc,13861
132
132
  sempy_labs/report/_report_bpa_rules.py,sha256=tPVGA0hmE6QMLlWtig7Va7Ksr2yXWl_Lndq--tWWd6w,4959
133
- sempy_labs/report/_report_functions.py,sha256=_na5023utr9RsWuqI5C1CQ1ScSKQDZc0zydnHvuc0jM,29909
133
+ sempy_labs/report/_report_functions.py,sha256=DlfGtDtTS3kM0v039UOHEHQ8_7I2jj2jBaQYoHJpfKY,29884
134
134
  sempy_labs/report/_report_helper.py,sha256=NcdWgFuh1GjDwVPzy6QWwg3ecaJKoWzZdhbxT6hbbdA,10599
135
135
  sempy_labs/report/_report_list_functions.py,sha256=K9tMDQKhIZhelHvfMMW0lsxbVHekJ-5dAQveoD7PUDA,3980
136
- sempy_labs/report/_report_rebind.py,sha256=TyLXerciHD9MexBT9SMOpfx5a9wXgRx1jGDMPG9gfPs,4987
136
+ sempy_labs/report/_report_rebind.py,sha256=pOzg_XWbip8ledM79APsLxUjLJNS9aTDKK9dEx1F7Ds,4990
137
137
  sempy_labs/report/_reportwrapper.py,sha256=gwilmrE_QqWLQankc1rFlbp1_bexbdR5K5pGyx0N3Go,82945
138
138
  sempy_labs/report/_bpareporttemplate/.platform,sha256=kWRa6B_KwSYLsvVFDx372mQriQO8v7dJ_YzQV_cfD-Q,303
139
139
  sempy_labs/report/_bpareporttemplate/definition.pbir,sha256=bttyHZYKqjA8OBb_cezGlX4H82cDvGZVCl1QB3fij4E,343
@@ -165,9 +165,9 @@ sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visua
165
165
  sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json,sha256=wBVuNc8S2NaUA0FC708w6stmR2djNZp8nAsHMqesgsc,293
166
166
  sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json,sha256=mlY6t9OlSe-Y6_QmXJpS1vggU6Y3FjISUKECL8FVSg8,931
167
167
  sempy_labs/tom/__init__.py,sha256=Qbs8leW0fjzvWwOjyWK3Hjeehu7IvpB1beASGsi28bk,121
168
- sempy_labs/tom/_model.py,sha256=NRRrrCdOwVgJW8ZHq2KqpEZ-zMS6ug6U6TNZZSXQhOo,178711
169
- semantic_link_labs-0.9.2.dist-info/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
170
- semantic_link_labs-0.9.2.dist-info/METADATA,sha256=ugHqM-11M0HTwUT5F6m1rILorGHozw86igz7nH28yKU,24650
171
- semantic_link_labs-0.9.2.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
172
- semantic_link_labs-0.9.2.dist-info/top_level.txt,sha256=kiQX1y42Dbein1l3Q8jMUYyRulDjdlc2tMepvtrvixQ,11
173
- semantic_link_labs-0.9.2.dist-info/RECORD,,
168
+ sempy_labs/tom/_model.py,sha256=mZ6vCdngYFgDApcAmAZMnvJCYha3KyJOT8LidlguOKs,178793
169
+ semantic_link_labs-0.9.3.dist-info/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
170
+ semantic_link_labs-0.9.3.dist-info/METADATA,sha256=cwDtUO5grMFH9sHRkgjnII90UfveA5VAldDKm8PGmOs,24748
171
+ semantic_link_labs-0.9.3.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
172
+ semantic_link_labs-0.9.3.dist-info/top_level.txt,sha256=kiQX1y42Dbein1l3Q8jMUYyRulDjdlc2tMepvtrvixQ,11
173
+ semantic_link_labs-0.9.3.dist-info/RECORD,,
sempy_labs/_ai.py CHANGED
@@ -1,10 +1,13 @@
1
1
  import sempy
2
2
  import sempy.fabric as fabric
3
3
  import pandas as pd
4
- from pyspark.sql import SparkSession
5
4
  from typing import List, Optional, Union
6
5
  from IPython.display import display
7
6
  import sempy_labs._icons as icons
7
+ from sempy_labs._helper_functions import (
8
+ _read_delta_table,
9
+ _run_spark_sql_query,
10
+ )
8
11
 
9
12
 
10
13
  def optimize_semantic_model(dataset: str, workspace: Optional[str] = None):
@@ -186,13 +189,13 @@ def generate_aggs(
186
189
 
187
190
  query = query[:-1]
188
191
 
189
- spark = SparkSession.builder.getOrCreate()
190
192
  fromTablePath = create_abfss_path(
191
193
  lakehouse_id=lakehouse_id,
192
194
  lakehouse_workspace_id=lakehouse_workspace_id,
193
195
  delta_table_name=lakeTName,
194
196
  )
195
- df = spark.read.format("delta").load(fromTablePath)
197
+
198
+ df = _read_delta_table(fromTablePath)
196
199
  tempTableName = f"delta_table_{lakeTName}"
197
200
  df.createOrReplaceTempView(tempTableName)
198
201
  sqlQuery = f"{query} \n FROM {tempTableName} {groupBy}"
@@ -201,7 +204,7 @@ def generate_aggs(
201
204
  print(sqlQuery)
202
205
 
203
206
  # Save query to spark dataframe
204
- spark_df = spark.sql(sqlQuery)
207
+ spark_df = _run_spark_sql_query(sqlQuery)
205
208
  f"\nCreating/updating the '{aggLakeTName}' table in the lakehouse..."
206
209
  # Write spark dataframe to delta table
207
210
  aggFilePath = create_abfss_path(
@@ -419,7 +422,7 @@ def generate_aggs(
419
422
  # dfP = fabric.list_partitions(dataset = dataset, workspace = workspace)
420
423
  # isDirectLake = any(r['Mode'] == 'DirectLake' for i, r in dfP.iterrows())
421
424
 
422
- # spark = SparkSession.builder.getOrCreate()
425
+ # spark = _create_spark_session()
423
426
  # views = spark.sql(f"SHOW VIEWS IN {lakehouse}").collect()
424
427
  # for view in views:
425
428
  # viewName = view['viewName']
@@ -19,6 +19,8 @@ def delete_connection(connection: str | UUID):
19
19
 
20
20
  This is a wrapper function for the following API: `Connections - Delete Connection <https://learn.microsoft.com/rest/api/fabric/core/connections/delete-connection>`_.
21
21
 
22
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
23
+
22
24
  Parameters
23
25
  ----------
24
26
  connection : str | uuid.UUID
@@ -26,7 +28,9 @@ def delete_connection(connection: str | UUID):
26
28
  """
27
29
 
28
30
  connection_id = _resolve_connection_id(connection)
29
- _base_api(request=f"/v1/connections/{connection_id}", method="delete")
31
+ _base_api(
32
+ request=f"/v1/connections/{connection_id}", client="fabric_sp", method="delete"
33
+ )
30
34
  print(f"{icons.green_dot} The '{connection}' connection has been deleted.")
31
35
 
32
36
 
@@ -36,6 +40,8 @@ def delete_connection_role_assignment(connection: str | UUID, role_assignment_id
36
40
 
37
41
  This is a wrapper function for the following API: `Connections - Delete Connection Role Assignment <https://learn.microsoft.com/rest/api/fabric/core/connections/delete-connection-role-assignment>`_.
38
42
 
43
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
44
+
39
45
  Parameters
40
46
  ----------
41
47
  connection : str | uuid.UUID
@@ -47,6 +53,7 @@ def delete_connection_role_assignment(connection: str | UUID, role_assignment_id
47
53
  connection_id = _resolve_connection_id(connection)
48
54
  _base_api(
49
55
  request=f"/v1/connections/{connection_id}/roleAssignments/{role_assignment_id}",
56
+ client="fabric_sp",
50
57
  method="delete",
51
58
  )
52
59
 
@@ -77,6 +84,8 @@ def list_connection_role_assignments(connection: str | UUID) -> pd.DataFrame:
77
84
 
78
85
  This is a wrapper function for the following API: `Connections - List Connection Role Assignments <https://learn.microsoft.com/rest/api/fabric/core/connections/list-connection-role-assignments>`_.
79
86
 
87
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
88
+
80
89
  Parameters
81
90
  ----------
82
91
  connection : str | uuid.UUID
@@ -100,7 +109,9 @@ def list_connection_role_assignments(connection: str | UUID) -> pd.DataFrame:
100
109
  df = _create_dataframe(columns=columns)
101
110
 
102
111
  responses = _base_api(
103
- request=f"/v1/connections/{connection_id}/roleAssignments", uses_pagination=True
112
+ request=f"/v1/connections/{connection_id}/roleAssignments",
113
+ client="fabric_sp",
114
+ uses_pagination=True,
104
115
  )
105
116
 
106
117
  for r in responses:
@@ -121,6 +132,8 @@ def list_connections() -> pd.DataFrame:
121
132
  """
122
133
  Lists all available connections.
123
134
 
135
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
136
+
124
137
  Returns
125
138
  -------
126
139
  pandas.DataFrame
@@ -136,13 +149,15 @@ def list_connections() -> pd.DataFrame:
136
149
  "Connection Type": "string",
137
150
  "Privacy Level": "string",
138
151
  "Credential Type": "string",
139
- "Single Sign on Type": "string",
140
- "Connection Encyrption": "string",
152
+ "Single Sign On Type": "string",
153
+ "Connection Encryption": "string",
141
154
  "Skip Test Connection": "bool",
142
155
  }
143
156
  df = _create_dataframe(columns=columns)
144
157
 
145
- responses = _base_api(request="/v1/connections", uses_pagination=True)
158
+ responses = _base_api(
159
+ request="/v1/connections", client="fabric_sp", uses_pagination=True
160
+ )
146
161
 
147
162
  for r in responses:
148
163
  for i in r.get("value", []):
@@ -194,6 +209,8 @@ def list_item_connections(
194
209
 
195
210
  This is a wrapper function for the following API: `Items - List Item Connections <https://learn.microsoft.com/rest/api/fabric/core/items/list-item-connections>`_.
196
211
 
212
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
213
+
197
214
  Parameters
198
215
  ----------
199
216
  item_name : str
@@ -229,6 +246,7 @@ def list_item_connections(
229
246
 
230
247
  responses = _base_api(
231
248
  request=f"/v1/workspaces/{workspace_id}/items/{item_id}/connections",
249
+ client="fabric_sp",
232
250
  uses_pagination=True,
233
251
  )
234
252
 
@@ -267,7 +285,7 @@ def _list_supported_connection_types(
267
285
  df = _create_dataframe(columns=columns)
268
286
 
269
287
  url = url.rstrip("&")
270
- responses = _base_api(request=url, uses_pagination=True)
288
+ responses = _base_api(request=url, client="fabric_sp", uses_pagination=True)
271
289
 
272
290
  records = []
273
291
  for r in responses:
@@ -309,6 +327,8 @@ def create_cloud_connection(
309
327
 
310
328
  This is a wrapper function for the following API: `Connections - Create Connection <https://learn.microsoft.com/rest/api/fabric/core/connections/create-connection>`_.
311
329
 
330
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
331
+
312
332
  Parameters
313
333
  ----------
314
334
  name : str
@@ -362,7 +382,11 @@ def create_cloud_connection(
362
382
  }
363
383
 
364
384
  _base_api(
365
- request="/v1/connections", method="post", payload=payload, status_codes=201
385
+ request="/v1/connections",
386
+ client="fabric_sp",
387
+ method="post",
388
+ payload=payload,
389
+ status_codes=201,
366
390
  )
367
391
 
368
392
  print(f"{icons.green_dot} The '{name}' cloud connection has been created.")
@@ -383,6 +407,8 @@ def create_on_prem_connection(
383
407
 
384
408
  This is a wrapper function for the following API: `Connections - Create Connection <https://learn.microsoft.com/rest/api/fabric/core/connections/create-connection>`_.
385
409
 
410
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
411
+
386
412
  Parameters
387
413
  ----------
388
414
  name : str
@@ -440,7 +466,11 @@ def create_on_prem_connection(
440
466
  }
441
467
 
442
468
  _base_api(
443
- request="/v1/connections", method="post", payload=payload, status_codes=201
469
+ request="/v1/connections",
470
+ client="fabric_sp",
471
+ method="post",
472
+ payload=payload,
473
+ status_codes=201,
444
474
  )
445
475
 
446
476
  print(f"{icons.green_dot} The '{name}' on-prem connection has been created.")
@@ -462,6 +492,8 @@ def create_vnet_connection(
462
492
 
463
493
  This is a wrapper function for the following API: `Connections - Create Connection <https://learn.microsoft.com/rest/api/fabric/core/connections/create-connection>`_.
464
494
 
495
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
496
+
465
497
  Parameters
466
498
  ----------
467
499
  name : str
@@ -520,7 +552,11 @@ def create_vnet_connection(
520
552
  }
521
553
 
522
554
  _base_api(
523
- request="/v1/connections", method="post", payload=payload, status_codes=201
555
+ request="/v1/connections",
556
+ client="fabric_sp",
557
+ method="post",
558
+ payload=payload,
559
+ status_codes=201,
524
560
  )
525
561
 
526
562
  print(
@@ -1,9 +1,8 @@
1
1
  import pandas as pd
2
2
  import datetime
3
- from typing import Dict
3
+ from typing import Dict, Optional
4
4
  import pyarrow.dataset as ds
5
5
  import pyarrow.parquet as pq
6
- from pyspark.sql import SparkSession
7
6
  from sempy_labs._helper_functions import (
8
7
  create_abfss_path,
9
8
  save_as_delta_table,
@@ -12,19 +11,24 @@ from sempy_labs._helper_functions import (
12
11
  _update_dataframe_datatypes,
13
12
  resolve_workspace_name_and_id,
14
13
  resolve_lakehouse_name_and_id,
14
+ _read_delta_table,
15
+ _delta_table_row_count,
15
16
  )
16
17
  from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
17
18
  from sempy_labs.lakehouse._lakehouse import lakehouse_attached
18
19
  import sempy_labs._icons as icons
20
+ from uuid import UUID
19
21
 
20
22
 
21
23
  def delta_analyzer(
22
24
  table_name: str,
23
25
  approx_distinct_count: bool = True,
24
26
  export: bool = False,
27
+ lakehouse: Optional[str | UUID] = None,
28
+ workspace: Optional[str | UUID] = None,
25
29
  ) -> Dict[str, pd.DataFrame]:
26
30
  """
27
- Analyzes a delta table and shows the results in dictionary containing a set of 5 dataframes. The table being analyzed must be in the lakehouse attached to the notebook.
31
+ Analyzes a delta table and shows the results in dictionary containing a set of 5 dataframes. If 'export' is set to True, the results will be saved to delta tables in the lakehouse attached to the notebook.
28
32
 
29
33
  The 5 dataframes returned by this function are:
30
34
 
@@ -44,26 +48,52 @@ def delta_analyzer(
44
48
  If True, uses approx_count_distinct to calculate the cardinality of each column. If False, uses COUNT(DISTINCT) instead.
45
49
  export : bool, default=False
46
50
  If True, exports the resulting dataframes to delta tables in the lakehouse attached to the notebook.
51
+ lakehouse : str | uuid.UUID, default=None
52
+ The Fabric lakehouse name or ID.
53
+ Defaults to None which resolves to the lakehouse attached to the notebook.
54
+ workspace : str | uuid.UUID, default=None
55
+ The Fabric workspace name or ID used by the lakehouse.
56
+ Defaults to None which resolves to the workspace of the attached lakehouse
57
+ or if no lakehouse attached, resolves to the workspace of the notebook.
47
58
 
48
59
  Returns
49
60
  -------
50
61
  Dict[str, pandas.DataFrame]
51
62
  A dictionary of pandas dataframes showing semantic model objects which violated the best practice analyzer rules.
52
63
  """
64
+ import notebookutils
53
65
 
54
- if not lakehouse_attached():
55
- raise ValueError(
56
- f"{icons.red_dot} No lakehouse is attached to this notebook. Please attach a lakehouse to the notebook before running the Delta Analyzer."
57
- )
66
+ # display_toggle = notebookutils.common.configs.pandas_display
67
+
68
+ # Turn off notebookutils display
69
+ # if display_toggle is True:
70
+ # notebookutils.common.configs.pandas_display = False
58
71
 
59
72
  prefix = "SLL_DeltaAnalyzer_"
60
73
  now = datetime.datetime.now()
61
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace=None)
74
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace=workspace)
62
75
  (lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
63
- lakehouse=None, workspace=None
76
+ lakehouse=lakehouse, workspace=workspace
64
77
  )
65
78
  path = create_abfss_path(lakehouse_id, workspace_id, table_name)
66
- table_path = f"/lakehouse/default/Tables/{table_name}"
79
+ lake_path = create_abfss_path(lakehouse_id, workspace_id)
80
+ mounts = notebookutils.fs.mounts()
81
+ mount_point = f"/{workspace_name.replace(' ', '')}{lakehouse_name.replace(' ', '')}"
82
+ if not any(i.get("source") == lake_path for i in mounts):
83
+ # Mount lakehouse if not mounted
84
+ notebookutils.fs.mount(lake_path, mount_point)
85
+ print(
86
+ f"{icons.green_dot} Mounted the '{lakehouse_name}' lakehouse within the '{workspace_name}' to the notebook."
87
+ )
88
+
89
+ mounts = notebookutils.fs.mounts()
90
+ local_path = next(
91
+ i.get("localPath") for i in mounts if i.get("source") == lake_path
92
+ )
93
+ table_path = f"{local_path}/Tables/{table_name}"
94
+
95
+ # Set back to original value
96
+ # notebookutils.common.configs.pandas_display = display_toggle
67
97
 
68
98
  parquet_file_df_columns = {
69
99
  "ParquetFile": "string",
@@ -95,39 +125,21 @@ def delta_analyzer(
95
125
  row_group_df = _create_dataframe(columns=row_group_df_columns)
96
126
  column_chunk_df = _create_dataframe(columns=column_chunk_df_columns)
97
127
 
98
- spark = SparkSession.builder.getOrCreate()
99
128
  # delta_table = DeltaTable.forPath(spark, path)
100
129
  # detail_df = spark.sql(f"DESCRIBE DETAIL `{table_name}`").collect()[0]
101
130
 
102
131
  # num_files = detail_df.numFiles
103
132
  # size_in_bytes = detail_df.sizeInBytes
104
133
 
105
- latest_files = spark.read.format("delta").load(path).inputFiles()
134
+ latest_files = _read_delta_table(path).inputFiles()
106
135
  file_paths = [f.split("/")[-1] for f in latest_files]
107
- row_count = spark.table(table_name).count()
136
+ row_count = _delta_table_row_count(table_name)
108
137
  row_groups = 0
109
138
  max_rows_per_row_group = 0
110
139
  min_rows_per_row_group = float("inf")
111
- # dt = DeltaTable.forPath(spark, path)
112
- # schema = dt.toDF().schema
113
- # is_vorder = False
114
- # if (
115
- # dt.detail()
116
- # .collect()[0]
117
- # .asDict()
118
- # .get("properties")
119
- # .get("delta.parquet.vorder.enabled")
120
- # == "true"
121
- # ):
122
- # is_vorder = True
123
140
 
124
141
  schema = ds.dataset(table_path).schema.metadata
125
142
  is_vorder = any(b"vorder" in key for key in schema.keys())
126
- # v_order_level = (
127
- # int(schema.get(b"com.microsoft.parquet.vorder.level").decode("utf-8"))
128
- # if is_vorder
129
- # else None
130
- # )
131
143
 
132
144
  for file_name in file_paths:
133
145
  parquet_file = pq.ParquetFile(f"{table_path}/{file_name}")
@@ -235,14 +247,16 @@ def delta_analyzer(
235
247
  table_name=table_name,
236
248
  column_name=col_name,
237
249
  function="approx",
238
- lakehouse=lakehouse_name,
250
+ lakehouse=lakehouse,
251
+ workspace=workspace,
239
252
  )
240
253
  else:
241
254
  dc = _get_column_aggregate(
242
255
  table_name=table_name,
243
256
  column_name=col_name,
244
257
  function="distinctcount",
245
- lakehouse=lakehouse_name,
258
+ lakehouse=lakehouse,
259
+ workspace=workspace,
246
260
  )
247
261
 
248
262
  if "Cardinality" not in column_df.columns:
@@ -264,13 +278,17 @@ def delta_analyzer(
264
278
  save_table = f"{prefix}Summary"
265
279
 
266
280
  if export:
281
+ if not lakehouse_attached():
282
+ raise ValueError(
283
+ f"{icons.red_dot} No lakehouse is attached to this notebook. Please attach a lakehouse to the notebook before running the Delta Analyzer."
284
+ )
267
285
  dfL = get_lakehouse_tables()
268
286
  dfL_filt = dfL[dfL["Table Name"] == save_table]
269
287
  if dfL_filt.empty:
270
288
  runId = 1
271
289
  else:
272
290
  max_run_id = _get_column_aggregate(
273
- lakehouse=lakehouse_name, table_name=save_table
291
+ table_name=save_table,
274
292
  )
275
293
  runId = max_run_id + 1
276
294