semantic-link-labs 0.9.11__py3-none-any.whl → 0.10.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: semantic-link-labs
3
- Version: 0.9.11
3
+ Version: 0.10.1
4
4
  Summary: Semantic Link Labs for Microsoft Fabric
5
5
  Author: Microsoft Corporation
6
6
  License: MIT License
@@ -15,7 +15,7 @@ Classifier: Framework :: Jupyter
15
15
  Requires-Python: <3.12,>=3.10
16
16
  Description-Content-Type: text/markdown
17
17
  License-File: LICENSE
18
- Requires-Dist: semantic-link-sempy>=0.10.2
18
+ Requires-Dist: semantic-link-sempy>=0.11.0
19
19
  Requires-Dist: anytree
20
20
  Requires-Dist: powerbiclient
21
21
  Requires-Dist: polib
@@ -27,7 +27,7 @@ Dynamic: license-file
27
27
  # Semantic Link Labs
28
28
 
29
29
  [![PyPI version](https://badge.fury.io/py/semantic-link-labs.svg)](https://badge.fury.io/py/semantic-link-labs)
30
- [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.9.11&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
30
+ [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.10.1&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
31
31
  [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
32
32
  [![Downloads](https://static.pepy.tech/badge/semantic-link-labs)](https://pepy.tech/project/semantic-link-labs)
33
33
 
@@ -56,6 +56,7 @@ Check out the video below for an introduction to Semantic Link, Semantic Link La
56
56
  * [Migrating an import/DirectQuery semantic model to Direct Lake](https://github.com/microsoft/semantic-link-labs?tab=readme-ov-file#direct-lake-migration)
57
57
  * [Model Best Practice Analyzer (BPA)](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#model-best-practice-analyzer)
58
58
  * [Vertipaq Analyzer](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#vertipaq-analyzer)
59
+ * [Create a .vpax file](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#create-a-vpax-file)
59
60
  * [Tabular Object Model](https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Tabular%20Object%20Model.ipynb) [(TOM)](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.tom.html)
60
61
  * [Translate a semantic model's metadata](https://github.com/microsoft/semantic-link-labs/wiki/Code-Examples#translate-a-semantic-model)
61
62
  * [Check Direct Lake Guardrails](https://semantic-link-labs.readthedocs.io/en/stable/sempy_labs.lakehouse.html#sempy_labs.lakehouse.get_lakehouse_tables)
@@ -154,6 +155,8 @@ An even better way to ensure the semantic-link-labs library is available in your
154
155
  2. Select your newly created environment within the 'Environment' drop down in the navigation bar at the top of the notebook
155
156
 
156
157
  ## Version History
158
+ * [0.10.1](https://github.com/microsoft/semantic-link-labs/releases/tag/0.10.1) (June 10, 2025)
159
+ * [0.10.0](https://github.com/microsoft/semantic-link-labs/releases/tag/0.10.0) (May 30, 2025)
157
160
  * [0.9.11](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.11) (May 22, 2025)
158
161
  * [0.9.10](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.10) (April 24, 2025)
159
162
  * [0.9.9](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.9) (April 7, 2025)
@@ -1,5 +1,6 @@
1
- semantic_link_labs-0.9.11.dist-info/licenses/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
2
- sempy_labs/__init__.py,sha256=X39q1v8jygmKdGc0kfuRcn7V1MAHxzG5mDivJxON7Go,16255
1
+ semantic_link_labs-0.10.1.dist-info/licenses/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
2
+ sempy_labs/__init__.py,sha256=leWvgWaSALeu0s8hVUEIJs0K34L6lmk_fUtIvx4hfSQ,16553
3
+ sempy_labs/_a_lib_info.py,sha256=cYmuBB-6Ns9Ar3M5zdEBeM-NU3UOZn3DZDBpYSxLGog,53
3
4
  sempy_labs/_ai.py,sha256=BD1TdGOJ7T4m3x426OP-FLb7bevn-9gKY8BTEDAJDQU,16205
4
5
  sempy_labs/_authentication.py,sha256=GjtN5XqIyWXbR5Ni4hfYiUNwgFa-ySX8e-BrqE1vgGc,6903
5
6
  sempy_labs/_capacities.py,sha256=n48NYTY03zygRzcfyK1UOkSwTqKSyQefQ10IKQh-dfA,40426
@@ -11,9 +12,11 @@ sempy_labs/_data_pipelines.py,sha256=cW_WGmuWD4V9IgLprKL4TqFXgid4eTBXvEL3-IArS0w
11
12
  sempy_labs/_dataflows.py,sha256=xv-wRDUq4Bzz-BOs1Jdb4bgS9HbPLpa1GqexfA6H0mg,8053
12
13
  sempy_labs/_dax.py,sha256=Q_GylKeuHFnRB_sztZS1ON5v5tr6ua6lc9elyJYKbV8,17219
13
14
  sempy_labs/_dax_query_view.py,sha256=_zSvgystZzBj5euNTLKTg7-G77XVk0vqyqrDT72VvoM,1892
15
+ sempy_labs/_daxformatter.py,sha256=RenhoLcdGygESAQ0hk8gu9hEt7XCy_Dj6Fx6fnIjXgY,2998
14
16
  sempy_labs/_delta_analyzer.py,sha256=d6qxZrEhn3Hfg5qMQODt7dDG5mYSY18xeXUkW_NyMgw,17281
15
17
  sempy_labs/_delta_analyzer_history.py,sha256=A50dlBd2d3ILKV7Fwj4pfIRtXKmCFslhk1gpeEw4inc,10765
16
18
  sempy_labs/_deployment_pipelines.py,sha256=SDQYkCAhOAlxBr58jYxtLFOVySiRXO0_WhfOKGDeYZQ,6254
19
+ sempy_labs/_dictionary_diffs.py,sha256=DCXuASmt45gshsBO1FgSZDqxm68DnojuDcO-H35EH7Q,9003
17
20
  sempy_labs/_documentation.py,sha256=yVA8VPEzx_fmljtcvSxtB7-BeupYsfdMXXjp6Fpnyo8,5007
18
21
  sempy_labs/_environments.py,sha256=c_9uU6zhVmZVTLZWuD-OdcicBJvmRQQVmqHW7EqUn_Q,5839
19
22
  sempy_labs/_eventhouses.py,sha256=WEf33difBOTGTZGh1QFmY4gv-e43uwO1V54nrsjGGyY,5376
@@ -23,13 +26,13 @@ sempy_labs/_gateways.py,sha256=6JE6VeGFPKF617sf2mMkxXVOz57YHI5jAQLAF-BzRLc,17527
23
26
  sempy_labs/_generate_semantic_model.py,sha256=F2NVW6kT1HnrZTqWnDZ4BRApbsUfLFDSsDbRzCJnc0o,18429
24
27
  sempy_labs/_git.py,sha256=RyaT4XzrSi-4NLJZWiWZnnNpMgrKzRNxhyY8b1O2I6c,17819
25
28
  sempy_labs/_graphQL.py,sha256=truXeIUPRKLwc4skhs3FZYNcKP9FCGKly9en0YkR4NE,2690
26
- sempy_labs/_helper_functions.py,sha256=SbFhIT_6j8xSkFflzuWWoo_NIH_uqPuINUGvcblPy7s,68762
29
+ sempy_labs/_helper_functions.py,sha256=9HhCH3xdEMjS50K7UBAjwnLiQPS4whVZco3v__k8bQU,74249
27
30
  sempy_labs/_icons.py,sha256=SB9EQeoFCfD4bO6fcYuJOoPRSYenSrW0rI9G5RFsH28,3579
28
31
  sempy_labs/_job_scheduler.py,sha256=_-Pifkttk1oPNxewxwWcQ4QC_Hr24GSi6nmrEXwc0pc,15814
29
32
  sempy_labs/_kql_databases.py,sha256=UtpYVBsxwWQDnqwdjq186bZzw5IlkD2S9KHA6Kw75U0,4738
30
33
  sempy_labs/_kql_querysets.py,sha256=Jjcs4SkjeirnDkG6zfsl0KRUXVzMyWii0Yn0JMWwln8,3502
31
34
  sempy_labs/_kusto.py,sha256=g3Up4j1KNdIGC2DDbvoduCdX1Pp8fAPGAlBAqOtaBeg,4544
32
- sempy_labs/_list_functions.py,sha256=wqaeCpsYPG3SW52ipB-uTCrjWQ1xcVkcazakWtPAsHo,61035
35
+ sempy_labs/_list_functions.py,sha256=L09erDM43XcFt_k3t66fbmrtiqjbwlVg_Z_cP89hDLc,59752
33
36
  sempy_labs/_managed_private_endpoints.py,sha256=Vqicp_EiGg_m8aA2F__gaJiB9cwjbxQOSOi7hkS6FvQ,6907
34
37
  sempy_labs/_mirrored_databases.py,sha256=-9ZV2PdPeIc4lvFNkpPMm_9wkGIY1QLZXspYdSev5oQ,13147
35
38
  sempy_labs/_mirrored_warehouses.py,sha256=Q3WlRjUwCLz8KW1eN8MiTPeY0P52Vkuz5kgnv4GvQ3k,1739
@@ -41,20 +44,22 @@ sempy_labs/_model_bpa_bulk.py,sha256=hRY3dRBUtecrbscCZsEGv6TpCVqg_zAi8NmRq6dVMiE
41
44
  sempy_labs/_model_bpa_rules.py,sha256=ZK16VqWcITiTKdd9T5Xnu-AMgodLVx0ZpanZjsC88-U,46260
42
45
  sempy_labs/_model_dependencies.py,sha256=0xGgubrq76zIvBdEqmEX_Pd6WdizXFVECBW6BPl2DZo,13162
43
46
  sempy_labs/_mounted_data_factories.py,sha256=-IBxE5XurYyeeQg7BvpXSSR1MW3rRGmue6UGpqlo96U,3906
44
- sempy_labs/_notebooks.py,sha256=QbDmvxvYZEFE90lQ3Rqi70yjc5Xxg7D3ySemPPVGflY,9102
47
+ sempy_labs/_notebooks.py,sha256=bWE9VtzPj6BNdV2QQgkT_aINptImdlvKxdSgOb7ZZIg,9101
45
48
  sempy_labs/_one_lake_integration.py,sha256=9ub75-ueEFqn1iRgRd5y97SYujalsWW6ufs1du4PbDs,6303
46
49
  sempy_labs/_query_scale_out.py,sha256=nra1q8s-PKpZTlI_L0lMGO1GmdBk6sqETsBQShF1yPY,15352
47
50
  sempy_labs/_refresh_semantic_model.py,sha256=4w_uaYLbaZptmEFY7QHWzOgXcgc2ctGx8HQvt2aguxk,17360
48
- sempy_labs/_semantic_models.py,sha256=F9v964IiXqx2qNPtNBzYrWPtXIoQH5-FI5csWJGofoQ,7934
51
+ sempy_labs/_semantic_models.py,sha256=tdjifeziLA1Sx8fs7nnUsbR9NRv4EEBIYtmQEflXH4o,11567
49
52
  sempy_labs/_spark.py,sha256=SuSTjjmtzj7suDgN8Njk_pNBaStDLgIJB_1yk_e2H1Y,19340
50
- sempy_labs/_sql.py,sha256=6mtX0I2VTpmpMbAiqdQGPyLiLN3q3pVDTP9IW7Z3JfA,8276
53
+ sempy_labs/_sql.py,sha256=BnL7Syd9vJZFysSiILYhqwTFS4y30nvkhDLQXGjtveE,8281
54
+ sempy_labs/_sql_endpoints.py,sha256=hb-eD5R8xgPuXYIciTpPLOsvC6VKUOfTsubb4Tr2Bxw,5930
51
55
  sempy_labs/_sqldatabase.py,sha256=8HV3UtsLiwexmPSjYnhnYnD6xEvgFpTG13jcOuGheuI,6470
52
56
  sempy_labs/_tags.py,sha256=7DvSc3wah26DxHwUhr-yr_JhZiplrePkFaDaVIAQfV4,5666
53
57
  sempy_labs/_translations.py,sha256=i4K2PFk6-TcmAnUpqz-z_GuDv9XEp1cBs0KY-x6ja1w,16168
58
+ sempy_labs/_user_delegation_key.py,sha256=5Qre0ZB_atajtwWfFQqD12q413Fz313GK9nrA9IIwjI,1414
54
59
  sempy_labs/_utils.py,sha256=aKGnUiXSLLRQRXXv8TH_XhGcOtDsnrs0RHmQ6YZMn3o,1786
55
60
  sempy_labs/_variable_libraries.py,sha256=t97gj8Mo-YjahKx6XObqh5HkhUMHUke5GdWpSzkC5ZM,3008
56
61
  sempy_labs/_vertipaq.py,sha256=1UvB79xOxeGdRFINsUsreXxtZtiatHlACAfbQhv45as,38536
57
- sempy_labs/_vpax.py,sha256=k1UalPGdwmhL8eqH_WeOx1IkPu0Zz2xGWSBuAp4Sq0M,15432
62
+ sempy_labs/_vpax.py,sha256=4rtXXGVoadvdu7uiU9PVsgKszST3XH-K56zmWdMmZEg,15471
58
63
  sempy_labs/_warehouses.py,sha256=wF38YP4-39KPsXPyexJahZPrYAyLc5xfrerJvS7My5Q,7286
59
64
  sempy_labs/_workloads.py,sha256=ifQ6Jv0_MDzjfu993bU8V7thOdW5kFyp3MjA082rsrE,4687
60
65
  sempy_labs/_workspace_identity.py,sha256=plxgcqt2aBXgLpyn1dpHhzh_5Z-gFkLK8RtId2OIX5s,2561
@@ -126,7 +131,7 @@ sempy_labs/directlake/_get_shared_expression.py,sha256=qc85kXggkx_7Sz_rAAli_yPnL
126
131
  sempy_labs/directlake/_guardrails.py,sha256=wNVXpeiZckgLTly4cS5DU5DoV9x1S4DMxN5S08qAavE,2749
127
132
  sempy_labs/directlake/_list_directlake_model_calc_tables.py,sha256=EYT4ELmOZ3Uklzy6uMQMidc4WtBXm21NQqZu1Q5HTsg,2509
128
133
  sempy_labs/directlake/_show_unsupported_directlake_objects.py,sha256=nmrZrtDez7U8Ji76i9fxnnTx1zxMu2LCOZTMz4sFUEc,3504
129
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py,sha256=W3LDXDD2pLH5B43NI9ixSIW2MJIORAWu5ANHQRFKMBY,9215
134
+ sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py,sha256=o2oW5Wf0LpNVBRh2l2IxifDEZani7YU4V7J8kueIidQ,9230
130
135
  sempy_labs/directlake/_update_directlake_partition_entity.py,sha256=8YxrReJObtc7_Huq0qQrLKTVMhPO84guv8bQKtp__4c,9032
131
136
  sempy_labs/directlake/_warm_cache.py,sha256=xc7gG_OJY1rJYg79ztgcLATpnXHNqFaw-6CU1HgdlXk,9258
132
137
  sempy_labs/dotnet_lib/dotnet.runtime.config.json,sha256=syhDFQv6cEmZnE1WtFjNe3NwhsIsnd-CFULv-vEWOFI,167
@@ -134,8 +139,8 @@ sempy_labs/graph/__init__.py,sha256=AZ_IpOL06VvXrYmgbcrvQlxCxdDksvwXKf7JAGohCNI,
134
139
  sempy_labs/graph/_groups.py,sha256=j3YDeV6MzhRjGJRoD60SAaGyU8yb23x8QhXBzU2RWlE,12590
135
140
  sempy_labs/graph/_teams.py,sha256=SRFaFuxtB7ylC5WeXIdrW0aLCxc_JTJHeEmxOPG99r8,3089
136
141
  sempy_labs/graph/_users.py,sha256=dFOZ-jel6Aj4Um66f1jzQrgV0fOoI0cQnZfmR4OJSXo,5947
137
- sempy_labs/lakehouse/__init__.py,sha256=xuYQAxBEEahNA_twvYxeP6cUd8dCreNb0eKyuKZbtPU,1327
138
- sempy_labs/lakehouse/_blobs.py,sha256=N8s3hYa9dAOLpH9iTavR_FPKrb3j_RqXHJnC6UVeeW0,9745
142
+ sempy_labs/lakehouse/__init__.py,sha256=zKF6-rjy3UEClAlyW-koqrTK3_bAjU6WbDxKamsWCjs,1267
143
+ sempy_labs/lakehouse/_blobs.py,sha256=K2uwzUUkYN1rGfpyQrubxWNjtM6AIWM9VVHh2_LYCTY,8483
139
144
  sempy_labs/lakehouse/_get_lakehouse_columns.py,sha256=dF5rLkdD3PB8EiXQewRdnr7MzbDGkZWHrFfI01_a7K4,3710
140
145
  sempy_labs/lakehouse/_get_lakehouse_tables.py,sha256=AMQXk40YMN4daS0zILgZm-sc2llnvCaL7kS1v8dfYMA,9369
141
146
  sempy_labs/lakehouse/_helper.py,sha256=W9adTkZw9_f9voB3bA2JWkI4LqAcnvpY929vMQJw1xE,7401
@@ -151,8 +156,11 @@ sempy_labs/migration/_migrate_model_objects_to_semantic_model.py,sha256=RD0ttWcB
151
156
  sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py,sha256=HYi2vn7yYDsBCTAXFTi6UiB86kdSlhQKPdwAt1nTKEE,7169
152
157
  sempy_labs/migration/_migration_validation.py,sha256=AHURrWofb-U-L2Bdu36mcisVXOuZXi6Smgrrs2kjYBM,2650
153
158
  sempy_labs/migration/_refresh_calc_tables.py,sha256=qUBPZ5HAHyE5ev6STKDcmtEpRuLDX5RzYTKre4ZElj4,5443
159
+ sempy_labs/mirrored_azure_databricks_catalog/__init__.py,sha256=oQfKUOcDnssZ3m0fuyrugYhkFLVqaoHTkj2lDtIAlRo,373
160
+ sempy_labs/mirrored_azure_databricks_catalog/_discover.py,sha256=HK_2eja5YbVrwCHcsX6CIA_qeYVKa-nkVTB-R9z-a9o,6976
161
+ sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py,sha256=dNV7z0DnE3zAIsdyhWwpT5sWoPy3pIbrvWyPk8TFLI8,1604
154
162
  sempy_labs/report/_BPAReportTemplate.json,sha256=9Uh-7E6d2ooxQ7j5JRayv_ayEULc7Gzg42kZGKdOqH8,63920
155
- sempy_labs/report/__init__.py,sha256=bPZ_MMqKGokskjJwM3T89LxIVNa2AXJg8Lr-mvJhP0E,1392
163
+ sempy_labs/report/__init__.py,sha256=yuMGbP7rd_50M-CRfIYR7BK8mPzpXXYzOPh9sBV-aqw,1434
156
164
  sempy_labs/report/_download_report.py,sha256=01hI26UV_jb5RLPheXRQsIDNNf4i72xICm14slKqEFA,2704
157
165
  sempy_labs/report/_export_report.py,sha256=XCMsZzTBMgvQOe3Ltdod7my7_izpmP-3AVH6W5CExPE,10976
158
166
  sempy_labs/report/_generate_report.py,sha256=S830woeisjKCYNyacfvSx0fVHzLC7-aw2oPIU2sYiP8,13910
@@ -160,10 +168,10 @@ sempy_labs/report/_paginated.py,sha256=rsElE0IQ9qxRDuEp6qNF1EcD5XEgfTc7WsWEQsals
160
168
  sempy_labs/report/_report_bpa.py,sha256=ClETB8Q41sY1scCuknhpvalvuBaQ9ZwA4QX7F3sPcjc,13596
161
169
  sempy_labs/report/_report_bpa_rules.py,sha256=tPVGA0hmE6QMLlWtig7Va7Ksr2yXWl_Lndq--tWWd6w,4959
162
170
  sempy_labs/report/_report_functions.py,sha256=pSrsUfMJqmsn9CYb5AM0iYdPR-EmuUSprVnc0dGhO1s,19709
163
- sempy_labs/report/_report_helper.py,sha256=m23osIZMjvHhKbfhmTHyqHibXoWA9eP84TPanbH8kuE,10863
171
+ sempy_labs/report/_report_helper.py,sha256=L9wU0N0rvTUMglZHTxcowywrBDuZvZTv3DA4JrX84Os,7207
164
172
  sempy_labs/report/_report_list_functions.py,sha256=K9tMDQKhIZhelHvfMMW0lsxbVHekJ-5dAQveoD7PUDA,3980
165
173
  sempy_labs/report/_report_rebind.py,sha256=svyxUSdqgXJW1UDNcb-urJxU9erO3JM72uzmuJUWIT0,5090
166
- sempy_labs/report/_reportwrapper.py,sha256=itzDImW0XUJuQBL1gQpXjIRBk0Knic0VCf7MHw8Kt68,83421
174
+ sempy_labs/report/_reportwrapper.py,sha256=SlotekdZ_VUL2uNlxV324ftYKh5Y_XQbXr8R4aOkTUc,108916
167
175
  sempy_labs/report/_save_report.py,sha256=FAzScMQIXl89TgVSRvaJofzKT0TfZh_hhPNNvDiktaI,6033
168
176
  sempy_labs/report/_bpareporttemplate/definition.pbir,sha256=bttyHZYKqjA8OBb_cezGlX4H82cDvGZVCl1QB3fij4E,343
169
177
  sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json,sha256=4N6sT5nLlYBobGmZ1Xb68uOMVVCBEyheR535js_et28,13467
@@ -193,8 +201,8 @@ sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visua
193
201
  sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json,sha256=wBVuNc8S2NaUA0FC708w6stmR2djNZp8nAsHMqesgsc,293
194
202
  sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json,sha256=mlY6t9OlSe-Y6_QmXJpS1vggU6Y3FjISUKECL8FVSg8,931
195
203
  sempy_labs/tom/__init__.py,sha256=Qbs8leW0fjzvWwOjyWK3Hjeehu7IvpB1beASGsi28bk,121
196
- sempy_labs/tom/_model.py,sha256=64IJf2Pdag5ECWxJcf4Cg2paoMD0Pr6BHvdjgvW6pwo,197537
197
- semantic_link_labs-0.9.11.dist-info/METADATA,sha256=93cuq3I17_DH0fWpnYNqB8YF8rckYYICQdRBtzN_SZs,26736
198
- semantic_link_labs-0.9.11.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
199
- semantic_link_labs-0.9.11.dist-info/top_level.txt,sha256=kiQX1y42Dbein1l3Q8jMUYyRulDjdlc2tMepvtrvixQ,11
200
- semantic_link_labs-0.9.11.dist-info/RECORD,,
204
+ sempy_labs/tom/_model.py,sha256=g0XCzjLVVIe-Qa2K0gqKjxAykjjxLXuer4A9OvmSjeE,205123
205
+ semantic_link_labs-0.10.1.dist-info/METADATA,sha256=F4eW_Wqf4FFnc5CFOXjJZqn7N3Qzlw9e9fGHy0ZbAsY,27042
206
+ semantic_link_labs-0.10.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
207
+ semantic_link_labs-0.10.1.dist-info/top_level.txt,sha256=kiQX1y42Dbein1l3Q8jMUYyRulDjdlc2tMepvtrvixQ,11
208
+ semantic_link_labs-0.10.1.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.8.0)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
sempy_labs/__init__.py CHANGED
@@ -1,3 +1,7 @@
1
+ from sempy_labs._sql_endpoints import (
2
+ list_sql_endpoints,
3
+ refresh_sql_endpoint_metadata,
4
+ )
1
5
  from sempy_labs._variable_libraries import (
2
6
  list_variable_libraries,
3
7
  delete_variable_library,
@@ -30,6 +34,7 @@ from sempy_labs._semantic_models import (
30
34
  enable_semantic_model_scheduled_refresh,
31
35
  delete_semantic_model,
32
36
  update_semantic_model_refresh_schedule,
37
+ list_semantic_model_datasources,
33
38
  )
34
39
  from sempy_labs._graphQL import (
35
40
  list_graphql_apis,
@@ -275,7 +280,6 @@ from sempy_labs._list_functions import (
275
280
  get_object_level_security,
276
281
  list_datamarts,
277
282
  list_lakehouses,
278
- list_sql_endpoints,
279
283
  update_item,
280
284
  list_server_properties,
281
285
  list_semantic_model_errors,
@@ -340,6 +344,9 @@ from sempy_labs._vertipaq import (
340
344
  vertipaq_analyzer,
341
345
  import_vertipaq_analyzer,
342
346
  )
347
+ from sempy_labs._user_delegation_key import (
348
+ get_user_delegation_key,
349
+ )
343
350
 
344
351
  __all__ = [
345
352
  "resolve_warehouse_id",
@@ -585,4 +592,7 @@ __all__ = [
585
592
  "update_semantic_model_refresh_schedule",
586
593
  "apply_tags",
587
594
  "unapply_tags",
595
+ "get_user_delegation_key",
596
+ "refresh_sql_endpoint_metadata",
597
+ "list_semantic_model_datasources",
588
598
  ]
@@ -0,0 +1,2 @@
1
+ lib_name = "semanticlinklabs"
2
+ lib_version = "0.10.1"
@@ -0,0 +1,78 @@
1
+ import requests
2
+ from typing import List, Optional
3
+ from sempy_labs._a_lib_info import lib_name, lib_version
4
+
5
+
6
+ def _format_dax(
7
+ expressions: str | List[str],
8
+ skip_space_after_function_name: bool = False,
9
+ metadata: Optional[List[dict]] = None,
10
+ ) -> List[str]:
11
+
12
+ if isinstance(expressions, str):
13
+ expressions = [expressions]
14
+ metadata = [metadata] if metadata else [{}]
15
+
16
+ # Add variable assignment to each expression
17
+ expressions = [f"x :={item}" for item in expressions]
18
+
19
+ url = "https://daxformatter.azurewebsites.net/api/daxformatter/daxtextformatmulti"
20
+
21
+ payload = {
22
+ "Dax": expressions,
23
+ "MaxLineLength": 0,
24
+ "SkipSpaceAfterFunctionName": skip_space_after_function_name,
25
+ "ListSeparator": ",",
26
+ "DecimalSeparator": ".",
27
+ }
28
+
29
+ headers = {
30
+ "Accept": "application/json, text/javascript, */*; q=0.01",
31
+ "Accept-Encoding": "gzip,deflate",
32
+ "Accept-Language": "en-US,en;q=0.8",
33
+ "Content-Type": "application/json; charset=UTF-8",
34
+ "Host": "daxformatter.azurewebsites.net",
35
+ "Expect": "100-continue",
36
+ "Connection": "Keep-Alive",
37
+ "CallerApp": lib_name,
38
+ "CallerVersion": lib_version,
39
+ }
40
+
41
+ response = requests.post(url, json=payload, headers=headers)
42
+ result = []
43
+ for idx, dax in enumerate(response.json()):
44
+ formatted_dax = dax.get("formatted")
45
+ errors = dax.get("errors")
46
+ if errors:
47
+ meta = metadata[idx] if metadata and idx < len(metadata) else {}
48
+ obj_name = meta.get("name", "Unknown")
49
+ table_name = meta.get("table", "Unknown")
50
+ obj_type = meta.get("type", "Unknown")
51
+ if obj_type == "calculated_tables":
52
+ raise ValueError(
53
+ f"DAX formatting failed for the '{obj_name}' calculated table: {errors}"
54
+ )
55
+ elif obj_type == "calculated_columns":
56
+ raise ValueError(
57
+ f"DAX formatting failed for the '{table_name}'[{obj_name}] calculated column: {errors}"
58
+ )
59
+ elif obj_type == "calculation_items":
60
+ raise ValueError(
61
+ f"DAX formatting failed for the '{table_name}'[{obj_name}] calculation item: {errors}"
62
+ )
63
+ elif obj_type == "measures":
64
+ raise ValueError(
65
+ f"DAX formatting failed for the '{obj_name}' measure: {errors}"
66
+ )
67
+ elif obj_type == "rls":
68
+ raise ValueError(
69
+ f"DAX formatting failed for the row level security expression on the '{table_name}' table within the '{obj_name}' role: {errors}"
70
+ )
71
+ else:
72
+ NotImplementedError()
73
+ else:
74
+ if formatted_dax.startswith("x :="):
75
+ formatted_dax = formatted_dax[4:]
76
+ formatted_dax = formatted_dax.strip()
77
+ result.append(formatted_dax)
78
+ return result
@@ -0,0 +1,221 @@
1
+ import re
2
+ import json
3
+ import difflib
4
+ from collections import defaultdict
5
+
6
+
7
+ def color_text(text, color_code):
8
+ return f"\033[{color_code}m{text}\033[0m"
9
+
10
+
11
+ def stringify(payload):
12
+ try:
13
+ if isinstance(payload, list):
14
+ return (
15
+ "[\n" + ",\n".join(f" {json.dumps(item)}" for item in payload) + "\n]"
16
+ )
17
+ return json.dumps(payload, indent=2, sort_keys=True)
18
+ except Exception:
19
+ return str(payload)
20
+
21
+
22
+ def extract_top_level_group(path):
23
+ # For something like: resourcePackages[1].items[1].name → resourcePackages[1].items[1]
24
+ segments = re.split(r"\.(?![^[]*\])", path) # split on dots not in brackets
25
+ return ".".join(segments[:-1]) if len(segments) > 1 else segments[0]
26
+
27
+
28
+ def get_by_path(obj, path):
29
+ """Navigate into nested dict/list based on a dot/bracket path like: a.b[1].c"""
30
+ tokens = re.findall(r"\w+|\[\d+\]", path)
31
+ for token in tokens:
32
+ if token.startswith("["):
33
+ index = int(token[1:-1])
34
+ obj = obj[index]
35
+ else:
36
+ obj = obj.get(token)
37
+ return obj
38
+
39
+
40
+ def deep_diff(d1, d2, path=""):
41
+ diffs = []
42
+ if isinstance(d1, dict) and isinstance(d2, dict):
43
+ keys = set(d1) | set(d2)
44
+ for key in sorted(keys):
45
+ new_path = f"{path}.{key}" if path else key
46
+ if key not in d1:
47
+ diffs.append(("+", new_path, None, d2[key]))
48
+ elif key not in d2:
49
+ diffs.append(("-", new_path, d1[key], None))
50
+ else:
51
+ diffs.extend(deep_diff(d1[key], d2[key], new_path))
52
+ elif isinstance(d1, list) and isinstance(d2, list):
53
+ min_len = min(len(d1), len(d2))
54
+ list_changed = False
55
+ for i in range(min_len):
56
+ if d1[i] != d2[i]:
57
+ list_changed = True
58
+ break
59
+ if list_changed or len(d1) != len(d2):
60
+ diffs.append(("~", path, d1, d2))
61
+ elif d1 != d2:
62
+ diffs.append(("~", path, d1, d2))
63
+ return diffs
64
+
65
+
66
+ def diff_parts(d1, d2):
67
+
68
+ def build_path_map(parts):
69
+ return {part["path"]: part["payload"] for part in parts}
70
+
71
+ try:
72
+ paths1 = build_path_map(d1)
73
+ except Exception:
74
+ paths1 = d1
75
+ try:
76
+ paths2 = build_path_map(d2)
77
+ except Exception:
78
+ paths2 = d2
79
+ all_paths = set(paths1) | set(paths2)
80
+
81
+ for part_path in sorted(all_paths):
82
+ p1 = paths1.get(part_path)
83
+ p2 = paths2.get(part_path)
84
+
85
+ if p1 is None:
86
+ print(color_text(f"+ {part_path}", "32")) # Green
87
+ continue
88
+ elif p2 is None:
89
+ print(color_text(f"- {part_path}", "31")) # Red
90
+ continue
91
+ elif p1 == p2:
92
+ continue
93
+
94
+ if p1 is None or p2 is None:
95
+ print(
96
+ color_text(f"+ {part_path}", "32")
97
+ if p2 and not p1
98
+ else color_text(f"- {part_path}", "31")
99
+ )
100
+ continue
101
+
102
+ # Header for the changed part
103
+ print(color_text(f"~ {part_path}", "33"))
104
+
105
+ # Collect diffs
106
+ diffs = deep_diff(p1, p2)
107
+ # If the diff is only a change of a whole list (like appending to a list), group it under its key
108
+ merged_list_diffs = []
109
+ for change_type, full_path, old_val, new_val in diffs:
110
+ if (
111
+ change_type == "~"
112
+ and isinstance(old_val, list)
113
+ and isinstance(new_val, list)
114
+ ):
115
+ merged_list_diffs.append((change_type, full_path, old_val, new_val))
116
+
117
+ # Replace individual item diffs with unified list diff
118
+ if merged_list_diffs:
119
+ diffs = merged_list_diffs
120
+
121
+ # Group diffs by common parent path (e.g. items[1])
122
+ grouped = defaultdict(list)
123
+ for change_type, full_path, old_val, new_val in diffs:
124
+ group_path = extract_top_level_group(full_path)
125
+ grouped[group_path].append((change_type, full_path, old_val, new_val))
126
+
127
+ # Print each group once with unified diff for the full substructure
128
+ for group_path in sorted(grouped):
129
+ print(" " + color_text(f"~ {group_path}", "33"))
130
+
131
+ try:
132
+ old_group = get_by_path(p1, group_path)
133
+ new_group = get_by_path(p2, group_path)
134
+ except Exception:
135
+ old_group = new_group = None
136
+
137
+ # Skip showing diffs for empty/null groups
138
+ if isinstance(old_group, dict) and isinstance(new_group, dict):
139
+ old_keys = set(old_group.keys())
140
+ new_keys = set(new_group.keys())
141
+
142
+ for key in sorted(old_keys - new_keys):
143
+ print(
144
+ " "
145
+ + color_text(f"- {key}: {json.dumps(old_group[key])}", "31")
146
+ )
147
+ for key in sorted(new_keys - old_keys):
148
+ print(
149
+ " "
150
+ + color_text(f"+ {key}: {json.dumps(new_group[key])}", "32")
151
+ )
152
+ for key in sorted(old_keys & new_keys):
153
+ if old_group[key] != new_group[key]:
154
+ print(" " + color_text(f"~ {key}:", "33"))
155
+ old_val_str = stringify(old_group[key]).splitlines()
156
+ new_val_str = stringify(new_group[key]).splitlines()
157
+ for line in difflib.unified_diff(
158
+ old_val_str,
159
+ new_val_str,
160
+ fromfile="old",
161
+ tofile="new",
162
+ lineterm="",
163
+ ):
164
+ if line.startswith("@@"):
165
+ print(" " + color_text(line, "36"))
166
+ elif line.startswith("-") and not line.startswith("---"):
167
+ print(" " + color_text(line, "31"))
168
+ elif line.startswith("+") and not line.startswith("+++"):
169
+ print(" " + color_text(line, "32"))
170
+ elif old_group is None and new_group is not None:
171
+ if isinstance(new_group, dict):
172
+ # print all added keys
173
+ for key, val in new_group.items():
174
+ print(" " + color_text(f"+ {key}: {json.dumps(val)}", "32"))
175
+ elif isinstance(new_group, list):
176
+ old_str = []
177
+ new_str = stringify(new_group).splitlines()
178
+ for line in difflib.unified_diff(
179
+ old_str, new_str, fromfile="old", tofile="new", lineterm=""
180
+ ):
181
+ if line.startswith("@@"):
182
+ print(" " + color_text(line, "36"))
183
+ elif line.startswith("-") and not line.startswith("---"):
184
+ print(" " + color_text(line, "31"))
185
+ elif line.startswith("+") and not line.startswith("+++"):
186
+ print(" " + color_text(line, "32"))
187
+ else:
188
+ print(" " + color_text(f"+ {json.dumps(new_group)}", "32"))
189
+
190
+ elif new_group is None and old_group is not None:
191
+ if isinstance(old_group, dict):
192
+ # print all removed keys
193
+ for key, val in old_group.items():
194
+ print(" " + color_text(f"- {key}: {json.dumps(val)}", "31"))
195
+ elif isinstance(old_group, list):
196
+ old_str = stringify(old_group).splitlines()
197
+ new_str = []
198
+ for line in difflib.unified_diff(
199
+ old_str, new_str, fromfile="old", tofile="new", lineterm=""
200
+ ):
201
+ if line.startswith("@@"):
202
+ print(" " + color_text(line, "36"))
203
+ elif line.startswith("-") and not line.startswith("---"):
204
+ print(" " + color_text(line, "31"))
205
+ elif line.startswith("+") and not line.startswith("+++"):
206
+ print(" " + color_text(line, "32"))
207
+ else:
208
+ print(" " + color_text(f"- {json.dumps(old_group)}", "31"))
209
+ else:
210
+ old_str = stringify(old_group).splitlines()
211
+ new_str = stringify(new_group).splitlines()
212
+
213
+ for line in difflib.unified_diff(
214
+ old_str, new_str, fromfile="old", tofile="new", lineterm=""
215
+ ):
216
+ if line.startswith("@@"):
217
+ print(" " + color_text(line, "36"))
218
+ elif line.startswith("-") and not line.startswith("---"):
219
+ print(" " + color_text(line, "31"))
220
+ elif line.startswith("+") and not line.startswith("+++"):
221
+ print(" " + color_text(line, "32"))