semantic-link-labs 0.9.11__py3-none-any.whl → 0.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: semantic-link-labs
3
- Version: 0.9.11
3
+ Version: 0.10.0
4
4
  Summary: Semantic Link Labs for Microsoft Fabric
5
5
  Author: Microsoft Corporation
6
6
  License: MIT License
@@ -27,7 +27,7 @@ Dynamic: license-file
27
27
  # Semantic Link Labs
28
28
 
29
29
  [![PyPI version](https://badge.fury.io/py/semantic-link-labs.svg)](https://badge.fury.io/py/semantic-link-labs)
30
- [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.9.11&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
30
+ [![Read The Docs](https://readthedocs.org/projects/semantic-link-labs/badge/?version=0.10.0&style=flat)](https://readthedocs.org/projects/semantic-link-labs/)
31
31
  [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
32
32
  [![Downloads](https://static.pepy.tech/badge/semantic-link-labs)](https://pepy.tech/project/semantic-link-labs)
33
33
 
@@ -154,6 +154,7 @@ An even better way to ensure the semantic-link-labs library is available in your
154
154
  2. Select your newly created environment within the 'Environment' drop down in the navigation bar at the top of the notebook
155
155
 
156
156
  ## Version History
157
+ * [0.10.0](https://github.com/microsoft/semantic-link-labs/releases/tag/0.10.0) (May 30, 2025)
157
158
  * [0.9.11](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.11) (May 22, 2025)
158
159
  * [0.9.10](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.10) (April 24, 2025)
159
160
  * [0.9.9](https://github.com/microsoft/semantic-link-labs/releases/tag/0.9.9) (April 7, 2025)
@@ -1,5 +1,5 @@
1
- semantic_link_labs-0.9.11.dist-info/licenses/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
2
- sempy_labs/__init__.py,sha256=X39q1v8jygmKdGc0kfuRcn7V1MAHxzG5mDivJxON7Go,16255
1
+ semantic_link_labs-0.10.0.dist-info/licenses/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
2
+ sempy_labs/__init__.py,sha256=RzY_bcDROFRZHTNuEUXpmYOUDZzTSZT7DpT4k_cJbHw,16363
3
3
  sempy_labs/_ai.py,sha256=BD1TdGOJ7T4m3x426OP-FLb7bevn-9gKY8BTEDAJDQU,16205
4
4
  sempy_labs/_authentication.py,sha256=GjtN5XqIyWXbR5Ni4hfYiUNwgFa-ySX8e-BrqE1vgGc,6903
5
5
  sempy_labs/_capacities.py,sha256=n48NYTY03zygRzcfyK1UOkSwTqKSyQefQ10IKQh-dfA,40426
@@ -14,6 +14,7 @@ sempy_labs/_dax_query_view.py,sha256=_zSvgystZzBj5euNTLKTg7-G77XVk0vqyqrDT72VvoM
14
14
  sempy_labs/_delta_analyzer.py,sha256=d6qxZrEhn3Hfg5qMQODt7dDG5mYSY18xeXUkW_NyMgw,17281
15
15
  sempy_labs/_delta_analyzer_history.py,sha256=A50dlBd2d3ILKV7Fwj4pfIRtXKmCFslhk1gpeEw4inc,10765
16
16
  sempy_labs/_deployment_pipelines.py,sha256=SDQYkCAhOAlxBr58jYxtLFOVySiRXO0_WhfOKGDeYZQ,6254
17
+ sempy_labs/_dictionary_diffs.py,sha256=DCXuASmt45gshsBO1FgSZDqxm68DnojuDcO-H35EH7Q,9003
17
18
  sempy_labs/_documentation.py,sha256=yVA8VPEzx_fmljtcvSxtB7-BeupYsfdMXXjp6Fpnyo8,5007
18
19
  sempy_labs/_environments.py,sha256=c_9uU6zhVmZVTLZWuD-OdcicBJvmRQQVmqHW7EqUn_Q,5839
19
20
  sempy_labs/_eventhouses.py,sha256=WEf33difBOTGTZGh1QFmY4gv-e43uwO1V54nrsjGGyY,5376
@@ -23,7 +24,7 @@ sempy_labs/_gateways.py,sha256=6JE6VeGFPKF617sf2mMkxXVOz57YHI5jAQLAF-BzRLc,17527
23
24
  sempy_labs/_generate_semantic_model.py,sha256=F2NVW6kT1HnrZTqWnDZ4BRApbsUfLFDSsDbRzCJnc0o,18429
24
25
  sempy_labs/_git.py,sha256=RyaT4XzrSi-4NLJZWiWZnnNpMgrKzRNxhyY8b1O2I6c,17819
25
26
  sempy_labs/_graphQL.py,sha256=truXeIUPRKLwc4skhs3FZYNcKP9FCGKly9en0YkR4NE,2690
26
- sempy_labs/_helper_functions.py,sha256=SbFhIT_6j8xSkFflzuWWoo_NIH_uqPuINUGvcblPy7s,68762
27
+ sempy_labs/_helper_functions.py,sha256=9HhCH3xdEMjS50K7UBAjwnLiQPS4whVZco3v__k8bQU,74249
27
28
  sempy_labs/_icons.py,sha256=SB9EQeoFCfD4bO6fcYuJOoPRSYenSrW0rI9G5RFsH28,3579
28
29
  sempy_labs/_job_scheduler.py,sha256=_-Pifkttk1oPNxewxwWcQ4QC_Hr24GSi6nmrEXwc0pc,15814
29
30
  sempy_labs/_kql_databases.py,sha256=UtpYVBsxwWQDnqwdjq186bZzw5IlkD2S9KHA6Kw75U0,4738
@@ -47,14 +48,15 @@ sempy_labs/_query_scale_out.py,sha256=nra1q8s-PKpZTlI_L0lMGO1GmdBk6sqETsBQShF1yP
47
48
  sempy_labs/_refresh_semantic_model.py,sha256=4w_uaYLbaZptmEFY7QHWzOgXcgc2ctGx8HQvt2aguxk,17360
48
49
  sempy_labs/_semantic_models.py,sha256=F9v964IiXqx2qNPtNBzYrWPtXIoQH5-FI5csWJGofoQ,7934
49
50
  sempy_labs/_spark.py,sha256=SuSTjjmtzj7suDgN8Njk_pNBaStDLgIJB_1yk_e2H1Y,19340
50
- sempy_labs/_sql.py,sha256=6mtX0I2VTpmpMbAiqdQGPyLiLN3q3pVDTP9IW7Z3JfA,8276
51
+ sempy_labs/_sql.py,sha256=BnL7Syd9vJZFysSiILYhqwTFS4y30nvkhDLQXGjtveE,8281
51
52
  sempy_labs/_sqldatabase.py,sha256=8HV3UtsLiwexmPSjYnhnYnD6xEvgFpTG13jcOuGheuI,6470
52
53
  sempy_labs/_tags.py,sha256=7DvSc3wah26DxHwUhr-yr_JhZiplrePkFaDaVIAQfV4,5666
53
54
  sempy_labs/_translations.py,sha256=i4K2PFk6-TcmAnUpqz-z_GuDv9XEp1cBs0KY-x6ja1w,16168
55
+ sempy_labs/_user_delegation_key.py,sha256=5Qre0ZB_atajtwWfFQqD12q413Fz313GK9nrA9IIwjI,1414
54
56
  sempy_labs/_utils.py,sha256=aKGnUiXSLLRQRXXv8TH_XhGcOtDsnrs0RHmQ6YZMn3o,1786
55
57
  sempy_labs/_variable_libraries.py,sha256=t97gj8Mo-YjahKx6XObqh5HkhUMHUke5GdWpSzkC5ZM,3008
56
58
  sempy_labs/_vertipaq.py,sha256=1UvB79xOxeGdRFINsUsreXxtZtiatHlACAfbQhv45as,38536
57
- sempy_labs/_vpax.py,sha256=k1UalPGdwmhL8eqH_WeOx1IkPu0Zz2xGWSBuAp4Sq0M,15432
59
+ sempy_labs/_vpax.py,sha256=4rtXXGVoadvdu7uiU9PVsgKszST3XH-K56zmWdMmZEg,15471
58
60
  sempy_labs/_warehouses.py,sha256=wF38YP4-39KPsXPyexJahZPrYAyLc5xfrerJvS7My5Q,7286
59
61
  sempy_labs/_workloads.py,sha256=ifQ6Jv0_MDzjfu993bU8V7thOdW5kFyp3MjA082rsrE,4687
60
62
  sempy_labs/_workspace_identity.py,sha256=plxgcqt2aBXgLpyn1dpHhzh_5Z-gFkLK8RtId2OIX5s,2561
@@ -134,8 +136,8 @@ sempy_labs/graph/__init__.py,sha256=AZ_IpOL06VvXrYmgbcrvQlxCxdDksvwXKf7JAGohCNI,
134
136
  sempy_labs/graph/_groups.py,sha256=j3YDeV6MzhRjGJRoD60SAaGyU8yb23x8QhXBzU2RWlE,12590
135
137
  sempy_labs/graph/_teams.py,sha256=SRFaFuxtB7ylC5WeXIdrW0aLCxc_JTJHeEmxOPG99r8,3089
136
138
  sempy_labs/graph/_users.py,sha256=dFOZ-jel6Aj4Um66f1jzQrgV0fOoI0cQnZfmR4OJSXo,5947
137
- sempy_labs/lakehouse/__init__.py,sha256=xuYQAxBEEahNA_twvYxeP6cUd8dCreNb0eKyuKZbtPU,1327
138
- sempy_labs/lakehouse/_blobs.py,sha256=N8s3hYa9dAOLpH9iTavR_FPKrb3j_RqXHJnC6UVeeW0,9745
139
+ sempy_labs/lakehouse/__init__.py,sha256=zKF6-rjy3UEClAlyW-koqrTK3_bAjU6WbDxKamsWCjs,1267
140
+ sempy_labs/lakehouse/_blobs.py,sha256=K2uwzUUkYN1rGfpyQrubxWNjtM6AIWM9VVHh2_LYCTY,8483
139
141
  sempy_labs/lakehouse/_get_lakehouse_columns.py,sha256=dF5rLkdD3PB8EiXQewRdnr7MzbDGkZWHrFfI01_a7K4,3710
140
142
  sempy_labs/lakehouse/_get_lakehouse_tables.py,sha256=AMQXk40YMN4daS0zILgZm-sc2llnvCaL7kS1v8dfYMA,9369
141
143
  sempy_labs/lakehouse/_helper.py,sha256=W9adTkZw9_f9voB3bA2JWkI4LqAcnvpY929vMQJw1xE,7401
@@ -152,7 +154,7 @@ sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py,sha256=HYi2vn7
152
154
  sempy_labs/migration/_migration_validation.py,sha256=AHURrWofb-U-L2Bdu36mcisVXOuZXi6Smgrrs2kjYBM,2650
153
155
  sempy_labs/migration/_refresh_calc_tables.py,sha256=qUBPZ5HAHyE5ev6STKDcmtEpRuLDX5RzYTKre4ZElj4,5443
154
156
  sempy_labs/report/_BPAReportTemplate.json,sha256=9Uh-7E6d2ooxQ7j5JRayv_ayEULc7Gzg42kZGKdOqH8,63920
155
- sempy_labs/report/__init__.py,sha256=bPZ_MMqKGokskjJwM3T89LxIVNa2AXJg8Lr-mvJhP0E,1392
157
+ sempy_labs/report/__init__.py,sha256=yuMGbP7rd_50M-CRfIYR7BK8mPzpXXYzOPh9sBV-aqw,1434
156
158
  sempy_labs/report/_download_report.py,sha256=01hI26UV_jb5RLPheXRQsIDNNf4i72xICm14slKqEFA,2704
157
159
  sempy_labs/report/_export_report.py,sha256=XCMsZzTBMgvQOe3Ltdod7my7_izpmP-3AVH6W5CExPE,10976
158
160
  sempy_labs/report/_generate_report.py,sha256=S830woeisjKCYNyacfvSx0fVHzLC7-aw2oPIU2sYiP8,13910
@@ -160,10 +162,10 @@ sempy_labs/report/_paginated.py,sha256=rsElE0IQ9qxRDuEp6qNF1EcD5XEgfTc7WsWEQsals
160
162
  sempy_labs/report/_report_bpa.py,sha256=ClETB8Q41sY1scCuknhpvalvuBaQ9ZwA4QX7F3sPcjc,13596
161
163
  sempy_labs/report/_report_bpa_rules.py,sha256=tPVGA0hmE6QMLlWtig7Va7Ksr2yXWl_Lndq--tWWd6w,4959
162
164
  sempy_labs/report/_report_functions.py,sha256=pSrsUfMJqmsn9CYb5AM0iYdPR-EmuUSprVnc0dGhO1s,19709
163
- sempy_labs/report/_report_helper.py,sha256=m23osIZMjvHhKbfhmTHyqHibXoWA9eP84TPanbH8kuE,10863
165
+ sempy_labs/report/_report_helper.py,sha256=L9wU0N0rvTUMglZHTxcowywrBDuZvZTv3DA4JrX84Os,7207
164
166
  sempy_labs/report/_report_list_functions.py,sha256=K9tMDQKhIZhelHvfMMW0lsxbVHekJ-5dAQveoD7PUDA,3980
165
167
  sempy_labs/report/_report_rebind.py,sha256=svyxUSdqgXJW1UDNcb-urJxU9erO3JM72uzmuJUWIT0,5090
166
- sempy_labs/report/_reportwrapper.py,sha256=itzDImW0XUJuQBL1gQpXjIRBk0Knic0VCf7MHw8Kt68,83421
168
+ sempy_labs/report/_reportwrapper.py,sha256=YxGfhAXDkDyXuyyGYu5YpQf9S31L64Rpr3hHRE44fsk,108736
167
169
  sempy_labs/report/_save_report.py,sha256=FAzScMQIXl89TgVSRvaJofzKT0TfZh_hhPNNvDiktaI,6033
168
170
  sempy_labs/report/_bpareporttemplate/definition.pbir,sha256=bttyHZYKqjA8OBb_cezGlX4H82cDvGZVCl1QB3fij4E,343
169
171
  sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json,sha256=4N6sT5nLlYBobGmZ1Xb68uOMVVCBEyheR535js_et28,13467
@@ -194,7 +196,7 @@ sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.
194
196
  sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json,sha256=mlY6t9OlSe-Y6_QmXJpS1vggU6Y3FjISUKECL8FVSg8,931
195
197
  sempy_labs/tom/__init__.py,sha256=Qbs8leW0fjzvWwOjyWK3Hjeehu7IvpB1beASGsi28bk,121
196
198
  sempy_labs/tom/_model.py,sha256=64IJf2Pdag5ECWxJcf4Cg2paoMD0Pr6BHvdjgvW6pwo,197537
197
- semantic_link_labs-0.9.11.dist-info/METADATA,sha256=93cuq3I17_DH0fWpnYNqB8YF8rckYYICQdRBtzN_SZs,26736
198
- semantic_link_labs-0.9.11.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
199
- semantic_link_labs-0.9.11.dist-info/top_level.txt,sha256=kiQX1y42Dbein1l3Q8jMUYyRulDjdlc2tMepvtrvixQ,11
200
- semantic_link_labs-0.9.11.dist-info/RECORD,,
199
+ semantic_link_labs-0.10.0.dist-info/METADATA,sha256=vw-G81gYnj_kJXcwm752EAY2d6TpwQ3QzDnjGYMkC3I,26831
200
+ semantic_link_labs-0.10.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
201
+ semantic_link_labs-0.10.0.dist-info/top_level.txt,sha256=kiQX1y42Dbein1l3Q8jMUYyRulDjdlc2tMepvtrvixQ,11
202
+ semantic_link_labs-0.10.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.8.0)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
sempy_labs/__init__.py CHANGED
@@ -340,6 +340,9 @@ from sempy_labs._vertipaq import (
340
340
  vertipaq_analyzer,
341
341
  import_vertipaq_analyzer,
342
342
  )
343
+ from sempy_labs._user_delegation_key import (
344
+ get_user_delegation_key,
345
+ )
343
346
 
344
347
  __all__ = [
345
348
  "resolve_warehouse_id",
@@ -585,4 +588,5 @@ __all__ = [
585
588
  "update_semantic_model_refresh_schedule",
586
589
  "apply_tags",
587
590
  "unapply_tags",
591
+ "get_user_delegation_key",
588
592
  ]
@@ -0,0 +1,221 @@
1
+ import re
2
+ import json
3
+ import difflib
4
+ from collections import defaultdict
5
+
6
+
7
+ def color_text(text, color_code):
8
+ return f"\033[{color_code}m{text}\033[0m"
9
+
10
+
11
+ def stringify(payload):
12
+ try:
13
+ if isinstance(payload, list):
14
+ return (
15
+ "[\n" + ",\n".join(f" {json.dumps(item)}" for item in payload) + "\n]"
16
+ )
17
+ return json.dumps(payload, indent=2, sort_keys=True)
18
+ except Exception:
19
+ return str(payload)
20
+
21
+
22
+ def extract_top_level_group(path):
23
+ # For something like: resourcePackages[1].items[1].name → resourcePackages[1].items[1]
24
+ segments = re.split(r"\.(?![^[]*\])", path) # split on dots not in brackets
25
+ return ".".join(segments[:-1]) if len(segments) > 1 else segments[0]
26
+
27
+
28
+ def get_by_path(obj, path):
29
+ """Navigate into nested dict/list based on a dot/bracket path like: a.b[1].c"""
30
+ tokens = re.findall(r"\w+|\[\d+\]", path)
31
+ for token in tokens:
32
+ if token.startswith("["):
33
+ index = int(token[1:-1])
34
+ obj = obj[index]
35
+ else:
36
+ obj = obj.get(token)
37
+ return obj
38
+
39
+
40
+ def deep_diff(d1, d2, path=""):
41
+ diffs = []
42
+ if isinstance(d1, dict) and isinstance(d2, dict):
43
+ keys = set(d1) | set(d2)
44
+ for key in sorted(keys):
45
+ new_path = f"{path}.{key}" if path else key
46
+ if key not in d1:
47
+ diffs.append(("+", new_path, None, d2[key]))
48
+ elif key not in d2:
49
+ diffs.append(("-", new_path, d1[key], None))
50
+ else:
51
+ diffs.extend(deep_diff(d1[key], d2[key], new_path))
52
+ elif isinstance(d1, list) and isinstance(d2, list):
53
+ min_len = min(len(d1), len(d2))
54
+ list_changed = False
55
+ for i in range(min_len):
56
+ if d1[i] != d2[i]:
57
+ list_changed = True
58
+ break
59
+ if list_changed or len(d1) != len(d2):
60
+ diffs.append(("~", path, d1, d2))
61
+ elif d1 != d2:
62
+ diffs.append(("~", path, d1, d2))
63
+ return diffs
64
+
65
+
66
+ def diff_parts(d1, d2):
67
+
68
+ def build_path_map(parts):
69
+ return {part["path"]: part["payload"] for part in parts}
70
+
71
+ try:
72
+ paths1 = build_path_map(d1)
73
+ except Exception:
74
+ paths1 = d1
75
+ try:
76
+ paths2 = build_path_map(d2)
77
+ except Exception:
78
+ paths2 = d2
79
+ all_paths = set(paths1) | set(paths2)
80
+
81
+ for part_path in sorted(all_paths):
82
+ p1 = paths1.get(part_path)
83
+ p2 = paths2.get(part_path)
84
+
85
+ if p1 is None:
86
+ print(color_text(f"+ {part_path}", "32")) # Green
87
+ continue
88
+ elif p2 is None:
89
+ print(color_text(f"- {part_path}", "31")) # Red
90
+ continue
91
+ elif p1 == p2:
92
+ continue
93
+
94
+ if p1 is None or p2 is None:
95
+ print(
96
+ color_text(f"+ {part_path}", "32")
97
+ if p2 and not p1
98
+ else color_text(f"- {part_path}", "31")
99
+ )
100
+ continue
101
+
102
+ # Header for the changed part
103
+ print(color_text(f"~ {part_path}", "33"))
104
+
105
+ # Collect diffs
106
+ diffs = deep_diff(p1, p2)
107
+ # If the diff is only a change of a whole list (like appending to a list), group it under its key
108
+ merged_list_diffs = []
109
+ for change_type, full_path, old_val, new_val in diffs:
110
+ if (
111
+ change_type == "~"
112
+ and isinstance(old_val, list)
113
+ and isinstance(new_val, list)
114
+ ):
115
+ merged_list_diffs.append((change_type, full_path, old_val, new_val))
116
+
117
+ # Replace individual item diffs with unified list diff
118
+ if merged_list_diffs:
119
+ diffs = merged_list_diffs
120
+
121
+ # Group diffs by common parent path (e.g. items[1])
122
+ grouped = defaultdict(list)
123
+ for change_type, full_path, old_val, new_val in diffs:
124
+ group_path = extract_top_level_group(full_path)
125
+ grouped[group_path].append((change_type, full_path, old_val, new_val))
126
+
127
+ # Print each group once with unified diff for the full substructure
128
+ for group_path in sorted(grouped):
129
+ print(" " + color_text(f"~ {group_path}", "33"))
130
+
131
+ try:
132
+ old_group = get_by_path(p1, group_path)
133
+ new_group = get_by_path(p2, group_path)
134
+ except Exception:
135
+ old_group = new_group = None
136
+
137
+ # Skip showing diffs for empty/null groups
138
+ if isinstance(old_group, dict) and isinstance(new_group, dict):
139
+ old_keys = set(old_group.keys())
140
+ new_keys = set(new_group.keys())
141
+
142
+ for key in sorted(old_keys - new_keys):
143
+ print(
144
+ " "
145
+ + color_text(f"- {key}: {json.dumps(old_group[key])}", "31")
146
+ )
147
+ for key in sorted(new_keys - old_keys):
148
+ print(
149
+ " "
150
+ + color_text(f"+ {key}: {json.dumps(new_group[key])}", "32")
151
+ )
152
+ for key in sorted(old_keys & new_keys):
153
+ if old_group[key] != new_group[key]:
154
+ print(" " + color_text(f"~ {key}:", "33"))
155
+ old_val_str = stringify(old_group[key]).splitlines()
156
+ new_val_str = stringify(new_group[key]).splitlines()
157
+ for line in difflib.unified_diff(
158
+ old_val_str,
159
+ new_val_str,
160
+ fromfile="old",
161
+ tofile="new",
162
+ lineterm="",
163
+ ):
164
+ if line.startswith("@@"):
165
+ print(" " + color_text(line, "36"))
166
+ elif line.startswith("-") and not line.startswith("---"):
167
+ print(" " + color_text(line, "31"))
168
+ elif line.startswith("+") and not line.startswith("+++"):
169
+ print(" " + color_text(line, "32"))
170
+ elif old_group is None and new_group is not None:
171
+ if isinstance(new_group, dict):
172
+ # print all added keys
173
+ for key, val in new_group.items():
174
+ print(" " + color_text(f"+ {key}: {json.dumps(val)}", "32"))
175
+ elif isinstance(new_group, list):
176
+ old_str = []
177
+ new_str = stringify(new_group).splitlines()
178
+ for line in difflib.unified_diff(
179
+ old_str, new_str, fromfile="old", tofile="new", lineterm=""
180
+ ):
181
+ if line.startswith("@@"):
182
+ print(" " + color_text(line, "36"))
183
+ elif line.startswith("-") and not line.startswith("---"):
184
+ print(" " + color_text(line, "31"))
185
+ elif line.startswith("+") and not line.startswith("+++"):
186
+ print(" " + color_text(line, "32"))
187
+ else:
188
+ print(" " + color_text(f"+ {json.dumps(new_group)}", "32"))
189
+
190
+ elif new_group is None and old_group is not None:
191
+ if isinstance(old_group, dict):
192
+ # print all removed keys
193
+ for key, val in old_group.items():
194
+ print(" " + color_text(f"- {key}: {json.dumps(val)}", "31"))
195
+ elif isinstance(old_group, list):
196
+ old_str = stringify(old_group).splitlines()
197
+ new_str = []
198
+ for line in difflib.unified_diff(
199
+ old_str, new_str, fromfile="old", tofile="new", lineterm=""
200
+ ):
201
+ if line.startswith("@@"):
202
+ print(" " + color_text(line, "36"))
203
+ elif line.startswith("-") and not line.startswith("---"):
204
+ print(" " + color_text(line, "31"))
205
+ elif line.startswith("+") and not line.startswith("+++"):
206
+ print(" " + color_text(line, "32"))
207
+ else:
208
+ print(" " + color_text(f"- {json.dumps(old_group)}", "31"))
209
+ else:
210
+ old_str = stringify(old_group).splitlines()
211
+ new_str = stringify(new_group).splitlines()
212
+
213
+ for line in difflib.unified_diff(
214
+ old_str, new_str, fromfile="old", tofile="new", lineterm=""
215
+ ):
216
+ if line.startswith("@@"):
217
+ print(" " + color_text(line, "36"))
218
+ elif line.startswith("-") and not line.startswith("---"):
219
+ print(" " + color_text(line, "31"))
220
+ elif line.startswith("+") and not line.startswith("+++"):
221
+ print(" " + color_text(line, "32"))
@@ -17,6 +17,8 @@ import numpy as np
17
17
  from IPython.display import display, HTML
18
18
  import requests
19
19
  import sempy_labs._authentication as auth
20
+ from jsonpath_ng.ext import parse
21
+ from jsonpath_ng.jsonpath import Fields, Index
20
22
 
21
23
 
22
24
  def _build_url(url: str, params: dict) -> str:
@@ -2270,3 +2272,166 @@ def file_exists(file_path: str) -> bool:
2270
2272
  import notebookutils
2271
2273
 
2272
2274
  return len(notebookutils.fs.ls(file_path)) > 0
2275
+
2276
+
2277
+ def generate_number_guid():
2278
+
2279
+ guid = uuid.uuid4()
2280
+ return str(guid.int & ((1 << 64) - 1))
2281
+
2282
+
2283
+ def get_url_content(url: str):
2284
+
2285
+ if "github.com" in url and "/blob/" in url:
2286
+ url = url.replace("github.com", "raw.githubusercontent.com")
2287
+ url = url.replace("/blob/", "/")
2288
+
2289
+ response = requests.get(url)
2290
+ if response.ok:
2291
+ try:
2292
+ data = response.json() # Only works if the response is valid JSON
2293
+ except ValueError:
2294
+ data = response.text # Fallback: get raw text content
2295
+ return data
2296
+ else:
2297
+ print(f"Failed to fetch raw content: {response.status_code}")
2298
+
2299
+
2300
+ def generate_hex(length: int = 10) -> str:
2301
+ """
2302
+ Generate a random hex string of the specified length. Used for generating IDs for report objects (page, visual, bookmark etc.).
2303
+ """
2304
+ import secrets
2305
+
2306
+ return secrets.token_hex(length)
2307
+
2308
+
2309
+ def decode_payload(payload):
2310
+
2311
+ if is_base64(payload):
2312
+ try:
2313
+ decoded_payload = json.loads(base64.b64decode(payload).decode("utf-8"))
2314
+ except Exception:
2315
+ decoded_payload = base64.b64decode(payload)
2316
+ elif isinstance(payload, dict):
2317
+ decoded_payload = payload
2318
+ else:
2319
+ raise ValueError("Payload must be a dictionary or a base64 encoded value.")
2320
+
2321
+ return decoded_payload
2322
+
2323
+
2324
+ def is_base64(s):
2325
+ try:
2326
+ # Add padding if needed
2327
+ s_padded = s + "=" * (-len(s) % 4)
2328
+ decoded = base64.b64decode(s_padded, validate=True)
2329
+ # Optional: check if re-encoding gives the original (excluding padding)
2330
+ return base64.b64encode(decoded).decode().rstrip("=") == s.rstrip("=")
2331
+ except Exception:
2332
+ return False
2333
+
2334
+
2335
+ def get_jsonpath_value(
2336
+ data, path, default=None, remove_quotes=False, fix_true: bool = False
2337
+ ):
2338
+ matches = parse(path).find(data)
2339
+ result = matches[0].value if matches else default
2340
+ if result and remove_quotes and isinstance(result, str):
2341
+ if result.startswith("'") and result.endswith("'"):
2342
+ result = result[1:-1]
2343
+ if fix_true and isinstance(result, str):
2344
+ if result.lower() == "true":
2345
+ result = True
2346
+ elif result.lower() == "false":
2347
+ result = False
2348
+ return result
2349
+
2350
+
2351
+ def set_json_value(payload: dict, json_path: str, json_value: str | dict | List):
2352
+
2353
+ jsonpath_expr = parse(json_path)
2354
+ matches = jsonpath_expr.find(payload)
2355
+
2356
+ if matches:
2357
+ # Update all matches
2358
+ for match in matches:
2359
+ parent = match.context.value
2360
+ path = match.path
2361
+ if isinstance(path, Fields):
2362
+ parent[path.fields[0]] = json_value
2363
+ elif isinstance(path, Index):
2364
+ parent[path.index] = json_value
2365
+ else:
2366
+ # Handle creation
2367
+ parts = json_path.lstrip("$").strip(".").split(".")
2368
+ current = payload
2369
+
2370
+ for i, part in enumerate(parts):
2371
+ is_last = i == len(parts) - 1
2372
+
2373
+ # Detect list syntax like "lockAspect[*]"
2374
+ list_match = re.match(r"(\w+)\[\*\]", part)
2375
+ if list_match:
2376
+ list_key = list_match.group(1)
2377
+ if list_key not in current or not isinstance(current[list_key], list):
2378
+ # Initialize with one dict element
2379
+ current[list_key] = [{}]
2380
+
2381
+ for item in current[list_key]:
2382
+ if is_last:
2383
+ # Last part, assign value
2384
+ item = json_value
2385
+ else:
2386
+ # Proceed to next level
2387
+ if not isinstance(item, dict):
2388
+ raise ValueError(
2389
+ f"Expected dict in list for key '{list_key}', got {type(item)}"
2390
+ )
2391
+ next_part = ".".join(parts[i + 1 :])
2392
+ set_json_value(item, "$." + next_part, json_value)
2393
+ return payload
2394
+ else:
2395
+ if part not in current or not isinstance(current[part], dict):
2396
+ current[part] = {} if not is_last else json_value
2397
+ elif is_last:
2398
+ current[part] = json_value
2399
+ current = current[part]
2400
+
2401
+ return payload
2402
+
2403
+
2404
+ def remove_json_value(path: str, payload: dict, json_path: str, verbose: bool = True):
2405
+
2406
+ if not isinstance(payload, dict):
2407
+ raise ValueError(
2408
+ f"{icons.red_dot} Cannot apply json_path to non-dictionary payload in '{path}'."
2409
+ )
2410
+
2411
+ jsonpath_expr = parse(json_path)
2412
+ matches = jsonpath_expr.find(payload)
2413
+
2414
+ if not matches and verbose:
2415
+ print(
2416
+ f"{icons.red_dot} No match found for '{json_path}' in '{path}'. Skipping."
2417
+ )
2418
+ return payload
2419
+
2420
+ for match in matches:
2421
+ parent = match.context.value
2422
+ path_expr = match.path
2423
+
2424
+ if isinstance(path_expr, Fields):
2425
+ key = path_expr.fields[0]
2426
+ if key in parent:
2427
+ del parent[key]
2428
+ if verbose:
2429
+ print(f"{icons.green_dot} Removed key '{key}' from '{path}'.")
2430
+ elif isinstance(path_expr, Index):
2431
+ index = path_expr.index
2432
+ if isinstance(parent, list) and 0 <= index < len(parent):
2433
+ parent.pop(index)
2434
+ if verbose:
2435
+ print(f"{icons.green_dot} Removed index [{index}] from '{path}'.")
2436
+
2437
+ return payload
sempy_labs/_sql.py CHANGED
@@ -82,7 +82,7 @@ class ConnectBase:
82
82
  )
83
83
 
84
84
  # Set up the connection string
85
- access_token = SynapseTokenProvider()()
85
+ access_token = SynapseTokenProvider()("sql")
86
86
  tokenstruct = _bytes2mswin_bstr(access_token.encode())
87
87
  if endpoint_type == "sqldatabase":
88
88
  conn_str = f"DRIVER={{ODBC Driver 18 for SQL Server}};SERVER={tds_endpoint};DATABASE={resource_name}-{resource_id};Encrypt=Yes;"
@@ -0,0 +1,42 @@
1
+ from sempy_labs.lakehouse._blobs import _request_blob_api
2
+ from sempy_labs._helper_functions import (
3
+ _xml_to_dict,
4
+ )
5
+ from datetime import datetime, timedelta, timezone
6
+ import xml.etree.ElementTree as ET
7
+
8
+
9
+ def get_user_delegation_key():
10
+ """
11
+ Gets a key that can be used to sign a user delegation SAS (shared access signature). A user delegation SAS grants access to Azure Blob Storage resources by using Microsoft Entra credentials.
12
+
13
+ This is a wrapper function for the following API: `Get User Delegation Key <https://learn.microsoft.com/rest/api/storageservices/get-user-delegation-key>`_.
14
+
15
+ Returns
16
+ -------
17
+ str
18
+ The user delegation key value.
19
+ """
20
+
21
+ utc_now = datetime.now(timezone.utc)
22
+ start_time = utc_now + timedelta(minutes=2)
23
+ expiry_time = start_time + timedelta(minutes=60)
24
+ start_str = start_time.strftime("%Y-%m-%dT%H:%M:%SZ")
25
+ expiry_str = expiry_time.strftime("%Y-%m-%dT%H:%M:%SZ")
26
+
27
+ payload = f"""<?xml version="1.0" encoding="utf-8"?>
28
+ <KeyInfo>
29
+ <Start>{start_str}</Start>
30
+ <Expiry>{expiry_str}</Expiry>
31
+ </KeyInfo>"""
32
+
33
+ response = _request_blob_api(
34
+ request="?restype=service&comp=userdelegationkey",
35
+ method="post",
36
+ payload=payload,
37
+ )
38
+
39
+ root = ET.fromstring(response.content)
40
+ response_json = _xml_to_dict(root)
41
+
42
+ return response_json.get("UserDelegationKey", {}).get("Value", None)
sempy_labs/_vpax.py CHANGED
@@ -16,6 +16,7 @@ from sempy_labs._helper_functions import (
16
16
  file_exists,
17
17
  create_abfss_path_from_path,
18
18
  )
19
+ from sempy._utils._log import log
19
20
  import sempy_labs._icons as icons
20
21
  import zipfile
21
22
  import requests
@@ -134,6 +135,7 @@ def init_vertipaq_analyzer():
134
135
  _vpa_initialized = True
135
136
 
136
137
 
138
+ @log
137
139
  def create_vpax(
138
140
  dataset: str | UUID,
139
141
  workspace: Optional[str | UUID] = None,
@@ -20,7 +20,6 @@ from sempy_labs.lakehouse._shortcuts import (
20
20
  from sempy_labs.lakehouse._blobs import (
21
21
  recover_lakehouse_object,
22
22
  list_blobs,
23
- get_user_delegation_key,
24
23
  )
25
24
  from sempy_labs.lakehouse._livy_sessions import (
26
25
  list_livy_sessions,
@@ -51,5 +50,4 @@ __all__ = [
51
50
  "delete_lakehouse",
52
51
  "update_lakehouse",
53
52
  "load_table",
54
- "get_user_delegation_key",
55
53
  ]
@@ -244,40 +244,3 @@ def recover_lakehouse_object(
244
244
  print(
245
245
  f"{icons.red_dot} An error occurred while recovering the '{blob_name}' blob: {e}"
246
246
  )
247
-
248
-
249
- def get_user_delegation_key():
250
- """
251
- Gets a key that can be used to sign a user delegation SAS (shared access signature). A user delegation SAS grants access to Azure Blob Storage resources by using Microsoft Entra credentials.
252
-
253
- This is a wrapper function for the following API: `Get User Delegation Key <https://learn.microsoft.com/rest/api/storageservices/get-user-delegation-key>`_.
254
-
255
- Returns
256
- -------
257
- str
258
- The user delegation key value.
259
- """
260
-
261
- from datetime import datetime, timedelta, timezone
262
-
263
- utc_now = datetime.now(timezone.utc)
264
- start_time = utc_now + timedelta(minutes=2)
265
- expiry_time = start_time + timedelta(minutes=60)
266
- start_str = start_time.strftime("%Y-%m-%dT%H:%M:%SZ")
267
- expiry_str = expiry_time.strftime("%Y-%m-%dT%H:%M:%SZ")
268
-
269
- payload = f"""<?xml version="1.0" encoding="utf-8"?>
270
- <KeyInfo>
271
- <Start>{start_str}</Start>
272
- <Expiry>{expiry_str}</Expiry>
273
- </KeyInfo>"""
274
-
275
- response = _request_blob_api(
276
- request="?restype=service&comp=userdelegationkey",
277
- method="post",
278
- payload=payload,
279
- )
280
-
281
- root = ET.fromstring(response.content)
282
- response_json = _xml_to_dict(root)
283
- return response_json.get("UserDelegationKey", {}).get("Value", None)