semantic-link-labs 0.10.1__py3-none-any.whl → 0.11.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.10.1.dist-info → semantic_link_labs-0.11.1.dist-info}/METADATA +8 -6
- {semantic_link_labs-0.10.1.dist-info → semantic_link_labs-0.11.1.dist-info}/RECORD +94 -92
- sempy_labs/__init__.py +4 -0
- sempy_labs/_a_lib_info.py +1 -1
- sempy_labs/_capacities.py +2 -0
- sempy_labs/_connections.py +11 -0
- sempy_labs/_dashboards.py +9 -4
- sempy_labs/_data_pipelines.py +5 -0
- sempy_labs/_dataflows.py +284 -17
- sempy_labs/_daxformatter.py +2 -0
- sempy_labs/_delta_analyzer_history.py +4 -1
- sempy_labs/_deployment_pipelines.py +4 -0
- sempy_labs/_documentation.py +3 -0
- sempy_labs/_environments.py +10 -1
- sempy_labs/_eventhouses.py +12 -5
- sempy_labs/_eventstreams.py +11 -3
- sempy_labs/_external_data_shares.py +8 -2
- sempy_labs/_gateways.py +26 -5
- sempy_labs/_git.py +11 -0
- sempy_labs/_graphQL.py +10 -3
- sempy_labs/_helper_functions.py +62 -10
- sempy_labs/_job_scheduler.py +54 -7
- sempy_labs/_kql_databases.py +11 -2
- sempy_labs/_kql_querysets.py +11 -3
- sempy_labs/_list_functions.py +17 -2
- sempy_labs/_managed_private_endpoints.py +11 -2
- sempy_labs/_mirrored_databases.py +17 -3
- sempy_labs/_mirrored_warehouses.py +9 -3
- sempy_labs/_ml_experiments.py +11 -3
- sempy_labs/_ml_models.py +11 -3
- sempy_labs/_model_bpa_rules.py +2 -0
- sempy_labs/_mounted_data_factories.py +12 -8
- sempy_labs/_notebooks.py +3 -0
- sempy_labs/_refresh_semantic_model.py +1 -0
- sempy_labs/_semantic_models.py +6 -0
- sempy_labs/_spark.py +7 -0
- sempy_labs/_sql_endpoints.py +54 -31
- sempy_labs/_sqldatabase.py +13 -4
- sempy_labs/_tags.py +5 -1
- sempy_labs/_user_delegation_key.py +2 -0
- sempy_labs/_variable_libraries.py +3 -1
- sempy_labs/_warehouses.py +13 -3
- sempy_labs/_workloads.py +3 -0
- sempy_labs/_workspace_identity.py +3 -0
- sempy_labs/_workspaces.py +14 -1
- sempy_labs/admin/__init__.py +2 -0
- sempy_labs/admin/_activities.py +6 -5
- sempy_labs/admin/_apps.py +31 -31
- sempy_labs/admin/_artifacts.py +8 -3
- sempy_labs/admin/_basic_functions.py +5 -0
- sempy_labs/admin/_capacities.py +33 -20
- sempy_labs/admin/_datasets.py +51 -51
- sempy_labs/admin/_domains.py +17 -1
- sempy_labs/admin/_external_data_share.py +8 -2
- sempy_labs/admin/_git.py +14 -9
- sempy_labs/admin/_items.py +15 -2
- sempy_labs/admin/_reports.py +64 -65
- sempy_labs/admin/_shared.py +7 -1
- sempy_labs/admin/_tags.py +5 -0
- sempy_labs/admin/_tenant.py +5 -2
- sempy_labs/admin/_users.py +9 -3
- sempy_labs/admin/_workspaces.py +88 -0
- sempy_labs/directlake/_dl_helper.py +2 -0
- sempy_labs/directlake/_generate_shared_expression.py +2 -0
- sempy_labs/directlake/_get_directlake_lakehouse.py +2 -4
- sempy_labs/directlake/_get_shared_expression.py +2 -0
- sempy_labs/directlake/_guardrails.py +2 -0
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +2 -0
- sempy_labs/directlake/_warm_cache.py +1 -0
- sempy_labs/graph/_groups.py +22 -7
- sempy_labs/graph/_teams.py +7 -2
- sempy_labs/graph/_users.py +1 -0
- sempy_labs/lakehouse/_blobs.py +1 -0
- sempy_labs/lakehouse/_get_lakehouse_tables.py +88 -27
- sempy_labs/lakehouse/_helper.py +2 -0
- sempy_labs/lakehouse/_lakehouse.py +38 -5
- sempy_labs/lakehouse/_livy_sessions.py +2 -1
- sempy_labs/lakehouse/_shortcuts.py +7 -1
- sempy_labs/migration/_direct_lake_to_import.py +2 -0
- sempy_labs/mirrored_azure_databricks_catalog/_discover.py +4 -0
- sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +2 -0
- sempy_labs/report/_download_report.py +2 -1
- sempy_labs/report/_generate_report.py +2 -0
- sempy_labs/report/_paginated.py +2 -0
- sempy_labs/report/_report_bpa.py +110 -122
- sempy_labs/report/_report_bpa_rules.py +2 -0
- sempy_labs/report/_report_functions.py +7 -0
- sempy_labs/report/_reportwrapper.py +64 -31
- sempy_labs/theme/__init__.py +12 -0
- sempy_labs/theme/_org_themes.py +117 -0
- sempy_labs/tom/_model.py +494 -16
- {semantic_link_labs-0.10.1.dist-info → semantic_link_labs-0.11.1.dist-info}/WHEEL +0 -0
- {semantic_link_labs-0.10.1.dist-info → semantic_link_labs-0.11.1.dist-info}/licenses/LICENSE +0 -0
- {semantic_link_labs-0.10.1.dist-info → semantic_link_labs-0.11.1.dist-info}/top_level.txt +0 -0
sempy_labs/_gateways.py
CHANGED
|
@@ -47,6 +47,7 @@ def list_gateways() -> pd.DataFrame:
|
|
|
47
47
|
request="/v1/gateways", client="fabric_sp", uses_pagination=True
|
|
48
48
|
)
|
|
49
49
|
|
|
50
|
+
dfs = []
|
|
50
51
|
for r in responses:
|
|
51
52
|
for v in r.get("value", []):
|
|
52
53
|
new_data = {
|
|
@@ -62,13 +63,16 @@ def list_gateways() -> pd.DataFrame:
|
|
|
62
63
|
"Allow Custom Connectors": v.get("allowCustomConnectors"),
|
|
63
64
|
}
|
|
64
65
|
|
|
65
|
-
|
|
66
|
+
dfs.append(pd.DataFrame(new_data, index=[0]))
|
|
66
67
|
|
|
67
|
-
|
|
68
|
+
if dfs:
|
|
69
|
+
df = pd.concat(dfs, ignore_index=True)
|
|
70
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
68
71
|
|
|
69
72
|
return df
|
|
70
73
|
|
|
71
74
|
|
|
75
|
+
@log
|
|
72
76
|
def _resolve_gateway_id(gateway: str | UUID) -> UUID:
|
|
73
77
|
|
|
74
78
|
dfG = list_gateways()
|
|
@@ -83,6 +87,7 @@ def _resolve_gateway_id(gateway: str | UUID) -> UUID:
|
|
|
83
87
|
return dfG_filt["Gateway Id"].iloc[0]
|
|
84
88
|
|
|
85
89
|
|
|
90
|
+
@log
|
|
86
91
|
def delete_gateway(gateway: str | UUID):
|
|
87
92
|
"""
|
|
88
93
|
Deletes a gateway.
|
|
@@ -102,6 +107,7 @@ def delete_gateway(gateway: str | UUID):
|
|
|
102
107
|
print(f"{icons.green_dot} The '{gateway}' gateway has been deleted.")
|
|
103
108
|
|
|
104
109
|
|
|
110
|
+
@log
|
|
105
111
|
def list_gateway_role_assigments(gateway: str | UUID) -> pd.DataFrame:
|
|
106
112
|
"""
|
|
107
113
|
Returns a list of gateway role assignments.
|
|
@@ -135,6 +141,7 @@ def list_gateway_role_assigments(gateway: str | UUID) -> pd.DataFrame:
|
|
|
135
141
|
uses_pagination=True,
|
|
136
142
|
)
|
|
137
143
|
|
|
144
|
+
dfs = []
|
|
138
145
|
for r in responses:
|
|
139
146
|
for v in r.get("value", []):
|
|
140
147
|
new_data = {
|
|
@@ -144,11 +151,15 @@ def list_gateway_role_assigments(gateway: str | UUID) -> pd.DataFrame:
|
|
|
144
151
|
"Role": v.get("role"),
|
|
145
152
|
}
|
|
146
153
|
|
|
147
|
-
|
|
154
|
+
dfs.append(pd.DataFrame(new_data, index=[0]))
|
|
155
|
+
|
|
156
|
+
if dfs:
|
|
157
|
+
df = pd.concat(dfs, ignore_index=True)
|
|
148
158
|
|
|
149
159
|
return df
|
|
150
160
|
|
|
151
161
|
|
|
162
|
+
@log
|
|
152
163
|
def delete_gateway_role_assignment(gateway: str | UUID, role_assignment_id: UUID):
|
|
153
164
|
"""
|
|
154
165
|
Delete the specified role assignment for the gateway.
|
|
@@ -177,6 +188,7 @@ def delete_gateway_role_assignment(gateway: str | UUID, role_assignment_id: UUID
|
|
|
177
188
|
)
|
|
178
189
|
|
|
179
190
|
|
|
191
|
+
@log
|
|
180
192
|
def _resolve_gateway_member_id(gateway: str | UUID, gateway_member: str | UUID) -> UUID:
|
|
181
193
|
|
|
182
194
|
gateway_id = _resolve_gateway_id(gateway)
|
|
@@ -194,6 +206,7 @@ def _resolve_gateway_member_id(gateway: str | UUID, gateway_member: str | UUID)
|
|
|
194
206
|
return dfM_filt["Member Id"].iloc[0]
|
|
195
207
|
|
|
196
208
|
|
|
209
|
+
@log
|
|
197
210
|
def delete_gateway_member(gateway: str | UUID, gateway_member: str | UUID):
|
|
198
211
|
"""
|
|
199
212
|
Delete gateway member of an on-premises gateway.
|
|
@@ -225,6 +238,7 @@ def delete_gateway_member(gateway: str | UUID, gateway_member: str | UUID):
|
|
|
225
238
|
)
|
|
226
239
|
|
|
227
240
|
|
|
241
|
+
@log
|
|
228
242
|
def list_gateway_members(gateway: str | UUID) -> pd.DataFrame:
|
|
229
243
|
"""
|
|
230
244
|
Lists gateway members of an on-premises gateway.
|
|
@@ -260,6 +274,7 @@ def list_gateway_members(gateway: str | UUID) -> pd.DataFrame:
|
|
|
260
274
|
request=f"/v1/gateways/{gateway_id}/members", client="fabric_sp"
|
|
261
275
|
)
|
|
262
276
|
|
|
277
|
+
dfs = []
|
|
263
278
|
for v in response.json().get("value", []):
|
|
264
279
|
new_data = {
|
|
265
280
|
"Member Id": v.get("id"),
|
|
@@ -270,13 +285,16 @@ def list_gateway_members(gateway: str | UUID) -> pd.DataFrame:
|
|
|
270
285
|
"Enabled": v.get("enabled"),
|
|
271
286
|
}
|
|
272
287
|
|
|
273
|
-
|
|
288
|
+
dfs.append(pd.DataFrame(new_data, index=[0]))
|
|
274
289
|
|
|
275
|
-
|
|
290
|
+
if dfs:
|
|
291
|
+
df = pd.concat(dfs, ignore_index=True)
|
|
292
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
276
293
|
|
|
277
294
|
return df
|
|
278
295
|
|
|
279
296
|
|
|
297
|
+
@log
|
|
280
298
|
def create_vnet_gateway(
|
|
281
299
|
name: str,
|
|
282
300
|
capacity: str | UUID,
|
|
@@ -342,6 +360,7 @@ def create_vnet_gateway(
|
|
|
342
360
|
)
|
|
343
361
|
|
|
344
362
|
|
|
363
|
+
@log
|
|
345
364
|
def update_on_premises_gateway(
|
|
346
365
|
gateway: str | UUID,
|
|
347
366
|
allow_cloud_connection_refresh: Optional[bool] = None,
|
|
@@ -395,6 +414,7 @@ def update_on_premises_gateway(
|
|
|
395
414
|
print(f"{icons.green_dot} The '{gateway}' has been updated accordingly.")
|
|
396
415
|
|
|
397
416
|
|
|
417
|
+
@log
|
|
398
418
|
def update_vnet_gateway(
|
|
399
419
|
gateway: str | UUID,
|
|
400
420
|
capacity: str | UUID,
|
|
@@ -448,6 +468,7 @@ def update_vnet_gateway(
|
|
|
448
468
|
print(f"{icons.green_dot} The '{gateway}' has been updated accordingly.")
|
|
449
469
|
|
|
450
470
|
|
|
471
|
+
@log
|
|
451
472
|
def bind_semantic_model_to_gateway(
|
|
452
473
|
dataset: str | UUID, gateway: str | UUID, workspace: Optional[str | UUID] = None
|
|
453
474
|
):
|
sempy_labs/_git.py
CHANGED
|
@@ -7,8 +7,10 @@ from sempy_labs._helper_functions import (
|
|
|
7
7
|
_create_dataframe,
|
|
8
8
|
)
|
|
9
9
|
from uuid import UUID
|
|
10
|
+
from sempy._utils._log import log
|
|
10
11
|
|
|
11
12
|
|
|
13
|
+
@log
|
|
12
14
|
def connect_workspace_to_azure_dev_ops(
|
|
13
15
|
organization_name: str,
|
|
14
16
|
project_name: str,
|
|
@@ -64,6 +66,7 @@ def connect_workspace_to_azure_dev_ops(
|
|
|
64
66
|
)
|
|
65
67
|
|
|
66
68
|
|
|
69
|
+
@log
|
|
67
70
|
def connect_workspace_to_github(
|
|
68
71
|
owner_name: str,
|
|
69
72
|
repository_name: str,
|
|
@@ -125,6 +128,7 @@ def connect_workspace_to_github(
|
|
|
125
128
|
)
|
|
126
129
|
|
|
127
130
|
|
|
131
|
+
@log
|
|
128
132
|
def disconnect_workspace_from_git(workspace: Optional[str | UUID] = None):
|
|
129
133
|
"""
|
|
130
134
|
Disconnects a workspace from a git repository.
|
|
@@ -148,6 +152,7 @@ def disconnect_workspace_from_git(workspace: Optional[str | UUID] = None):
|
|
|
148
152
|
)
|
|
149
153
|
|
|
150
154
|
|
|
155
|
+
@log
|
|
151
156
|
def get_git_status(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
152
157
|
"""
|
|
153
158
|
Obtains the Git status of items in the workspace, that can be committed to Git.
|
|
@@ -209,6 +214,7 @@ def get_git_status(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
209
214
|
return df
|
|
210
215
|
|
|
211
216
|
|
|
217
|
+
@log
|
|
212
218
|
def get_git_connection(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
213
219
|
"""
|
|
214
220
|
Obtains the Git status of items in the workspace, that can be committed to Git.
|
|
@@ -265,6 +271,7 @@ def get_git_connection(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
|
|
|
265
271
|
return df
|
|
266
272
|
|
|
267
273
|
|
|
274
|
+
@log
|
|
268
275
|
def initialize_git_connection(workspace: Optional[str | UUID] = None) -> str:
|
|
269
276
|
"""
|
|
270
277
|
Initializes a connection for a workspace that is connected to Git.
|
|
@@ -300,6 +307,7 @@ def initialize_git_connection(workspace: Optional[str | UUID] = None) -> str:
|
|
|
300
307
|
return response_json.get("remoteCommitHash")
|
|
301
308
|
|
|
302
309
|
|
|
310
|
+
@log
|
|
303
311
|
def commit_to_git(
|
|
304
312
|
comment: str,
|
|
305
313
|
item_ids: UUID | List[UUID] = None,
|
|
@@ -368,6 +376,7 @@ def commit_to_git(
|
|
|
368
376
|
)
|
|
369
377
|
|
|
370
378
|
|
|
379
|
+
@log
|
|
371
380
|
def update_from_git(
|
|
372
381
|
remote_commit_hash: str,
|
|
373
382
|
conflict_resolution_policy: str,
|
|
@@ -435,6 +444,7 @@ def update_from_git(
|
|
|
435
444
|
)
|
|
436
445
|
|
|
437
446
|
|
|
447
|
+
@log
|
|
438
448
|
def get_my_git_credentials(
|
|
439
449
|
workspace: Optional[str | UUID] = None,
|
|
440
450
|
) -> pd.DataFrame:
|
|
@@ -476,6 +486,7 @@ def get_my_git_credentials(
|
|
|
476
486
|
return df
|
|
477
487
|
|
|
478
488
|
|
|
489
|
+
@log
|
|
479
490
|
def update_my_git_credentials(
|
|
480
491
|
source: str,
|
|
481
492
|
connection_id: Optional[UUID] = None,
|
sempy_labs/_graphQL.py
CHANGED
|
@@ -4,11 +4,13 @@ from typing import Optional
|
|
|
4
4
|
from sempy_labs._helper_functions import (
|
|
5
5
|
_base_api,
|
|
6
6
|
_create_dataframe,
|
|
7
|
-
|
|
7
|
+
resolve_workspace_id,
|
|
8
8
|
create_item,
|
|
9
9
|
)
|
|
10
|
+
from sempy._utils._log import log
|
|
10
11
|
|
|
11
12
|
|
|
13
|
+
@log
|
|
12
14
|
def list_graphql_apis(workspace: Optional[str | UUID]) -> pd.DataFrame:
|
|
13
15
|
"""
|
|
14
16
|
Shows the Graph QL APIs within a workspace.
|
|
@@ -37,7 +39,7 @@ def list_graphql_apis(workspace: Optional[str | UUID]) -> pd.DataFrame:
|
|
|
37
39
|
}
|
|
38
40
|
df = _create_dataframe(columns=columns)
|
|
39
41
|
|
|
40
|
-
|
|
42
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
41
43
|
|
|
42
44
|
responses = _base_api(
|
|
43
45
|
request=f"/v1/workspaces/{workspace_id}/GraphQLApis",
|
|
@@ -45,6 +47,7 @@ def list_graphql_apis(workspace: Optional[str | UUID]) -> pd.DataFrame:
|
|
|
45
47
|
client="fabric_sp",
|
|
46
48
|
)
|
|
47
49
|
|
|
50
|
+
dfs = []
|
|
48
51
|
for r in responses:
|
|
49
52
|
for v in r.get("value", []):
|
|
50
53
|
new_data = {
|
|
@@ -52,11 +55,15 @@ def list_graphql_apis(workspace: Optional[str | UUID]) -> pd.DataFrame:
|
|
|
52
55
|
"GraphQL API Id": v.get("id"),
|
|
53
56
|
"Description": v.get("description"),
|
|
54
57
|
}
|
|
55
|
-
|
|
58
|
+
dfs.append(pd.DataFrame(new_data, index=[0]))
|
|
59
|
+
|
|
60
|
+
if dfs:
|
|
61
|
+
df = pd.concat(dfs, ignore_index=True)
|
|
56
62
|
|
|
57
63
|
return df
|
|
58
64
|
|
|
59
65
|
|
|
66
|
+
@log
|
|
60
67
|
def create_graphql_api(
|
|
61
68
|
name: str, description: Optional[str] = None, workspace: Optional[str | UUID] = None
|
|
62
69
|
):
|
sempy_labs/_helper_functions.py
CHANGED
|
@@ -19,6 +19,7 @@ import requests
|
|
|
19
19
|
import sempy_labs._authentication as auth
|
|
20
20
|
from jsonpath_ng.ext import parse
|
|
21
21
|
from jsonpath_ng.jsonpath import Fields, Index
|
|
22
|
+
from sempy._utils._log import log
|
|
22
23
|
|
|
23
24
|
|
|
24
25
|
def _build_url(url: str, params: dict) -> str:
|
|
@@ -37,6 +38,7 @@ def _encode_user(user: str) -> str:
|
|
|
37
38
|
return urllib.parse.quote(user, safe="@")
|
|
38
39
|
|
|
39
40
|
|
|
41
|
+
@log
|
|
40
42
|
def create_abfss_path(
|
|
41
43
|
lakehouse_id: UUID,
|
|
42
44
|
lakehouse_workspace_id: UUID,
|
|
@@ -76,6 +78,7 @@ def create_abfss_path(
|
|
|
76
78
|
return path
|
|
77
79
|
|
|
78
80
|
|
|
81
|
+
@log
|
|
79
82
|
def create_abfss_path_from_path(
|
|
80
83
|
lakehouse_id: UUID, workspace_id: UUID, file_path: str
|
|
81
84
|
) -> str:
|
|
@@ -103,6 +106,7 @@ def _split_abfss_path(path: str) -> Tuple[UUID, UUID, str]:
|
|
|
103
106
|
return workspace_id, item_id, delta_table_name
|
|
104
107
|
|
|
105
108
|
|
|
109
|
+
@log
|
|
106
110
|
def format_dax_object_name(table: str, column: str) -> str:
|
|
107
111
|
"""
|
|
108
112
|
Formats a table/column combination to the 'Table Name'[Column Name] format.
|
|
@@ -123,6 +127,7 @@ def format_dax_object_name(table: str, column: str) -> str:
|
|
|
123
127
|
return "'" + table + "'[" + column + "]"
|
|
124
128
|
|
|
125
129
|
|
|
130
|
+
@log
|
|
126
131
|
def create_relationship_name(
|
|
127
132
|
from_table: str, from_column: str, to_table: str, to_column: str
|
|
128
133
|
) -> str:
|
|
@@ -153,6 +158,7 @@ def create_relationship_name(
|
|
|
153
158
|
)
|
|
154
159
|
|
|
155
160
|
|
|
161
|
+
@log
|
|
156
162
|
def resolve_report_id(
|
|
157
163
|
report: str | UUID, workspace: Optional[str | UUID] = None
|
|
158
164
|
) -> UUID:
|
|
@@ -177,6 +183,7 @@ def resolve_report_id(
|
|
|
177
183
|
return resolve_item_id(item=report, type="Report", workspace=workspace)
|
|
178
184
|
|
|
179
185
|
|
|
186
|
+
@log
|
|
180
187
|
def resolve_report_name(report_id: UUID, workspace: Optional[str | UUID] = None) -> str:
|
|
181
188
|
"""
|
|
182
189
|
Obtains the name of the Power BI report.
|
|
@@ -199,6 +206,7 @@ def resolve_report_name(report_id: UUID, workspace: Optional[str | UUID] = None)
|
|
|
199
206
|
return resolve_item_name(item_id=report_id, workspace=workspace)
|
|
200
207
|
|
|
201
208
|
|
|
209
|
+
@log
|
|
202
210
|
def delete_item(
|
|
203
211
|
item: str | UUID, type: str, workspace: Optional[str | UUID] = None
|
|
204
212
|
) -> None:
|
|
@@ -230,6 +238,7 @@ def delete_item(
|
|
|
230
238
|
)
|
|
231
239
|
|
|
232
240
|
|
|
241
|
+
@log
|
|
233
242
|
def create_item(
|
|
234
243
|
name: str,
|
|
235
244
|
type: str,
|
|
@@ -281,6 +290,7 @@ def create_item(
|
|
|
281
290
|
)
|
|
282
291
|
|
|
283
292
|
|
|
293
|
+
@log
|
|
284
294
|
def get_item_definition(
|
|
285
295
|
item: str | UUID,
|
|
286
296
|
type: str,
|
|
@@ -319,6 +329,7 @@ def get_item_definition(
|
|
|
319
329
|
return value
|
|
320
330
|
|
|
321
331
|
|
|
332
|
+
@log
|
|
322
333
|
def resolve_lakehouse_name_and_id(
|
|
323
334
|
lakehouse: Optional[str | UUID] = None, workspace: Optional[str | UUID] = None
|
|
324
335
|
) -> Tuple[str, UUID]:
|
|
@@ -344,6 +355,7 @@ def resolve_lakehouse_name_and_id(
|
|
|
344
355
|
return lakehouse_name, lakehouse_id
|
|
345
356
|
|
|
346
357
|
|
|
358
|
+
@log
|
|
347
359
|
def resolve_dataset_name_and_id(
|
|
348
360
|
dataset: str | UUID, workspace: Optional[str | UUID] = None
|
|
349
361
|
) -> Tuple[str, UUID]:
|
|
@@ -355,6 +367,7 @@ def resolve_dataset_name_and_id(
|
|
|
355
367
|
return dataset_name, dataset_id
|
|
356
368
|
|
|
357
369
|
|
|
370
|
+
@log
|
|
358
371
|
def resolve_dataset_id(
|
|
359
372
|
dataset: str | UUID, workspace: Optional[str | UUID] = None
|
|
360
373
|
) -> UUID:
|
|
@@ -379,6 +392,7 @@ def resolve_dataset_id(
|
|
|
379
392
|
return resolve_item_id(item=dataset, type="SemanticModel", workspace=workspace)
|
|
380
393
|
|
|
381
394
|
|
|
395
|
+
@log
|
|
382
396
|
def resolve_dataset_name(
|
|
383
397
|
dataset_id: UUID, workspace: Optional[str | UUID] = None
|
|
384
398
|
) -> str:
|
|
@@ -403,6 +417,7 @@ def resolve_dataset_name(
|
|
|
403
417
|
return resolve_item_name(item_id=dataset_id, workspace=workspace)
|
|
404
418
|
|
|
405
419
|
|
|
420
|
+
@log
|
|
406
421
|
def resolve_lakehouse_name(
|
|
407
422
|
lakehouse_id: Optional[UUID] = None, workspace: Optional[str | UUID] = None
|
|
408
423
|
) -> str:
|
|
@@ -435,6 +450,7 @@ def resolve_lakehouse_name(
|
|
|
435
450
|
return resolve_item_name(item_id=lakehouse_id, workspace=workspace)
|
|
436
451
|
|
|
437
452
|
|
|
453
|
+
@log
|
|
438
454
|
def resolve_lakehouse_id(
|
|
439
455
|
lakehouse: Optional[str | UUID] = None, workspace: Optional[str | UUID] = None
|
|
440
456
|
) -> UUID:
|
|
@@ -470,6 +486,7 @@ def resolve_lakehouse_id(
|
|
|
470
486
|
return lakehouse_id
|
|
471
487
|
|
|
472
488
|
|
|
489
|
+
@log
|
|
473
490
|
def get_direct_lake_sql_endpoint(
|
|
474
491
|
dataset: str | UUID, workspace: Optional[str | UUID] = None
|
|
475
492
|
) -> UUID:
|
|
@@ -517,6 +534,7 @@ def get_direct_lake_sql_endpoint(
|
|
|
517
534
|
return sqlEndpointId
|
|
518
535
|
|
|
519
536
|
|
|
537
|
+
@log
|
|
520
538
|
def generate_embedded_filter(filter: str) -> str:
|
|
521
539
|
"""
|
|
522
540
|
Converts the filter expression to a filter expression which can be used by a Power BI embedded URL.
|
|
@@ -582,6 +600,7 @@ def generate_embedded_filter(filter: str) -> str:
|
|
|
582
600
|
return revised_filter
|
|
583
601
|
|
|
584
602
|
|
|
603
|
+
@log
|
|
585
604
|
def save_as_delta_table(
|
|
586
605
|
dataframe,
|
|
587
606
|
delta_table_name: str,
|
|
@@ -735,6 +754,7 @@ def save_as_delta_table(
|
|
|
735
754
|
)
|
|
736
755
|
|
|
737
756
|
|
|
757
|
+
@log
|
|
738
758
|
def language_validate(language: str):
|
|
739
759
|
"""
|
|
740
760
|
Validateds that the language specified exists within the supported langauges.
|
|
@@ -771,6 +791,7 @@ def language_validate(language: str):
|
|
|
771
791
|
return lang
|
|
772
792
|
|
|
773
793
|
|
|
794
|
+
@log
|
|
774
795
|
def resolve_workspace_id(
|
|
775
796
|
workspace: Optional[str | UUID] = None,
|
|
776
797
|
) -> UUID:
|
|
@@ -803,6 +824,7 @@ def resolve_workspace_id(
|
|
|
803
824
|
return workspace_id
|
|
804
825
|
|
|
805
826
|
|
|
827
|
+
@log
|
|
806
828
|
def resolve_workspace_name(workspace_id: Optional[UUID] = None) -> str:
|
|
807
829
|
|
|
808
830
|
if workspace_id is None:
|
|
@@ -820,6 +842,7 @@ def resolve_workspace_name(workspace_id: Optional[UUID] = None) -> str:
|
|
|
820
842
|
return response.get("displayName")
|
|
821
843
|
|
|
822
844
|
|
|
845
|
+
@log
|
|
823
846
|
def resolve_workspace_name_and_id(
|
|
824
847
|
workspace: Optional[str | UUID] = None,
|
|
825
848
|
) -> Tuple[str, str]:
|
|
@@ -865,6 +888,7 @@ def resolve_workspace_name_and_id(
|
|
|
865
888
|
return workspace_name, workspace_id
|
|
866
889
|
|
|
867
890
|
|
|
891
|
+
@log
|
|
868
892
|
def resolve_item_id(
|
|
869
893
|
item: str | UUID, type: Optional[str] = None, workspace: Optional[str | UUID] = None
|
|
870
894
|
) -> UUID:
|
|
@@ -909,6 +933,7 @@ def resolve_item_id(
|
|
|
909
933
|
return item_id
|
|
910
934
|
|
|
911
935
|
|
|
936
|
+
@log
|
|
912
937
|
def resolve_item_name_and_id(
|
|
913
938
|
item: str | UUID, type: Optional[str] = None, workspace: Optional[str | UUID] = None
|
|
914
939
|
) -> Tuple[str, UUID]:
|
|
@@ -926,6 +951,7 @@ def resolve_item_name_and_id(
|
|
|
926
951
|
return item_name, item_id
|
|
927
952
|
|
|
928
953
|
|
|
954
|
+
@log
|
|
929
955
|
def resolve_item_name(item_id: UUID, workspace: Optional[str | UUID] = None) -> str:
|
|
930
956
|
|
|
931
957
|
workspace_id = resolve_workspace_id(workspace)
|
|
@@ -946,6 +972,7 @@ def resolve_item_name(item_id: UUID, workspace: Optional[str | UUID] = None) ->
|
|
|
946
972
|
return item_name
|
|
947
973
|
|
|
948
974
|
|
|
975
|
+
@log
|
|
949
976
|
def _extract_json(dataframe: pd.DataFrame) -> dict:
|
|
950
977
|
|
|
951
978
|
payload = dataframe["payload"].iloc[0]
|
|
@@ -954,19 +981,21 @@ def _extract_json(dataframe: pd.DataFrame) -> dict:
|
|
|
954
981
|
return json.loads(json_file)
|
|
955
982
|
|
|
956
983
|
|
|
957
|
-
|
|
984
|
+
@log
|
|
985
|
+
def _conv_b64(file, json_dumps: bool = True):
|
|
958
986
|
|
|
959
|
-
|
|
960
|
-
|
|
961
|
-
|
|
962
|
-
return f
|
|
987
|
+
if json_dumps:
|
|
988
|
+
file = json.dumps(file)
|
|
989
|
+
return base64.b64encode(file.encode("utf-8")).decode("utf-8")
|
|
963
990
|
|
|
964
991
|
|
|
992
|
+
@log
|
|
965
993
|
def _decode_b64(file, format: Optional[str] = "utf-8"):
|
|
966
994
|
|
|
967
995
|
return base64.b64decode(file).decode(format)
|
|
968
996
|
|
|
969
997
|
|
|
998
|
+
@log
|
|
970
999
|
def is_default_semantic_model(
|
|
971
1000
|
dataset: str, workspace: Optional[str | UUID] = None
|
|
972
1001
|
) -> bool:
|
|
@@ -1000,6 +1029,7 @@ def is_default_semantic_model(
|
|
|
1000
1029
|
return dataset in default_semantic_models
|
|
1001
1030
|
|
|
1002
1031
|
|
|
1032
|
+
@log
|
|
1003
1033
|
def resolve_item_type(item_id: UUID, workspace: Optional[str | UUID] = None) -> str:
|
|
1004
1034
|
"""
|
|
1005
1035
|
Obtains the item type for a given Fabric Item Id within a Fabric workspace.
|
|
@@ -1030,6 +1060,7 @@ def resolve_item_type(item_id: UUID, workspace: Optional[str | UUID] = None) ->
|
|
|
1030
1060
|
return dfI_filt["Type"].iloc[0]
|
|
1031
1061
|
|
|
1032
1062
|
|
|
1063
|
+
@log
|
|
1033
1064
|
def resolve_dataset_from_report(
|
|
1034
1065
|
report: str | UUID, workspace: Optional[str | UUID] = None
|
|
1035
1066
|
) -> Tuple[UUID, str, UUID, str]:
|
|
@@ -1071,6 +1102,7 @@ def _add_part(target_dict, path, payload):
|
|
|
1071
1102
|
target_dict["definition"]["parts"].append(part)
|
|
1072
1103
|
|
|
1073
1104
|
|
|
1105
|
+
@log
|
|
1074
1106
|
def resolve_workspace_capacity(
|
|
1075
1107
|
workspace: Optional[str | UUID] = None,
|
|
1076
1108
|
) -> Tuple[UUID, str]:
|
|
@@ -1105,6 +1137,7 @@ def resolve_workspace_capacity(
|
|
|
1105
1137
|
return capacity_id, capacity_name
|
|
1106
1138
|
|
|
1107
1139
|
|
|
1140
|
+
@log
|
|
1108
1141
|
def get_capacity_id(workspace: Optional[str | UUID] = None) -> UUID:
|
|
1109
1142
|
"""
|
|
1110
1143
|
Obtains the Capacity Id for a given workspace.
|
|
@@ -1136,6 +1169,7 @@ def get_capacity_id(workspace: Optional[str | UUID] = None) -> UUID:
|
|
|
1136
1169
|
return capacity_id
|
|
1137
1170
|
|
|
1138
1171
|
|
|
1172
|
+
@log
|
|
1139
1173
|
def get_capacity_name(workspace: Optional[str | UUID] = None) -> str:
|
|
1140
1174
|
"""
|
|
1141
1175
|
Obtains the capacity name for a given workspace.
|
|
@@ -1166,6 +1200,7 @@ def get_capacity_name(workspace: Optional[str | UUID] = None) -> str:
|
|
|
1166
1200
|
return dfC_filt["Display Name"].iloc[0]
|
|
1167
1201
|
|
|
1168
1202
|
|
|
1203
|
+
@log
|
|
1169
1204
|
def resolve_capacity_name(capacity_id: Optional[UUID] = None) -> str:
|
|
1170
1205
|
"""
|
|
1171
1206
|
Obtains the capacity name for a given capacity Id.
|
|
@@ -1198,6 +1233,7 @@ def resolve_capacity_name(capacity_id: Optional[UUID] = None) -> str:
|
|
|
1198
1233
|
return dfC_filt["Display Name"].iloc[0]
|
|
1199
1234
|
|
|
1200
1235
|
|
|
1236
|
+
@log
|
|
1201
1237
|
def resolve_capacity_id(capacity: Optional[str | UUID] = None, **kwargs) -> UUID:
|
|
1202
1238
|
"""
|
|
1203
1239
|
Obtains the capacity Id for a given capacity name.
|
|
@@ -1381,6 +1417,7 @@ def _get_blob_client(workspace_id: UUID, item_id: UUID):
|
|
|
1381
1417
|
return BlobServiceClient(url, credential=FabricTokenCredential())
|
|
1382
1418
|
|
|
1383
1419
|
|
|
1420
|
+
@log
|
|
1384
1421
|
def resolve_warehouse_id(
|
|
1385
1422
|
warehouse: str | UUID, workspace: Optional[str | UUID]
|
|
1386
1423
|
) -> UUID:
|
|
@@ -1461,6 +1498,7 @@ def convert_to_alphanumeric_lowercase(input_string):
|
|
|
1461
1498
|
return cleaned_string
|
|
1462
1499
|
|
|
1463
1500
|
|
|
1501
|
+
@log
|
|
1464
1502
|
def resolve_environment_id(
|
|
1465
1503
|
environment: str | UUID, workspace: Optional[str | UUID] = None
|
|
1466
1504
|
) -> UUID:
|
|
@@ -1490,6 +1528,7 @@ def _make_clickable(val):
|
|
|
1490
1528
|
return f'<a target="_blank" href="{val}">{val}</a>'
|
|
1491
1529
|
|
|
1492
1530
|
|
|
1531
|
+
@log
|
|
1493
1532
|
def convert_to_friendly_case(text: str) -> str:
|
|
1494
1533
|
"""
|
|
1495
1534
|
Converts a string of pascal/camel/snake case to business-friendly case.
|
|
@@ -1514,6 +1553,7 @@ def convert_to_friendly_case(text: str) -> str:
|
|
|
1514
1553
|
return text
|
|
1515
1554
|
|
|
1516
1555
|
|
|
1556
|
+
@log
|
|
1517
1557
|
def resolve_notebook_id(
|
|
1518
1558
|
notebook: str | UUID, workspace: Optional[str | UUID] = None
|
|
1519
1559
|
) -> UUID:
|
|
@@ -1543,6 +1583,7 @@ def generate_guid():
|
|
|
1543
1583
|
return str(uuid.uuid4())
|
|
1544
1584
|
|
|
1545
1585
|
|
|
1586
|
+
@log
|
|
1546
1587
|
def _get_column_aggregate(
|
|
1547
1588
|
table_name: str,
|
|
1548
1589
|
column_name: str | List[str] = "RunId",
|
|
@@ -1643,6 +1684,14 @@ def _get_column_aggregate(
|
|
|
1643
1684
|
return result
|
|
1644
1685
|
|
|
1645
1686
|
|
|
1687
|
+
def _validate_weight(weight: float):
|
|
1688
|
+
|
|
1689
|
+
if weight is not None and (weight <= 0 or weight >= 1):
|
|
1690
|
+
raise ValueError(
|
|
1691
|
+
f"{icons.red_dot} Invalid weight parameter. Weight must be a value between 0 and 1."
|
|
1692
|
+
)
|
|
1693
|
+
|
|
1694
|
+
|
|
1646
1695
|
def _create_spark_dataframe(df: pd.DataFrame):
|
|
1647
1696
|
|
|
1648
1697
|
spark = _create_spark_session()
|
|
@@ -1858,7 +1907,7 @@ def _get_fabric_context_setting(name: str):
|
|
|
1858
1907
|
|
|
1859
1908
|
def get_tenant_id():
|
|
1860
1909
|
|
|
1861
|
-
_get_fabric_context_setting(name="trident.tenant.id")
|
|
1910
|
+
return _get_fabric_context_setting(name="trident.tenant.id")
|
|
1862
1911
|
|
|
1863
1912
|
|
|
1864
1913
|
def _base_api(
|
|
@@ -2082,7 +2131,9 @@ def _run_spark_sql_query(query):
|
|
|
2082
2131
|
|
|
2083
2132
|
|
|
2084
2133
|
def _mount(
|
|
2085
|
-
lakehouse: Optional[str | UUID] = None,
|
|
2134
|
+
lakehouse: Optional[str | UUID] = None,
|
|
2135
|
+
workspace: Optional[str | UUID] = None,
|
|
2136
|
+
verbose: bool = False,
|
|
2086
2137
|
) -> str:
|
|
2087
2138
|
"""
|
|
2088
2139
|
Mounts a lakehouse to a notebook if it is not already mounted. Returns the local path to the lakehouse.
|
|
@@ -2111,9 +2162,10 @@ def _mount(
|
|
|
2111
2162
|
if not any(i.get("source") == lake_path for i in mounts):
|
|
2112
2163
|
# Mount lakehouse if not mounted
|
|
2113
2164
|
notebookutils.fs.mount(lake_path, mount_point)
|
|
2114
|
-
|
|
2115
|
-
|
|
2116
|
-
|
|
2165
|
+
if verbose:
|
|
2166
|
+
print(
|
|
2167
|
+
f"{icons.green_dot} Mounted the '{lakehouse_name}' lakehouse within the '{workspace_name}' to the notebook."
|
|
2168
|
+
)
|
|
2117
2169
|
|
|
2118
2170
|
mounts = notebookutils.fs.mounts()
|
|
2119
2171
|
|