semantic-link-labs 0.9.4__py3-none-any.whl → 0.9.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/METADATA +19 -2
- {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/RECORD +71 -64
- {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +33 -4
- sempy_labs/_capacities.py +59 -128
- sempy_labs/_capacity_migration.py +19 -21
- sempy_labs/_connections.py +2 -4
- sempy_labs/_dashboards.py +60 -0
- sempy_labs/_data_pipelines.py +5 -31
- sempy_labs/_dataflows.py +2 -2
- sempy_labs/_dax_query_view.py +55 -0
- sempy_labs/_delta_analyzer.py +16 -14
- sempy_labs/_environments.py +28 -49
- sempy_labs/_eventhouses.py +27 -53
- sempy_labs/_eventstreams.py +16 -34
- sempy_labs/_external_data_shares.py +4 -10
- sempy_labs/_gateways.py +4 -4
- sempy_labs/_generate_semantic_model.py +2 -2
- sempy_labs/_git.py +90 -1
- sempy_labs/_graphQL.py +8 -21
- sempy_labs/_helper_functions.py +440 -91
- sempy_labs/_kql_databases.py +24 -35
- sempy_labs/_kql_querysets.py +15 -32
- sempy_labs/_list_functions.py +17 -192
- sempy_labs/_managed_private_endpoints.py +9 -2
- sempy_labs/_mirrored_databases.py +17 -49
- sempy_labs/_ml_experiments.py +6 -31
- sempy_labs/_ml_models.py +4 -28
- sempy_labs/_model_bpa.py +4 -11
- sempy_labs/_model_bpa_bulk.py +23 -27
- sempy_labs/_mounted_data_factories.py +119 -0
- sempy_labs/_notebooks.py +16 -26
- sempy_labs/_one_lake_integration.py +2 -1
- sempy_labs/_semantic_models.py +20 -0
- sempy_labs/_sql.py +13 -8
- sempy_labs/_sqldatabase.py +61 -100
- sempy_labs/_utils.py +42 -0
- sempy_labs/_vertipaq.py +25 -13
- sempy_labs/_warehouses.py +19 -20
- sempy_labs/_workloads.py +23 -9
- sempy_labs/_workspace_identity.py +6 -0
- sempy_labs/_workspaces.py +55 -7
- sempy_labs/admin/__init__.py +21 -1
- sempy_labs/admin/_apps.py +1 -1
- sempy_labs/admin/_artifacts.py +62 -0
- sempy_labs/admin/_basic_functions.py +3 -54
- sempy_labs/admin/_capacities.py +61 -0
- sempy_labs/admin/_reports.py +74 -0
- sempy_labs/admin/_scanner.py +2 -2
- sempy_labs/admin/_shared.py +4 -2
- sempy_labs/admin/_users.py +133 -0
- sempy_labs/admin/_workspaces.py +148 -0
- sempy_labs/directlake/_directlake_schema_compare.py +2 -1
- sempy_labs/directlake/_directlake_schema_sync.py +65 -19
- sempy_labs/directlake/_dl_helper.py +0 -6
- sempy_labs/directlake/_generate_shared_expression.py +10 -11
- sempy_labs/directlake/_guardrails.py +2 -1
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +12 -25
- sempy_labs/directlake/_update_directlake_partition_entity.py +11 -3
- sempy_labs/lakehouse/__init__.py +2 -0
- sempy_labs/lakehouse/_lakehouse.py +6 -7
- sempy_labs/lakehouse/_shortcuts.py +198 -57
- sempy_labs/migration/_migration_validation.py +0 -4
- sempy_labs/report/_download_report.py +4 -6
- sempy_labs/report/_generate_report.py +15 -23
- sempy_labs/report/_report_bpa.py +12 -19
- sempy_labs/report/_report_functions.py +2 -1
- sempy_labs/report/_report_rebind.py +8 -6
- sempy_labs/tom/_model.py +34 -16
- {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/top_level.txt +0 -0
sempy_labs/_ml_experiments.py
CHANGED
|
@@ -1,12 +1,11 @@
|
|
|
1
|
-
import sempy.fabric as fabric
|
|
2
1
|
import pandas as pd
|
|
3
2
|
from typing import Optional
|
|
4
3
|
from sempy_labs._helper_functions import (
|
|
5
4
|
resolve_workspace_name_and_id,
|
|
6
5
|
_base_api,
|
|
7
|
-
|
|
8
|
-
resolve_item_id,
|
|
6
|
+
delete_item,
|
|
9
7
|
_create_dataframe,
|
|
8
|
+
create_item,
|
|
10
9
|
)
|
|
11
10
|
from uuid import UUID
|
|
12
11
|
|
|
@@ -74,32 +73,15 @@ def create_ml_experiment(
|
|
|
74
73
|
name: str
|
|
75
74
|
Name of the ML experiment.
|
|
76
75
|
description : str, default=None
|
|
77
|
-
A description of the
|
|
76
|
+
A description of the ML experiment.
|
|
78
77
|
workspace : str | uuid.UUID, default=None
|
|
79
78
|
The Fabric workspace name or ID.
|
|
80
79
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
81
80
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
82
81
|
"""
|
|
83
82
|
|
|
84
|
-
(
|
|
85
|
-
|
|
86
|
-
payload = {"displayName": name}
|
|
87
|
-
|
|
88
|
-
if description:
|
|
89
|
-
payload["description"] = description
|
|
90
|
-
|
|
91
|
-
_base_api(
|
|
92
|
-
request=f"/v1/workspaces/{workspace_id}/mlExperiments",
|
|
93
|
-
method="post",
|
|
94
|
-
payload=payload,
|
|
95
|
-
status_codes=[201, 202],
|
|
96
|
-
lro_return_status_code=True,
|
|
97
|
-
)
|
|
98
|
-
_print_success(
|
|
99
|
-
item_name=name,
|
|
100
|
-
item_type="ML experiment",
|
|
101
|
-
workspace_name=workspace_name,
|
|
102
|
-
action="created",
|
|
83
|
+
create_item(
|
|
84
|
+
name=name, description=description, type="MLExperiment", workspace=workspace
|
|
103
85
|
)
|
|
104
86
|
|
|
105
87
|
|
|
@@ -119,11 +101,4 @@ def delete_ml_experiment(name: str, workspace: Optional[str | UUID] = None):
|
|
|
119
101
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
120
102
|
"""
|
|
121
103
|
|
|
122
|
-
|
|
123
|
-
fabric.delete_item(item_id=item_id, workspace=workspace)
|
|
124
|
-
_print_success(
|
|
125
|
-
item_name=name,
|
|
126
|
-
item_type="ML Experiment",
|
|
127
|
-
workspace_name=workspace,
|
|
128
|
-
action="deleted",
|
|
129
|
-
)
|
|
104
|
+
delete_item(item=name, type="MLExperiment", workspace=workspace)
|
sempy_labs/_ml_models.py
CHANGED
|
@@ -1,12 +1,11 @@
|
|
|
1
|
-
import sempy.fabric as fabric
|
|
2
1
|
import pandas as pd
|
|
3
2
|
from typing import Optional
|
|
4
3
|
from sempy_labs._helper_functions import (
|
|
5
4
|
resolve_workspace_name_and_id,
|
|
6
5
|
_base_api,
|
|
7
|
-
|
|
8
|
-
_print_success,
|
|
6
|
+
delete_item,
|
|
9
7
|
_create_dataframe,
|
|
8
|
+
create_item,
|
|
10
9
|
)
|
|
11
10
|
from uuid import UUID
|
|
12
11
|
|
|
@@ -81,26 +80,7 @@ def create_ml_model(
|
|
|
81
80
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
82
81
|
"""
|
|
83
82
|
|
|
84
|
-
(
|
|
85
|
-
|
|
86
|
-
payload = {"displayName": name}
|
|
87
|
-
|
|
88
|
-
if description:
|
|
89
|
-
payload["description"] = description
|
|
90
|
-
|
|
91
|
-
_base_api(
|
|
92
|
-
request=f"/v1/workspaces/{workspace_id}/mlModels",
|
|
93
|
-
method="post",
|
|
94
|
-
status_codes=[201, 202],
|
|
95
|
-
payload=payload,
|
|
96
|
-
lro_return_status_code=True,
|
|
97
|
-
)
|
|
98
|
-
_print_success(
|
|
99
|
-
item_name=name,
|
|
100
|
-
item_type="ML Model",
|
|
101
|
-
workspace_name=workspace_name,
|
|
102
|
-
action="created",
|
|
103
|
-
)
|
|
83
|
+
create_item(name=name, description=description, type="MLModel", workspace=workspace)
|
|
104
84
|
|
|
105
85
|
|
|
106
86
|
def delete_ml_model(name: str | UUID, workspace: Optional[str | UUID] = None):
|
|
@@ -119,8 +99,4 @@ def delete_ml_model(name: str | UUID, workspace: Optional[str | UUID] = None):
|
|
|
119
99
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
120
100
|
"""
|
|
121
101
|
|
|
122
|
-
|
|
123
|
-
fabric.delete_item(item_id=item_id, workspace=workspace)
|
|
124
|
-
_print_success(
|
|
125
|
-
item_name=name, item_type="ML Model", workspace_name=workspace, action="deleted"
|
|
126
|
-
)
|
|
102
|
+
delete_item(item=name, type="MLModel", workspace=workspace)
|
sempy_labs/_model_bpa.py
CHANGED
|
@@ -6,7 +6,6 @@ from IPython.display import display, HTML
|
|
|
6
6
|
from sempy_labs._model_dependencies import get_model_calc_dependencies
|
|
7
7
|
from sempy_labs._helper_functions import (
|
|
8
8
|
format_dax_object_name,
|
|
9
|
-
resolve_lakehouse_name,
|
|
10
9
|
create_relationship_name,
|
|
11
10
|
save_as_delta_table,
|
|
12
11
|
resolve_workspace_capacity,
|
|
@@ -43,6 +42,8 @@ def run_model_bpa(
|
|
|
43
42
|
"""
|
|
44
43
|
Displays an HTML visualization of the results of the Best Practice Analyzer scan for a semantic model.
|
|
45
44
|
|
|
45
|
+
The Best Practice Analyzer rules are based on the rules defined `here <https://github.com/microsoft/Analysis-Services/tree/master/BestPracticeRules>`_. The framework for the Best Practice Analyzer and rules are based on the foundation set by `Tabular Editor <https://github.com/TabularEditor/TabularEditor>`_.
|
|
46
|
+
|
|
46
47
|
Parameters
|
|
47
48
|
----------
|
|
48
49
|
dataset : str | uuid.UUID
|
|
@@ -387,13 +388,7 @@ def run_model_bpa(
|
|
|
387
388
|
dfExport = finalDF.copy()
|
|
388
389
|
delta_table_name = "modelbparesults"
|
|
389
390
|
|
|
390
|
-
|
|
391
|
-
lake_workspace = fabric.get_workspace_id()
|
|
392
|
-
lakehouse = resolve_lakehouse_name(
|
|
393
|
-
lakehouse_id=lakehouse_id, workspace=lake_workspace
|
|
394
|
-
)
|
|
395
|
-
|
|
396
|
-
lakeT = get_lakehouse_tables(lakehouse=lakehouse, workspace=lake_workspace)
|
|
391
|
+
lakeT = get_lakehouse_tables()
|
|
397
392
|
lakeT_filt = lakeT[lakeT["Table Name"] == delta_table_name]
|
|
398
393
|
|
|
399
394
|
dfExport["Severity"].replace(icons.severity_mapping, inplace=True)
|
|
@@ -401,9 +396,7 @@ def run_model_bpa(
|
|
|
401
396
|
if len(lakeT_filt) == 0:
|
|
402
397
|
runId = 1
|
|
403
398
|
else:
|
|
404
|
-
max_run_id = _get_column_aggregate(
|
|
405
|
-
lakehouse=lakehouse, table_name=delta_table_name
|
|
406
|
-
)
|
|
399
|
+
max_run_id = _get_column_aggregate(table_name=delta_table_name)
|
|
407
400
|
runId = max_run_id + 1
|
|
408
401
|
|
|
409
402
|
now = datetime.datetime.now()
|
sempy_labs/_model_bpa_bulk.py
CHANGED
|
@@ -7,6 +7,8 @@ from sempy_labs._helper_functions import (
|
|
|
7
7
|
resolve_workspace_capacity,
|
|
8
8
|
retry,
|
|
9
9
|
_get_column_aggregate,
|
|
10
|
+
resolve_workspace_id,
|
|
11
|
+
resolve_lakehouse_name_and_id,
|
|
10
12
|
)
|
|
11
13
|
from sempy_labs.lakehouse import (
|
|
12
14
|
get_lakehouse_tables,
|
|
@@ -16,6 +18,7 @@ from sempy_labs._model_bpa import run_model_bpa
|
|
|
16
18
|
from typing import Optional, List
|
|
17
19
|
from sempy._utils._log import log
|
|
18
20
|
import sempy_labs._icons as icons
|
|
21
|
+
from uuid import UUID
|
|
19
22
|
|
|
20
23
|
|
|
21
24
|
@log
|
|
@@ -66,17 +69,12 @@ def run_model_bpa_bulk(
|
|
|
66
69
|
|
|
67
70
|
now = datetime.datetime.now()
|
|
68
71
|
output_table = "modelbparesults"
|
|
69
|
-
|
|
70
|
-
lakehouse_id = fabric.get_lakehouse_id()
|
|
71
|
-
lakehouse = resolve_lakehouse_name(
|
|
72
|
-
lakehouse_id=lakehouse_id, workspace=lakehouse_workspace
|
|
73
|
-
)
|
|
74
|
-
lakeT = get_lakehouse_tables(lakehouse=lakehouse, workspace=lakehouse_workspace)
|
|
72
|
+
lakeT = get_lakehouse_tables()
|
|
75
73
|
lakeT_filt = lakeT[lakeT["Table Name"] == output_table]
|
|
76
74
|
if len(lakeT_filt) == 0:
|
|
77
75
|
runId = 1
|
|
78
76
|
else:
|
|
79
|
-
max_run_id = _get_column_aggregate(
|
|
77
|
+
max_run_id = _get_column_aggregate(table_name=output_table)
|
|
80
78
|
runId = max_run_id + 1
|
|
81
79
|
|
|
82
80
|
if isinstance(workspace, str):
|
|
@@ -170,7 +168,7 @@ def run_model_bpa_bulk(
|
|
|
170
168
|
|
|
171
169
|
# Append save results individually for each workspace (so as not to create a giant dataframe)
|
|
172
170
|
print(
|
|
173
|
-
f"{icons.in_progress} Saving the Model BPA results of the '{wksp}' workspace to the '{output_table}' within the
|
|
171
|
+
f"{icons.in_progress} Saving the Model BPA results of the '{wksp}' workspace to the '{output_table}' within the lakehouse attached to this notebook..."
|
|
174
172
|
)
|
|
175
173
|
|
|
176
174
|
schema = {
|
|
@@ -195,8 +193,8 @@ def run_model_bpa_bulk(
|
|
|
195
193
|
@log
|
|
196
194
|
def create_model_bpa_semantic_model(
|
|
197
195
|
dataset: Optional[str] = icons.model_bpa_name,
|
|
198
|
-
lakehouse: Optional[str] = None,
|
|
199
|
-
lakehouse_workspace: Optional[str] = None,
|
|
196
|
+
lakehouse: Optional[str | UUID] = None,
|
|
197
|
+
lakehouse_workspace: Optional[str | UUID] = None,
|
|
200
198
|
):
|
|
201
199
|
"""
|
|
202
200
|
Dynamically generates a Direct Lake semantic model based on the 'modelbparesults' delta table which contains the Best Practice Analyzer results.
|
|
@@ -209,16 +207,15 @@ def create_model_bpa_semantic_model(
|
|
|
209
207
|
----------
|
|
210
208
|
dataset : str, default='ModelBPA'
|
|
211
209
|
Name of the semantic model to be created.
|
|
212
|
-
lakehouse : str, default=None
|
|
210
|
+
lakehouse : str | uuid.UUID, default=None
|
|
213
211
|
Name of the Fabric lakehouse which contains the 'modelbparesults' delta table.
|
|
214
212
|
Defaults to None which resolves to the default lakehouse attached to the notebook.
|
|
215
|
-
lakehouse_workspace : str, default=None
|
|
213
|
+
lakehouse_workspace : str | uuid.UUID, default=None
|
|
216
214
|
The workspace in which the lakehouse resides.
|
|
217
215
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
218
216
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
219
217
|
"""
|
|
220
218
|
|
|
221
|
-
from sempy_labs._helper_functions import resolve_lakehouse_name
|
|
222
219
|
from sempy_labs.directlake import (
|
|
223
220
|
generate_shared_expression,
|
|
224
221
|
add_table_to_direct_lake_semantic_model,
|
|
@@ -226,22 +223,21 @@ def create_model_bpa_semantic_model(
|
|
|
226
223
|
from sempy_labs import create_blank_semantic_model, refresh_semantic_model
|
|
227
224
|
from sempy_labs.tom import connect_semantic_model
|
|
228
225
|
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
lakehouse = resolve_lakehouse_name(
|
|
234
|
-
lakehouse_id=lakehouse_id, workspace=lakehouse_workspace
|
|
235
|
-
)
|
|
226
|
+
lakehouse_workspace_id = resolve_workspace_id(workspace=lakehouse_workspace)
|
|
227
|
+
(lakehouse_id, lakehouse_name) = resolve_lakehouse_name_and_id(
|
|
228
|
+
lakehouse=lakehouse, workspace=lakehouse_workspace_id
|
|
229
|
+
)
|
|
236
230
|
|
|
237
231
|
# Generate the shared expression based on the lakehouse and lakehouse workspace
|
|
238
232
|
expr = generate_shared_expression(
|
|
239
|
-
item_name=
|
|
233
|
+
item_name=lakehouse_name,
|
|
234
|
+
item_type="Lakehouse",
|
|
235
|
+
workspace=lakehouse_workspace_id,
|
|
240
236
|
)
|
|
241
237
|
|
|
242
238
|
# Create blank model
|
|
243
239
|
create_blank_semantic_model(
|
|
244
|
-
dataset=dataset, workspace=
|
|
240
|
+
dataset=dataset, workspace=lakehouse_workspace_id, overwrite=True
|
|
245
241
|
)
|
|
246
242
|
|
|
247
243
|
@retry(
|
|
@@ -250,7 +246,7 @@ def create_model_bpa_semantic_model(
|
|
|
250
246
|
)
|
|
251
247
|
def dyn_connect():
|
|
252
248
|
with connect_semantic_model(
|
|
253
|
-
dataset=dataset, readonly=True, workspace=
|
|
249
|
+
dataset=dataset, readonly=True, workspace=lakehouse_workspace_id
|
|
254
250
|
) as tom:
|
|
255
251
|
|
|
256
252
|
tom.model
|
|
@@ -259,7 +255,7 @@ def create_model_bpa_semantic_model(
|
|
|
259
255
|
icons.sll_tags.append("ModelBPABulk")
|
|
260
256
|
table_exists = False
|
|
261
257
|
with connect_semantic_model(
|
|
262
|
-
dataset=dataset, readonly=False, workspace=
|
|
258
|
+
dataset=dataset, readonly=False, workspace=lakehouse_workspace_id
|
|
263
259
|
) as tom:
|
|
264
260
|
t_name = "BPAResults"
|
|
265
261
|
t_name_full = f"'{t_name}'"
|
|
@@ -274,11 +270,11 @@ def create_model_bpa_semantic_model(
|
|
|
274
270
|
dataset=dataset,
|
|
275
271
|
table_name=t_name,
|
|
276
272
|
lakehouse_table_name="modelbparesults",
|
|
277
|
-
workspace=
|
|
273
|
+
workspace=lakehouse_workspace_id,
|
|
278
274
|
refresh=False,
|
|
279
275
|
)
|
|
280
276
|
with connect_semantic_model(
|
|
281
|
-
dataset=dataset, readonly=False, workspace=
|
|
277
|
+
dataset=dataset, readonly=False, workspace=lakehouse_workspace_id
|
|
282
278
|
) as tom:
|
|
283
279
|
# Fix column names
|
|
284
280
|
for c in tom.all_columns():
|
|
@@ -377,4 +373,4 @@ def create_model_bpa_semantic_model(
|
|
|
377
373
|
# tom.add_measure(table_name=t_name, measure_name='Rules Followed', expression="[Rules] - [Rules Violated]")
|
|
378
374
|
|
|
379
375
|
# Refresh the model
|
|
380
|
-
refresh_semantic_model(dataset=dataset, workspace=
|
|
376
|
+
refresh_semantic_model(dataset=dataset, workspace=lakehouse_workspace_id)
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
import pandas as pd
|
|
2
|
+
import json
|
|
3
|
+
from typing import Optional
|
|
4
|
+
from sempy_labs._helper_functions import (
|
|
5
|
+
resolve_workspace_name_and_id,
|
|
6
|
+
_base_api,
|
|
7
|
+
_create_dataframe,
|
|
8
|
+
_update_dataframe_datatypes,
|
|
9
|
+
resolve_item_id,
|
|
10
|
+
_decode_b64,
|
|
11
|
+
delete_item,
|
|
12
|
+
get_item_definition,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
from uuid import UUID
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def list_mounted_data_factories(
|
|
19
|
+
workspace: Optional[str | UUID] = None,
|
|
20
|
+
) -> pd.DataFrame:
|
|
21
|
+
"""
|
|
22
|
+
Shows a list of mounted data factories from the specified workspace.
|
|
23
|
+
|
|
24
|
+
This is a wrapper function for the following API: `Items - List Mounted Data Factories <https://learn.microsoft.com/rest/api/fabric/mounteddatafactory/items/list-mounted-data-factories>`_.
|
|
25
|
+
|
|
26
|
+
Parameters
|
|
27
|
+
----------
|
|
28
|
+
workspace : str | uuid.UUID, default=None
|
|
29
|
+
The Fabric workspace name or ID.
|
|
30
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
31
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
32
|
+
|
|
33
|
+
Returns
|
|
34
|
+
-------
|
|
35
|
+
pandas.DataFrame
|
|
36
|
+
A pandas dataframe showing a list of mounted data factories from the specified workspace.
|
|
37
|
+
"""
|
|
38
|
+
|
|
39
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
40
|
+
|
|
41
|
+
columns = {
|
|
42
|
+
"Mounted Data Factory Name": "str",
|
|
43
|
+
"Mounted Data Factory Id": "str",
|
|
44
|
+
"Description": "str",
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
df = _create_dataframe(columns=columns)
|
|
48
|
+
responses = _base_api(
|
|
49
|
+
request=f"/v1/workspaces/{workspace_id}/mountedDataFactories",
|
|
50
|
+
uses_pagination=True,
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
for r in responses:
|
|
54
|
+
for v in r.get("value", []):
|
|
55
|
+
new_data = {
|
|
56
|
+
"Mounted Data Factory Name": v.get("displayName"),
|
|
57
|
+
"Mounted Data Factory Id": v.get("id"),
|
|
58
|
+
"Description": v.get("description"),
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
62
|
+
|
|
63
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
64
|
+
|
|
65
|
+
return df
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def get_mounted_data_factory_definition(
|
|
69
|
+
mounted_data_factory: str | UUID, workspace: Optional[str | UUID] = None
|
|
70
|
+
) -> dict:
|
|
71
|
+
"""
|
|
72
|
+
Returns the specified MountedDataFactory public definition.
|
|
73
|
+
|
|
74
|
+
This is a wrapper function for the following API: `Items - Get Mounted Data Factory Definition <https://learn.microsoft.com/rest/api/fabric/mounteddatafactory/items/get-mounted-data-factory-definition>`_.
|
|
75
|
+
|
|
76
|
+
Parameters
|
|
77
|
+
----------
|
|
78
|
+
mounted_data_factory : str | uuid.UUID
|
|
79
|
+
The name or ID of the mounted data factory.
|
|
80
|
+
workspace : str | uuid.UUID, default=None
|
|
81
|
+
The Fabric workspace name or ID.
|
|
82
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
83
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
84
|
+
|
|
85
|
+
Returns
|
|
86
|
+
-------
|
|
87
|
+
dict
|
|
88
|
+
The 'mountedDataFactory-content.json' file from the mounted data factory definition.
|
|
89
|
+
"""
|
|
90
|
+
|
|
91
|
+
return get_item_definition(
|
|
92
|
+
item=mounted_data_factory,
|
|
93
|
+
type="MountedDataFactory",
|
|
94
|
+
workspace=workspace,
|
|
95
|
+
return_dataframe=False,
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def delete_mounted_data_factory(
|
|
100
|
+
mounted_data_factory: str | UUID, workspace: Optional[str | UUID]
|
|
101
|
+
):
|
|
102
|
+
"""
|
|
103
|
+
Deletes the specified mounted data factory.
|
|
104
|
+
|
|
105
|
+
This is a wrapper function for the following API: `Items - Delete Mounted Data Factory <https://learn.microsoft.com/rest/api/fabric/mounteddatafactory/items/delete-mounted-data-factory>`_.
|
|
106
|
+
|
|
107
|
+
Parameters
|
|
108
|
+
----------
|
|
109
|
+
mounted_data_factory : str | uuid.UUID
|
|
110
|
+
The name or ID of the mounted data factory.
|
|
111
|
+
workspace : str | uuid.UUID, default=None
|
|
112
|
+
The Fabric workspace name or ID.
|
|
113
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
114
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
115
|
+
"""
|
|
116
|
+
|
|
117
|
+
delete_item(
|
|
118
|
+
item=mounted_data_factory, type="MountedDataFactory", workspace=workspace
|
|
119
|
+
)
|
sempy_labs/_notebooks.py
CHANGED
|
@@ -10,6 +10,7 @@ from sempy_labs._helper_functions import (
|
|
|
10
10
|
_decode_b64,
|
|
11
11
|
_base_api,
|
|
12
12
|
resolve_item_id,
|
|
13
|
+
create_item,
|
|
13
14
|
)
|
|
14
15
|
from sempy.fabric.exceptions import FabricHTTPException
|
|
15
16
|
import os
|
|
@@ -183,35 +184,24 @@ def create_notebook(
|
|
|
183
184
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
184
185
|
"""
|
|
185
186
|
|
|
186
|
-
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
187
187
|
notebook_payload = base64.b64encode(notebook_content).decode("utf-8")
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
"
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
"payloadType": "InlineBase64",
|
|
198
|
-
}
|
|
199
|
-
],
|
|
200
|
-
},
|
|
188
|
+
definition_payload = {
|
|
189
|
+
"format": "ipynb",
|
|
190
|
+
"parts": [
|
|
191
|
+
{
|
|
192
|
+
"path": f"{_notebook_prefix}.{type}",
|
|
193
|
+
"payload": notebook_payload,
|
|
194
|
+
"payloadType": "InlineBase64",
|
|
195
|
+
}
|
|
196
|
+
],
|
|
201
197
|
}
|
|
202
|
-
if description is not None:
|
|
203
|
-
payload["description"] = description
|
|
204
198
|
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
)
|
|
212
|
-
|
|
213
|
-
print(
|
|
214
|
-
f"{icons.green_dot} The '{name}' notebook was created within the '{workspace_name}' workspace."
|
|
199
|
+
create_item(
|
|
200
|
+
name=name,
|
|
201
|
+
type="Notebook",
|
|
202
|
+
workspace=workspace,
|
|
203
|
+
description=description,
|
|
204
|
+
definition=definition_payload,
|
|
215
205
|
)
|
|
216
206
|
|
|
217
207
|
|
|
@@ -5,6 +5,7 @@ from sempy._utils._log import log
|
|
|
5
5
|
from sempy_labs._helper_functions import (
|
|
6
6
|
resolve_workspace_name_and_id,
|
|
7
7
|
resolve_dataset_name_and_id,
|
|
8
|
+
resolve_workspace_id,
|
|
8
9
|
)
|
|
9
10
|
import sempy_labs._icons as icons
|
|
10
11
|
from uuid import UUID
|
|
@@ -43,7 +44,7 @@ def export_model_to_onelake(
|
|
|
43
44
|
destination_workspace = workspace_name
|
|
44
45
|
destination_workspace_id = workspace_id
|
|
45
46
|
else:
|
|
46
|
-
destination_workspace_id =
|
|
47
|
+
destination_workspace_id = resolve_workspace_id(workspace=destination_workspace)
|
|
47
48
|
|
|
48
49
|
tmsl = f"""
|
|
49
50
|
{{
|
sempy_labs/_semantic_models.py
CHANGED
|
@@ -7,6 +7,7 @@ from sempy_labs._helper_functions import (
|
|
|
7
7
|
_update_dataframe_datatypes,
|
|
8
8
|
resolve_workspace_name_and_id,
|
|
9
9
|
resolve_dataset_name_and_id,
|
|
10
|
+
delete_item,
|
|
10
11
|
)
|
|
11
12
|
import sempy_labs._icons as icons
|
|
12
13
|
|
|
@@ -115,3 +116,22 @@ def enable_semantic_model_scheduled_refresh(
|
|
|
115
116
|
print(
|
|
116
117
|
f"{icons.green_dot} Scheduled refresh for the '{dataset_name}' within the '{workspace_name}' workspace has been enabled."
|
|
117
118
|
)
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def delete_semantic_model(dataset: str | UUID, workspace: Optional[str | UUID] = None):
|
|
122
|
+
"""
|
|
123
|
+
Deletes a semantic model.
|
|
124
|
+
|
|
125
|
+
This is a wrapper function for the following API: `Items - Delete Semantic Model <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/delete-semantic-model>`_.
|
|
126
|
+
|
|
127
|
+
Parameters
|
|
128
|
+
----------
|
|
129
|
+
dataset: str | uuid.UUID
|
|
130
|
+
Name or ID of the semantic model.
|
|
131
|
+
workspace : str | uuid.UUID, default=None
|
|
132
|
+
The Fabric workspace name or ID.
|
|
133
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
134
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
135
|
+
"""
|
|
136
|
+
|
|
137
|
+
delete_item(item=dataset, type="SemanticModel", workspace=workspace)
|
sempy_labs/_sql.py
CHANGED
|
@@ -45,28 +45,33 @@ class ConnectBase:
|
|
|
45
45
|
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
46
46
|
|
|
47
47
|
# Resolve the appropriate ID and name (warehouse or lakehouse)
|
|
48
|
-
if endpoint_type == "warehouse":
|
|
49
|
-
(resource_name, resource_id) = resolve_item_name_and_id(
|
|
50
|
-
item=item, type=endpoint_type.capitalize(), workspace=workspace_id
|
|
51
|
-
)
|
|
52
48
|
if endpoint_type == "sqldatabase":
|
|
53
49
|
# SQLDatabase is has special case for resolving the name and id
|
|
54
50
|
(resource_name, resource_id) = resolve_item_name_and_id(
|
|
55
51
|
item=item, type="SQLDatabase", workspace=workspace_id
|
|
56
52
|
)
|
|
57
|
-
|
|
53
|
+
elif endpoint_type == "lakehouse":
|
|
58
54
|
(resource_name, resource_id) = resolve_lakehouse_name_and_id(
|
|
59
|
-
lakehouse=item,
|
|
55
|
+
lakehouse=item,
|
|
56
|
+
workspace=workspace_id,
|
|
57
|
+
)
|
|
58
|
+
else:
|
|
59
|
+
(resource_name, resource_id) = resolve_item_name_and_id(
|
|
60
|
+
item=item, workspace=workspace_id, type=endpoint_type.capitalize()
|
|
60
61
|
)
|
|
61
62
|
|
|
63
|
+
endpoint_for_url = (
|
|
64
|
+
"sqlDatabases" if endpoint_type == "sqldatabase" else f"{endpoint_type}s"
|
|
65
|
+
)
|
|
66
|
+
|
|
62
67
|
# Get the TDS endpoint
|
|
63
68
|
response = _base_api(
|
|
64
|
-
request=f"v1/workspaces/{workspace_id}/{
|
|
69
|
+
request=f"v1/workspaces/{workspace_id}/{endpoint_for_url}/{resource_id}"
|
|
65
70
|
)
|
|
66
71
|
|
|
67
72
|
if endpoint_type == "warehouse":
|
|
68
73
|
tds_endpoint = response.json().get("properties", {}).get("connectionString")
|
|
69
|
-
|
|
74
|
+
elif endpoint_type == "sqldatabase":
|
|
70
75
|
tds_endpoint = response.json().get("properties", {}).get("serverFqdn")
|
|
71
76
|
else:
|
|
72
77
|
tds_endpoint = (
|