semantic-link-labs 0.8.3__py3-none-any.whl → 0.8.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/METADATA +38 -8
- {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/RECORD +109 -104
- {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +53 -1
- sempy_labs/_bpa_translation/_model/_translations_am-ET.po +24 -5
- sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +28 -4
- sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +33 -4
- sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_da-DK.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_de-DE.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_el-GR.po +36 -4
- sempy_labs/_bpa_translation/_model/_translations_es-ES.po +90 -58
- sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +31 -5
- sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_he-IL.po +28 -4
- sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_id-ID.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_is-IS.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_it-IT.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +24 -4
- sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +72 -56
- sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +95 -71
- sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +33 -4
- sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +34 -4
- sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_te-IN.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_th-TH.po +31 -4
- sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +32 -4
- sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +100 -72
- sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +23 -5
- sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +32 -4
- sempy_labs/_capacities.py +49 -14
- sempy_labs/_capacity_migration.py +1 -7
- sempy_labs/_data_pipelines.py +6 -0
- sempy_labs/_dataflows.py +118 -1
- sempy_labs/_dax.py +189 -3
- sempy_labs/_deployment_pipelines.py +13 -7
- sempy_labs/_environments.py +6 -0
- sempy_labs/_eventhouses.py +6 -0
- sempy_labs/_eventstreams.py +6 -0
- sempy_labs/_external_data_shares.py +6 -4
- sempy_labs/_generate_semantic_model.py +26 -3
- sempy_labs/_git.py +14 -14
- sempy_labs/_helper_functions.py +197 -1
- sempy_labs/_icons.py +55 -22
- sempy_labs/_kql_databases.py +6 -0
- sempy_labs/_kql_querysets.py +6 -0
- sempy_labs/_list_functions.py +1 -1
- sempy_labs/_managed_private_endpoints.py +166 -0
- sempy_labs/_mirrored_databases.py +428 -0
- sempy_labs/_mirrored_warehouses.py +2 -0
- sempy_labs/_ml_experiments.py +6 -0
- sempy_labs/_ml_models.py +7 -1
- sempy_labs/_model_bpa.py +215 -181
- sempy_labs/_model_bpa_bulk.py +46 -42
- sempy_labs/_model_bpa_rules.py +8 -3
- sempy_labs/_model_dependencies.py +41 -87
- sempy_labs/_notebooks.py +107 -12
- sempy_labs/_query_scale_out.py +8 -6
- sempy_labs/_refresh_semantic_model.py +299 -49
- sempy_labs/_spark.py +12 -5
- sempy_labs/_translations.py +2 -0
- sempy_labs/_vertipaq.py +89 -86
- sempy_labs/_warehouses.py +79 -0
- sempy_labs/_workloads.py +128 -0
- sempy_labs/_workspace_identity.py +4 -4
- sempy_labs/_workspaces.py +14 -1
- sempy_labs/admin/__init__.py +2 -0
- sempy_labs/admin/_basic_functions.py +131 -43
- sempy_labs/admin/_domains.py +18 -18
- sempy_labs/directlake/__init__.py +2 -0
- sempy_labs/directlake/_directlake_schema_sync.py +2 -1
- sempy_labs/directlake/_dl_helper.py +4 -1
- sempy_labs/directlake/_get_shared_expression.py +7 -1
- sempy_labs/directlake/_guardrails.py +2 -1
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -7
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +78 -0
- sempy_labs/directlake/_update_directlake_partition_entity.py +13 -32
- sempy_labs/directlake/_warm_cache.py +10 -9
- sempy_labs/lakehouse/_get_lakehouse_tables.py +6 -2
- sempy_labs/lakehouse/_shortcuts.py +4 -0
- sempy_labs/migration/_create_pqt_file.py +5 -2
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -2
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +2 -0
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +2 -8
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +17 -0
- sempy_labs/migration/_migration_validation.py +2 -0
- sempy_labs/migration/_refresh_calc_tables.py +1 -0
- sempy_labs/report/__init__.py +6 -1
- sempy_labs/report/_download_report.py +75 -0
- sempy_labs/report/_generate_report.py +6 -0
- sempy_labs/report/_paginated.py +74 -0
- sempy_labs/report/_report_functions.py +6 -0
- sempy_labs/report/_report_rebind.py +2 -0
- sempy_labs/report/_reportwrapper.py +4 -2
- sempy_labs/tom/_model.py +135 -68
- {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
import pandas as pd
|
|
3
|
+
import sempy_labs._icons as icons
|
|
4
|
+
from typing import Optional
|
|
5
|
+
from sempy_labs._helper_functions import (
|
|
6
|
+
resolve_workspace_name_and_id,
|
|
7
|
+
lro,
|
|
8
|
+
pagination,
|
|
9
|
+
)
|
|
10
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
11
|
+
from uuid import UUID
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def create_managed_private_endpoint(
|
|
15
|
+
name: str,
|
|
16
|
+
target_private_link_resource_id: UUID,
|
|
17
|
+
target_subresource_type: str,
|
|
18
|
+
request_message: Optional[str] = None,
|
|
19
|
+
workspace: Optional[str] = None,
|
|
20
|
+
):
|
|
21
|
+
"""
|
|
22
|
+
Creates a managed private endpoint.
|
|
23
|
+
|
|
24
|
+
This is a wrapper function for the following API: `Managed Private Endpoints - Create Workspace Managed Private Endpoint <https://learn.microsoft.com/rest/api/fabric/core/managed-private-endpoints/create-workspace-managed-private-endpoint>`.
|
|
25
|
+
|
|
26
|
+
Parameters
|
|
27
|
+
----------
|
|
28
|
+
name: str
|
|
29
|
+
Name of the managed private endpoint.
|
|
30
|
+
target_private_link_resource_id: UUID
|
|
31
|
+
Resource Id of data source for which private endpoint needs to be created.
|
|
32
|
+
target_subresource_type : str
|
|
33
|
+
Sub-resource pointing to Private-link resoure.
|
|
34
|
+
request_message : str, default=None
|
|
35
|
+
Message to approve private endpoint request. Should not be more than 140 characters.
|
|
36
|
+
workspace : str, default=None
|
|
37
|
+
The Fabric workspace name.
|
|
38
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
39
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
40
|
+
"""
|
|
41
|
+
|
|
42
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
43
|
+
|
|
44
|
+
request_body = {
|
|
45
|
+
"name": name,
|
|
46
|
+
"targetPrivateLinkResourceId": target_private_link_resource_id,
|
|
47
|
+
"targetSubresourceType": target_subresource_type,
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
if request_message is not None:
|
|
51
|
+
if len(request_message) > 140:
|
|
52
|
+
raise ValueError(
|
|
53
|
+
f"{icons.red_dot} The request message cannot be more than 140 characters."
|
|
54
|
+
)
|
|
55
|
+
request_body["requestMessage"] = request_message
|
|
56
|
+
|
|
57
|
+
client = fabric.FabricRestClient()
|
|
58
|
+
response = client.post(
|
|
59
|
+
f"/v1/workspaces/{workspace_id}/managedPrivateEndpoints", json=request_body
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
lro(client, response, status_codes=[201, 202])
|
|
63
|
+
|
|
64
|
+
print(
|
|
65
|
+
f"{icons.green_dot} The '{name}' managed private endpoint has been created within the '{workspace}' workspace."
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def list_managed_private_endpoints(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
70
|
+
"""
|
|
71
|
+
Shows the managed private endpoints within a workspace.
|
|
72
|
+
|
|
73
|
+
This is a wrapper function for the following API: `Managed Private Endpoints - List Workspace Managed Private Endpoints <https://learn.microsoft.com/rest/api/fabric/core/managed-private-endpoints/list-workspace-managed-private-endpoints>`.
|
|
74
|
+
|
|
75
|
+
Parameters
|
|
76
|
+
----------
|
|
77
|
+
workspace : str, default=None
|
|
78
|
+
The Fabric workspace name.
|
|
79
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
80
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
81
|
+
|
|
82
|
+
Returns
|
|
83
|
+
-------
|
|
84
|
+
pandas.DataFrame
|
|
85
|
+
A pandas dataframe showing the managed private endpoints within a workspace.
|
|
86
|
+
"""
|
|
87
|
+
|
|
88
|
+
df = pd.DataFrame(
|
|
89
|
+
columns=[
|
|
90
|
+
"Managed Private Endpoint Name",
|
|
91
|
+
"Managed Private Endpoint Id",
|
|
92
|
+
"Target Private Link Resource Id",
|
|
93
|
+
"Provisioning State",
|
|
94
|
+
"Connection Status",
|
|
95
|
+
"Connection Description",
|
|
96
|
+
"Target Subresource Type",
|
|
97
|
+
]
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
101
|
+
|
|
102
|
+
client = fabric.FabricRestClient()
|
|
103
|
+
response = client.get(f"/v1/workspaces/{workspace_id}/managedPrivateEndpoints")
|
|
104
|
+
if response.status_code != 200:
|
|
105
|
+
raise FabricHTTPException(response)
|
|
106
|
+
|
|
107
|
+
responses = pagination(client, response)
|
|
108
|
+
|
|
109
|
+
for r in responses:
|
|
110
|
+
for v in r.get("value", []):
|
|
111
|
+
conn = v.get("connectionState", {})
|
|
112
|
+
new_data = {
|
|
113
|
+
"Managed Private Endpoint Name": v.get("name"),
|
|
114
|
+
"Managed Private Endpoint Id": v.get("id"),
|
|
115
|
+
"Target Private Link Resource Id": v.get("targetPrivateLinkResourceId"),
|
|
116
|
+
"Provisioning State": v.get("provisioningState"),
|
|
117
|
+
"Connection Status": conn.get("status"),
|
|
118
|
+
"Connection Description": conn.get("description"),
|
|
119
|
+
"Target Subresource Type": v.get("targetSubresourceType"),
|
|
120
|
+
}
|
|
121
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
122
|
+
|
|
123
|
+
return df
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def delete_managed_private_endpoint(
|
|
127
|
+
managed_private_endpoint: str, workspace: Optional[str] = None
|
|
128
|
+
):
|
|
129
|
+
"""
|
|
130
|
+
Deletes a Fabric managed private endpoint.
|
|
131
|
+
|
|
132
|
+
This is a wrapper function for the following API: `Managed Private Endpoints - Delete Workspace Managed Private Endpoint <https://learn.microsoft.com/rest/api/fabric/core/managed-private-endpoints/delete-workspace-managed-private-endpoint>`.
|
|
133
|
+
|
|
134
|
+
Parameters
|
|
135
|
+
----------
|
|
136
|
+
managed_private_endpoint: str
|
|
137
|
+
Name of the managed private endpoint.
|
|
138
|
+
workspace : str, default=None
|
|
139
|
+
The Fabric workspace name.
|
|
140
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
141
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
142
|
+
"""
|
|
143
|
+
|
|
144
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
145
|
+
|
|
146
|
+
df = list_managed_private_endpoints(workspace=workspace)
|
|
147
|
+
df_filt = df[df["Managed Private Endpoint Name"] == managed_private_endpoint]
|
|
148
|
+
|
|
149
|
+
if len(df_filt) == 0:
|
|
150
|
+
raise ValueError(
|
|
151
|
+
f"{icons.red_dot} The '{managed_private_endpoint}' managed private endpoint does not exist within the '{workspace}' workspace."
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
item_id = df_filt["Managed Private Endpoint Id"].iloc[0]
|
|
155
|
+
|
|
156
|
+
client = fabric.FabricRestClient()
|
|
157
|
+
response = client.delete(
|
|
158
|
+
f"/v1/workspaces/{workspace_id}/managedPrivateEndpoints/{item_id}"
|
|
159
|
+
)
|
|
160
|
+
|
|
161
|
+
if response.status_code != 200:
|
|
162
|
+
raise FabricHTTPException(response)
|
|
163
|
+
|
|
164
|
+
print(
|
|
165
|
+
f"{icons.green_dot} The '{managed_private_endpoint}' managed private endpoint within the '{workspace}' workspace has been deleted."
|
|
166
|
+
)
|
|
@@ -0,0 +1,428 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
import pandas as pd
|
|
3
|
+
from typing import Optional
|
|
4
|
+
from sempy_labs._helper_functions import (
|
|
5
|
+
resolve_workspace_name_and_id,
|
|
6
|
+
pagination,
|
|
7
|
+
lro,
|
|
8
|
+
_decode_b64,
|
|
9
|
+
)
|
|
10
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
11
|
+
import sempy_labs._icons as icons
|
|
12
|
+
import base64
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def list_mirrored_databases(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
16
|
+
"""
|
|
17
|
+
Shows the mirrored databases within a workspace.
|
|
18
|
+
|
|
19
|
+
This is a wrapper function for the following API: `Items - List Mirrored Databases <https://learn.microsoft.com/rest/api/fabric/mirroredwarehouse/items/list-mirrored-databases>`_.
|
|
20
|
+
|
|
21
|
+
Parameters
|
|
22
|
+
----------
|
|
23
|
+
workspace : str, default=None
|
|
24
|
+
The Fabric workspace name.
|
|
25
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
26
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
27
|
+
|
|
28
|
+
Returns
|
|
29
|
+
-------
|
|
30
|
+
pandas.DataFrame
|
|
31
|
+
A pandas dataframe showing the mirrored databases within a workspace.
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
df = pd.DataFrame(
|
|
35
|
+
columns=[
|
|
36
|
+
"Mirrored Database Name",
|
|
37
|
+
"Mirrored Database Id",
|
|
38
|
+
"Description",
|
|
39
|
+
"OneLake Tables Path",
|
|
40
|
+
"SQL Endpoint Connection String",
|
|
41
|
+
"SQL Endpoint Id",
|
|
42
|
+
"Provisioning Status",
|
|
43
|
+
"Default Schema",
|
|
44
|
+
]
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
48
|
+
|
|
49
|
+
client = fabric.FabricRestClient()
|
|
50
|
+
response = client.get(f"/v1/workspaces/{workspace_id}/mirroredDatabases")
|
|
51
|
+
if response.status_code != 200:
|
|
52
|
+
raise FabricHTTPException(response)
|
|
53
|
+
responses = pagination(client, response)
|
|
54
|
+
|
|
55
|
+
for r in responses:
|
|
56
|
+
for v in r.get("value", []):
|
|
57
|
+
prop = v.get("properties", {})
|
|
58
|
+
sql = prop.get("sqlEndpointProperties", {})
|
|
59
|
+
new_data = {
|
|
60
|
+
"Mirrored Database Name": v.get("displayName"),
|
|
61
|
+
"Mirrored Database Id": v.get("id"),
|
|
62
|
+
"Description": v.get("description"),
|
|
63
|
+
"OneLake Tables Path": prop.get("oneLakeTablesPath"),
|
|
64
|
+
"SQL Endpoint Connection String": sql.get("connectionString"),
|
|
65
|
+
"SQL Endpoint Id": sql.get("id"),
|
|
66
|
+
"Provisioning Status": sql.get("provisioningStatus"),
|
|
67
|
+
"Default Schema": prop.get("defaultSchema"),
|
|
68
|
+
}
|
|
69
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
70
|
+
|
|
71
|
+
return df
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def create_mirrored_database(
|
|
75
|
+
name: str, description: Optional[str] = None, workspace: Optional[str] = None
|
|
76
|
+
):
|
|
77
|
+
"""
|
|
78
|
+
Creates a Fabric mirrored database.
|
|
79
|
+
|
|
80
|
+
This is a wrapper function for the following API: `Items - Create Mirrored Database <https://learn.microsoft.com/rest/api/fabric/mirroreddatabase/items/create-mirrored-database>`_.
|
|
81
|
+
|
|
82
|
+
Parameters
|
|
83
|
+
----------
|
|
84
|
+
name: str
|
|
85
|
+
Name of the mirrored database.
|
|
86
|
+
description : str, default=None
|
|
87
|
+
A description of the mirrored database.
|
|
88
|
+
workspace : str, default=None
|
|
89
|
+
The Fabric workspace name.
|
|
90
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
91
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
92
|
+
"""
|
|
93
|
+
|
|
94
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
95
|
+
|
|
96
|
+
request_body = {"displayName": name}
|
|
97
|
+
|
|
98
|
+
if description:
|
|
99
|
+
request_body["description"] = description
|
|
100
|
+
|
|
101
|
+
client = fabric.FabricRestClient()
|
|
102
|
+
response = client.post(
|
|
103
|
+
f"/v1/workspaces/{workspace_id}/mirroredDatabases", json=request_body
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
if response.status_code != 201:
|
|
107
|
+
raise FabricHTTPException(response)
|
|
108
|
+
|
|
109
|
+
print(
|
|
110
|
+
f"{icons.green_dot} The '{name}' mirrored database has been created within the '{workspace}' workspace."
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def delete_mirrored_database(mirrored_database: str, workspace: Optional[str] = None):
|
|
115
|
+
"""
|
|
116
|
+
Deletes a mirrored database.
|
|
117
|
+
|
|
118
|
+
This is a wrapper function for the following API: `Items - Delete Mirrored Database <https://learn.microsoft.com/rest/api/fabric/mirroreddatabase/items/delete-mirrored-database>`_.
|
|
119
|
+
|
|
120
|
+
Parameters
|
|
121
|
+
----------
|
|
122
|
+
mirrored_database: str
|
|
123
|
+
Name of the mirrored database.
|
|
124
|
+
workspace : str, default=None
|
|
125
|
+
The Fabric workspace name.
|
|
126
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
127
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
128
|
+
"""
|
|
129
|
+
|
|
130
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
131
|
+
|
|
132
|
+
item_id = fabric.resolve_item_id(
|
|
133
|
+
item_name=mirrored_database, type="MirroredDatabase", workspace=workspace
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
client = fabric.FabricRestClient()
|
|
137
|
+
response = client.delete(
|
|
138
|
+
f"/v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}"
|
|
139
|
+
)
|
|
140
|
+
|
|
141
|
+
if response.status_code != 200:
|
|
142
|
+
raise FabricHTTPException(response)
|
|
143
|
+
|
|
144
|
+
print(
|
|
145
|
+
f"{icons.green_dot} The '{mirrored_database}' mirrored database within the '{workspace}' workspace has been deleted."
|
|
146
|
+
)
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
def get_mirroring_status(
|
|
150
|
+
mirrored_database: str, workspace: Optional[str] = None
|
|
151
|
+
) -> str:
|
|
152
|
+
"""
|
|
153
|
+
Get the status of the mirrored database.
|
|
154
|
+
|
|
155
|
+
This is a wrapper function for the following API: `Mirroring - Get Mirroring Status <https://learn.microsoft.com/rest/api/fabric/mirroreddatabase/mirroring/get-mirroring-status>`_.
|
|
156
|
+
|
|
157
|
+
Parameters
|
|
158
|
+
----------
|
|
159
|
+
mirrored_database: str
|
|
160
|
+
Name of the mirrored database.
|
|
161
|
+
workspace : str, default=None
|
|
162
|
+
The Fabric workspace name.
|
|
163
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
164
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
165
|
+
|
|
166
|
+
Returns
|
|
167
|
+
-------
|
|
168
|
+
str
|
|
169
|
+
The status of a mirrored database.
|
|
170
|
+
"""
|
|
171
|
+
|
|
172
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
173
|
+
|
|
174
|
+
item_id = fabric.resolve_item_id(
|
|
175
|
+
item_name=mirrored_database, type="MirroredDatabase", workspace=workspace
|
|
176
|
+
)
|
|
177
|
+
|
|
178
|
+
client = fabric.FabricRestClient()
|
|
179
|
+
response = client.post(
|
|
180
|
+
f"/v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}/getMirroringStatus"
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
if response.status_code != 200:
|
|
184
|
+
raise FabricHTTPException(response)
|
|
185
|
+
|
|
186
|
+
return response.json().get("status", {})
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def get_tables_mirroring_status(
|
|
190
|
+
mirrored_database: str, workspace: Optional[str] = None
|
|
191
|
+
) -> pd.DataFrame:
|
|
192
|
+
"""
|
|
193
|
+
Gets the mirroring status of the tables.
|
|
194
|
+
|
|
195
|
+
This is a wrapper function for the following API: `Mirroring - Get Tables Mirroring Status <https://learn.microsoft.com/rest/api/fabric/mirroreddatabase/mirroring/get-tables-mirroring-status>`_.
|
|
196
|
+
|
|
197
|
+
Parameters
|
|
198
|
+
----------
|
|
199
|
+
mirrored_database: str
|
|
200
|
+
Name of the mirrored database.
|
|
201
|
+
workspace : str, default=None
|
|
202
|
+
The Fabric workspace name.
|
|
203
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
204
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
205
|
+
|
|
206
|
+
Returns
|
|
207
|
+
-------
|
|
208
|
+
pandas.DataFrame
|
|
209
|
+
A pandas dataframe showing the mirroring status of the tables.
|
|
210
|
+
"""
|
|
211
|
+
|
|
212
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
213
|
+
|
|
214
|
+
item_id = fabric.resolve_item_id(
|
|
215
|
+
item_name=mirrored_database, type="MirroredDatabase", workspace=workspace
|
|
216
|
+
)
|
|
217
|
+
|
|
218
|
+
client = fabric.FabricRestClient()
|
|
219
|
+
response = client.post(
|
|
220
|
+
f"/v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}/getTablesMirroringStatus"
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
if response.status_code != 200:
|
|
224
|
+
raise FabricHTTPException(response)
|
|
225
|
+
|
|
226
|
+
responses = pagination(client, response)
|
|
227
|
+
|
|
228
|
+
df = pd.DataFrame(
|
|
229
|
+
columns=[
|
|
230
|
+
"Source Schema Name",
|
|
231
|
+
"Source Table Name",
|
|
232
|
+
"Status",
|
|
233
|
+
"Processed Bytes",
|
|
234
|
+
"Processed Rows",
|
|
235
|
+
"Last Sync Date",
|
|
236
|
+
]
|
|
237
|
+
)
|
|
238
|
+
|
|
239
|
+
for r in responses:
|
|
240
|
+
for v in r.get("data", []):
|
|
241
|
+
m = v.get("metrics", {})
|
|
242
|
+
new_data = {
|
|
243
|
+
"Source Schema Name": v.get("sourceSchemaName"),
|
|
244
|
+
"Source Table Name": v.get("sourceTableName"),
|
|
245
|
+
"Status": v.get("status"),
|
|
246
|
+
"Processed Bytes": m.get("processedBytes"),
|
|
247
|
+
"Processed Rows": m.get("processedRows"),
|
|
248
|
+
"Last Sync Date": m.get("lastSyncDateTime"),
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
252
|
+
|
|
253
|
+
int_cols = ["Processed Bytes", "Processed Rows"]
|
|
254
|
+
df[int_cols] = df[int_cols].astype(int)
|
|
255
|
+
df["Last Sync Date"] = pd.to_datetime(df["Last Sync Date"])
|
|
256
|
+
|
|
257
|
+
return df
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
def start_mirroring(mirrored_database: str, workspace: Optional[str] = None):
|
|
261
|
+
"""
|
|
262
|
+
Starts the mirroring for a database.
|
|
263
|
+
|
|
264
|
+
This is a wrapper function for the following API: `Mirroring - Start Mirroring <https://learn.microsoft.com/rest/api/fabric/mirroreddatabase/mirroring/start-mirroring>`_.
|
|
265
|
+
|
|
266
|
+
Parameters
|
|
267
|
+
----------
|
|
268
|
+
mirrored_database: str
|
|
269
|
+
Name of the mirrored database.
|
|
270
|
+
workspace : str, default=None
|
|
271
|
+
The Fabric workspace name.
|
|
272
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
273
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
274
|
+
"""
|
|
275
|
+
|
|
276
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
277
|
+
|
|
278
|
+
item_id = fabric.resolve_item_id(
|
|
279
|
+
item_name=mirrored_database, type="MirroredDatabase", workspace=workspace
|
|
280
|
+
)
|
|
281
|
+
|
|
282
|
+
client = fabric.FabricRestClient()
|
|
283
|
+
response = client.post(
|
|
284
|
+
f"/v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}/startMirroring"
|
|
285
|
+
)
|
|
286
|
+
|
|
287
|
+
if response.status_code != 200:
|
|
288
|
+
raise FabricHTTPException(response)
|
|
289
|
+
|
|
290
|
+
print(
|
|
291
|
+
f"{icons.green_dot} Mirroring has started for the '{mirrored_database}' database within the '{workspace}' workspace."
|
|
292
|
+
)
|
|
293
|
+
|
|
294
|
+
|
|
295
|
+
def stop_mirroring(mirrored_database: str, workspace: Optional[str] = None):
|
|
296
|
+
"""
|
|
297
|
+
Stops the mirroring for a database.
|
|
298
|
+
|
|
299
|
+
This is a wrapper function for the following API: `Mirroring - Stop Mirroring <https://learn.microsoft.com/rest/api/fabric/mirroreddatabase/mirroring/stop-mirroring>`_.
|
|
300
|
+
|
|
301
|
+
Parameters
|
|
302
|
+
----------
|
|
303
|
+
mirrored_database: str
|
|
304
|
+
Name of the mirrored database.
|
|
305
|
+
workspace : str, default=None
|
|
306
|
+
The Fabric workspace name.
|
|
307
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
308
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
309
|
+
"""
|
|
310
|
+
|
|
311
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
312
|
+
|
|
313
|
+
item_id = fabric.resolve_item_id(
|
|
314
|
+
item_name=mirrored_database, type="MirroredDatabase", workspace=workspace
|
|
315
|
+
)
|
|
316
|
+
|
|
317
|
+
client = fabric.FabricRestClient()
|
|
318
|
+
response = client.post(
|
|
319
|
+
f"/v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}/stopMirroring"
|
|
320
|
+
)
|
|
321
|
+
|
|
322
|
+
if response.status_code != 200:
|
|
323
|
+
raise FabricHTTPException(response)
|
|
324
|
+
|
|
325
|
+
print(
|
|
326
|
+
f"{icons.green_dot} Mirroring has stopped for the '{mirrored_database}' database within the '{workspace}' workspace."
|
|
327
|
+
)
|
|
328
|
+
|
|
329
|
+
|
|
330
|
+
def get_mirrored_database_definition(
|
|
331
|
+
mirrored_database: str, workspace: Optional[str] = None, decode: bool = True
|
|
332
|
+
) -> str:
|
|
333
|
+
"""
|
|
334
|
+
Obtains the mirrored database definition.
|
|
335
|
+
|
|
336
|
+
This is a wrapper function for the following API: `Items - Get Mirrored Database Definition <https://learn.microsoft.com/rest/api/fabric/mirroreddatabase/items/get-mirrored-database-definition>`_.
|
|
337
|
+
|
|
338
|
+
Parameters
|
|
339
|
+
----------
|
|
340
|
+
mirrored_database : str
|
|
341
|
+
The name of the mirrored database.
|
|
342
|
+
workspace : str, default=None
|
|
343
|
+
The name of the workspace.
|
|
344
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
345
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
346
|
+
decode : bool, default=True
|
|
347
|
+
If True, decodes the mirrored database definition file into .json format.
|
|
348
|
+
If False, obtains the mirrored database definition file in base64 format.
|
|
349
|
+
|
|
350
|
+
Returns
|
|
351
|
+
-------
|
|
352
|
+
str
|
|
353
|
+
The mirrored database definition.
|
|
354
|
+
"""
|
|
355
|
+
|
|
356
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
357
|
+
item_id = fabric.resolve_item_id(
|
|
358
|
+
item_name=mirrored_database, type="MirroredDatabase", workspace=workspace
|
|
359
|
+
)
|
|
360
|
+
client = fabric.FabricRestClient()
|
|
361
|
+
response = client.post(
|
|
362
|
+
f"v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}/getDefinition",
|
|
363
|
+
)
|
|
364
|
+
|
|
365
|
+
result = lro(client, response).json()
|
|
366
|
+
df_items = pd.json_normalize(result["definition"]["parts"])
|
|
367
|
+
df_items_filt = df_items[df_items["path"] == "mirroredDatabase.json"]
|
|
368
|
+
payload = df_items_filt["payload"].iloc[0]
|
|
369
|
+
|
|
370
|
+
if decode:
|
|
371
|
+
result = _decode_b64(payload)
|
|
372
|
+
else:
|
|
373
|
+
result = payload
|
|
374
|
+
|
|
375
|
+
return result
|
|
376
|
+
|
|
377
|
+
|
|
378
|
+
def update_mirrored_database_definition(
|
|
379
|
+
mirrored_database: str,
|
|
380
|
+
mirrored_database_content: dict,
|
|
381
|
+
workspace: Optional[str] = None,
|
|
382
|
+
):
|
|
383
|
+
"""
|
|
384
|
+
Updates an existing notebook with a new definition.
|
|
385
|
+
|
|
386
|
+
Parameters
|
|
387
|
+
----------
|
|
388
|
+
mirrored_database : str
|
|
389
|
+
The name of the mirrored database to be created.
|
|
390
|
+
mirrored_database_content : dict
|
|
391
|
+
The mirrored database definition (not in Base64 format).
|
|
392
|
+
workspace : str, default=None
|
|
393
|
+
The name of the workspace.
|
|
394
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
395
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
396
|
+
"""
|
|
397
|
+
|
|
398
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
399
|
+
client = fabric.FabricRestClient()
|
|
400
|
+
payload = base64.b64encode(mirrored_database_content)
|
|
401
|
+
item_id = fabric.resolve_item_id(
|
|
402
|
+
item_name=mirrored_database, type="MirroredDatabase", workspace=workspace
|
|
403
|
+
)
|
|
404
|
+
|
|
405
|
+
request_body = {
|
|
406
|
+
"displayName": mirrored_database,
|
|
407
|
+
"definition": {
|
|
408
|
+
"format": "ipynb",
|
|
409
|
+
"parts": [
|
|
410
|
+
{
|
|
411
|
+
"path": "mirroredDatabase.json",
|
|
412
|
+
"payload": payload,
|
|
413
|
+
"payloadType": "InlineBase64",
|
|
414
|
+
}
|
|
415
|
+
],
|
|
416
|
+
},
|
|
417
|
+
}
|
|
418
|
+
|
|
419
|
+
response = client.post(
|
|
420
|
+
f"v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}/updateDefinition",
|
|
421
|
+
json=request_body,
|
|
422
|
+
)
|
|
423
|
+
|
|
424
|
+
lro(client, response, return_status_code=True)
|
|
425
|
+
|
|
426
|
+
print(
|
|
427
|
+
f"{icons.green_dot} The '{mirrored_database}' mirrored database was updated within the '{workspace}' workspace."
|
|
428
|
+
)
|
|
@@ -12,6 +12,8 @@ def list_mirrored_warehouses(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
12
12
|
"""
|
|
13
13
|
Shows the mirrored warehouses within a workspace.
|
|
14
14
|
|
|
15
|
+
This is a wrapper function for the following API: `Items - List Mirrored Warehouses <https://learn.microsoft.com/rest/api/fabric/mirroredwarehouse/items/list-mirrored-warehouses>`_.
|
|
16
|
+
|
|
15
17
|
Parameters
|
|
16
18
|
----------
|
|
17
19
|
workspace : str, default=None
|
sempy_labs/_ml_experiments.py
CHANGED
|
@@ -14,6 +14,8 @@ def list_ml_experiments(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
14
14
|
"""
|
|
15
15
|
Shows the ML experiments within a workspace.
|
|
16
16
|
|
|
17
|
+
This is a wrapper function for the following API: `Items - List ML Experiments <https://learn.microsoft.com/rest/api/fabric/mlexperiment/items/list-ml-experiments>`_.
|
|
18
|
+
|
|
17
19
|
Parameters
|
|
18
20
|
----------
|
|
19
21
|
workspace : str, default=None
|
|
@@ -60,6 +62,8 @@ def create_ml_experiment(
|
|
|
60
62
|
"""
|
|
61
63
|
Creates a Fabric ML experiment.
|
|
62
64
|
|
|
65
|
+
This is a wrapper function for the following API: `Items - Create ML Experiment <https://learn.microsoft.com/rest/api/fabric/mlexperiment/items/create-ml-experiment>`_.
|
|
66
|
+
|
|
63
67
|
Parameters
|
|
64
68
|
----------
|
|
65
69
|
name: str
|
|
@@ -95,6 +99,8 @@ def delete_ml_experiment(name: str, workspace: Optional[str] = None):
|
|
|
95
99
|
"""
|
|
96
100
|
Deletes a Fabric ML experiment.
|
|
97
101
|
|
|
102
|
+
This is a wrapper function for the following API: `Items - Delete ML Experiment <https://learn.microsoft.com/rest/api/fabric/mlexperiment/items/delete-ml-experiment>`_.
|
|
103
|
+
|
|
98
104
|
Parameters
|
|
99
105
|
----------
|
|
100
106
|
name: str
|
sempy_labs/_ml_models.py
CHANGED
|
@@ -14,6 +14,8 @@ def list_ml_models(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
|
14
14
|
"""
|
|
15
15
|
Shows the ML models within a workspace.
|
|
16
16
|
|
|
17
|
+
This is a wrapper function for the following API: `Items - List ML Models <https://learn.microsoft.com/rest/api/fabric/mlmodel/items/list-ml-models>`_.
|
|
18
|
+
|
|
17
19
|
Parameters
|
|
18
20
|
----------
|
|
19
21
|
workspace : str, default=None
|
|
@@ -60,12 +62,14 @@ def create_ml_model(
|
|
|
60
62
|
"""
|
|
61
63
|
Creates a Fabric ML model.
|
|
62
64
|
|
|
65
|
+
This is a wrapper function for the following API: `Items - Create ML Model <https://learn.microsoft.com/rest/api/fabric/mlmodel/items/create-ml-model>`_.
|
|
66
|
+
|
|
63
67
|
Parameters
|
|
64
68
|
----------
|
|
65
69
|
name: str
|
|
66
70
|
Name of the ML model.
|
|
67
71
|
description : str, default=None
|
|
68
|
-
A description of the
|
|
72
|
+
A description of the ML model.
|
|
69
73
|
workspace : str, default=None
|
|
70
74
|
The Fabric workspace name.
|
|
71
75
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
@@ -93,6 +97,8 @@ def delete_ml_model(name: str, workspace: Optional[str] = None):
|
|
|
93
97
|
"""
|
|
94
98
|
Deletes a Fabric ML model.
|
|
95
99
|
|
|
100
|
+
This is a wrapper function for the following API: `Items - Delete ML Model <https://learn.microsoft.com/rest/api/fabric/mlmodel/items/delete-ml-model>`_.
|
|
101
|
+
|
|
96
102
|
Parameters
|
|
97
103
|
----------
|
|
98
104
|
name: str
|