semantic-link-labs 0.7.2__py3-none-any.whl → 0.7.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.7.2.dist-info → semantic_link_labs-0.7.4.dist-info}/METADATA +15 -3
- semantic_link_labs-0.7.4.dist-info/RECORD +134 -0
- {semantic_link_labs-0.7.2.dist-info → semantic_link_labs-0.7.4.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +120 -24
- sempy_labs/_bpa_translation/{_translations_am-ET.po → _model/_translations_am-ET.po} +22 -0
- sempy_labs/_bpa_translation/{_translations_ar-AE.po → _model/_translations_ar-AE.po} +24 -0
- sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +938 -0
- sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +934 -0
- sempy_labs/_bpa_translation/{_translations_cs-CZ.po → _model/_translations_cs-CZ.po} +179 -157
- sempy_labs/_bpa_translation/{_translations_da-DK.po → _model/_translations_da-DK.po} +24 -0
- sempy_labs/_bpa_translation/{_translations_de-DE.po → _model/_translations_de-DE.po} +77 -52
- sempy_labs/_bpa_translation/{_translations_el-GR.po → _model/_translations_el-GR.po} +25 -0
- sempy_labs/_bpa_translation/{_translations_es-ES.po → _model/_translations_es-ES.po} +67 -43
- sempy_labs/_bpa_translation/{_translations_fa-IR.po → _model/_translations_fa-IR.po} +24 -0
- sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +915 -0
- sempy_labs/_bpa_translation/{_translations_fr-FR.po → _model/_translations_fr-FR.po} +83 -57
- sempy_labs/_bpa_translation/{_translations_ga-IE.po → _model/_translations_ga-IE.po} +25 -0
- sempy_labs/_bpa_translation/{_translations_he-IL.po → _model/_translations_he-IL.po} +23 -0
- sempy_labs/_bpa_translation/{_translations_hi-IN.po → _model/_translations_hi-IN.po} +24 -0
- sempy_labs/_bpa_translation/{_translations_hu-HU.po → _model/_translations_hu-HU.po} +25 -0
- sempy_labs/_bpa_translation/_model/_translations_id-ID.po +918 -0
- sempy_labs/_bpa_translation/{_translations_is-IS.po → _model/_translations_is-IS.po} +25 -0
- sempy_labs/_bpa_translation/{_translations_it-IT.po → _model/_translations_it-IT.po} +25 -0
- sempy_labs/_bpa_translation/{_translations_ja-JP.po → _model/_translations_ja-JP.po} +21 -0
- sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +823 -0
- sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +937 -0
- sempy_labs/_bpa_translation/{_translations_nl-NL.po → _model/_translations_nl-NL.po} +80 -56
- sempy_labs/_bpa_translation/{_translations_pl-PL.po → _model/_translations_pl-PL.po} +101 -76
- sempy_labs/_bpa_translation/{_translations_pt-BR.po → _model/_translations_pt-BR.po} +25 -0
- sempy_labs/_bpa_translation/{_translations_pt-PT.po → _model/_translations_pt-PT.po} +25 -0
- sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +939 -0
- sempy_labs/_bpa_translation/{_translations_ru-RU.po → _model/_translations_ru-RU.po} +25 -0
- sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +925 -0
- sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +922 -0
- sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +914 -0
- sempy_labs/_bpa_translation/{_translations_ta-IN.po → _model/_translations_ta-IN.po} +26 -0
- sempy_labs/_bpa_translation/{_translations_te-IN.po → _model/_translations_te-IN.po} +24 -0
- sempy_labs/_bpa_translation/{_translations_th-TH.po → _model/_translations_th-TH.po} +24 -0
- sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +925 -0
- sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +933 -0
- sempy_labs/_bpa_translation/{_translations_zh-CN.po → _model/_translations_zh-CN.po} +116 -97
- sempy_labs/_bpa_translation/{_translations_zu-ZA.po → _model/_translations_zu-ZA.po} +25 -0
- sempy_labs/_capacities.py +541 -0
- sempy_labs/_clear_cache.py +298 -3
- sempy_labs/_connections.py +138 -0
- sempy_labs/_dataflows.py +130 -0
- sempy_labs/_deployment_pipelines.py +171 -0
- sempy_labs/_environments.py +156 -0
- sempy_labs/_generate_semantic_model.py +148 -27
- sempy_labs/_git.py +380 -0
- sempy_labs/_helper_functions.py +203 -8
- sempy_labs/_icons.py +43 -0
- sempy_labs/_list_functions.py +170 -1012
- sempy_labs/_model_bpa.py +90 -112
- sempy_labs/_model_bpa_bulk.py +3 -1
- sempy_labs/_model_bpa_rules.py +788 -800
- sempy_labs/_notebooks.py +143 -0
- sempy_labs/_query_scale_out.py +28 -7
- sempy_labs/_spark.py +465 -0
- sempy_labs/_sql.py +120 -0
- sempy_labs/_translations.py +3 -1
- sempy_labs/_vertipaq.py +160 -99
- sempy_labs/_workspace_identity.py +66 -0
- sempy_labs/_workspaces.py +294 -0
- sempy_labs/directlake/__init__.py +2 -0
- sempy_labs/directlake/_directlake_schema_compare.py +1 -2
- sempy_labs/directlake/_directlake_schema_sync.py +1 -2
- sempy_labs/directlake/_dl_helper.py +4 -7
- sempy_labs/directlake/_generate_shared_expression.py +85 -0
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -2
- sempy_labs/lakehouse/_get_lakehouse_tables.py +7 -3
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +5 -0
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +5 -0
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +6 -2
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +6 -5
- sempy_labs/migration/_migration_validation.py +6 -0
- sempy_labs/report/_report_functions.py +21 -42
- sempy_labs/report/_report_rebind.py +5 -0
- sempy_labs/tom/_model.py +95 -52
- semantic_link_labs-0.7.2.dist-info/RECORD +0 -111
- {semantic_link_labs-0.7.2.dist-info → semantic_link_labs-0.7.4.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.7.2.dist-info → semantic_link_labs-0.7.4.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,171 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
import pandas as pd
|
|
3
|
+
from sempy_labs._helper_functions import (
|
|
4
|
+
pagination,
|
|
5
|
+
)
|
|
6
|
+
import sempy_labs._icons as icons
|
|
7
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def list_deployment_pipelines() -> pd.DataFrame:
|
|
11
|
+
"""
|
|
12
|
+
Shows a list of deployment pipelines the user can access.
|
|
13
|
+
|
|
14
|
+
Returns
|
|
15
|
+
-------
|
|
16
|
+
pandas.DataFrame
|
|
17
|
+
A pandas dataframe showing a list of deployment pipelines the user can access.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
df = pd.DataFrame(
|
|
21
|
+
columns=["Deployment Pipeline Id", "Deployment Pipeline Name", "Description"]
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
client = fabric.FabricRestClient()
|
|
25
|
+
response = client.get("/v1/deploymentPipelines")
|
|
26
|
+
|
|
27
|
+
if response.status_code != 200:
|
|
28
|
+
raise FabricHTTPException(response)
|
|
29
|
+
|
|
30
|
+
responses = pagination(client, response)
|
|
31
|
+
|
|
32
|
+
for r in responses:
|
|
33
|
+
for v in r.get("value", []):
|
|
34
|
+
new_data = {
|
|
35
|
+
"Deployment Pipeline Id": v.get("id"),
|
|
36
|
+
"Deployment Pipeline Name": v.get("displayName"),
|
|
37
|
+
"Description": v.get("description"),
|
|
38
|
+
}
|
|
39
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
40
|
+
|
|
41
|
+
return df
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def list_deployment_pipeline_stages(deployment_pipeline: str) -> pd.DataFrame:
|
|
45
|
+
"""
|
|
46
|
+
Shows the specified deployment pipeline stages.
|
|
47
|
+
|
|
48
|
+
Parameters
|
|
49
|
+
----------
|
|
50
|
+
deployment_pipeline : str
|
|
51
|
+
The deployment pipeline name.
|
|
52
|
+
|
|
53
|
+
Returns
|
|
54
|
+
-------
|
|
55
|
+
pandas.DataFrame
|
|
56
|
+
A pandas dataframe showing the specified deployment pipeline stages.
|
|
57
|
+
"""
|
|
58
|
+
|
|
59
|
+
from sempy_labs._helper_functions import resolve_deployment_pipeline_id
|
|
60
|
+
|
|
61
|
+
df = pd.DataFrame(
|
|
62
|
+
columns=[
|
|
63
|
+
"Deployment Pipeline Stage Id",
|
|
64
|
+
"Deployment Pipeline Stage Name",
|
|
65
|
+
"Order",
|
|
66
|
+
"Description",
|
|
67
|
+
"Workspace Id",
|
|
68
|
+
"Workspace Name",
|
|
69
|
+
"Public",
|
|
70
|
+
]
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
deployment_pipeline_id = resolve_deployment_pipeline_id(
|
|
74
|
+
deployment_pipeline=deployment_pipeline
|
|
75
|
+
)
|
|
76
|
+
client = fabric.FabricRestClient()
|
|
77
|
+
response = client.get(f"/v1/deploymentPipelines/{deployment_pipeline_id}/stages")
|
|
78
|
+
|
|
79
|
+
if response.status_code != 200:
|
|
80
|
+
raise FabricHTTPException(response)
|
|
81
|
+
|
|
82
|
+
responses = pagination(client, response)
|
|
83
|
+
|
|
84
|
+
for r in responses:
|
|
85
|
+
for v in r.get("value", []):
|
|
86
|
+
new_data = {
|
|
87
|
+
"Deployment Pipeline Stage Id": v["id"],
|
|
88
|
+
"Deployment Pipeline Stage Name": v["displayName"],
|
|
89
|
+
"Description": v["description"],
|
|
90
|
+
"Order": v["order"],
|
|
91
|
+
"Workspace Id": v["workspaceId"],
|
|
92
|
+
"Workspace Name": v["workspaceName"],
|
|
93
|
+
"Public": v["isPublic"],
|
|
94
|
+
}
|
|
95
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
96
|
+
|
|
97
|
+
df["Order"] = df["Order"].astype(int)
|
|
98
|
+
df["Public"] = df["Public"].astype(bool)
|
|
99
|
+
|
|
100
|
+
return df
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def list_deployment_pipeline_stage_items(
|
|
104
|
+
deployment_pipeline: str, stage_name: str
|
|
105
|
+
) -> pd.DataFrame:
|
|
106
|
+
"""
|
|
107
|
+
Shows the supported items from the workspace assigned to the specified stage of the specified deployment pipeline.
|
|
108
|
+
|
|
109
|
+
Parameters
|
|
110
|
+
----------
|
|
111
|
+
deployment_pipeline : str
|
|
112
|
+
The deployment pipeline name.
|
|
113
|
+
stage_name : str
|
|
114
|
+
The deployment pipeline stage name.
|
|
115
|
+
|
|
116
|
+
Returns
|
|
117
|
+
-------
|
|
118
|
+
pandas.DataFrame
|
|
119
|
+
A pandas dataframe showing the supported items from the workspace assigned to the specified stage of the specified deployment pipeline.
|
|
120
|
+
"""
|
|
121
|
+
|
|
122
|
+
from sempy_labs._helper_functions import resolve_deployment_pipeline_id
|
|
123
|
+
|
|
124
|
+
df = pd.DataFrame(
|
|
125
|
+
columns=[
|
|
126
|
+
"Deployment Pipeline Stage Item Id",
|
|
127
|
+
"Deployment Pipeline Stage Item Name",
|
|
128
|
+
"Item Type",
|
|
129
|
+
"Source Item Id",
|
|
130
|
+
"Target Item Id",
|
|
131
|
+
"Last Deployment Time",
|
|
132
|
+
]
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
deployment_pipeline_id = resolve_deployment_pipeline_id(
|
|
136
|
+
deployment_pipeline=deployment_pipeline
|
|
137
|
+
)
|
|
138
|
+
dfPS = list_deployment_pipeline_stages(deployment_pipeline=deployment_pipeline)
|
|
139
|
+
dfPS_filt = dfPS[dfPS["Deployment Pipeline Stage Name"] == stage_name]
|
|
140
|
+
|
|
141
|
+
if len(dfPS_filt) == 0:
|
|
142
|
+
raise ValueError(
|
|
143
|
+
f"{icons.red_dot} The '{stage_name}' stage does not exist within the '{deployment_pipeline}' deployment pipeline."
|
|
144
|
+
)
|
|
145
|
+
stage_id = dfPS_filt["Deployment Pipeline Stage ID"].iloc[0]
|
|
146
|
+
|
|
147
|
+
client = fabric.FabricRestClient()
|
|
148
|
+
response = client.get(
|
|
149
|
+
f"/v1/deploymentPipelines/{deployment_pipeline_id}/stages/{stage_id}/items"
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
if response.status_code != 200:
|
|
153
|
+
raise FabricHTTPException(response)
|
|
154
|
+
|
|
155
|
+
responses = pagination(client, response)
|
|
156
|
+
|
|
157
|
+
for r in responses:
|
|
158
|
+
for v in r.get("value", []):
|
|
159
|
+
new_data = {
|
|
160
|
+
"Deployment Pipeline Stage Item Id": v.get("itemId"),
|
|
161
|
+
"Deployment Pipeline Stage Item Name": v.get("itemDisplayName"),
|
|
162
|
+
"Item Type": v.get("itemType"),
|
|
163
|
+
"Source Item Id": v.get("sourceItemId"),
|
|
164
|
+
"Target Item Id": v.get("targetItemId"),
|
|
165
|
+
"Last Deployment Time": v.get("lastDeploymentTime"),
|
|
166
|
+
}
|
|
167
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
168
|
+
|
|
169
|
+
df["Last Deployment Time"] = pd.to_datetime(df["Last Deployment Time"])
|
|
170
|
+
|
|
171
|
+
return df
|
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
import pandas as pd
|
|
3
|
+
import sempy_labs._icons as icons
|
|
4
|
+
from typing import Optional
|
|
5
|
+
from sempy_labs._helper_functions import (
|
|
6
|
+
resolve_workspace_name_and_id,
|
|
7
|
+
lro,
|
|
8
|
+
pagination,
|
|
9
|
+
)
|
|
10
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def create_environment(
|
|
14
|
+
environment: str, description: Optional[str] = None, workspace: Optional[str] = None
|
|
15
|
+
):
|
|
16
|
+
"""
|
|
17
|
+
Creates a Fabric environment.
|
|
18
|
+
|
|
19
|
+
Parameters
|
|
20
|
+
----------
|
|
21
|
+
environment: str
|
|
22
|
+
Name of the environment.
|
|
23
|
+
description : str, default=None
|
|
24
|
+
A description of the environment.
|
|
25
|
+
workspace : str, default=None
|
|
26
|
+
The Fabric workspace name.
|
|
27
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
28
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
32
|
+
|
|
33
|
+
request_body = {"displayName": environment}
|
|
34
|
+
|
|
35
|
+
if description:
|
|
36
|
+
request_body["description"] = description
|
|
37
|
+
|
|
38
|
+
client = fabric.FabricRestClient()
|
|
39
|
+
response = client.post(
|
|
40
|
+
f"/v1/workspaces/{workspace_id}/environments", json=request_body
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
lro(client, response, status_codes=[201, 202])
|
|
44
|
+
|
|
45
|
+
print(
|
|
46
|
+
f"{icons.green_dot} The '{environment}' environment has been created within the '{workspace}' workspace."
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def list_environments(workspace: Optional[str] = None) -> pd.DataFrame:
|
|
51
|
+
"""
|
|
52
|
+
Shows the environments within a workspace.
|
|
53
|
+
|
|
54
|
+
Parameters
|
|
55
|
+
----------
|
|
56
|
+
workspace : str, default=None
|
|
57
|
+
The Fabric workspace name.
|
|
58
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
59
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
60
|
+
|
|
61
|
+
Returns
|
|
62
|
+
-------
|
|
63
|
+
pandas.DataFrame
|
|
64
|
+
A pandas dataframe showing the environments within a workspace.
|
|
65
|
+
"""
|
|
66
|
+
|
|
67
|
+
df = pd.DataFrame(columns=["Environment Name", "Environment Id", "Description"])
|
|
68
|
+
|
|
69
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
70
|
+
|
|
71
|
+
client = fabric.FabricRestClient()
|
|
72
|
+
response = client.get(f"/v1/workspaces/{workspace_id}/environments")
|
|
73
|
+
if response.status_code != 200:
|
|
74
|
+
raise FabricHTTPException(response)
|
|
75
|
+
|
|
76
|
+
responses = pagination(client, response)
|
|
77
|
+
|
|
78
|
+
for r in responses:
|
|
79
|
+
for v in r.get("value", []):
|
|
80
|
+
new_data = {
|
|
81
|
+
"Environment Name": v.get("displayName"),
|
|
82
|
+
"Environment Id": v.get("id"),
|
|
83
|
+
"Description": v.get("description"),
|
|
84
|
+
}
|
|
85
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
86
|
+
|
|
87
|
+
return df
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def delete_environment(environment: str, workspace: Optional[str] = None):
|
|
91
|
+
"""
|
|
92
|
+
Deletes a Fabric environment.
|
|
93
|
+
|
|
94
|
+
Parameters
|
|
95
|
+
----------
|
|
96
|
+
environment: str
|
|
97
|
+
Name of the environment.
|
|
98
|
+
workspace : str, default=None
|
|
99
|
+
The Fabric workspace name.
|
|
100
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
101
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
102
|
+
"""
|
|
103
|
+
|
|
104
|
+
from sempy_labs._helper_functions import resolve_environment_id
|
|
105
|
+
|
|
106
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
107
|
+
environment_id = resolve_environment_id(
|
|
108
|
+
environment=environment, workspace=workspace
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
client = fabric.FabricRestClient()
|
|
112
|
+
response = client.delete(
|
|
113
|
+
f"/v1/workspaces/{workspace_id}/environments/{environment_id}"
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
if response.status_code != 200:
|
|
117
|
+
raise FabricHTTPException(response)
|
|
118
|
+
|
|
119
|
+
print(
|
|
120
|
+
f"{icons.green_dot} The '{environment}' environment within the '{workspace}' workspace has been deleted."
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def publish_environment(environment: str, workspace: Optional[str] = None):
|
|
125
|
+
"""
|
|
126
|
+
Publishes a Fabric environment.
|
|
127
|
+
|
|
128
|
+
Parameters
|
|
129
|
+
----------
|
|
130
|
+
environment: str
|
|
131
|
+
Name of the environment.
|
|
132
|
+
workspace : str, default=None
|
|
133
|
+
The Fabric workspace name.
|
|
134
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
135
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
136
|
+
"""
|
|
137
|
+
|
|
138
|
+
# https://learn.microsoft.com/en-us/rest/api/fabric/environment/spark-libraries/publish-environment?tabs=HTTP
|
|
139
|
+
|
|
140
|
+
from sempy_labs._helper_functions import resolve_environment_id
|
|
141
|
+
|
|
142
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
143
|
+
environment_id = resolve_environment_id(
|
|
144
|
+
environment=environment, workspace=workspace
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
client = fabric.FabricRestClient()
|
|
148
|
+
response = client.post(
|
|
149
|
+
f"/v1/workspaces/{workspace_id}/environments/{environment_id}/staging/publish"
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
lro(client, response)
|
|
153
|
+
|
|
154
|
+
print(
|
|
155
|
+
f"{icons.green_dot} The '{environment}' environment within the '{workspace}' workspace has been published."
|
|
156
|
+
)
|
|
@@ -13,12 +13,14 @@ from sempy_labs._helper_functions import (
|
|
|
13
13
|
)
|
|
14
14
|
from sempy_labs.lakehouse._lakehouse import lakehouse_attached
|
|
15
15
|
import sempy_labs._icons as icons
|
|
16
|
+
from sempy_labs._refresh_semantic_model import refresh_semantic_model
|
|
16
17
|
|
|
17
18
|
|
|
18
19
|
def create_blank_semantic_model(
|
|
19
20
|
dataset: str,
|
|
20
21
|
compatibility_level: int = 1605,
|
|
21
22
|
workspace: Optional[str] = None,
|
|
23
|
+
overwrite: Optional[bool] = True,
|
|
22
24
|
):
|
|
23
25
|
"""
|
|
24
26
|
Creates a new blank semantic model (no tables/columns etc.).
|
|
@@ -33,34 +35,42 @@ def create_blank_semantic_model(
|
|
|
33
35
|
The Fabric workspace name.
|
|
34
36
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
35
37
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
38
|
+
overwrite : bool, default=False
|
|
39
|
+
If set to True, overwrites the existing semantic model in the workspace if it exists.
|
|
36
40
|
"""
|
|
37
41
|
|
|
38
42
|
workspace = fabric.resolve_workspace_name(workspace)
|
|
43
|
+
dfD = fabric.list_datasets(workspace=workspace, mode="rest")
|
|
44
|
+
dfD_filt = dfD[dfD["Dataset Name"] == dataset]
|
|
39
45
|
|
|
40
|
-
|
|
46
|
+
if len(dfD_filt) > 0 and not overwrite:
|
|
47
|
+
raise ValueError(
|
|
48
|
+
f"{icons.warning} The '{dataset}' semantic model already exists within the '{workspace}' workspace. The 'overwrite' parameter is set to False so the blank new semantic model was not created."
|
|
49
|
+
)
|
|
41
50
|
|
|
51
|
+
min_compat = 1500
|
|
42
52
|
if compatibility_level < min_compat:
|
|
43
53
|
raise ValueError(
|
|
44
54
|
f"{icons.red_dot} Compatiblity level must be at least {min_compat}."
|
|
45
55
|
)
|
|
46
56
|
|
|
47
57
|
tmsl = f"""
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
58
|
+
{{
|
|
59
|
+
"createOrReplace": {{
|
|
60
|
+
"object": {{
|
|
61
|
+
"database": '{dataset}'
|
|
62
|
+
}},
|
|
63
|
+
"database": {{
|
|
64
|
+
"name": '{dataset}',
|
|
65
|
+
"compatibilityLevel": {compatibility_level},
|
|
66
|
+
"model": {{
|
|
67
|
+
"culture": "en-US",
|
|
68
|
+
"defaultPowerBIDataSourceVersion": "powerBI_V3"
|
|
69
|
+
}}
|
|
70
|
+
}}
|
|
59
71
|
}}
|
|
60
|
-
}}
|
|
61
72
|
}}
|
|
62
|
-
|
|
63
|
-
"""
|
|
73
|
+
"""
|
|
64
74
|
|
|
65
75
|
fabric.execute_tmsl(script=tmsl, workspace=workspace)
|
|
66
76
|
|
|
@@ -89,12 +99,12 @@ def create_semantic_model_from_bim(
|
|
|
89
99
|
|
|
90
100
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
91
101
|
|
|
92
|
-
dfI = fabric.
|
|
93
|
-
dfI_filt = dfI[(dfI["
|
|
102
|
+
dfI = fabric.list_datasets(workspace=workspace, mode="rest")
|
|
103
|
+
dfI_filt = dfI[(dfI["Dataset Name"] == dataset)]
|
|
94
104
|
|
|
95
105
|
if len(dfI_filt) > 0:
|
|
96
106
|
raise ValueError(
|
|
97
|
-
f"{icons.red_dot} '{dataset}' already exists as a semantic model in the '{workspace}' workspace."
|
|
107
|
+
f"{icons.red_dot} The '{dataset}' semantic model already exists as a semantic model in the '{workspace}' workspace."
|
|
98
108
|
)
|
|
99
109
|
|
|
100
110
|
client = fabric.FabricRestClient()
|
|
@@ -133,12 +143,77 @@ def create_semantic_model_from_bim(
|
|
|
133
143
|
)
|
|
134
144
|
|
|
135
145
|
|
|
146
|
+
def update_semantic_model_from_bim(
|
|
147
|
+
dataset: str, bim_file: dict, workspace: Optional[str] = None
|
|
148
|
+
):
|
|
149
|
+
"""
|
|
150
|
+
Updates a semantic model definition based on a Model.bim file.
|
|
151
|
+
|
|
152
|
+
Parameters
|
|
153
|
+
----------
|
|
154
|
+
dataset : str
|
|
155
|
+
Name of the semantic model.
|
|
156
|
+
bim_file : dict
|
|
157
|
+
The model.bim file.
|
|
158
|
+
workspace : str, default=None
|
|
159
|
+
The Fabric workspace name.
|
|
160
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
161
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
162
|
+
"""
|
|
163
|
+
|
|
164
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
165
|
+
|
|
166
|
+
dfD = fabric.list_datasets(workspace=workspace, mode="rest")
|
|
167
|
+
dfD_filt = dfD[dfD["Dataset Name"] == dataset]
|
|
168
|
+
if len(dfD_filt) == 0:
|
|
169
|
+
raise ValueError(
|
|
170
|
+
f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace does not exist."
|
|
171
|
+
)
|
|
172
|
+
dataset_id = dfD_filt["Dataset Id"].iloc[0]
|
|
173
|
+
|
|
174
|
+
client = fabric.FabricRestClient()
|
|
175
|
+
defPBIDataset = {"version": "1.0", "settings": {}}
|
|
176
|
+
|
|
177
|
+
payloadPBIDefinition = _conv_b64(defPBIDataset)
|
|
178
|
+
payloadBim = _conv_b64(bim_file)
|
|
179
|
+
|
|
180
|
+
request_body = {
|
|
181
|
+
"displayName": dataset,
|
|
182
|
+
"definition": {
|
|
183
|
+
"parts": [
|
|
184
|
+
{
|
|
185
|
+
"path": "model.bim",
|
|
186
|
+
"payload": payloadBim,
|
|
187
|
+
"payloadType": "InlineBase64",
|
|
188
|
+
},
|
|
189
|
+
{
|
|
190
|
+
"path": "definition.pbidataset",
|
|
191
|
+
"payload": payloadPBIDefinition,
|
|
192
|
+
"payloadType": "InlineBase64",
|
|
193
|
+
},
|
|
194
|
+
]
|
|
195
|
+
},
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
response = client.post(
|
|
199
|
+
f"/v1/workspaces/{workspace_id}/semanticModels/{dataset_id}/updateDefinition",
|
|
200
|
+
json=request_body,
|
|
201
|
+
)
|
|
202
|
+
|
|
203
|
+
lro(client, response, status_codes=[200, 202])
|
|
204
|
+
|
|
205
|
+
print(
|
|
206
|
+
f"{icons.green_dot} The '{dataset}' semantic model has been updated within the '{workspace}' workspace."
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
|
|
136
210
|
def deploy_semantic_model(
|
|
137
211
|
source_dataset: str,
|
|
138
212
|
source_workspace: Optional[str] = None,
|
|
139
213
|
target_dataset: Optional[str] = None,
|
|
140
214
|
target_workspace: Optional[str] = None,
|
|
141
215
|
refresh_target_dataset: Optional[bool] = True,
|
|
216
|
+
overwrite: Optional[bool] = False,
|
|
142
217
|
):
|
|
143
218
|
"""
|
|
144
219
|
Deploys a semantic model based on an existing semantic model.
|
|
@@ -159,14 +234,10 @@ def deploy_semantic_model(
|
|
|
159
234
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
160
235
|
refresh_target_dataset : bool, default=True
|
|
161
236
|
If set to True, this will initiate a full refresh of the target semantic model in the target workspace.
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
-------
|
|
165
|
-
|
|
237
|
+
overwrite : bool, default=False
|
|
238
|
+
If set to True, overwrites the existing semantic model in the workspace if it exists.
|
|
166
239
|
"""
|
|
167
240
|
|
|
168
|
-
from sempy_labs import refresh_semantic_model
|
|
169
|
-
|
|
170
241
|
source_workspace = fabric.resolve_workspace_name(source_workspace)
|
|
171
242
|
|
|
172
243
|
if target_workspace is None:
|
|
@@ -181,11 +252,28 @@ def deploy_semantic_model(
|
|
|
181
252
|
f"parameters have the same value. At least one of these must be different. Please update the parameters."
|
|
182
253
|
)
|
|
183
254
|
|
|
255
|
+
dfD = fabric.list_datasets(workspace=target_workspace, mode="rest")
|
|
256
|
+
dfD_filt = dfD[dfD["Dataset Name"] == target_dataset]
|
|
257
|
+
if len(dfD_filt) > 0 and not overwrite:
|
|
258
|
+
raise ValueError(
|
|
259
|
+
f"{icons.warning} The '{target_dataset}' semantic model already exists within the '{target_workspace}' workspace. The 'overwrite' parameter is set to False so the source semantic model was not deployed to the target destination."
|
|
260
|
+
)
|
|
261
|
+
|
|
184
262
|
bim = get_semantic_model_bim(dataset=source_dataset, workspace=source_workspace)
|
|
185
263
|
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
264
|
+
# Create the semantic model if the model does not exist
|
|
265
|
+
if len(dfD_filt) == 0:
|
|
266
|
+
create_semantic_model_from_bim(
|
|
267
|
+
dataset=target_dataset,
|
|
268
|
+
bim_file=bim,
|
|
269
|
+
workspace=target_workspace,
|
|
270
|
+
overwrite=overwrite,
|
|
271
|
+
)
|
|
272
|
+
# Update the semantic model if the model exists
|
|
273
|
+
else:
|
|
274
|
+
update_semantic_model_from_bim(
|
|
275
|
+
dataset=target_dataset, bim_file=bim, workspace=target_workspace
|
|
276
|
+
)
|
|
189
277
|
|
|
190
278
|
if refresh_target_dataset:
|
|
191
279
|
refresh_semantic_model(dataset=target_dataset, workspace=target_workspace)
|
|
@@ -257,3 +345,36 @@ def get_semantic_model_bim(
|
|
|
257
345
|
)
|
|
258
346
|
|
|
259
347
|
return bimJson
|
|
348
|
+
|
|
349
|
+
|
|
350
|
+
def get_semantic_model_size(dataset: str, workspace: Optional[str] = None):
|
|
351
|
+
|
|
352
|
+
workspace = fabric.resolve_workspace_name(workspace)
|
|
353
|
+
|
|
354
|
+
dict = fabric.evaluate_dax(
|
|
355
|
+
dataset=dataset,
|
|
356
|
+
workspace=workspace,
|
|
357
|
+
dax_string="""
|
|
358
|
+
EVALUATE SELECTCOLUMNS(FILTER(INFO.STORAGETABLECOLUMNS(), [COLUMN_TYPE] = "BASIC_DATA"),[DICTIONARY_SIZE])
|
|
359
|
+
""",
|
|
360
|
+
)
|
|
361
|
+
|
|
362
|
+
used_size = fabric.evaluate_dax(
|
|
363
|
+
dataset=dataset,
|
|
364
|
+
workspace=workspace,
|
|
365
|
+
dax_string="""
|
|
366
|
+
EVALUATE SELECTCOLUMNS(INFO.STORAGETABLECOLUMNSEGMENTS(),[USED_SIZE])
|
|
367
|
+
""",
|
|
368
|
+
)
|
|
369
|
+
dict_size = dict["[DICTIONARY_SIZE]"].sum()
|
|
370
|
+
used_size = used_size["[USED_SIZE]"].sum()
|
|
371
|
+
model_size = dict_size + used_size
|
|
372
|
+
# Calculate proper bytes size by dividing by 1024 and multiplying by 1000 - per 1000
|
|
373
|
+
if model_size >= 10**9:
|
|
374
|
+
result = model_size / (1024**3) * 10**9
|
|
375
|
+
elif model_size >= 10**6:
|
|
376
|
+
result = model_size / (1024**2) * 10**6
|
|
377
|
+
elif model_size >= 10**3:
|
|
378
|
+
result = model_size / (1024) * 10**3
|
|
379
|
+
|
|
380
|
+
return result
|