semantic-link-labs 0.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- semantic_link_labs-0.4.1.dist-info/LICENSE +21 -0
- semantic_link_labs-0.4.1.dist-info/METADATA +22 -0
- semantic_link_labs-0.4.1.dist-info/RECORD +52 -0
- semantic_link_labs-0.4.1.dist-info/WHEEL +5 -0
- semantic_link_labs-0.4.1.dist-info/top_level.txt +1 -0
- sempy_labs/__init__.py +154 -0
- sempy_labs/_ai.py +496 -0
- sempy_labs/_clear_cache.py +39 -0
- sempy_labs/_connections.py +234 -0
- sempy_labs/_dax.py +70 -0
- sempy_labs/_generate_semantic_model.py +280 -0
- sempy_labs/_helper_functions.py +506 -0
- sempy_labs/_icons.py +4 -0
- sempy_labs/_list_functions.py +1372 -0
- sempy_labs/_model_auto_build.py +143 -0
- sempy_labs/_model_bpa.py +1354 -0
- sempy_labs/_model_dependencies.py +341 -0
- sempy_labs/_one_lake_integration.py +155 -0
- sempy_labs/_query_scale_out.py +447 -0
- sempy_labs/_refresh_semantic_model.py +184 -0
- sempy_labs/_tom.py +3766 -0
- sempy_labs/_translations.py +378 -0
- sempy_labs/_vertipaq.py +893 -0
- sempy_labs/directlake/__init__.py +45 -0
- sempy_labs/directlake/_directlake_schema_compare.py +110 -0
- sempy_labs/directlake/_directlake_schema_sync.py +128 -0
- sempy_labs/directlake/_fallback.py +62 -0
- sempy_labs/directlake/_get_directlake_lakehouse.py +69 -0
- sempy_labs/directlake/_get_shared_expression.py +59 -0
- sempy_labs/directlake/_guardrails.py +84 -0
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +54 -0
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +89 -0
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +81 -0
- sempy_labs/directlake/_update_directlake_partition_entity.py +64 -0
- sempy_labs/directlake/_warm_cache.py +210 -0
- sempy_labs/lakehouse/__init__.py +24 -0
- sempy_labs/lakehouse/_get_lakehouse_columns.py +81 -0
- sempy_labs/lakehouse/_get_lakehouse_tables.py +250 -0
- sempy_labs/lakehouse/_lakehouse.py +85 -0
- sempy_labs/lakehouse/_shortcuts.py +296 -0
- sempy_labs/migration/__init__.py +29 -0
- sempy_labs/migration/_create_pqt_file.py +239 -0
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +429 -0
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +150 -0
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +524 -0
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +165 -0
- sempy_labs/migration/_migration_validation.py +227 -0
- sempy_labs/migration/_refresh_calc_tables.py +129 -0
- sempy_labs/report/__init__.py +35 -0
- sempy_labs/report/_generate_report.py +253 -0
- sempy_labs/report/_report_functions.py +855 -0
- sempy_labs/report/_report_rebind.py +131 -0
|
@@ -0,0 +1,234 @@
|
|
|
1
|
+
import sempy
|
|
2
|
+
import sempy.fabric as fabric
|
|
3
|
+
import pandas as pd
|
|
4
|
+
from typing import List, Optional, Union
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def create_connection_cloud(
|
|
8
|
+
name: str,
|
|
9
|
+
server_name: str,
|
|
10
|
+
database_name: str,
|
|
11
|
+
user_name: str,
|
|
12
|
+
password: str,
|
|
13
|
+
privacy_level: str,
|
|
14
|
+
):
|
|
15
|
+
|
|
16
|
+
# https://review.learn.microsoft.com/en-us/rest/api/fabric/core/connections/create-connection?branch=features%2Fdmts&tabs=HTTP
|
|
17
|
+
|
|
18
|
+
df = pd.DataFrame(
|
|
19
|
+
columns=[
|
|
20
|
+
"Connection ID",
|
|
21
|
+
"Connection Name",
|
|
22
|
+
"Connectivity Type",
|
|
23
|
+
"Connection Type",
|
|
24
|
+
"Connection Path",
|
|
25
|
+
"Privacy Level",
|
|
26
|
+
"Credential Type",
|
|
27
|
+
"Single Sign On Type",
|
|
28
|
+
"Connection Encryption",
|
|
29
|
+
"Skip Test Connection",
|
|
30
|
+
]
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
client = fabric.FabricRestClient()
|
|
34
|
+
|
|
35
|
+
request_body = {
|
|
36
|
+
"connectivityType": "ShareableCloud",
|
|
37
|
+
"name": name,
|
|
38
|
+
"connectionDetails": {
|
|
39
|
+
"type": "SQL",
|
|
40
|
+
"parameters": [
|
|
41
|
+
{"name": "server", "value": server_name},
|
|
42
|
+
{"name": "database", "value": database_name},
|
|
43
|
+
],
|
|
44
|
+
},
|
|
45
|
+
"privacyLevel": privacy_level,
|
|
46
|
+
"credentialDetails": {
|
|
47
|
+
"singleSignOnType": "None",
|
|
48
|
+
"connectionEncryption": "NotEncrypted",
|
|
49
|
+
"skipTestConnection": False,
|
|
50
|
+
"credentials": {
|
|
51
|
+
"credentialType": "Basic",
|
|
52
|
+
"username": user_name,
|
|
53
|
+
"password": password,
|
|
54
|
+
},
|
|
55
|
+
},
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
response = client.post(f"/v1/connections", json=request_body)
|
|
59
|
+
|
|
60
|
+
if response.status_code == 200:
|
|
61
|
+
o = response.json()
|
|
62
|
+
new_data = {
|
|
63
|
+
"Connection Id": o["id"],
|
|
64
|
+
"Connection Name": o["name"],
|
|
65
|
+
"Connectivity Type": o["connectivityType"],
|
|
66
|
+
"Connection Type": o["connectionDetails"]["type"],
|
|
67
|
+
"Connection Path": o["connectionDetails"]["path"],
|
|
68
|
+
"Privacy Level": o["privacyLevel"],
|
|
69
|
+
"Credential Type": o["credentialDetails"]["credentialType"],
|
|
70
|
+
"Single Sign On Type": o["credentialDetails"]["singleSignOnType"],
|
|
71
|
+
"Connection Encryption": o["credentialDetails"]["connectionEncryption"],
|
|
72
|
+
"Skip Test Connection": o["credentialDetails"]["skipTestConnection"],
|
|
73
|
+
}
|
|
74
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
75
|
+
|
|
76
|
+
df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
|
|
77
|
+
|
|
78
|
+
return df
|
|
79
|
+
else:
|
|
80
|
+
print(response.status_code)
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def create_connection_on_prem(
|
|
84
|
+
name: str,
|
|
85
|
+
gateway_id: str,
|
|
86
|
+
server_name: str,
|
|
87
|
+
database_name: str,
|
|
88
|
+
credentials: str,
|
|
89
|
+
privacy_level: str,
|
|
90
|
+
):
|
|
91
|
+
|
|
92
|
+
df = pd.DataFrame(
|
|
93
|
+
columns=[
|
|
94
|
+
"Connection ID",
|
|
95
|
+
"Connection Name",
|
|
96
|
+
"Gateway ID",
|
|
97
|
+
"Connectivity Type",
|
|
98
|
+
"Connection Type",
|
|
99
|
+
"Connection Path",
|
|
100
|
+
"Privacy Level",
|
|
101
|
+
"Credential Type",
|
|
102
|
+
"Single Sign On Type",
|
|
103
|
+
"Connection Encryption",
|
|
104
|
+
"Skip Test Connection",
|
|
105
|
+
]
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
client = fabric.FabricRestClient()
|
|
109
|
+
|
|
110
|
+
request_body = {
|
|
111
|
+
"connectivityType": "OnPremisesDataGateway",
|
|
112
|
+
"gatewayId": gateway_id,
|
|
113
|
+
"name": name,
|
|
114
|
+
"connectionDetails": {
|
|
115
|
+
"type": "SQL",
|
|
116
|
+
"parameters": [
|
|
117
|
+
{"name": "server", "value": server_name},
|
|
118
|
+
{"name": "database", "value": database_name},
|
|
119
|
+
],
|
|
120
|
+
},
|
|
121
|
+
"privacyLevel": privacy_level,
|
|
122
|
+
"credentialDetails": {
|
|
123
|
+
"singleSignOnType": "None",
|
|
124
|
+
"connectionEncryption": "NotEncrypted",
|
|
125
|
+
"skipTestConnection": False,
|
|
126
|
+
"credentials": {
|
|
127
|
+
"credentialType": "Windows",
|
|
128
|
+
"values": [{"gatewayId": gateway_id, "credentials": credentials}],
|
|
129
|
+
},
|
|
130
|
+
},
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
response = client.post(f"/v1/connections", json=request_body)
|
|
134
|
+
|
|
135
|
+
if response.status_code == 200:
|
|
136
|
+
o = response.json()
|
|
137
|
+
new_data = {
|
|
138
|
+
"Connection Id": o["id"],
|
|
139
|
+
"Connection Name": o["name"],
|
|
140
|
+
"Gateway ID": o["gatewayId"],
|
|
141
|
+
"Connectivity Type": o["connectivityType"],
|
|
142
|
+
"Connection Type": o["connectionDetails"]["type"],
|
|
143
|
+
"Connection Path": o["connectionDetails"]["path"],
|
|
144
|
+
"Privacy Level": o["privacyLevel"],
|
|
145
|
+
"Credential Type": o["credentialDetails"]["credentialType"],
|
|
146
|
+
"Single Sign On Type": o["credentialDetails"]["singleSignOnType"],
|
|
147
|
+
"Connection Encryption": o["credentialDetails"]["connectionEncryption"],
|
|
148
|
+
"Skip Test Connection": o["credentialDetails"]["skipTestConnection"],
|
|
149
|
+
}
|
|
150
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
151
|
+
|
|
152
|
+
df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
|
|
153
|
+
|
|
154
|
+
return df
|
|
155
|
+
else:
|
|
156
|
+
print(response.status_code)
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def create_connection_vnet(
|
|
160
|
+
name: str,
|
|
161
|
+
gateway_id: str,
|
|
162
|
+
server_name: str,
|
|
163
|
+
database_name: str,
|
|
164
|
+
user_name: str,
|
|
165
|
+
password: str,
|
|
166
|
+
privacy_level: str,
|
|
167
|
+
):
|
|
168
|
+
|
|
169
|
+
df = pd.DataFrame(
|
|
170
|
+
columns=[
|
|
171
|
+
"Connection ID",
|
|
172
|
+
"Connection Name",
|
|
173
|
+
"Gateway ID",
|
|
174
|
+
"Connectivity Type",
|
|
175
|
+
"Connection Type",
|
|
176
|
+
"Connection Path",
|
|
177
|
+
"Privacy Level",
|
|
178
|
+
"Credential Type",
|
|
179
|
+
"Single Sign On Type",
|
|
180
|
+
"Connection Encryption",
|
|
181
|
+
"Skip Test Connection",
|
|
182
|
+
]
|
|
183
|
+
)
|
|
184
|
+
|
|
185
|
+
client = fabric.FabricRestClient()
|
|
186
|
+
|
|
187
|
+
request_body = {
|
|
188
|
+
"connectivityType": "VirtualNetworkDataGateway",
|
|
189
|
+
"gatewayId": gateway_id,
|
|
190
|
+
"name": name,
|
|
191
|
+
"connectionDetails": {
|
|
192
|
+
"type": "SQL",
|
|
193
|
+
"parameters": [
|
|
194
|
+
{"name": "server", "value": server_name},
|
|
195
|
+
{"name": "database", "value": database_name},
|
|
196
|
+
],
|
|
197
|
+
},
|
|
198
|
+
"privacyLevel": privacy_level,
|
|
199
|
+
"credentialDetails": {
|
|
200
|
+
"singleSignOnType": "None",
|
|
201
|
+
"connectionEncryption": "Encrypted",
|
|
202
|
+
"skipTestConnection": False,
|
|
203
|
+
"credentials": {
|
|
204
|
+
"credentialType": "Basic",
|
|
205
|
+
"username": user_name,
|
|
206
|
+
"password": password,
|
|
207
|
+
},
|
|
208
|
+
},
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
response = client.post(f"/v1/connections", json=request_body)
|
|
212
|
+
|
|
213
|
+
if response.status_code == 200:
|
|
214
|
+
o = response.json()
|
|
215
|
+
new_data = {
|
|
216
|
+
"Connection Id": o["id"],
|
|
217
|
+
"Connection Name": o["name"],
|
|
218
|
+
"Gateway ID": o["gatewayId"],
|
|
219
|
+
"Connectivity Type": o["connectivityType"],
|
|
220
|
+
"Connection Type": o["connectionDetails"]["type"],
|
|
221
|
+
"Connection Path": o["connectionDetails"]["path"],
|
|
222
|
+
"Privacy Level": o["privacyLevel"],
|
|
223
|
+
"Credential Type": o["credentialDetails"]["credentialType"],
|
|
224
|
+
"Single Sign On Type": o["credentialDetails"]["singleSignOnType"],
|
|
225
|
+
"Connection Encryption": o["credentialDetails"]["connectionEncryption"],
|
|
226
|
+
"Skip Test Connection": o["credentialDetails"]["skipTestConnection"],
|
|
227
|
+
}
|
|
228
|
+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
|
|
229
|
+
|
|
230
|
+
df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
|
|
231
|
+
|
|
232
|
+
return df
|
|
233
|
+
else:
|
|
234
|
+
print(response.status_code)
|
sempy_labs/_dax.py
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
import sempy
|
|
2
|
+
import sempy.fabric as fabric
|
|
3
|
+
import pandas as pd
|
|
4
|
+
from sempy_labs._helper_functions import resolve_dataset_id
|
|
5
|
+
from typing import List, Optional, Union
|
|
6
|
+
from sempy._utils._log import log
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@log
|
|
10
|
+
def run_dax(
|
|
11
|
+
dataset: str,
|
|
12
|
+
dax_query: str,
|
|
13
|
+
user_name: Optional[str] = None,
|
|
14
|
+
workspace: Optional[str] = None,
|
|
15
|
+
):
|
|
16
|
+
"""
|
|
17
|
+
Runs a DAX query against a semantic model using the `REST API <https://learn.microsoft.com/en-us/rest/api/power-bi/datasets/execute-queries-in-group>`_.
|
|
18
|
+
|
|
19
|
+
Compared to evaluate_dax this allows passing the user name for impersonation.
|
|
20
|
+
Note that the REST API has significant limitations compared to the XMLA endpoint.
|
|
21
|
+
|
|
22
|
+
Parameters
|
|
23
|
+
----------
|
|
24
|
+
dataset : str
|
|
25
|
+
Name of the semantic model.
|
|
26
|
+
dax_query : str
|
|
27
|
+
The DAX query.
|
|
28
|
+
user_name : str
|
|
29
|
+
The user name (i.e. hello@goodbye.com).
|
|
30
|
+
Defaults to None which resolves to no user impersonation.
|
|
31
|
+
workspace : str, default=None
|
|
32
|
+
The Fabric workspace name.
|
|
33
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
34
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
35
|
+
|
|
36
|
+
Returns
|
|
37
|
+
-------
|
|
38
|
+
pandas.DataFrame
|
|
39
|
+
A pandas dataframe holding the result of the DAX query.
|
|
40
|
+
"""
|
|
41
|
+
|
|
42
|
+
# https://learn.microsoft.com/en-us/rest/api/power-bi/datasets/execute-queries-in-group
|
|
43
|
+
|
|
44
|
+
if workspace is None:
|
|
45
|
+
workspace_id = fabric.get_workspace_id()
|
|
46
|
+
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
47
|
+
else:
|
|
48
|
+
workspace_id = fabric.resolve_workspace_id(workspace)
|
|
49
|
+
|
|
50
|
+
dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace)
|
|
51
|
+
|
|
52
|
+
if user_name is None:
|
|
53
|
+
request_body = {"queries": [{"query": dax_query}]}
|
|
54
|
+
else:
|
|
55
|
+
request_body = {
|
|
56
|
+
"queries": [{"query": dax_query}],
|
|
57
|
+
"impersonatedUserName": user_name,
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
client = fabric.PowerBIRestClient()
|
|
61
|
+
response = client.post(
|
|
62
|
+
f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/executeQueries",
|
|
63
|
+
json=request_body,
|
|
64
|
+
)
|
|
65
|
+
data = response.json()["results"][0]["tables"]
|
|
66
|
+
column_names = data[0]["rows"][0].keys()
|
|
67
|
+
data_rows = [row.values() for item in data for row in item["rows"]]
|
|
68
|
+
df = pd.DataFrame(data_rows, columns=column_names)
|
|
69
|
+
|
|
70
|
+
return df
|
|
@@ -0,0 +1,280 @@
|
|
|
1
|
+
import sempy
|
|
2
|
+
import sempy.fabric as fabric
|
|
3
|
+
import pandas as pd
|
|
4
|
+
import json, base64, time, os
|
|
5
|
+
from typing import List, Optional, Union
|
|
6
|
+
from sempy_labs._helper_functions import (
|
|
7
|
+
resolve_lakehouse_name,
|
|
8
|
+
resolve_workspace_name_and_id,
|
|
9
|
+
)
|
|
10
|
+
from sempy_labs.lakehouse._lakehouse import lakehouse_attached
|
|
11
|
+
import sempy_labs._icons as icons
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def create_blank_semantic_model(
|
|
15
|
+
dataset: str,
|
|
16
|
+
compatibility_level: int = 1605,
|
|
17
|
+
workspace: Optional[str] = None,
|
|
18
|
+
):
|
|
19
|
+
"""
|
|
20
|
+
Creates a new blank semantic model (no tables/columns etc.).
|
|
21
|
+
|
|
22
|
+
Parameters
|
|
23
|
+
----------
|
|
24
|
+
dataset : str
|
|
25
|
+
Name of the semantic model.
|
|
26
|
+
compatibility_level : int
|
|
27
|
+
The compatibility level of the semantic model.
|
|
28
|
+
Defaults to 1605.
|
|
29
|
+
workspace : str, default=None
|
|
30
|
+
The Fabric workspace name.
|
|
31
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
32
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
if workspace == None:
|
|
36
|
+
workspace_id = fabric.get_workspace_id()
|
|
37
|
+
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
38
|
+
|
|
39
|
+
if compatibility_level < 1500:
|
|
40
|
+
print(f"{icons.red_dot} Compatiblity level must be at least 1500.")
|
|
41
|
+
return
|
|
42
|
+
|
|
43
|
+
tmsl = f"""
|
|
44
|
+
{{
|
|
45
|
+
"createOrReplace": {{
|
|
46
|
+
"object": {{
|
|
47
|
+
"database": '{dataset}'
|
|
48
|
+
}},
|
|
49
|
+
"database": {{
|
|
50
|
+
"name": '{dataset}',
|
|
51
|
+
"compatibilityLevel": {compatibility_level},
|
|
52
|
+
"model": {{
|
|
53
|
+
"culture": "en-US",
|
|
54
|
+
"defaultPowerBIDataSourceVersion": "powerBI_V3"
|
|
55
|
+
}}
|
|
56
|
+
}}
|
|
57
|
+
}}
|
|
58
|
+
}}
|
|
59
|
+
"""
|
|
60
|
+
|
|
61
|
+
fabric.execute_tmsl(script=tmsl, workspace=workspace)
|
|
62
|
+
|
|
63
|
+
return print(
|
|
64
|
+
f"{icons.green_dot} The '{dataset}' semantic model was created within the '{workspace}' workspace."
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def create_semantic_model_from_bim(
|
|
69
|
+
dataset: str, bim_file: str, workspace: Optional[str] = None
|
|
70
|
+
):
|
|
71
|
+
"""
|
|
72
|
+
Creates a new semantic model based on a Model.bim file.
|
|
73
|
+
|
|
74
|
+
Parameters
|
|
75
|
+
----------
|
|
76
|
+
dataset : str
|
|
77
|
+
Name of the semantic model.
|
|
78
|
+
bim_file : str
|
|
79
|
+
The model.bim file.
|
|
80
|
+
workspace : str, default=None
|
|
81
|
+
The Fabric workspace name.
|
|
82
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
83
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
84
|
+
"""
|
|
85
|
+
|
|
86
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
87
|
+
|
|
88
|
+
objectType = "SemanticModel"
|
|
89
|
+
|
|
90
|
+
dfI = fabric.list_items(workspace=workspace, type=objectType)
|
|
91
|
+
dfI_filt = dfI[(dfI["Display Name"] == dataset)]
|
|
92
|
+
|
|
93
|
+
if len(dfI_filt) > 0:
|
|
94
|
+
print(
|
|
95
|
+
f"WARNING: '{dataset}' already exists as a semantic model in the '{workspace}' workspace."
|
|
96
|
+
)
|
|
97
|
+
return
|
|
98
|
+
|
|
99
|
+
client = fabric.FabricRestClient()
|
|
100
|
+
defPBIDataset = {"version": "1.0", "settings": {}}
|
|
101
|
+
|
|
102
|
+
def conv_b64(file):
|
|
103
|
+
|
|
104
|
+
loadJson = json.dumps(file)
|
|
105
|
+
f = base64.b64encode(loadJson.encode("utf-8")).decode("utf-8")
|
|
106
|
+
|
|
107
|
+
return f
|
|
108
|
+
|
|
109
|
+
payloadPBIDefinition = conv_b64(defPBIDataset)
|
|
110
|
+
payloadBim = conv_b64(bim_file)
|
|
111
|
+
|
|
112
|
+
request_body = {
|
|
113
|
+
"displayName": dataset,
|
|
114
|
+
"type": objectType,
|
|
115
|
+
"definition": {
|
|
116
|
+
"parts": [
|
|
117
|
+
{
|
|
118
|
+
"path": "model.bim",
|
|
119
|
+
"payload": payloadBim,
|
|
120
|
+
"payloadType": "InlineBase64",
|
|
121
|
+
},
|
|
122
|
+
{
|
|
123
|
+
"path": "definition.pbidataset",
|
|
124
|
+
"payload": payloadPBIDefinition,
|
|
125
|
+
"payloadType": "InlineBase64",
|
|
126
|
+
},
|
|
127
|
+
]
|
|
128
|
+
},
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
response = client.post(f"/v1/workspaces/{workspace_id}/items", json=request_body)
|
|
132
|
+
|
|
133
|
+
if response.status_code == 201:
|
|
134
|
+
print(
|
|
135
|
+
f"The '{dataset}' semantic model has been created within the '{workspace}' workspace."
|
|
136
|
+
)
|
|
137
|
+
print(response.json())
|
|
138
|
+
elif response.status_code == 202:
|
|
139
|
+
operationId = response.headers["x-ms-operation-id"]
|
|
140
|
+
response = client.get(f"/v1/operations/{operationId}")
|
|
141
|
+
response_body = json.loads(response.content)
|
|
142
|
+
while response_body["status"] != "Succeeded":
|
|
143
|
+
time.sleep(3)
|
|
144
|
+
response = client.get(f"/v1/operations/{operationId}")
|
|
145
|
+
response_body = json.loads(response.content)
|
|
146
|
+
response = client.get(f"/v1/operations/{operationId}/result")
|
|
147
|
+
print(
|
|
148
|
+
f"The '{dataset}' semantic model has been created within the '{workspace}' workspace."
|
|
149
|
+
)
|
|
150
|
+
print(response.json())
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
def deploy_semantic_model(
|
|
154
|
+
dataset: str,
|
|
155
|
+
new_dataset: Optional[str] = None,
|
|
156
|
+
workspace: Optional[str] = None,
|
|
157
|
+
new_dataset_workspace: Optional[str] = None,
|
|
158
|
+
):
|
|
159
|
+
"""
|
|
160
|
+
Deploys a semantic model based on an existing semantic model.
|
|
161
|
+
|
|
162
|
+
Parameters
|
|
163
|
+
----------
|
|
164
|
+
dataset : str
|
|
165
|
+
Name of the semantic model to deploy.
|
|
166
|
+
new_dataset: str
|
|
167
|
+
Name of the new semantic model to be created.
|
|
168
|
+
workspace : str, default=None
|
|
169
|
+
The Fabric workspace name.
|
|
170
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
171
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
172
|
+
new_dataset_workspace : str, default=None
|
|
173
|
+
The Fabric workspace name in which the new semantic model will be deployed.
|
|
174
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
175
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
176
|
+
|
|
177
|
+
Returns
|
|
178
|
+
-------
|
|
179
|
+
|
|
180
|
+
"""
|
|
181
|
+
|
|
182
|
+
if workspace == None:
|
|
183
|
+
workspace_id = fabric.get_workspace_id()
|
|
184
|
+
workspace = fabric.resolve_workspace_name(workspace_id)
|
|
185
|
+
|
|
186
|
+
if new_dataset_workspace == None:
|
|
187
|
+
new_dataset_workspace = workspace
|
|
188
|
+
|
|
189
|
+
if new_dataset is None:
|
|
190
|
+
new_dataset = dataset
|
|
191
|
+
|
|
192
|
+
if new_dataset == dataset and new_dataset_workspace == workspace:
|
|
193
|
+
print(
|
|
194
|
+
f"The 'dataset' and 'new_dataset' parameters have the same value. And, the 'workspace' and 'new_dataset_workspace' parameters have the same value. At least one of these must be different. Please update the parameters."
|
|
195
|
+
)
|
|
196
|
+
return
|
|
197
|
+
|
|
198
|
+
bim = get_semantic_model_bim(dataset=dataset, workspace=workspace)
|
|
199
|
+
|
|
200
|
+
create_semantic_model_from_bim(
|
|
201
|
+
dataset=new_dataset, bim_file=bim, workspace=new_dataset_workspace
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def get_semantic_model_bim(
|
|
206
|
+
dataset: str,
|
|
207
|
+
workspace: Optional[str] = None,
|
|
208
|
+
save_to_file_name: Optional[str] = None,
|
|
209
|
+
):
|
|
210
|
+
"""
|
|
211
|
+
Extracts the Model.bim file for a given semantic model.
|
|
212
|
+
|
|
213
|
+
Parameters
|
|
214
|
+
----------
|
|
215
|
+
dataset : str
|
|
216
|
+
Name of the semantic model.
|
|
217
|
+
workspace : str, default=None
|
|
218
|
+
The Fabric workspace name.
|
|
219
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
220
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
221
|
+
save_to_file_name : str, default=None
|
|
222
|
+
If specified, saves the Model.bim as a file in the lakehouse attached to the notebook.
|
|
223
|
+
|
|
224
|
+
Returns
|
|
225
|
+
-------
|
|
226
|
+
str
|
|
227
|
+
The Model.bim file for the semantic model.
|
|
228
|
+
"""
|
|
229
|
+
|
|
230
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
231
|
+
|
|
232
|
+
objType = "SemanticModel"
|
|
233
|
+
client = fabric.FabricRestClient()
|
|
234
|
+
itemList = fabric.list_items(workspace=workspace, type=objType)
|
|
235
|
+
itemListFilt = itemList[(itemList["Display Name"] == dataset)]
|
|
236
|
+
itemId = itemListFilt["Id"].iloc[0]
|
|
237
|
+
response = client.post(
|
|
238
|
+
f"/v1/workspaces/{workspace_id}/items/{itemId}/getDefinition"
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
if response.status_code == 200:
|
|
242
|
+
res = response.json()
|
|
243
|
+
elif response.status_code == 202:
|
|
244
|
+
operationId = response.headers["x-ms-operation-id"]
|
|
245
|
+
response = client.get(f"/v1/operations/{operationId}")
|
|
246
|
+
response_body = json.loads(response.content)
|
|
247
|
+
while response_body["status"] != "Succeeded":
|
|
248
|
+
time.sleep(3)
|
|
249
|
+
response = client.get(f"/v1/operations/{operationId}")
|
|
250
|
+
response_body = json.loads(response.content)
|
|
251
|
+
response = client.get(f"/v1/operations/{operationId}/result")
|
|
252
|
+
res = response.json()
|
|
253
|
+
df_items = pd.json_normalize(res["definition"]["parts"])
|
|
254
|
+
df_items_filt = df_items[df_items["path"] == "model.bim"]
|
|
255
|
+
payload = df_items_filt["payload"].iloc[0]
|
|
256
|
+
bimFile = base64.b64decode(payload).decode("utf-8")
|
|
257
|
+
bimJson = json.loads(bimFile)
|
|
258
|
+
|
|
259
|
+
if save_to_file_name is not None:
|
|
260
|
+
lakeAttach = lakehouse_attached()
|
|
261
|
+
if lakeAttach == False:
|
|
262
|
+
print(
|
|
263
|
+
f"In order to save the model.bim file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
|
|
264
|
+
)
|
|
265
|
+
return
|
|
266
|
+
|
|
267
|
+
lakehouse_id = fabric.get_lakehouse_id()
|
|
268
|
+
lakehouse = resolve_lakehouse_name(lakehouse_id, workspace)
|
|
269
|
+
folderPath = "/lakehouse/default/Files"
|
|
270
|
+
fileExt = ".bim"
|
|
271
|
+
if not save_to_file_name.endswith(fileExt):
|
|
272
|
+
save_to_file_name = save_to_file_name + fileExt
|
|
273
|
+
filePath = os.path.join(folderPath, save_to_file_name)
|
|
274
|
+
with open(filePath, "w") as json_file:
|
|
275
|
+
json.dump(bimJson, json_file, indent=4)
|
|
276
|
+
print(
|
|
277
|
+
f"The .bim file for the '{dataset}' semantic model has been saved to the '{lakehouse}' in this location: '{filePath}'.\n\n"
|
|
278
|
+
)
|
|
279
|
+
|
|
280
|
+
return bimJson
|