semantic-link-labs 0.7.3__py3-none-any.whl → 0.7.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of semantic-link-labs might be problematic. Click here for more details.
- {semantic_link_labs-0.7.3.dist-info → semantic_link_labs-0.7.4.dist-info}/METADATA +14 -3
- {semantic_link_labs-0.7.3.dist-info → semantic_link_labs-0.7.4.dist-info}/RECORD +60 -44
- {semantic_link_labs-0.7.3.dist-info → semantic_link_labs-0.7.4.dist-info}/WHEEL +1 -1
- sempy_labs/__init__.py +63 -24
- sempy_labs/_bpa_translation/{_translations_am-ET.po → _model/_translations_am-ET.po} +22 -0
- sempy_labs/_bpa_translation/{_translations_ar-AE.po → _model/_translations_ar-AE.po} +24 -0
- sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +938 -0
- sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +934 -0
- sempy_labs/_bpa_translation/{_translations_cs-CZ.po → _model/_translations_cs-CZ.po} +179 -157
- sempy_labs/_bpa_translation/{_translations_da-DK.po → _model/_translations_da-DK.po} +24 -0
- sempy_labs/_bpa_translation/{_translations_de-DE.po → _model/_translations_de-DE.po} +77 -52
- sempy_labs/_bpa_translation/{_translations_el-GR.po → _model/_translations_el-GR.po} +25 -0
- sempy_labs/_bpa_translation/{_translations_es-ES.po → _model/_translations_es-ES.po} +67 -43
- sempy_labs/_bpa_translation/{_translations_fa-IR.po → _model/_translations_fa-IR.po} +24 -0
- sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +915 -0
- sempy_labs/_bpa_translation/{_translations_fr-FR.po → _model/_translations_fr-FR.po} +83 -57
- sempy_labs/_bpa_translation/{_translations_ga-IE.po → _model/_translations_ga-IE.po} +25 -0
- sempy_labs/_bpa_translation/{_translations_he-IL.po → _model/_translations_he-IL.po} +23 -0
- sempy_labs/_bpa_translation/{_translations_hi-IN.po → _model/_translations_hi-IN.po} +24 -0
- sempy_labs/_bpa_translation/{_translations_hu-HU.po → _model/_translations_hu-HU.po} +25 -0
- sempy_labs/_bpa_translation/_model/_translations_id-ID.po +918 -0
- sempy_labs/_bpa_translation/{_translations_is-IS.po → _model/_translations_is-IS.po} +25 -0
- sempy_labs/_bpa_translation/{_translations_it-IT.po → _model/_translations_it-IT.po} +25 -0
- sempy_labs/_bpa_translation/{_translations_ja-JP.po → _model/_translations_ja-JP.po} +21 -0
- sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +823 -0
- sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +937 -0
- sempy_labs/_bpa_translation/{_translations_nl-NL.po → _model/_translations_nl-NL.po} +80 -56
- sempy_labs/_bpa_translation/{_translations_pl-PL.po → _model/_translations_pl-PL.po} +101 -76
- sempy_labs/_bpa_translation/{_translations_pt-BR.po → _model/_translations_pt-BR.po} +25 -0
- sempy_labs/_bpa_translation/{_translations_pt-PT.po → _model/_translations_pt-PT.po} +25 -0
- sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +939 -0
- sempy_labs/_bpa_translation/{_translations_ru-RU.po → _model/_translations_ru-RU.po} +25 -0
- sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +925 -0
- sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +922 -0
- sempy_labs/_bpa_translation/{_translations_ta-IN.po → _model/_translations_ta-IN.po} +26 -0
- sempy_labs/_bpa_translation/{_translations_te-IN.po → _model/_translations_te-IN.po} +24 -0
- sempy_labs/_bpa_translation/{_translations_th-TH.po → _model/_translations_th-TH.po} +24 -0
- sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +925 -0
- sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +933 -0
- sempy_labs/_bpa_translation/{_translations_zh-CN.po → _model/_translations_zh-CN.po} +116 -97
- sempy_labs/_bpa_translation/{_translations_zu-ZA.po → _model/_translations_zu-ZA.po} +25 -0
- sempy_labs/_capacities.py +541 -0
- sempy_labs/_connections.py +138 -0
- sempy_labs/_environments.py +156 -0
- sempy_labs/_helper_functions.py +146 -8
- sempy_labs/_icons.py +43 -0
- sempy_labs/_list_functions.py +35 -900
- sempy_labs/_model_bpa.py +8 -32
- sempy_labs/_notebooks.py +143 -0
- sempy_labs/_query_scale_out.py +28 -7
- sempy_labs/_spark.py +465 -0
- sempy_labs/_sql.py +35 -11
- sempy_labs/_translations.py +3 -0
- sempy_labs/_vertipaq.py +160 -99
- sempy_labs/_workspaces.py +294 -0
- sempy_labs/directlake/_directlake_schema_sync.py +1 -2
- sempy_labs/tom/_model.py +5 -1
- {semantic_link_labs-0.7.3.dist-info → semantic_link_labs-0.7.4.dist-info}/LICENSE +0 -0
- {semantic_link_labs-0.7.3.dist-info → semantic_link_labs-0.7.4.dist-info}/top_level.txt +0 -0
- /sempy_labs/_bpa_translation/{_translations_sv-SE.po → _model/_translations_sv-SE.po} +0 -0
sempy_labs/_model_bpa.py
CHANGED
|
@@ -12,6 +12,7 @@ from sempy_labs._helper_functions import (
|
|
|
12
12
|
save_as_delta_table,
|
|
13
13
|
resolve_workspace_capacity,
|
|
14
14
|
resolve_dataset_id,
|
|
15
|
+
get_language_codes,
|
|
15
16
|
)
|
|
16
17
|
from sempy_labs.lakehouse import get_lakehouse_tables, lakehouse_attached
|
|
17
18
|
from sempy_labs.tom import connect_semantic_model
|
|
@@ -63,7 +64,7 @@ def run_model_bpa(
|
|
|
63
64
|
pandas.DataFrame
|
|
64
65
|
A pandas dataframe in HTML format showing semantic model objects which violated the best practice analyzer rules.
|
|
65
66
|
"""
|
|
66
|
-
|
|
67
|
+
|
|
67
68
|
import polib
|
|
68
69
|
|
|
69
70
|
if "extend" in kwargs:
|
|
@@ -80,35 +81,9 @@ def run_model_bpa(
|
|
|
80
81
|
"ignore", category=UserWarning, message=".*Arrow optimization.*"
|
|
81
82
|
)
|
|
82
83
|
|
|
83
|
-
language_list =
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
"he-IL",
|
|
87
|
-
"pt-PT",
|
|
88
|
-
"zh-CN",
|
|
89
|
-
"fr-FR",
|
|
90
|
-
"da-DK",
|
|
91
|
-
"cs-CZ",
|
|
92
|
-
"de-DE",
|
|
93
|
-
"el-GR",
|
|
94
|
-
"fa-IR",
|
|
95
|
-
"ga-IE",
|
|
96
|
-
"hi-IN",
|
|
97
|
-
"hu-HU",
|
|
98
|
-
"is-IS",
|
|
99
|
-
"ja-JP",
|
|
100
|
-
"nl-NL",
|
|
101
|
-
"pl-PL",
|
|
102
|
-
"pt-BR",
|
|
103
|
-
"ru-RU",
|
|
104
|
-
"te-IN",
|
|
105
|
-
"ta-IN",
|
|
106
|
-
"th-TH",
|
|
107
|
-
"zu-ZA",
|
|
108
|
-
"am-ET",
|
|
109
|
-
"ar-AE",
|
|
110
|
-
"sv-SE",
|
|
111
|
-
]
|
|
84
|
+
language_list = list(icons.language_map.keys())
|
|
85
|
+
if language is not None:
|
|
86
|
+
language = get_language_codes(languages=language)[0]
|
|
112
87
|
|
|
113
88
|
# Map languages to the closest language (first 2 letters matching)
|
|
114
89
|
def map_language(language, language_list):
|
|
@@ -152,7 +127,7 @@ def run_model_bpa(
|
|
|
152
127
|
def translate_using_po(rule_file):
|
|
153
128
|
current_dir = os.path.dirname(os.path.abspath(__file__))
|
|
154
129
|
translation_file = (
|
|
155
|
-
f"{current_dir}/_bpa_translation/_translations_{language}.po"
|
|
130
|
+
f"{current_dir}/_bpa_translation/_model/_translations_{language}.po"
|
|
156
131
|
)
|
|
157
132
|
for c in ["Category", "Description", "Rule Name"]:
|
|
158
133
|
po = polib.pofile(translation_file)
|
|
@@ -530,7 +505,8 @@ def run_model_bpa(
|
|
|
530
505
|
content_html += f'<td>{row["Rule Name"]}</td>'
|
|
531
506
|
content_html += f'<td>{row["Object Type"]}</td>'
|
|
532
507
|
content_html += f'<td>{row["Object Name"]}</td>'
|
|
533
|
-
content_html += f'<td>{row["Severity"]}</td>'
|
|
508
|
+
content_html += f'<td style="text-align: center;">{row["Severity"]}</td>'
|
|
509
|
+
#content_html += f'<td>{row["Severity"]}</td>'
|
|
534
510
|
content_html += "</tr>"
|
|
535
511
|
content_html += "</table>"
|
|
536
512
|
|
sempy_labs/_notebooks.py
ADDED
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
import sempy.fabric as fabric
|
|
2
|
+
import pandas as pd
|
|
3
|
+
import sempy_labs._icons as icons
|
|
4
|
+
from typing import Optional
|
|
5
|
+
import base64
|
|
6
|
+
import requests
|
|
7
|
+
from sempy_labs._helper_functions import (
|
|
8
|
+
resolve_workspace_name_and_id,
|
|
9
|
+
lro,
|
|
10
|
+
_decode_b64,
|
|
11
|
+
)
|
|
12
|
+
from sempy.fabric.exceptions import FabricHTTPException
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def get_notebook_definition(
|
|
16
|
+
notebook_name: str, workspace: Optional[str] = None, decode: Optional[bool] = True
|
|
17
|
+
):
|
|
18
|
+
"""
|
|
19
|
+
Obtains the notebook definition.
|
|
20
|
+
|
|
21
|
+
Parameters
|
|
22
|
+
----------
|
|
23
|
+
notebook_name : str
|
|
24
|
+
The name of the notebook.
|
|
25
|
+
workspace : str, default=None
|
|
26
|
+
The name of the workspace.
|
|
27
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
28
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
29
|
+
decode : bool, default=True
|
|
30
|
+
If True, decodes the notebook definition file into .ipynb format.
|
|
31
|
+
If False, obtains the notebook definition file in base64 format.
|
|
32
|
+
|
|
33
|
+
Returns
|
|
34
|
+
-------
|
|
35
|
+
ipynb
|
|
36
|
+
The notebook definition.
|
|
37
|
+
"""
|
|
38
|
+
|
|
39
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
40
|
+
|
|
41
|
+
dfI = fabric.list_items(workspace=workspace, type="Notebook")
|
|
42
|
+
dfI_filt = dfI[dfI["Display Name"] == notebook_name]
|
|
43
|
+
|
|
44
|
+
if len(dfI_filt) == 0:
|
|
45
|
+
raise ValueError(
|
|
46
|
+
f"{icons.red_dot} The '{notebook_name}' notebook does not exist within the '{workspace}' workspace."
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
notebook_id = dfI_filt["Id"].iloc[0]
|
|
50
|
+
client = fabric.FabricRestClient()
|
|
51
|
+
response = client.post(
|
|
52
|
+
f"v1/workspaces/{workspace_id}/notebooks/{notebook_id}/getDefinition",
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
result = lro(client, response).json()
|
|
56
|
+
df_items = pd.json_normalize(result["definition"]["parts"])
|
|
57
|
+
df_items_filt = df_items[df_items["path"] == "notebook-content.py"]
|
|
58
|
+
payload = df_items_filt["payload"].iloc[0]
|
|
59
|
+
|
|
60
|
+
if decode:
|
|
61
|
+
result = _decode_b64(payload)
|
|
62
|
+
else:
|
|
63
|
+
result = payload
|
|
64
|
+
|
|
65
|
+
return result
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def import_notebook_from_web(
|
|
69
|
+
notebook_name: str,
|
|
70
|
+
url: str,
|
|
71
|
+
description: Optional[str] = None,
|
|
72
|
+
workspace: Optional[str] = None,
|
|
73
|
+
):
|
|
74
|
+
"""
|
|
75
|
+
Creates a new notebook within a workspace based on a Jupyter notebook hosted in the web.
|
|
76
|
+
|
|
77
|
+
Note: When specifying a notebook from GitHub, please use the raw file path. Note that if the non-raw file path is specified, the url will be
|
|
78
|
+
converted to the raw URL as the raw URL is needed to obtain the notebook content.
|
|
79
|
+
|
|
80
|
+
Parameters
|
|
81
|
+
----------
|
|
82
|
+
notebook_name : str
|
|
83
|
+
The name of the notebook to be created.
|
|
84
|
+
url : str
|
|
85
|
+
The url of the Jupyter Notebook (.ipynb)
|
|
86
|
+
description : str, default=None
|
|
87
|
+
The description of the notebook.
|
|
88
|
+
Defaults to None which does not place a description.
|
|
89
|
+
workspace : str, default=None
|
|
90
|
+
The name of the workspace.
|
|
91
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
92
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
93
|
+
|
|
94
|
+
Returns
|
|
95
|
+
-------
|
|
96
|
+
"""
|
|
97
|
+
|
|
98
|
+
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
99
|
+
client = fabric.FabricRestClient()
|
|
100
|
+
dfI = fabric.list_items(workspace=workspace, type="Notebook")
|
|
101
|
+
dfI_filt = dfI[dfI["Display Name"] == notebook_name]
|
|
102
|
+
if len(dfI_filt) > 0:
|
|
103
|
+
raise ValueError(
|
|
104
|
+
f"{icons.red_dot} The '{notebook_name}' already exists within the '{workspace}' workspace."
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
# Fix links to go to the raw github file
|
|
108
|
+
starting_text = "https://github.com/"
|
|
109
|
+
starting_text_len = len(starting_text)
|
|
110
|
+
if url.startswith(starting_text):
|
|
111
|
+
url = f"https://raw.githubusercontent.com/{url[starting_text_len:]}".replace(
|
|
112
|
+
"/blob/", "/"
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
response = requests.get(url)
|
|
116
|
+
if response.status_code != 200:
|
|
117
|
+
raise FabricHTTPException(response)
|
|
118
|
+
file_content = response.content
|
|
119
|
+
notebook_payload = base64.b64encode(file_content)
|
|
120
|
+
|
|
121
|
+
request_body = {
|
|
122
|
+
"displayName": notebook_name,
|
|
123
|
+
"definition": {
|
|
124
|
+
"format": "ipynb",
|
|
125
|
+
"parts": [
|
|
126
|
+
{
|
|
127
|
+
"path": "notebook-content.py",
|
|
128
|
+
"payload": notebook_payload,
|
|
129
|
+
"payloadType": "InlineBase64",
|
|
130
|
+
}
|
|
131
|
+
],
|
|
132
|
+
},
|
|
133
|
+
}
|
|
134
|
+
if description is not None:
|
|
135
|
+
request_body["description"] = description
|
|
136
|
+
|
|
137
|
+
response = client.post(f"v1/workspaces/{workspace_id}/notebooks", json=request_body)
|
|
138
|
+
|
|
139
|
+
lro(client, response, status_codes=[201, 202])
|
|
140
|
+
|
|
141
|
+
print(
|
|
142
|
+
f"{icons.green_dot} The '{notebook_name}' notebook was created within the '{workspace}' workspace."
|
|
143
|
+
)
|
sempy_labs/_query_scale_out.py
CHANGED
|
@@ -293,13 +293,24 @@ def set_semantic_model_storage_format(
|
|
|
293
293
|
f"{icons.red_dot} Invalid storage format value. Valid options: {storageFormats}."
|
|
294
294
|
)
|
|
295
295
|
|
|
296
|
+
dfL = list_qso_settings(dataset=dataset, workspace=workspace)
|
|
297
|
+
current_storage_format = dfL["Storage Mode"].iloc[0]
|
|
298
|
+
|
|
299
|
+
if current_storage_format == storage_format:
|
|
300
|
+
print(
|
|
301
|
+
f"{icons.info} The '{dataset}' semantic model within the '{workspace}' workspace is already set to '{storage_format.lower()}' storage format."
|
|
302
|
+
)
|
|
303
|
+
return
|
|
304
|
+
|
|
296
305
|
client = fabric.PowerBIRestClient()
|
|
297
306
|
response = client.patch(
|
|
298
307
|
f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}", json=request_body
|
|
299
308
|
)
|
|
300
309
|
if response.status_code != 200:
|
|
301
310
|
raise FabricHTTPException(response)
|
|
302
|
-
print(
|
|
311
|
+
print(
|
|
312
|
+
f"{icons.green_dot} The semantic model storage format for the '{dataset}' semantic model within the '{workspace}' workspace has been set to '{storage_format}'."
|
|
313
|
+
)
|
|
303
314
|
|
|
304
315
|
|
|
305
316
|
def list_qso_settings(
|
|
@@ -383,10 +394,6 @@ def set_workspace_default_storage_format(
|
|
|
383
394
|
The Fabric workspace name.
|
|
384
395
|
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
385
396
|
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
386
|
-
|
|
387
|
-
Returns
|
|
388
|
-
-------
|
|
389
|
-
|
|
390
397
|
"""
|
|
391
398
|
|
|
392
399
|
# https://learn.microsoft.com/en-us/rest/api/power-bi/groups/update-group#defaultdatasetstorageformat
|
|
@@ -396,19 +403,33 @@ def set_workspace_default_storage_format(
|
|
|
396
403
|
storage_format = storage_format.capitalize()
|
|
397
404
|
|
|
398
405
|
if storage_format not in storageFormats:
|
|
399
|
-
|
|
406
|
+
raise ValueError(
|
|
400
407
|
f"{icons.red_dot} Invalid storage format. Please choose from these options: {storageFormats}."
|
|
401
408
|
)
|
|
402
409
|
|
|
403
410
|
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
404
411
|
|
|
405
|
-
|
|
412
|
+
# Check current storage format
|
|
413
|
+
dfW = fabric.list_workspaces(filter=f"name eq '{workspace}'")
|
|
414
|
+
if len(dfW) == 0:
|
|
415
|
+
raise ValueError()
|
|
416
|
+
current_storage_format = dfW['Default Dataset Storage Format'].iloc[0]
|
|
417
|
+
|
|
418
|
+
if current_storage_format == storage_format:
|
|
419
|
+
print(f"{icons.info} The '{workspace}' is already set to a default storage format of '{current_storage_format}'.")
|
|
420
|
+
return
|
|
421
|
+
|
|
422
|
+
request_body = {
|
|
423
|
+
"name": workspace,
|
|
424
|
+
"defaultDatasetStorageFormat": storage_format,
|
|
425
|
+
}
|
|
406
426
|
|
|
407
427
|
client = fabric.PowerBIRestClient()
|
|
408
428
|
response = client.patch(f"/v1.0/myorg/groups/{workspace_id}", json=request_body)
|
|
409
429
|
|
|
410
430
|
if response.status_code != 200:
|
|
411
431
|
raise FabricHTTPException(response)
|
|
432
|
+
|
|
412
433
|
print(
|
|
413
434
|
f"{icons.green_dot} The default storage format for the '{workspace}' workspace has been updated to '{storage_format}."
|
|
414
435
|
)
|