semantic-link-labs 0.7.3__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (75) hide show
  1. {semantic_link_labs-0.7.3.dist-info → semantic_link_labs-0.8.0.dist-info}/METADATA +19 -4
  2. {semantic_link_labs-0.7.3.dist-info → semantic_link_labs-0.8.0.dist-info}/RECORD +75 -50
  3. {semantic_link_labs-0.7.3.dist-info → semantic_link_labs-0.8.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +109 -31
  5. sempy_labs/_bpa_translation/{_translations_am-ET.po → _model/_translations_am-ET.po} +22 -0
  6. sempy_labs/_bpa_translation/{_translations_ar-AE.po → _model/_translations_ar-AE.po} +24 -0
  7. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +938 -0
  8. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +934 -0
  9. sempy_labs/_bpa_translation/{_translations_cs-CZ.po → _model/_translations_cs-CZ.po} +179 -157
  10. sempy_labs/_bpa_translation/{_translations_da-DK.po → _model/_translations_da-DK.po} +24 -0
  11. sempy_labs/_bpa_translation/{_translations_de-DE.po → _model/_translations_de-DE.po} +77 -52
  12. sempy_labs/_bpa_translation/{_translations_el-GR.po → _model/_translations_el-GR.po} +25 -0
  13. sempy_labs/_bpa_translation/{_translations_es-ES.po → _model/_translations_es-ES.po} +67 -43
  14. sempy_labs/_bpa_translation/{_translations_fa-IR.po → _model/_translations_fa-IR.po} +24 -0
  15. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +915 -0
  16. sempy_labs/_bpa_translation/{_translations_fr-FR.po → _model/_translations_fr-FR.po} +83 -57
  17. sempy_labs/_bpa_translation/{_translations_ga-IE.po → _model/_translations_ga-IE.po} +25 -0
  18. sempy_labs/_bpa_translation/{_translations_he-IL.po → _model/_translations_he-IL.po} +23 -0
  19. sempy_labs/_bpa_translation/{_translations_hi-IN.po → _model/_translations_hi-IN.po} +24 -0
  20. sempy_labs/_bpa_translation/{_translations_hu-HU.po → _model/_translations_hu-HU.po} +25 -0
  21. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +918 -0
  22. sempy_labs/_bpa_translation/{_translations_is-IS.po → _model/_translations_is-IS.po} +25 -0
  23. sempy_labs/_bpa_translation/{_translations_it-IT.po → _model/_translations_it-IT.po} +25 -0
  24. sempy_labs/_bpa_translation/{_translations_ja-JP.po → _model/_translations_ja-JP.po} +21 -0
  25. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +823 -0
  26. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +937 -0
  27. sempy_labs/_bpa_translation/{_translations_nl-NL.po → _model/_translations_nl-NL.po} +80 -56
  28. sempy_labs/_bpa_translation/{_translations_pl-PL.po → _model/_translations_pl-PL.po} +101 -76
  29. sempy_labs/_bpa_translation/{_translations_pt-BR.po → _model/_translations_pt-BR.po} +25 -0
  30. sempy_labs/_bpa_translation/{_translations_pt-PT.po → _model/_translations_pt-PT.po} +25 -0
  31. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +939 -0
  32. sempy_labs/_bpa_translation/{_translations_ru-RU.po → _model/_translations_ru-RU.po} +25 -0
  33. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +925 -0
  34. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +922 -0
  35. sempy_labs/_bpa_translation/{_translations_ta-IN.po → _model/_translations_ta-IN.po} +26 -0
  36. sempy_labs/_bpa_translation/{_translations_te-IN.po → _model/_translations_te-IN.po} +24 -0
  37. sempy_labs/_bpa_translation/{_translations_th-TH.po → _model/_translations_th-TH.po} +24 -0
  38. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +925 -0
  39. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +933 -0
  40. sempy_labs/_bpa_translation/{_translations_zh-CN.po → _model/_translations_zh-CN.po} +116 -97
  41. sempy_labs/_bpa_translation/{_translations_zu-ZA.po → _model/_translations_zu-ZA.po} +25 -0
  42. sempy_labs/_capacities.py +577 -0
  43. sempy_labs/_capacity_migration.py +624 -0
  44. sempy_labs/_clear_cache.py +8 -8
  45. sempy_labs/_connections.py +140 -0
  46. sempy_labs/_environments.py +156 -0
  47. sempy_labs/_git.py +20 -21
  48. sempy_labs/_helper_functions.py +151 -10
  49. sempy_labs/_icons.py +62 -0
  50. sempy_labs/_list_functions.py +232 -887
  51. sempy_labs/_model_bpa.py +8 -32
  52. sempy_labs/_notebooks.py +143 -0
  53. sempy_labs/_query_scale_out.py +30 -8
  54. sempy_labs/_spark.py +460 -0
  55. sempy_labs/_sql.py +88 -19
  56. sempy_labs/_translations.py +3 -0
  57. sempy_labs/_vertipaq.py +162 -99
  58. sempy_labs/_workspaces.py +294 -0
  59. sempy_labs/admin/__init__.py +53 -0
  60. sempy_labs/admin/_basic_functions.py +806 -0
  61. sempy_labs/admin/_domains.py +411 -0
  62. sempy_labs/directlake/_directlake_schema_sync.py +1 -2
  63. sempy_labs/directlake/_generate_shared_expression.py +11 -14
  64. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +14 -24
  65. sempy_labs/report/__init__.py +9 -6
  66. sempy_labs/report/_report_bpa.py +359 -0
  67. sempy_labs/report/_report_bpa_rules.py +113 -0
  68. sempy_labs/report/_report_helper.py +254 -0
  69. sempy_labs/report/_report_list_functions.py +95 -0
  70. sempy_labs/report/_report_rebind.py +0 -4
  71. sempy_labs/report/_reportwrapper.py +2039 -0
  72. sempy_labs/tom/_model.py +83 -5
  73. {semantic_link_labs-0.7.3.dist-info → semantic_link_labs-0.8.0.dist-info}/LICENSE +0 -0
  74. {semantic_link_labs-0.7.3.dist-info → semantic_link_labs-0.8.0.dist-info}/top_level.txt +0 -0
  75. /sempy_labs/_bpa_translation/{_translations_sv-SE.po → _model/_translations_sv-SE.po} +0 -0
sempy_labs/_model_bpa.py CHANGED
@@ -12,6 +12,7 @@ from sempy_labs._helper_functions import (
12
12
  save_as_delta_table,
13
13
  resolve_workspace_capacity,
14
14
  resolve_dataset_id,
15
+ get_language_codes,
15
16
  )
16
17
  from sempy_labs.lakehouse import get_lakehouse_tables, lakehouse_attached
17
18
  from sempy_labs.tom import connect_semantic_model
@@ -63,7 +64,7 @@ def run_model_bpa(
63
64
  pandas.DataFrame
64
65
  A pandas dataframe in HTML format showing semantic model objects which violated the best practice analyzer rules.
65
66
  """
66
-
67
+
67
68
  import polib
68
69
 
69
70
  if "extend" in kwargs:
@@ -80,35 +81,9 @@ def run_model_bpa(
80
81
  "ignore", category=UserWarning, message=".*Arrow optimization.*"
81
82
  )
82
83
 
83
- language_list = [
84
- "it-IT",
85
- "es-ES",
86
- "he-IL",
87
- "pt-PT",
88
- "zh-CN",
89
- "fr-FR",
90
- "da-DK",
91
- "cs-CZ",
92
- "de-DE",
93
- "el-GR",
94
- "fa-IR",
95
- "ga-IE",
96
- "hi-IN",
97
- "hu-HU",
98
- "is-IS",
99
- "ja-JP",
100
- "nl-NL",
101
- "pl-PL",
102
- "pt-BR",
103
- "ru-RU",
104
- "te-IN",
105
- "ta-IN",
106
- "th-TH",
107
- "zu-ZA",
108
- "am-ET",
109
- "ar-AE",
110
- "sv-SE",
111
- ]
84
+ language_list = list(icons.language_map.keys())
85
+ if language is not None:
86
+ language = get_language_codes(languages=language)[0]
112
87
 
113
88
  # Map languages to the closest language (first 2 letters matching)
114
89
  def map_language(language, language_list):
@@ -152,7 +127,7 @@ def run_model_bpa(
152
127
  def translate_using_po(rule_file):
153
128
  current_dir = os.path.dirname(os.path.abspath(__file__))
154
129
  translation_file = (
155
- f"{current_dir}/_bpa_translation/_translations_{language}.po"
130
+ f"{current_dir}/_bpa_translation/_model/_translations_{language}.po"
156
131
  )
157
132
  for c in ["Category", "Description", "Rule Name"]:
158
133
  po = polib.pofile(translation_file)
@@ -530,7 +505,8 @@ def run_model_bpa(
530
505
  content_html += f'<td>{row["Rule Name"]}</td>'
531
506
  content_html += f'<td>{row["Object Type"]}</td>'
532
507
  content_html += f'<td>{row["Object Name"]}</td>'
533
- content_html += f'<td>{row["Severity"]}</td>'
508
+ content_html += f'<td style="text-align: center;">{row["Severity"]}</td>'
509
+ # content_html += f'<td>{row["Severity"]}</td>'
534
510
  content_html += "</tr>"
535
511
  content_html += "</table>"
536
512
 
@@ -0,0 +1,143 @@
1
+ import sempy.fabric as fabric
2
+ import pandas as pd
3
+ import sempy_labs._icons as icons
4
+ from typing import Optional
5
+ import base64
6
+ import requests
7
+ from sempy_labs._helper_functions import (
8
+ resolve_workspace_name_and_id,
9
+ lro,
10
+ _decode_b64,
11
+ )
12
+ from sempy.fabric.exceptions import FabricHTTPException
13
+
14
+
15
+ def get_notebook_definition(
16
+ notebook_name: str, workspace: Optional[str] = None, decode: Optional[bool] = True
17
+ ):
18
+ """
19
+ Obtains the notebook definition.
20
+
21
+ Parameters
22
+ ----------
23
+ notebook_name : str
24
+ The name of the notebook.
25
+ workspace : str, default=None
26
+ The name of the workspace.
27
+ Defaults to None which resolves to the workspace of the attached lakehouse
28
+ or if no lakehouse attached, resolves to the workspace of the notebook.
29
+ decode : bool, default=True
30
+ If True, decodes the notebook definition file into .ipynb format.
31
+ If False, obtains the notebook definition file in base64 format.
32
+
33
+ Returns
34
+ -------
35
+ ipynb
36
+ The notebook definition.
37
+ """
38
+
39
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
40
+
41
+ dfI = fabric.list_items(workspace=workspace, type="Notebook")
42
+ dfI_filt = dfI[dfI["Display Name"] == notebook_name]
43
+
44
+ if len(dfI_filt) == 0:
45
+ raise ValueError(
46
+ f"{icons.red_dot} The '{notebook_name}' notebook does not exist within the '{workspace}' workspace."
47
+ )
48
+
49
+ notebook_id = dfI_filt["Id"].iloc[0]
50
+ client = fabric.FabricRestClient()
51
+ response = client.post(
52
+ f"v1/workspaces/{workspace_id}/notebooks/{notebook_id}/getDefinition",
53
+ )
54
+
55
+ result = lro(client, response).json()
56
+ df_items = pd.json_normalize(result["definition"]["parts"])
57
+ df_items_filt = df_items[df_items["path"] == "notebook-content.py"]
58
+ payload = df_items_filt["payload"].iloc[0]
59
+
60
+ if decode:
61
+ result = _decode_b64(payload)
62
+ else:
63
+ result = payload
64
+
65
+ return result
66
+
67
+
68
+ def import_notebook_from_web(
69
+ notebook_name: str,
70
+ url: str,
71
+ description: Optional[str] = None,
72
+ workspace: Optional[str] = None,
73
+ ):
74
+ """
75
+ Creates a new notebook within a workspace based on a Jupyter notebook hosted in the web.
76
+
77
+ Note: When specifying a notebook from GitHub, please use the raw file path. Note that if the non-raw file path is specified, the url will be
78
+ converted to the raw URL as the raw URL is needed to obtain the notebook content.
79
+
80
+ Parameters
81
+ ----------
82
+ notebook_name : str
83
+ The name of the notebook to be created.
84
+ url : str
85
+ The url of the Jupyter Notebook (.ipynb)
86
+ description : str, default=None
87
+ The description of the notebook.
88
+ Defaults to None which does not place a description.
89
+ workspace : str, default=None
90
+ The name of the workspace.
91
+ Defaults to None which resolves to the workspace of the attached lakehouse
92
+ or if no lakehouse attached, resolves to the workspace of the notebook.
93
+
94
+ Returns
95
+ -------
96
+ """
97
+
98
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
99
+ client = fabric.FabricRestClient()
100
+ dfI = fabric.list_items(workspace=workspace, type="Notebook")
101
+ dfI_filt = dfI[dfI["Display Name"] == notebook_name]
102
+ if len(dfI_filt) > 0:
103
+ raise ValueError(
104
+ f"{icons.red_dot} The '{notebook_name}' already exists within the '{workspace}' workspace."
105
+ )
106
+
107
+ # Fix links to go to the raw github file
108
+ starting_text = "https://github.com/"
109
+ starting_text_len = len(starting_text)
110
+ if url.startswith(starting_text):
111
+ url = f"https://raw.githubusercontent.com/{url[starting_text_len:]}".replace(
112
+ "/blob/", "/"
113
+ )
114
+
115
+ response = requests.get(url)
116
+ if response.status_code != 200:
117
+ raise FabricHTTPException(response)
118
+ file_content = response.content
119
+ notebook_payload = base64.b64encode(file_content)
120
+
121
+ request_body = {
122
+ "displayName": notebook_name,
123
+ "definition": {
124
+ "format": "ipynb",
125
+ "parts": [
126
+ {
127
+ "path": "notebook-content.py",
128
+ "payload": notebook_payload,
129
+ "payloadType": "InlineBase64",
130
+ }
131
+ ],
132
+ },
133
+ }
134
+ if description is not None:
135
+ request_body["description"] = description
136
+
137
+ response = client.post(f"v1/workspaces/{workspace_id}/notebooks", json=request_body)
138
+
139
+ lro(client, response, status_codes=[201, 202])
140
+
141
+ print(
142
+ f"{icons.green_dot} The '{notebook_name}' notebook was created within the '{workspace}' workspace."
143
+ )
@@ -293,13 +293,24 @@ def set_semantic_model_storage_format(
293
293
  f"{icons.red_dot} Invalid storage format value. Valid options: {storageFormats}."
294
294
  )
295
295
 
296
+ dfL = list_qso_settings(dataset=dataset, workspace=workspace)
297
+ current_storage_format = dfL["Storage Mode"].iloc[0]
298
+
299
+ if current_storage_format == storage_format:
300
+ print(
301
+ f"{icons.info} The '{dataset}' semantic model within the '{workspace}' workspace is already set to '{storage_format.lower()}' storage format."
302
+ )
303
+ return
304
+
296
305
  client = fabric.PowerBIRestClient()
297
306
  response = client.patch(
298
307
  f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}", json=request_body
299
308
  )
300
309
  if response.status_code != 200:
301
310
  raise FabricHTTPException(response)
302
- print(f"{icons.green_dot} Semantic model storage format set to '{storage_format}'.")
311
+ print(
312
+ f"{icons.green_dot} The semantic model storage format for the '{dataset}' semantic model within the '{workspace}' workspace has been set to '{storage_format}'."
313
+ )
303
314
 
304
315
 
305
316
  def list_qso_settings(
@@ -328,7 +339,6 @@ def list_qso_settings(
328
339
  if dataset is not None:
329
340
  dataset_id = resolve_dataset_id(dataset, workspace)
330
341
 
331
- workspace_id = fabric.get_workspace_id()
332
342
  df = pd.DataFrame(
333
343
  columns=[
334
344
  "Dataset Id",
@@ -383,10 +393,6 @@ def set_workspace_default_storage_format(
383
393
  The Fabric workspace name.
384
394
  Defaults to None which resolves to the workspace of the attached lakehouse
385
395
  or if no lakehouse attached, resolves to the workspace of the notebook.
386
-
387
- Returns
388
- -------
389
-
390
396
  """
391
397
 
392
398
  # https://learn.microsoft.com/en-us/rest/api/power-bi/groups/update-group#defaultdatasetstorageformat
@@ -396,19 +402,35 @@ def set_workspace_default_storage_format(
396
402
  storage_format = storage_format.capitalize()
397
403
 
398
404
  if storage_format not in storageFormats:
399
- print(
405
+ raise ValueError(
400
406
  f"{icons.red_dot} Invalid storage format. Please choose from these options: {storageFormats}."
401
407
  )
402
408
 
403
409
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
404
410
 
405
- request_body = {"name": workspace, "defaultDatasetStorageFormat": storage_format}
411
+ # Check current storage format
412
+ dfW = fabric.list_workspaces(filter=f"name eq '{workspace}'")
413
+ if len(dfW) == 0:
414
+ raise ValueError()
415
+ current_storage_format = dfW["Default Dataset Storage Format"].iloc[0]
416
+
417
+ if current_storage_format == storage_format:
418
+ print(
419
+ f"{icons.info} The '{workspace}' is already set to a default storage format of '{current_storage_format}'."
420
+ )
421
+ return
422
+
423
+ request_body = {
424
+ "name": workspace,
425
+ "defaultDatasetStorageFormat": storage_format,
426
+ }
406
427
 
407
428
  client = fabric.PowerBIRestClient()
408
429
  response = client.patch(f"/v1.0/myorg/groups/{workspace_id}", json=request_body)
409
430
 
410
431
  if response.status_code != 200:
411
432
  raise FabricHTTPException(response)
433
+
412
434
  print(
413
435
  f"{icons.green_dot} The default storage format for the '{workspace}' workspace has been updated to '{storage_format}."
414
436
  )