semantic-link-labs 0.6.0__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (103) hide show
  1. semantic_link_labs-0.7.0.dist-info/METADATA +148 -0
  2. semantic_link_labs-0.7.0.dist-info/RECORD +111 -0
  3. {semantic_link_labs-0.6.0.dist-info → semantic_link_labs-0.7.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +26 -2
  5. sempy_labs/_ai.py +3 -65
  6. sempy_labs/_bpa_translation/_translations_am-ET.po +828 -0
  7. sempy_labs/_bpa_translation/_translations_ar-AE.po +860 -0
  8. sempy_labs/_bpa_translation/_translations_cs-CZ.po +894 -0
  9. sempy_labs/_bpa_translation/_translations_da-DK.po +894 -0
  10. sempy_labs/_bpa_translation/_translations_de-DE.po +933 -0
  11. sempy_labs/_bpa_translation/_translations_el-GR.po +936 -0
  12. sempy_labs/_bpa_translation/_translations_es-ES.po +915 -0
  13. sempy_labs/_bpa_translation/_translations_fa-IR.po +883 -0
  14. sempy_labs/_bpa_translation/_translations_fr-FR.po +938 -0
  15. sempy_labs/_bpa_translation/_translations_ga-IE.po +912 -0
  16. sempy_labs/_bpa_translation/_translations_he-IL.po +855 -0
  17. sempy_labs/_bpa_translation/_translations_hi-IN.po +892 -0
  18. sempy_labs/_bpa_translation/_translations_hu-HU.po +910 -0
  19. sempy_labs/_bpa_translation/_translations_is-IS.po +887 -0
  20. sempy_labs/_bpa_translation/_translations_it-IT.po +931 -0
  21. sempy_labs/_bpa_translation/_translations_ja-JP.po +805 -0
  22. sempy_labs/_bpa_translation/_translations_nl-NL.po +924 -0
  23. sempy_labs/_bpa_translation/_translations_pl-PL.po +913 -0
  24. sempy_labs/_bpa_translation/_translations_pt-BR.po +909 -0
  25. sempy_labs/_bpa_translation/_translations_pt-PT.po +904 -0
  26. sempy_labs/_bpa_translation/_translations_ru-RU.po +909 -0
  27. sempy_labs/_bpa_translation/_translations_ta-IN.po +922 -0
  28. sempy_labs/_bpa_translation/_translations_te-IN.po +896 -0
  29. sempy_labs/_bpa_translation/_translations_th-TH.po +873 -0
  30. sempy_labs/_bpa_translation/_translations_zh-CN.po +767 -0
  31. sempy_labs/_bpa_translation/_translations_zu-ZA.po +916 -0
  32. sempy_labs/_clear_cache.py +9 -4
  33. sempy_labs/_generate_semantic_model.py +30 -56
  34. sempy_labs/_helper_functions.py +358 -14
  35. sempy_labs/_icons.py +10 -1
  36. sempy_labs/_list_functions.py +478 -237
  37. sempy_labs/_model_bpa.py +194 -18
  38. sempy_labs/_model_bpa_bulk.py +363 -0
  39. sempy_labs/_model_bpa_rules.py +4 -4
  40. sempy_labs/_model_dependencies.py +12 -10
  41. sempy_labs/_one_lake_integration.py +7 -7
  42. sempy_labs/_query_scale_out.py +45 -66
  43. sempy_labs/_refresh_semantic_model.py +7 -0
  44. sempy_labs/_translations.py +154 -1
  45. sempy_labs/_vertipaq.py +103 -90
  46. sempy_labs/directlake/__init__.py +5 -1
  47. sempy_labs/directlake/_directlake_schema_compare.py +27 -31
  48. sempy_labs/directlake/_directlake_schema_sync.py +55 -66
  49. sempy_labs/directlake/_dl_helper.py +233 -0
  50. sempy_labs/directlake/_get_directlake_lakehouse.py +6 -7
  51. sempy_labs/directlake/_get_shared_expression.py +1 -1
  52. sempy_labs/directlake/_guardrails.py +17 -13
  53. sempy_labs/directlake/_update_directlake_partition_entity.py +54 -30
  54. sempy_labs/directlake/_warm_cache.py +1 -1
  55. sempy_labs/lakehouse/_get_lakehouse_tables.py +61 -69
  56. sempy_labs/lakehouse/_lakehouse.py +3 -2
  57. sempy_labs/lakehouse/_shortcuts.py +1 -1
  58. sempy_labs/migration/_create_pqt_file.py +174 -182
  59. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +236 -268
  60. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +75 -73
  61. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +442 -426
  62. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +91 -97
  63. sempy_labs/migration/_refresh_calc_tables.py +92 -101
  64. sempy_labs/report/_BPAReportTemplate.json +232 -0
  65. sempy_labs/report/__init__.py +6 -2
  66. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  67. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  68. sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
  69. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
  70. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
  71. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
  72. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
  73. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
  74. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
  75. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
  76. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
  77. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
  78. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
  79. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
  80. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
  81. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
  82. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
  83. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
  84. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
  85. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
  86. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
  87. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
  88. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
  89. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
  90. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
  91. sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
  92. sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
  93. sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
  94. sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
  95. sempy_labs/report/_generate_report.py +255 -139
  96. sempy_labs/report/_report_functions.py +26 -33
  97. sempy_labs/report/_report_rebind.py +31 -26
  98. sempy_labs/tom/_model.py +75 -58
  99. semantic_link_labs-0.6.0.dist-info/METADATA +0 -22
  100. semantic_link_labs-0.6.0.dist-info/RECORD +0 -54
  101. sempy_labs/directlake/_fallback.py +0 -60
  102. {semantic_link_labs-0.6.0.dist-info → semantic_link_labs-0.7.0.dist-info}/LICENSE +0 -0
  103. {semantic_link_labs-0.6.0.dist-info → semantic_link_labs-0.7.0.dist-info}/top_level.txt +0 -0
@@ -1,5 +1,5 @@
1
1
  import sempy.fabric as fabric
2
- from ._helper_functions import resolve_dataset_id
2
+ from ._helper_functions import resolve_dataset_id, is_default_semantic_model
3
3
  from typing import Optional
4
4
  import sempy_labs._icons as icons
5
5
 
@@ -20,13 +20,18 @@ def clear_cache(dataset: str, workspace: Optional[str] = None):
20
20
  """
21
21
 
22
22
  workspace = fabric.resolve_workspace_name(workspace)
23
+ if is_default_semantic_model(dataset=dataset, workspace=workspace):
24
+ raise ValueError(
25
+ f"{icons.red_dot} Cannot run XMLA operations against a default semantic model. Please choose a different semantic model. "
26
+ "See here for more information: https://learn.microsoft.com/fabric/data-warehouse/semantic-models"
27
+ )
23
28
 
24
- datasetID = resolve_dataset_id(dataset=dataset, workspace=workspace)
29
+ dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace)
25
30
 
26
31
  xmla = f"""
27
32
  <ClearCache xmlns="http://schemas.microsoft.com/analysisservices/2003/engine">
28
- <Object>
29
- <DatabaseID>{datasetID}</DatabaseID>
33
+ <Object>
34
+ <DatabaseID>{dataset_id}</DatabaseID>
30
35
  </Object>
31
36
  </ClearCache>
32
37
  """
@@ -1,17 +1,18 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
3
  import json
4
- import base64
5
- import time
6
4
  import os
7
5
  from typing import Optional
8
6
  from sempy_labs._helper_functions import (
9
7
  resolve_lakehouse_name,
10
8
  resolve_workspace_name_and_id,
9
+ resolve_dataset_id,
10
+ _conv_b64,
11
+ _decode_b64,
12
+ lro,
11
13
  )
12
14
  from sempy_labs.lakehouse._lakehouse import lakehouse_attached
13
15
  import sempy_labs._icons as icons
14
- from sempy.fabric.exceptions import FabricHTTPException
15
16
 
16
17
 
17
18
  def create_blank_semantic_model(
@@ -34,8 +35,7 @@ def create_blank_semantic_model(
34
35
  or if no lakehouse attached, resolves to the workspace of the notebook.
35
36
  """
36
37
 
37
- if workspace is None:
38
- workspace = fabric.resolve_workspace_name()
38
+ workspace = fabric.resolve_workspace_name(workspace)
39
39
 
40
40
  min_compat = 1500
41
41
 
@@ -89,9 +89,7 @@ def create_semantic_model_from_bim(
89
89
 
90
90
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
91
91
 
92
- objectType = "SemanticModel"
93
-
94
- dfI = fabric.list_items(workspace=workspace, type=objectType)
92
+ dfI = fabric.list_items(workspace=workspace, type="SemanticModel")
95
93
  dfI_filt = dfI[(dfI["Display Name"] == dataset)]
96
94
 
97
95
  if len(dfI_filt) > 0:
@@ -102,19 +100,11 @@ def create_semantic_model_from_bim(
102
100
  client = fabric.FabricRestClient()
103
101
  defPBIDataset = {"version": "1.0", "settings": {}}
104
102
 
105
- def conv_b64(file):
106
-
107
- loadJson = json.dumps(file)
108
- f = base64.b64encode(loadJson.encode("utf-8")).decode("utf-8")
109
-
110
- return f
111
-
112
- payloadPBIDefinition = conv_b64(defPBIDataset)
113
- payloadBim = conv_b64(bim_file)
103
+ payloadPBIDefinition = _conv_b64(defPBIDataset)
104
+ payloadBim = _conv_b64(bim_file)
114
105
 
115
106
  request_body = {
116
107
  "displayName": dataset,
117
- "type": objectType,
118
108
  "definition": {
119
109
  "parts": [
120
110
  {
@@ -131,26 +121,16 @@ def create_semantic_model_from_bim(
131
121
  },
132
122
  }
133
123
 
134
- response = client.post(f"/v1/workspaces/{workspace_id}/items", json=request_body)
124
+ response = client.post(
125
+ f"/v1/workspaces/{workspace_id}/semanticModels",
126
+ json=request_body,
127
+ )
135
128
 
136
- if response.status_code == 201:
137
- print(
138
- f"{icons.green_dot} The '{dataset}' semantic model has been created within the '{workspace}' workspace."
139
- )
140
- print(response.json())
141
- elif response.status_code == 202:
142
- operationId = response.headers["x-ms-operation-id"]
143
- response = client.get(f"/v1/operations/{operationId}")
144
- response_body = json.loads(response.content)
145
- while response_body["status"] != "Succeeded":
146
- time.sleep(3)
147
- response = client.get(f"/v1/operations/{operationId}")
148
- response_body = json.loads(response.content)
149
- response = client.get(f"/v1/operations/{operationId}/result")
150
- print(
151
- f"{icons.green_dot} The '{dataset}' semantic model has been created within the '{workspace}' workspace."
152
- )
153
- print(response.json())
129
+ lro(client, response, status_codes=[201, 202])
130
+
131
+ print(
132
+ f"{icons.green_dot} The '{dataset}' semantic model has been created within the '{workspace}' workspace."
133
+ )
154
134
 
155
135
 
156
136
  def deploy_semantic_model(
@@ -196,11 +176,10 @@ def deploy_semantic_model(
196
176
  target_dataset = source_dataset
197
177
 
198
178
  if target_dataset == source_dataset and target_workspace == source_workspace:
199
- print(
179
+ raise ValueError(
200
180
  f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters have the same value. And, the 'workspace' and 'new_dataset_workspace' "
201
181
  f"parameters have the same value. At least one of these must be different. Please update the parameters."
202
182
  )
203
- return
204
183
 
205
184
  bim = get_semantic_model_bim(dataset=source_dataset, workspace=source_workspace)
206
185
 
@@ -217,7 +196,7 @@ def get_semantic_model_bim(
217
196
  workspace: Optional[str] = None,
218
197
  save_to_file_name: Optional[str] = None,
219
198
  lakehouse_workspace: Optional[str] = None,
220
- ):
199
+ ) -> dict:
221
200
  """
222
201
  Extracts the Model.bim file for a given semantic model.
223
202
 
@@ -238,48 +217,43 @@ def get_semantic_model_bim(
238
217
 
239
218
  Returns
240
219
  -------
241
- str
220
+ dict
242
221
  The Model.bim file for the semantic model.
243
222
  """
244
223
 
245
224
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
246
- if lakehouse_workspace is None:
247
- lakehouse_workspace = workspace
248
225
 
249
226
  fmt = "TMSL"
250
227
  client = fabric.FabricRestClient()
251
- itemList = fabric.list_items(workspace=workspace, type="SemanticModel")
252
- itemListFilt = itemList[(itemList["Display Name"] == dataset)]
253
- itemId = itemListFilt["Id"].iloc[0]
228
+ dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace)
254
229
  response = client.post(
255
- f"/v1/workspaces/{workspace_id}/items/{itemId}/getDefinition?format={fmt}",
256
- lro_wait=True,
230
+ f"/v1/workspaces/{workspace_id}/semanticModels/{dataset_id}/getDefinition?format={fmt}",
257
231
  )
258
-
259
- df_items = pd.json_normalize(response.json()["definition"]["parts"])
232
+ result = lro(client, response).json()
233
+ df_items = pd.json_normalize(result["definition"]["parts"])
260
234
  df_items_filt = df_items[df_items["path"] == "model.bim"]
261
235
  payload = df_items_filt["payload"].iloc[0]
262
- bimFile = base64.b64decode(payload).decode("utf-8")
236
+ bimFile = _decode_b64(payload)
263
237
  bimJson = json.loads(bimFile)
264
238
 
265
239
  if save_to_file_name is not None:
266
- lakeAttach = lakehouse_attached()
267
- if lakeAttach is False:
240
+ if not lakehouse_attached():
268
241
  raise ValueError(
269
242
  f"{icons.red_dot} In order to save the model.bim file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
270
243
  )
271
244
 
272
245
  lakehouse_id = fabric.get_lakehouse_id()
273
- lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
246
+ lake_workspace = fabric.resolve_workspace_name()
247
+ lakehouse = resolve_lakehouse_name(lakehouse_id, lake_workspace)
274
248
  folderPath = "/lakehouse/default/Files"
275
249
  fileExt = ".bim"
276
250
  if not save_to_file_name.endswith(fileExt):
277
- save_to_file_name = save_to_file_name + fileExt
251
+ save_to_file_name = f"{save_to_file_name}{fileExt}"
278
252
  filePath = os.path.join(folderPath, save_to_file_name)
279
253
  with open(filePath, "w") as json_file:
280
254
  json.dump(bimJson, json_file, indent=4)
281
255
  print(
282
- f"The .bim file for the '{dataset}' semantic model has been saved to the '{lakehouse}' in this location: '{filePath}'.\n\n"
256
+ f"{icons.green_dot} The .bim file for the '{dataset}' semantic model has been saved to the '{lakehouse}' in this location: '{filePath}'.\n\n"
283
257
  )
284
258
 
285
259
  return bimJson
@@ -1,10 +1,16 @@
1
1
  import sempy.fabric as fabric
2
2
  import re
3
+ import json
4
+ import base64
3
5
  import pandas as pd
6
+ from functools import wraps
7
+ import datetime
8
+ import time
4
9
  from pyspark.sql import SparkSession
5
- from typing import Optional, Tuple
10
+ from typing import Optional, Tuple, List
6
11
  from uuid import UUID
7
12
  import sempy_labs._icons as icons
13
+ from sempy.fabric.exceptions import FabricHTTPException
8
14
 
9
15
 
10
16
  def create_abfss_path(
@@ -284,26 +290,34 @@ def get_direct_lake_sql_endpoint(dataset: str, workspace: Optional[str] = None)
284
290
  The ID of SQL Endpoint.
285
291
  """
286
292
 
293
+ from sempy_labs.tom import connect_semantic_model
294
+
287
295
  if workspace is None:
288
296
  workspace_id = fabric.get_workspace_id()
289
297
  workspace = fabric.resolve_workspace_name(workspace_id)
290
298
 
291
- dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
292
- dfP_filt = dfP[dfP["Mode"] == "DirectLake"]
299
+ # dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
300
+ # dfP_filt = dfP[dfP["Mode"] == "DirectLake"]
293
301
 
294
- if len(dfP_filt) == 0:
295
- raise ValueError(
296
- f"The '{dataset}' semantic model in the '{workspace}' workspace is not in Direct Lake mode."
297
- )
302
+ # if len(dfP_filt) == 0:
303
+ # raise ValueError(
304
+ # f"The '{dataset}' semantic model in the '{workspace}' workspace is not in Direct Lake mode."
305
+ # )
298
306
 
299
- dfE = fabric.list_expressions(dataset=dataset, workspace=workspace)
300
- dfE_filt = dfE[dfE["Name"] == "DatabaseQuery"]
301
- expr = dfE_filt["Expression"].iloc[0]
307
+ with connect_semantic_model(
308
+ dataset=dataset, readonly=True, workspace=workspace
309
+ ) as tom:
310
+ sqlEndpointId = None
311
+ for e in tom.model.Expressions:
312
+ if e.Name == "DatabaseQuery":
313
+ expr = e.Expression
314
+ matches = re.findall(r'"([^"]+)"', expr)
315
+ sqlEndpointId = matches[1]
302
316
 
303
- matches = re.findall(r'"([^"]*)"', expr)
304
- sqlEndpointId = matches[1]
317
+ if sqlEndpointId is None:
318
+ raise ValueError("SQL Endpoint not found.")
305
319
 
306
- return sqlEndpointId
320
+ return sqlEndpointId
307
321
 
308
322
 
309
323
  def generate_embedded_filter(filter: str):
@@ -375,6 +389,7 @@ def save_as_delta_table(
375
389
  dataframe,
376
390
  delta_table_name: str,
377
391
  write_mode: str,
392
+ merge_schema: Optional[bool] = False,
378
393
  lakehouse: Optional[str] = None,
379
394
  workspace: Optional[str] = None,
380
395
  ):
@@ -389,6 +404,8 @@ def save_as_delta_table(
389
404
  The name of the delta table.
390
405
  write_mode : str
391
406
  The write mode for the save operation. Options: 'append', 'overwrite'.
407
+ merge_schema : bool, default=False
408
+ Merges the schemas of the dataframe to the delta table.
392
409
  lakehouse : str, default=None
393
410
  The Fabric lakehouse used by the Direct Lake semantic model.
394
411
  Defaults to None which resolves to the lakehouse attached to the notebook.
@@ -440,7 +457,13 @@ def save_as_delta_table(
440
457
  lakehouse_workspace_id=workspace_id,
441
458
  delta_table_name=delta_table_name,
442
459
  )
443
- spark_df.write.mode(write_mode).format("delta").save(filePath)
460
+
461
+ if merge_schema:
462
+ spark_df.write.mode(write_mode).format("delta").option(
463
+ "mergeSchema", "true"
464
+ ).save(filePath)
465
+ else:
466
+ spark_df.write.mode(write_mode).format("delta").save(filePath)
444
467
  print(
445
468
  f"{icons.green_dot} The dataframe has been saved as the '{delta_table_name}' table in the '{lakehouse}' lakehouse within the '{workspace}' workspace."
446
469
  )
@@ -506,3 +529,324 @@ def resolve_workspace_name_and_id(workspace: Optional[str] = None) -> Tuple[str,
506
529
  workspace_id = fabric.resolve_workspace_id(workspace)
507
530
 
508
531
  return str(workspace), str(workspace_id)
532
+
533
+
534
+ def _extract_json(dataframe: pd.DataFrame) -> dict:
535
+
536
+ payload = dataframe["payload"].iloc[0]
537
+ json_file = _decode_b64(payload)
538
+
539
+ return json.loads(json_file)
540
+
541
+
542
+ def _conv_b64(file):
543
+
544
+ loadJson = json.dumps(file)
545
+ f = base64.b64encode(loadJson.encode("utf-8")).decode("utf-8")
546
+
547
+ return f
548
+
549
+
550
+ def _decode_b64(file, format: Optional[str] = "utf-8"):
551
+
552
+ result = base64.b64decode(file).decode(format)
553
+
554
+ return result
555
+
556
+
557
+ def is_default_semantic_model(dataset: str, workspace: Optional[str] = None) -> bool:
558
+ """
559
+ Identifies whether a semantic model is a default semantic model.
560
+
561
+ Parameters
562
+ ----------
563
+ dataset : str
564
+ The name of the semantic model.
565
+ workspace : str, default=None
566
+ The Fabric workspace name.
567
+ Defaults to None which resolves to the workspace of the attached lakehouse
568
+ or if no lakehouse attached, resolves to the workspace of the notebook.
569
+
570
+ Returns
571
+ -------
572
+ bool
573
+ A True/False value indicating whether the semantic model is a default semantic model.
574
+ """
575
+
576
+ workspace = fabric.resolve_workspace_name(workspace)
577
+
578
+ dfI = fabric.list_items(workspace=workspace)
579
+ filtered_df = dfI.groupby("Display Name").filter(
580
+ lambda x: set(["Warehouse", "SemanticModel"]).issubset(set(x["Type"]))
581
+ or set(["Lakehouse", "SemanticModel"]).issubset(set(x["Type"]))
582
+ )
583
+ default_semantic_models = filtered_df["Display Name"].unique().tolist()
584
+
585
+ return dataset in default_semantic_models
586
+
587
+
588
+ def resolve_item_type(item_id: UUID, workspace: Optional[str] = None) -> str:
589
+ """
590
+ Obtains the item type for a given Fabric Item Id within a Fabric workspace.
591
+
592
+ Parameters
593
+ ----------
594
+ item_id : UUID
595
+ The item/artifact Id.
596
+ workspace : str, default=None
597
+ The Fabric workspace name.
598
+ Defaults to None which resolves to the workspace of the attached lakehouse
599
+ or if no lakehouse attached, resolves to the workspace of the notebook.
600
+
601
+ Returns
602
+ -------
603
+ str
604
+ The item type for the item Id.
605
+ """
606
+
607
+ workspace = fabric.resolve_workspace_name(workspace)
608
+ dfI = fabric.list_items(workspace=workspace)
609
+ dfI_filt = dfI[dfI["Id"] == item_id]
610
+
611
+ if len(dfI_filt) == 0:
612
+ raise ValueError(
613
+ f"Invalid 'item_id' parameter. The '{item_id}' item was not found in the '{workspace}' workspace."
614
+ )
615
+ item_type = dfI_filt["Type"].iloc[0]
616
+
617
+ return item_type
618
+
619
+
620
+ def resolve_dataset_from_report(
621
+ report: str, workspace: Optional[str] = None
622
+ ) -> Tuple[UUID, str, UUID, str]:
623
+ """
624
+ Obtains the basic semantic model properties from which the report's data is sourced.
625
+
626
+ Parameters
627
+ ----------
628
+ report : str
629
+ The name of the Power BI report.
630
+ workspace : str, default=None
631
+ The Fabric workspace name.
632
+ Defaults to None which resolves to the workspace of the attached lakehouse
633
+ or if no lakehouse attached, resolves to the workspace of the notebook.
634
+
635
+ Returns
636
+ -------
637
+ Tuple[UUID, str, UUID, str]
638
+ The semantic model UUID, semantic model name, semantic model workspace UUID, semantic model workspace name
639
+ """
640
+
641
+ workspace = fabric.resolve_workspace_name(workspace)
642
+
643
+ dfR = fabric.list_reports(workspace=workspace)
644
+ dfR_filt = dfR[dfR["Name"] == report]
645
+ if len(dfR_filt) == 0:
646
+ raise ValueError(
647
+ f"{icons.red_dot} The '{report}' report does not exist within the '{workspace}' workspace."
648
+ )
649
+ dataset_id = dfR_filt["Dataset Id"].iloc[0]
650
+ dataset_workspace_id = dfR_filt["Dataset Workspace Id"].iloc[0]
651
+ dataset_workspace = fabric.resolve_workspace_name(dataset_workspace_id)
652
+ dataset_name = resolve_dataset_name(
653
+ dataset_id=dataset_id, workspace=dataset_workspace
654
+ )
655
+
656
+ return dataset_id, dataset_name, dataset_workspace_id, dataset_workspace
657
+
658
+
659
+ def _add_part(target_dict, path, payload):
660
+
661
+ part = {"path": path, "payload": payload, "payloadType": "InlineBase64"}
662
+
663
+ target_dict["definition"]["parts"].append(part)
664
+
665
+
666
+ def resolve_workspace_capacity(workspace: Optional[str] = None) -> Tuple[UUID, str]:
667
+ """
668
+ Obtains the capacity Id and capacity name for a given workspace.
669
+
670
+ Parameters
671
+ ----------
672
+ workspace : str, default=None
673
+ The Fabric workspace name.
674
+ Defaults to None which resolves to the workspace of the attached lakehouse
675
+ or if no lakehouse attached, resolves to the workspace of the notebook.
676
+
677
+ Returns
678
+ -------
679
+ Tuple[UUID, str]
680
+ capacity Id; capacity came.
681
+ """
682
+
683
+ workspace = fabric.resolve_workspace_name(workspace)
684
+ dfW = fabric.list_workspaces(filter=f"name eq '{workspace}'")
685
+ capacity_id = dfW["Capacity Id"].iloc[0]
686
+ dfC = fabric.list_capacities()
687
+ dfC_filt = dfC[dfC["Id"] == capacity_id]
688
+ if len(dfC_filt) == 1:
689
+ capacity_name = dfC_filt["Display Name"].iloc[0]
690
+ else:
691
+ capacity_name = None
692
+
693
+ return capacity_id, capacity_name
694
+
695
+
696
+ def get_capacity_id(workspace: Optional[str] = None) -> UUID:
697
+ """
698
+ Obtains the Capacity Id for a given workspace.
699
+
700
+ Parameters
701
+ ----------
702
+ workspace : str, default=None
703
+ The Fabric workspace name.
704
+ Defaults to None which resolves to the workspace of the attached lakehouse
705
+ or if no lakehouse attached, resolves to the workspace of the notebook.
706
+
707
+ Returns
708
+ -------
709
+ UUID
710
+ The capacity Id.
711
+ """
712
+
713
+ workspace = fabric.resolve_workspace_name(workspace)
714
+ dfW = fabric.list_workspaces(filter=f"name eq '{workspace}'")
715
+ if len(dfW) == 0:
716
+ raise ValueError(f"{icons.red_dot} The '{workspace}' does not exist'.")
717
+
718
+ return dfW["Capacity Id"].iloc[0]
719
+
720
+
721
+ def get_capacity_name(workspace: Optional[str] = None) -> str:
722
+ """
723
+ Obtains the capacity name for a given workspace.
724
+
725
+ Parameters
726
+ ----------
727
+ workspace : str, default=None
728
+ The Fabric workspace name.
729
+ Defaults to None which resolves to the workspace of the attached lakehouse
730
+ or if no lakehouse attached, resolves to the workspace of the notebook.
731
+
732
+ Returns
733
+ -------
734
+ str
735
+ The capacity name.
736
+ """
737
+
738
+ capacity_id = get_capacity_id(workspace)
739
+ dfC = fabric.list_capacities()
740
+ dfC_filt = dfC[dfC["Id"] == capacity_id]
741
+ if len(dfC_filt) == 0:
742
+ raise ValueError(
743
+ f"{icons.red_dot} The '{capacity_id}' capacity Id does not exist."
744
+ )
745
+
746
+ return dfC_filt["Display Name"].iloc[0]
747
+
748
+
749
+ def resolve_capacity_name(capacity_id: Optional[UUID] = None) -> str:
750
+ """
751
+ Obtains the capacity name for a given capacity Id.
752
+
753
+ Parameters
754
+ ----------
755
+ capacity_id : UUID, default=None
756
+ The capacity Id.
757
+ Defaults to None which resolves to the capacity name of the workspace of the attached lakehouse
758
+ or if no lakehouse attached, resolves to the capacity name of the workspace of the notebook.
759
+
760
+ Returns
761
+ -------
762
+ str
763
+ The capacity name.
764
+ """
765
+
766
+ if capacity_id is None:
767
+ return get_capacity_name()
768
+
769
+ dfC = fabric.list_capacities()
770
+ dfC_filt = dfC[dfC["Id"] == capacity_id]
771
+
772
+ if len(dfC_filt) == 0:
773
+ raise ValueError(
774
+ f"{icons.red_dot} The '{capacity_id}' capacity Id does not exist."
775
+ )
776
+
777
+ return dfC_filt["Display Name"].iloc[0]
778
+
779
+
780
+ def retry(sleep_time: int, timeout_error_message: str):
781
+ def decorator(func):
782
+ @wraps(func)
783
+ def wrapper(*args, **kwargs):
784
+ start_time = datetime.datetime.now()
785
+ timeout = datetime.timedelta(minutes=1)
786
+ while datetime.datetime.now() - start_time <= timeout:
787
+ try:
788
+ return func(*args, **kwargs)
789
+ except Exception:
790
+ time.sleep(sleep_time)
791
+ raise TimeoutError(timeout_error_message)
792
+
793
+ return wrapper
794
+
795
+ return decorator
796
+
797
+
798
+ def lro(
799
+ client,
800
+ response,
801
+ status_codes: Optional[List[str]] = [200, 202],
802
+ sleep_time: Optional[int] = 1,
803
+ return_status_code: Optional[bool] = False,
804
+ ):
805
+
806
+ if response.status_code not in status_codes:
807
+ raise FabricHTTPException(response)
808
+ if response.status_code == status_codes[0]:
809
+ if return_status_code:
810
+ result = response.status_code
811
+ else:
812
+ result = response
813
+ if response.status_code == status_codes[1]:
814
+ operationId = response.headers["x-ms-operation-id"]
815
+ response = client.get(f"/v1/operations/{operationId}")
816
+ response_body = json.loads(response.content)
817
+ while response_body["status"] not in ["Succeeded", "Failed"]:
818
+ time.sleep(sleep_time)
819
+ response = client.get(f"/v1/operations/{operationId}")
820
+ response_body = json.loads(response.content)
821
+ if response_body["status"] != "Succeeded":
822
+ raise FabricHTTPException(response)
823
+ if return_status_code:
824
+ result = response.status_code
825
+ else:
826
+ response = client.get(f"/v1/operations/{operationId}/result")
827
+ result = response
828
+
829
+ return result
830
+
831
+
832
+ def pagination(client, response):
833
+
834
+ responses = []
835
+ response_json = response.json()
836
+ responses.append(response_json)
837
+
838
+ # Check for pagination
839
+ continuation_token = response_json.get("continuationToken")
840
+ continuation_uri = response_json.get("continuationUri")
841
+
842
+ # Loop to handle pagination
843
+ while continuation_token is not None:
844
+ response = client.get(continuation_uri)
845
+ response_json = response.json()
846
+ responses.append(response_json)
847
+
848
+ # Update the continuation token and URI for the next iteration
849
+ continuation_token = response_json.get("continuationToken")
850
+ continuation_uri = response_json.get("continuationUri")
851
+
852
+ return responses
sempy_labs/_icons.py CHANGED
@@ -8,17 +8,26 @@ start_bold = "\033[1m"
8
8
  end_bold = "\033[0m"
9
9
  bullet = "\u2022"
10
10
  warning = "⚠️"
11
+ error = "\u274C"
12
+ info = "ℹ️"
11
13
  data_type_mapping = {
12
14
  "string": "String",
13
- "bigint": "Int64",
14
15
  "int": "Int64",
16
+ "tinyint": "Int64",
15
17
  "smallint": "Int64",
18
+ "bigint": "Int64",
16
19
  "boolean": "Boolean",
17
20
  "timestamp": "DateTime",
18
21
  "date": "DateTime",
19
22
  "decimal(38,18)": "Decimal",
23
+ "decimal(19,4)": "Decimal",
20
24
  "double": "Double",
25
+ "float": "Double",
21
26
  }
22
27
  measure_icon = "\u2211"
23
28
  table_icon = "\u229E"
24
29
  column_icon = "\u229F"
30
+ model_bpa_name = "ModelBPA"
31
+ report_bpa_name = "ReportBPA"
32
+ severity_mapping = {warning: "Warning", error: "Error", info: "Info"}
33
+ special_characters = ['"', "/", '"', ":", "|", "<", ">", "*", "?", "'", "!"]