semantic-link-labs 0.5.0__py3-none-any.whl → 0.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (54) hide show
  1. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.6.0.dist-info}/METADATA +2 -2
  2. semantic_link_labs-0.6.0.dist-info/RECORD +54 -0
  3. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.6.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +19 -13
  5. sempy_labs/_ai.py +43 -24
  6. sempy_labs/_clear_cache.py +4 -5
  7. sempy_labs/_connections.py +77 -70
  8. sempy_labs/_dax.py +7 -9
  9. sempy_labs/_generate_semantic_model.py +55 -44
  10. sempy_labs/_helper_functions.py +13 -6
  11. sempy_labs/_icons.py +14 -0
  12. sempy_labs/_list_functions.py +491 -304
  13. sempy_labs/_model_auto_build.py +4 -3
  14. sempy_labs/_model_bpa.py +131 -1118
  15. sempy_labs/_model_bpa_rules.py +831 -0
  16. sempy_labs/_model_dependencies.py +14 -12
  17. sempy_labs/_one_lake_integration.py +11 -5
  18. sempy_labs/_query_scale_out.py +89 -81
  19. sempy_labs/_refresh_semantic_model.py +16 -10
  20. sempy_labs/_translations.py +213 -287
  21. sempy_labs/_vertipaq.py +53 -37
  22. sempy_labs/directlake/__init__.py +2 -0
  23. sempy_labs/directlake/_directlake_schema_compare.py +12 -5
  24. sempy_labs/directlake/_directlake_schema_sync.py +13 -19
  25. sempy_labs/directlake/_fallback.py +5 -3
  26. sempy_labs/directlake/_get_directlake_lakehouse.py +1 -1
  27. sempy_labs/directlake/_get_shared_expression.py +4 -2
  28. sempy_labs/directlake/_guardrails.py +3 -3
  29. sempy_labs/directlake/_list_directlake_model_calc_tables.py +17 -10
  30. sempy_labs/directlake/_show_unsupported_directlake_objects.py +3 -2
  31. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +10 -5
  32. sempy_labs/directlake/_update_directlake_partition_entity.py +132 -9
  33. sempy_labs/directlake/_warm_cache.py +6 -3
  34. sempy_labs/lakehouse/_get_lakehouse_columns.py +1 -1
  35. sempy_labs/lakehouse/_get_lakehouse_tables.py +5 -3
  36. sempy_labs/lakehouse/_lakehouse.py +2 -1
  37. sempy_labs/lakehouse/_shortcuts.py +19 -12
  38. sempy_labs/migration/__init__.py +1 -1
  39. sempy_labs/migration/_create_pqt_file.py +21 -15
  40. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +16 -13
  41. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +17 -18
  42. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +43 -40
  43. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +14 -14
  44. sempy_labs/migration/_migration_validation.py +2 -2
  45. sempy_labs/migration/_refresh_calc_tables.py +8 -5
  46. sempy_labs/report/__init__.py +2 -2
  47. sempy_labs/report/_generate_report.py +10 -5
  48. sempy_labs/report/_report_functions.py +67 -29
  49. sempy_labs/report/_report_rebind.py +9 -8
  50. sempy_labs/tom/__init__.py +1 -4
  51. sempy_labs/tom/_model.py +555 -152
  52. semantic_link_labs-0.5.0.dist-info/RECORD +0 -53
  53. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.6.0.dist-info}/LICENSE +0 -0
  54. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.6.0.dist-info}/top_level.txt +0 -0
@@ -1,14 +1,17 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  import pandas as pd
4
- import json, base64, time, os
5
- from typing import List, Optional, Union
3
+ import json
4
+ import base64
5
+ import time
6
+ import os
7
+ from typing import Optional
6
8
  from sempy_labs._helper_functions import (
7
9
  resolve_lakehouse_name,
8
10
  resolve_workspace_name_and_id,
9
11
  )
10
12
  from sempy_labs.lakehouse._lakehouse import lakehouse_attached
11
13
  import sempy_labs._icons as icons
14
+ from sempy.fabric.exceptions import FabricHTTPException
12
15
 
13
16
 
14
17
  def create_blank_semantic_model(
@@ -37,7 +40,9 @@ def create_blank_semantic_model(
37
40
  min_compat = 1500
38
41
 
39
42
  if compatibility_level < min_compat:
40
- raise ValueError(f"{icons.red_dot} Compatiblity level must be at least {min_compat}.")
43
+ raise ValueError(
44
+ f"{icons.red_dot} Compatiblity level must be at least {min_compat}."
45
+ )
41
46
 
42
47
  tmsl = f"""
43
48
  {{
@@ -90,7 +95,9 @@ def create_semantic_model_from_bim(
90
95
  dfI_filt = dfI[(dfI["Display Name"] == dataset)]
91
96
 
92
97
  if len(dfI_filt) > 0:
93
- raise ValueError(f"{icons.red_dot} '{dataset}' already exists as a semantic model in the '{workspace}' workspace.")
98
+ raise ValueError(
99
+ f"{icons.red_dot} '{dataset}' already exists as a semantic model in the '{workspace}' workspace."
100
+ )
94
101
 
95
102
  client = fabric.FabricRestClient()
96
103
  defPBIDataset = {"version": "1.0", "settings": {}}
@@ -147,59 +154,69 @@ def create_semantic_model_from_bim(
147
154
 
148
155
 
149
156
  def deploy_semantic_model(
150
- dataset: str,
151
- new_dataset: Optional[str] = None,
152
- workspace: Optional[str] = None,
153
- new_dataset_workspace: Optional[str] = None,
157
+ source_dataset: str,
158
+ source_workspace: Optional[str] = None,
159
+ target_dataset: Optional[str] = None,
160
+ target_workspace: Optional[str] = None,
161
+ refresh_target_dataset: Optional[bool] = True,
154
162
  ):
155
163
  """
156
164
  Deploys a semantic model based on an existing semantic model.
157
165
 
158
166
  Parameters
159
167
  ----------
160
- dataset : str
168
+ source_dataset : str
161
169
  Name of the semantic model to deploy.
162
- new_dataset: str
163
- Name of the new semantic model to be created.
164
- workspace : str, default=None
170
+ source_workspace : str, default=None
165
171
  The Fabric workspace name.
166
172
  Defaults to None which resolves to the workspace of the attached lakehouse
167
173
  or if no lakehouse attached, resolves to the workspace of the notebook.
168
- new_dataset_workspace : str, default=None
174
+ target_dataset: str
175
+ Name of the new semantic model to be created.
176
+ target_workspace : str, default=None
169
177
  The Fabric workspace name in which the new semantic model will be deployed.
170
178
  Defaults to None which resolves to the workspace of the attached lakehouse
171
179
  or if no lakehouse attached, resolves to the workspace of the notebook.
180
+ refresh_target_dataset : bool, default=True
181
+ If set to True, this will initiate a full refresh of the target semantic model in the target workspace.
172
182
 
173
183
  Returns
174
184
  -------
175
185
 
176
186
  """
177
187
 
178
- workspace = fabric.resolve_workspace_name(workspace)
188
+ from sempy_labs import refresh_semantic_model
189
+
190
+ source_workspace = fabric.resolve_workspace_name(source_workspace)
179
191
 
180
- if new_dataset_workspace is None:
181
- new_dataset_workspace = workspace
192
+ if target_workspace is None:
193
+ target_workspace = source_workspace
182
194
 
183
- if new_dataset is None:
184
- new_dataset = dataset
195
+ if target_dataset is None:
196
+ target_dataset = source_dataset
185
197
 
186
- if new_dataset == dataset and new_dataset_workspace == workspace:
198
+ if target_dataset == source_dataset and target_workspace == source_workspace:
187
199
  print(
188
- f"The 'dataset' and 'new_dataset' parameters have the same value. And, the 'workspace' and 'new_dataset_workspace' parameters have the same value. At least one of these must be different. Please update the parameters."
200
+ f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters have the same value. And, the 'workspace' and 'new_dataset_workspace' "
201
+ f"parameters have the same value. At least one of these must be different. Please update the parameters."
189
202
  )
190
203
  return
191
204
 
192
- bim = get_semantic_model_bim(dataset=dataset, workspace=workspace)
205
+ bim = get_semantic_model_bim(dataset=source_dataset, workspace=source_workspace)
193
206
 
194
207
  create_semantic_model_from_bim(
195
- dataset=new_dataset, bim_file=bim, workspace=new_dataset_workspace
208
+ dataset=target_dataset, bim_file=bim, workspace=target_workspace
196
209
  )
197
210
 
211
+ if refresh_target_dataset:
212
+ refresh_semantic_model(dataset=target_dataset, workspace=target_workspace)
213
+
198
214
 
199
215
  def get_semantic_model_bim(
200
216
  dataset: str,
201
217
  workspace: Optional[str] = None,
202
218
  save_to_file_name: Optional[str] = None,
219
+ lakehouse_workspace: Optional[str] = None,
203
220
  ):
204
221
  """
205
222
  Extracts the Model.bim file for a given semantic model.
@@ -209,11 +226,15 @@ def get_semantic_model_bim(
209
226
  dataset : str
210
227
  Name of the semantic model.
211
228
  workspace : str, default=None
212
- The Fabric workspace name.
229
+ The Fabric workspace name in which the semantic model resides.
213
230
  Defaults to None which resolves to the workspace of the attached lakehouse
214
231
  or if no lakehouse attached, resolves to the workspace of the notebook.
215
232
  save_to_file_name : str, default=None
216
233
  If specified, saves the Model.bim as a file in the lakehouse attached to the notebook.
234
+ lakehouse_workspace : str, default=None
235
+ The Fabric workspace name in which the lakehouse attached to the workspace resides.
236
+ Defaults to None which resolves to the workspace of the attached lakehouse
237
+ or if no lakehouse attached, resolves to the workspace of the notebook.
217
238
 
218
239
  Returns
219
240
  -------
@@ -222,29 +243,20 @@ def get_semantic_model_bim(
222
243
  """
223
244
 
224
245
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
246
+ if lakehouse_workspace is None:
247
+ lakehouse_workspace = workspace
225
248
 
226
- objType = "SemanticModel"
249
+ fmt = "TMSL"
227
250
  client = fabric.FabricRestClient()
228
- itemList = fabric.list_items(workspace=workspace, type=objType)
251
+ itemList = fabric.list_items(workspace=workspace, type="SemanticModel")
229
252
  itemListFilt = itemList[(itemList["Display Name"] == dataset)]
230
253
  itemId = itemListFilt["Id"].iloc[0]
231
254
  response = client.post(
232
- f"/v1/workspaces/{workspace_id}/items/{itemId}/getDefinition"
255
+ f"/v1/workspaces/{workspace_id}/items/{itemId}/getDefinition?format={fmt}",
256
+ lro_wait=True,
233
257
  )
234
258
 
235
- if response.status_code == 200:
236
- res = response.json()
237
- elif response.status_code == 202:
238
- operationId = response.headers["x-ms-operation-id"]
239
- response = client.get(f"/v1/operations/{operationId}")
240
- response_body = json.loads(response.content)
241
- while response_body["status"] != "Succeeded":
242
- time.sleep(3)
243
- response = client.get(f"/v1/operations/{operationId}")
244
- response_body = json.loads(response.content)
245
- response = client.get(f"/v1/operations/{operationId}/result")
246
- res = response.json()
247
- df_items = pd.json_normalize(res["definition"]["parts"])
259
+ df_items = pd.json_normalize(response.json()["definition"]["parts"])
248
260
  df_items_filt = df_items[df_items["path"] == "model.bim"]
249
261
  payload = df_items_filt["payload"].iloc[0]
250
262
  bimFile = base64.b64decode(payload).decode("utf-8")
@@ -253,13 +265,12 @@ def get_semantic_model_bim(
253
265
  if save_to_file_name is not None:
254
266
  lakeAttach = lakehouse_attached()
255
267
  if lakeAttach is False:
256
- print(
257
- f"In order to save the model.bim file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
268
+ raise ValueError(
269
+ f"{icons.red_dot} In order to save the model.bim file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
258
270
  )
259
- return
260
271
 
261
272
  lakehouse_id = fabric.get_lakehouse_id()
262
- lakehouse = resolve_lakehouse_name(lakehouse_id, workspace)
273
+ lakehouse = resolve_lakehouse_name(lakehouse_id, lakehouse_workspace)
263
274
  folderPath = "/lakehouse/default/Files"
264
275
  fileExt = ".bim"
265
276
  if not save_to_file_name.endswith(fileExt):
@@ -1,4 +1,3 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  import re
4
3
  import pandas as pd
@@ -200,7 +199,9 @@ def resolve_dataset_name(dataset_id: UUID, workspace: Optional[str] = None):
200
199
  return obj
201
200
 
202
201
 
203
- def resolve_lakehouse_name(lakehouse_id: Optional[UUID] = None, workspace: Optional[str] = None):
202
+ def resolve_lakehouse_name(
203
+ lakehouse_id: Optional[UUID] = None, workspace: Optional[str] = None
204
+ ):
204
205
  """
205
206
  Obtains the name of the Fabric lakehouse.
206
207
 
@@ -223,7 +224,7 @@ def resolve_lakehouse_name(lakehouse_id: Optional[UUID] = None, workspace: Optio
223
224
  if workspace is None:
224
225
  workspace_id = fabric.get_workspace_id()
225
226
  workspace = fabric.resolve_workspace_name(workspace_id)
226
-
227
+
227
228
  if lakehouse_id is None:
228
229
  lakehouse_id = fabric.get_lakehouse_id()
229
230
 
@@ -420,10 +421,14 @@ def save_as_delta_table(
420
421
  write_mode = write_mode.lower()
421
422
 
422
423
  if write_mode not in writeModes:
423
- raise ValueError(f"{icons.red_dot} Invalid 'write_type' parameter. Choose from one of the following values: {writeModes}.")
424
+ raise ValueError(
425
+ f"{icons.red_dot} Invalid 'write_type' parameter. Choose from one of the following values: {writeModes}."
426
+ )
424
427
 
425
428
  if " " in delta_table_name:
426
- raise ValueError(f"{icons.red_dot} Invalid 'delta_table_name'. Delta tables in the lakehouse cannot have spaces in their names.")
429
+ raise ValueError(
430
+ f"{icons.red_dot} Invalid 'delta_table_name'. Delta tables in the lakehouse cannot have spaces in their names."
431
+ )
427
432
 
428
433
  dataframe.columns = dataframe.columns.str.replace(" ", "_")
429
434
 
@@ -470,7 +475,9 @@ def language_validate(language: str):
470
475
  elif len(df_filt2) == 1:
471
476
  lang = df_filt2["Language"].iloc[0]
472
477
  else:
473
- raise ValueError(f"{icons.red_dot} The '{language}' language is not a valid language code. Please refer to this link for a list of valid language codes: {url}.")
478
+ raise ValueError(
479
+ f"{icons.red_dot} The '{language}' language is not a valid language code. Please refer to this link for a list of valid language codes: {url}."
480
+ )
474
481
 
475
482
  return lang
476
483
 
sempy_labs/_icons.py CHANGED
@@ -8,3 +8,17 @@ start_bold = "\033[1m"
8
8
  end_bold = "\033[0m"
9
9
  bullet = "\u2022"
10
10
  warning = "⚠️"
11
+ data_type_mapping = {
12
+ "string": "String",
13
+ "bigint": "Int64",
14
+ "int": "Int64",
15
+ "smallint": "Int64",
16
+ "boolean": "Boolean",
17
+ "timestamp": "DateTime",
18
+ "date": "DateTime",
19
+ "decimal(38,18)": "Decimal",
20
+ "double": "Double",
21
+ }
22
+ measure_icon = "\u2211"
23
+ table_icon = "\u229E"
24
+ column_icon = "\u229F"