semantic-link-labs 0.4.1__py3-none-any.whl → 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (53) hide show
  1. {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.5.0.dist-info}/METADATA +1 -1
  2. semantic_link_labs-0.5.0.dist-info/RECORD +53 -0
  3. {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.5.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +51 -27
  5. sempy_labs/_ai.py +32 -51
  6. sempy_labs/_clear_cache.py +2 -3
  7. sempy_labs/_connections.py +39 -38
  8. sempy_labs/_dax.py +5 -9
  9. sempy_labs/_generate_semantic_model.py +15 -21
  10. sempy_labs/_helper_functions.py +20 -25
  11. sempy_labs/_icons.py +6 -0
  12. sempy_labs/_list_functions.py +1172 -392
  13. sempy_labs/_model_auto_build.py +3 -5
  14. sempy_labs/_model_bpa.py +20 -24
  15. sempy_labs/_model_dependencies.py +7 -14
  16. sempy_labs/_one_lake_integration.py +14 -24
  17. sempy_labs/_query_scale_out.py +13 -31
  18. sempy_labs/_refresh_semantic_model.py +8 -18
  19. sempy_labs/_translations.py +5 -5
  20. sempy_labs/_vertipaq.py +11 -18
  21. sempy_labs/directlake/_directlake_schema_compare.py +11 -15
  22. sempy_labs/directlake/_directlake_schema_sync.py +35 -40
  23. sempy_labs/directlake/_fallback.py +3 -7
  24. sempy_labs/directlake/_get_directlake_lakehouse.py +3 -4
  25. sempy_labs/directlake/_get_shared_expression.py +5 -11
  26. sempy_labs/directlake/_guardrails.py +5 -7
  27. sempy_labs/directlake/_list_directlake_model_calc_tables.py +28 -26
  28. sempy_labs/directlake/_show_unsupported_directlake_objects.py +3 -4
  29. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +11 -16
  30. sempy_labs/directlake/_update_directlake_partition_entity.py +25 -15
  31. sempy_labs/directlake/_warm_cache.py +10 -15
  32. sempy_labs/lakehouse/__init__.py +0 -2
  33. sempy_labs/lakehouse/_get_lakehouse_columns.py +4 -3
  34. sempy_labs/lakehouse/_get_lakehouse_tables.py +12 -11
  35. sempy_labs/lakehouse/_lakehouse.py +6 -7
  36. sempy_labs/lakehouse/_shortcuts.py +10 -111
  37. sempy_labs/migration/__init__.py +4 -2
  38. sempy_labs/migration/_create_pqt_file.py +5 -14
  39. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +7 -7
  40. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +4 -4
  41. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +3 -8
  42. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +6 -6
  43. sempy_labs/migration/_migration_validation.py +5 -164
  44. sempy_labs/migration/_refresh_calc_tables.py +5 -5
  45. sempy_labs/report/__init__.py +2 -2
  46. sempy_labs/report/_generate_report.py +14 -19
  47. sempy_labs/report/_report_functions.py +41 -83
  48. sempy_labs/report/_report_rebind.py +43 -44
  49. sempy_labs/tom/__init__.py +6 -0
  50. sempy_labs/{_tom.py → tom/_model.py} +274 -337
  51. semantic_link_labs-0.4.1.dist-info/RECORD +0 -52
  52. {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.5.0.dist-info}/LICENSE +0 -0
  53. {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.5.0.dist-info}/top_level.txt +0 -0
sempy_labs/_dax.py CHANGED
@@ -7,7 +7,7 @@ from sempy._utils._log import log
7
7
 
8
8
 
9
9
  @log
10
- def run_dax(
10
+ def evaluate_dax_impersonation(
11
11
  dataset: str,
12
12
  dax_query: str,
13
13
  user_name: Optional[str] = None,
@@ -27,7 +27,6 @@ def run_dax(
27
27
  The DAX query.
28
28
  user_name : str
29
29
  The user name (i.e. hello@goodbye.com).
30
- Defaults to None which resolves to no user impersonation.
31
30
  workspace : str, default=None
32
31
  The Fabric workspace name.
33
32
  Defaults to None which resolves to the workspace of the attached lakehouse
@@ -49,13 +48,10 @@ def run_dax(
49
48
 
50
49
  dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace)
51
50
 
52
- if user_name is None:
53
- request_body = {"queries": [{"query": dax_query}]}
54
- else:
55
- request_body = {
56
- "queries": [{"query": dax_query}],
57
- "impersonatedUserName": user_name,
58
- }
51
+ request_body = {
52
+ "queries": [{"query": dax_query}],
53
+ "impersonatedUserName": user_name
54
+ }
59
55
 
60
56
  client = fabric.PowerBIRestClient()
61
57
  response = client.post(
@@ -23,22 +23,21 @@ def create_blank_semantic_model(
23
23
  ----------
24
24
  dataset : str
25
25
  Name of the semantic model.
26
- compatibility_level : int
26
+ compatibility_level : int, default=1605
27
27
  The compatibility level of the semantic model.
28
- Defaults to 1605.
29
28
  workspace : str, default=None
30
29
  The Fabric workspace name.
31
30
  Defaults to None which resolves to the workspace of the attached lakehouse
32
31
  or if no lakehouse attached, resolves to the workspace of the notebook.
33
32
  """
34
33
 
35
- if workspace == None:
36
- workspace_id = fabric.get_workspace_id()
37
- workspace = fabric.resolve_workspace_name(workspace_id)
34
+ if workspace is None:
35
+ workspace = fabric.resolve_workspace_name()
38
36
 
39
- if compatibility_level < 1500:
40
- print(f"{icons.red_dot} Compatiblity level must be at least 1500.")
41
- return
37
+ min_compat = 1500
38
+
39
+ if compatibility_level < min_compat:
40
+ raise ValueError(f"{icons.red_dot} Compatiblity level must be at least {min_compat}.")
42
41
 
43
42
  tmsl = f"""
44
43
  {{
@@ -66,7 +65,7 @@ def create_blank_semantic_model(
66
65
 
67
66
 
68
67
  def create_semantic_model_from_bim(
69
- dataset: str, bim_file: str, workspace: Optional[str] = None
68
+ dataset: str, bim_file: dict, workspace: Optional[str] = None
70
69
  ):
71
70
  """
72
71
  Creates a new semantic model based on a Model.bim file.
@@ -75,7 +74,7 @@ def create_semantic_model_from_bim(
75
74
  ----------
76
75
  dataset : str
77
76
  Name of the semantic model.
78
- bim_file : str
77
+ bim_file : dict
79
78
  The model.bim file.
80
79
  workspace : str, default=None
81
80
  The Fabric workspace name.
@@ -91,10 +90,7 @@ def create_semantic_model_from_bim(
91
90
  dfI_filt = dfI[(dfI["Display Name"] == dataset)]
92
91
 
93
92
  if len(dfI_filt) > 0:
94
- print(
95
- f"WARNING: '{dataset}' already exists as a semantic model in the '{workspace}' workspace."
96
- )
97
- return
93
+ raise ValueError(f"{icons.red_dot} '{dataset}' already exists as a semantic model in the '{workspace}' workspace.")
98
94
 
99
95
  client = fabric.FabricRestClient()
100
96
  defPBIDataset = {"version": "1.0", "settings": {}}
@@ -132,7 +128,7 @@ def create_semantic_model_from_bim(
132
128
 
133
129
  if response.status_code == 201:
134
130
  print(
135
- f"The '{dataset}' semantic model has been created within the '{workspace}' workspace."
131
+ f"{icons.green_dot} The '{dataset}' semantic model has been created within the '{workspace}' workspace."
136
132
  )
137
133
  print(response.json())
138
134
  elif response.status_code == 202:
@@ -145,7 +141,7 @@ def create_semantic_model_from_bim(
145
141
  response_body = json.loads(response.content)
146
142
  response = client.get(f"/v1/operations/{operationId}/result")
147
143
  print(
148
- f"The '{dataset}' semantic model has been created within the '{workspace}' workspace."
144
+ f"{icons.green_dot} The '{dataset}' semantic model has been created within the '{workspace}' workspace."
149
145
  )
150
146
  print(response.json())
151
147
 
@@ -179,11 +175,9 @@ def deploy_semantic_model(
179
175
 
180
176
  """
181
177
 
182
- if workspace == None:
183
- workspace_id = fabric.get_workspace_id()
184
- workspace = fabric.resolve_workspace_name(workspace_id)
178
+ workspace = fabric.resolve_workspace_name(workspace)
185
179
 
186
- if new_dataset_workspace == None:
180
+ if new_dataset_workspace is None:
187
181
  new_dataset_workspace = workspace
188
182
 
189
183
  if new_dataset is None:
@@ -258,7 +252,7 @@ def get_semantic_model_bim(
258
252
 
259
253
  if save_to_file_name is not None:
260
254
  lakeAttach = lakehouse_attached()
261
- if lakeAttach == False:
255
+ if lakeAttach is False:
262
256
  print(
263
257
  f"In order to save the model.bim file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
264
258
  )
@@ -1,3 +1,4 @@
1
+ import sempy
1
2
  import sempy.fabric as fabric
2
3
  import re
3
4
  import pandas as pd
@@ -100,7 +101,7 @@ def resolve_report_id(report: str, workspace: Optional[str] = None):
100
101
  The ID of the Power BI report.
101
102
  """
102
103
 
103
- if workspace == None:
104
+ if workspace is None:
104
105
  workspace_id = fabric.get_workspace_id()
105
106
  workspace = fabric.resolve_workspace_name(workspace_id)
106
107
 
@@ -128,7 +129,7 @@ def resolve_report_name(report_id: UUID, workspace: Optional[str] = None):
128
129
  The name of the Power BI report.
129
130
  """
130
131
 
131
- if workspace == None:
132
+ if workspace is None:
132
133
  workspace_id = fabric.get_workspace_id()
133
134
  workspace = fabric.resolve_workspace_name(workspace_id)
134
135
 
@@ -158,7 +159,7 @@ def resolve_dataset_id(dataset: str, workspace: Optional[str] = None):
158
159
  The ID of the semantic model.
159
160
  """
160
161
 
161
- if workspace == None:
162
+ if workspace is None:
162
163
  workspace_id = fabric.get_workspace_id()
163
164
  workspace = fabric.resolve_workspace_name(workspace_id)
164
165
 
@@ -188,7 +189,7 @@ def resolve_dataset_name(dataset_id: UUID, workspace: Optional[str] = None):
188
189
  The name of the semantic model.
189
190
  """
190
191
 
191
- if workspace == None:
192
+ if workspace is None:
192
193
  workspace_id = fabric.get_workspace_id()
193
194
  workspace = fabric.resolve_workspace_name(workspace_id)
194
195
 
@@ -199,14 +200,15 @@ def resolve_dataset_name(dataset_id: UUID, workspace: Optional[str] = None):
199
200
  return obj
200
201
 
201
202
 
202
- def resolve_lakehouse_name(lakehouse_id: UUID, workspace: Optional[str] = None):
203
+ def resolve_lakehouse_name(lakehouse_id: Optional[UUID] = None, workspace: Optional[str] = None):
203
204
  """
204
205
  Obtains the name of the Fabric lakehouse.
205
206
 
206
207
  Parameters
207
208
  ----------
208
- lakehouse_id : UUID
209
+ lakehouse_id : UUID, default=None
209
210
  The name of the Fabric lakehouse.
211
+ Defaults to None which resolves to the lakehouse attached to the notebook.
210
212
  workspace : str, default=None
211
213
  The Fabric workspace name.
212
214
  Defaults to None which resolves to the workspace of the attached lakehouse
@@ -218,9 +220,12 @@ def resolve_lakehouse_name(lakehouse_id: UUID, workspace: Optional[str] = None):
218
220
  The name of the Fabric lakehouse.
219
221
  """
220
222
 
221
- if workspace == None:
223
+ if workspace is None:
222
224
  workspace_id = fabric.get_workspace_id()
223
225
  workspace = fabric.resolve_workspace_name(workspace_id)
226
+
227
+ if lakehouse_id is None:
228
+ lakehouse_id = fabric.get_lakehouse_id()
224
229
 
225
230
  obj = fabric.resolve_item_name(
226
231
  item_id=lakehouse_id, type="Lakehouse", workspace=workspace
@@ -248,7 +253,7 @@ def resolve_lakehouse_id(lakehouse: str, workspace: Optional[str] = None):
248
253
  The ID of the Fabric lakehouse.
249
254
  """
250
255
 
251
- if workspace == None:
256
+ if workspace is None:
252
257
  workspace_id = fabric.get_workspace_id()
253
258
  workspace = fabric.resolve_workspace_name(workspace_id)
254
259
 
@@ -278,7 +283,7 @@ def get_direct_lake_sql_endpoint(dataset: str, workspace: Optional[str] = None)
278
283
  The ID of SQL Endpoint.
279
284
  """
280
285
 
281
- if workspace == None:
286
+ if workspace is None:
282
287
  workspace_id = fabric.get_workspace_id()
283
288
  workspace = fabric.resolve_workspace_name(workspace_id)
284
289
 
@@ -286,10 +291,9 @@ def get_direct_lake_sql_endpoint(dataset: str, workspace: Optional[str] = None)
286
291
  dfP_filt = dfP[dfP["Mode"] == "DirectLake"]
287
292
 
288
293
  if len(dfP_filt) == 0:
289
- print(
294
+ raise ValueError(
290
295
  f"The '{dataset}' semantic model in the '{workspace}' workspace is not in Direct Lake mode."
291
296
  )
292
- return
293
297
 
294
298
  dfE = fabric.list_expressions(dataset=dataset, workspace=workspace)
295
299
  dfE_filt = dfE[dfE["Name"] == "DatabaseQuery"]
@@ -416,16 +420,10 @@ def save_as_delta_table(
416
420
  write_mode = write_mode.lower()
417
421
 
418
422
  if write_mode not in writeModes:
419
- print(
420
- f"{icons.red_dot} Invalid 'write_type' parameter. Choose from one of the following values: {writeModes}."
421
- )
422
- return
423
+ raise ValueError(f"{icons.red_dot} Invalid 'write_type' parameter. Choose from one of the following values: {writeModes}.")
423
424
 
424
425
  if " " in delta_table_name:
425
- print(
426
- f"{icons.red_dot} Invalid 'delta_table_name'. Delta tables in the lakehouse cannot have spaces in their names."
427
- )
428
- return
426
+ raise ValueError(f"{icons.red_dot} Invalid 'delta_table_name'. Delta tables in the lakehouse cannot have spaces in their names.")
429
427
 
430
428
  dataframe.columns = dataframe.columns.str.replace(" ", "_")
431
429
 
@@ -472,10 +470,7 @@ def language_validate(language: str):
472
470
  elif len(df_filt2) == 1:
473
471
  lang = df_filt2["Language"].iloc[0]
474
472
  else:
475
- print(
476
- f"The '{language}' language is not a valid language code. Please refer to this link for a list of valid language codes: {url}."
477
- )
478
- return
473
+ raise ValueError(f"{icons.red_dot} The '{language}' language is not a valid language code. Please refer to this link for a list of valid language codes: {url}.")
479
474
 
480
475
  return lang
481
476
 
@@ -497,10 +492,10 @@ def resolve_workspace_name_and_id(workspace: Optional[str] = None) -> Tuple[str,
497
492
  The name and ID of the Fabric workspace.
498
493
  """
499
494
 
500
- if workspace == None:
495
+ if workspace is None:
501
496
  workspace_id = fabric.get_workspace_id()
502
497
  workspace = fabric.resolve_workspace_name(workspace_id)
503
498
  else:
504
499
  workspace_id = fabric.resolve_workspace_id(workspace)
505
500
 
506
- return workspace, workspace_id
501
+ return str(workspace), str(workspace_id)
sempy_labs/_icons.py CHANGED
@@ -2,3 +2,9 @@ green_dot = "\U0001F7E2"
2
2
  yellow_dot = "\U0001F7E1"
3
3
  red_dot = "\U0001F534"
4
4
  in_progress = "⌛"
5
+ checked = "\u2611"
6
+ unchecked = "\u2610"
7
+ start_bold = "\033[1m"
8
+ end_bold = "\033[0m"
9
+ bullet = "\u2022"
10
+ warning = "⚠️"