semantic-link-labs 0.4.1__py3-none-any.whl → 0.4.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (52) hide show
  1. {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.4.2.dist-info}/METADATA +1 -1
  2. semantic_link_labs-0.4.2.dist-info/RECORD +53 -0
  3. sempy_labs/__init__.py +25 -25
  4. sempy_labs/_ai.py +28 -27
  5. sempy_labs/_clear_cache.py +2 -1
  6. sempy_labs/_dax.py +5 -9
  7. sempy_labs/_generate_semantic_model.py +7 -8
  8. sempy_labs/_helper_functions.py +17 -13
  9. sempy_labs/_icons.py +5 -0
  10. sempy_labs/_list_functions.py +273 -17
  11. sempy_labs/_model_auto_build.py +1 -1
  12. sempy_labs/_model_bpa.py +37 -37
  13. sempy_labs/_model_dependencies.py +11 -12
  14. sempy_labs/_one_lake_integration.py +15 -22
  15. sempy_labs/_query_scale_out.py +1 -1
  16. sempy_labs/_refresh_semantic_model.py +4 -4
  17. sempy_labs/_translations.py +5 -5
  18. sempy_labs/_vertipaq.py +11 -11
  19. sempy_labs/directlake/_directlake_schema_compare.py +11 -9
  20. sempy_labs/directlake/_directlake_schema_sync.py +36 -37
  21. sempy_labs/directlake/_fallback.py +3 -3
  22. sempy_labs/directlake/_get_directlake_lakehouse.py +3 -4
  23. sempy_labs/directlake/_get_shared_expression.py +3 -3
  24. sempy_labs/directlake/_guardrails.py +3 -3
  25. sempy_labs/directlake/_list_directlake_model_calc_tables.py +28 -25
  26. sempy_labs/directlake/_show_unsupported_directlake_objects.py +4 -4
  27. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +10 -11
  28. sempy_labs/directlake/_update_directlake_partition_entity.py +25 -9
  29. sempy_labs/directlake/_warm_cache.py +5 -7
  30. sempy_labs/lakehouse/__init__.py +0 -2
  31. sempy_labs/lakehouse/_get_lakehouse_columns.py +3 -2
  32. sempy_labs/lakehouse/_get_lakehouse_tables.py +10 -7
  33. sempy_labs/lakehouse/_lakehouse.py +6 -5
  34. sempy_labs/lakehouse/_shortcuts.py +8 -106
  35. sempy_labs/migration/__init__.py +4 -2
  36. sempy_labs/migration/_create_pqt_file.py +2 -2
  37. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +7 -7
  38. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +4 -4
  39. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +5 -6
  40. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +6 -6
  41. sempy_labs/migration/_migration_validation.py +1 -164
  42. sempy_labs/migration/_refresh_calc_tables.py +3 -5
  43. sempy_labs/report/__init__.py +2 -2
  44. sempy_labs/report/_generate_report.py +14 -15
  45. sempy_labs/report/_report_functions.py +11 -10
  46. sempy_labs/report/_report_rebind.py +6 -7
  47. sempy_labs/tom/__init__.py +6 -0
  48. sempy_labs/{_tom.py → tom/_model.py} +166 -187
  49. semantic_link_labs-0.4.1.dist-info/RECORD +0 -52
  50. {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.4.2.dist-info}/LICENSE +0 -0
  51. {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.4.2.dist-info}/WHEEL +0 -0
  52. {semantic_link_labs-0.4.1.dist-info → semantic_link_labs-0.4.2.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: semantic-link-labs
3
- Version: 0.4.1
3
+ Version: 0.4.2
4
4
  Summary: Semantic Link Labs project
5
5
  Author: Microsoft Corporation
6
6
  License: MIT License
@@ -0,0 +1,53 @@
1
+ sempy_labs/__init__.py,sha256=aKnIa8tgtzqT939USn5rjCYnMNRu-Aku4mJSEFG75mc,4126
2
+ sempy_labs/_ai.py,sha256=WXWnVJ9g5Xq5ctiMWFNXbrv2WdAhKv5EsO6tCBy8ceE,17930
3
+ sempy_labs/_clear_cache.py,sha256=yahzQ-QojCtAReFT3zK9lrr5LJhkONUr49GC1qXf6Uw,1476
4
+ sempy_labs/_connections.py,sha256=DNgiMbswZM-vv6qnEnZ2RhIuo1HhGobh47X2UYh2br8,7396
5
+ sempy_labs/_dax.py,sha256=u4qVxsu2dVaOJmso-ErScNZ5yI4lGQTlon_jmrAzvGs,2148
6
+ sempy_labs/_generate_semantic_model.py,sha256=PgqqAk3ptVTXHlW6lU5dqGNZUAOjA3xrqmvZX2GlXx8,9273
7
+ sempy_labs/_helper_functions.py,sha256=mzzMFv6SKottNGBXjBe2-cCTgTQXDf6fXT7MmQ05DLA,14313
8
+ sempy_labs/_icons.py,sha256=mREeT-3eJCAbuKwaYf1IZGWYssy6WQ-mBCZ6nFnKGPU,196
9
+ sempy_labs/_list_functions.py,sha256=mxacAOu2MwBBLqOTbwb_Zqmx6liSItbc8fhQgkxkgqQ,53459
10
+ sempy_labs/_model_auto_build.py,sha256=xv4n4pjBAFAxz9Otu0ggx9QJsgRHzOFf_h4Fyou9p8E,5149
11
+ sempy_labs/_model_bpa.py,sha256=6fvUeZ0isLpmeTF4XC6lK9C5NreIJhoAgWpYIoD236s,63250
12
+ sempy_labs/_model_dependencies.py,sha256=e0QhwQs4KuWllbrYdGQnqQ5YwbbC-KKYxvsqT5x7Y6I,13194
13
+ sempy_labs/_one_lake_integration.py,sha256=RRjyDFitYzsC9vvovlYmvame9lRlV0myABAcFZxYffw,6210
14
+ sempy_labs/_query_scale_out.py,sha256=5X6Q8-hskf6VY_VY02VdyMlmBuV77bHWTTKqMdbGLBg,15296
15
+ sempy_labs/_refresh_semantic_model.py,sha256=YUtSnMyln7_8iPY3owG5uwfm8RqRdCPIKh0fTChuiIM,6805
16
+ sempy_labs/_translations.py,sha256=zW2HmJPqDs5p0Mj-1dTIlgee1rSmfZYMcQoPdNsZCmM,18044
17
+ sempy_labs/_vertipaq.py,sha256=18YT-3zDZnsY9DhPh9iQIjuCW8T91zZjliW8cUtbEAY,33385
18
+ sempy_labs/directlake/__init__.py,sha256=HbfHvDvGE4H-xSbV6JO7TUb4HoLGJf2AeuqeQxIuuJ4,1689
19
+ sempy_labs/directlake/_directlake_schema_compare.py,sha256=1N2NHL9XwjdLz-SPFpGAGkGGFsaEAqAfEdNzfUgli-E,4748
20
+ sempy_labs/directlake/_directlake_schema_sync.py,sha256=Fkg8qJdiPF4ppgG2ED9lN40MbobIleT_W5wCTv190rw,5166
21
+ sempy_labs/directlake/_fallback.py,sha256=StqIL9WS6NWHMIBkgqM14my37Foz_zzkCryZtn_Qjj4,2091
22
+ sempy_labs/directlake/_get_directlake_lakehouse.py,sha256=vEY1QBU7gAdoVQIGl59T_1TTYuXbHgk3pZA41EkVkl8,2358
23
+ sempy_labs/directlake/_get_shared_expression.py,sha256=IyjBSIvmhb5Nri1Xf8QoK8owGFQ2znKP60N0U1VDLg0,2012
24
+ sempy_labs/directlake/_guardrails.py,sha256=NJxT4AStorzNimYIRkLW2iu3T2f_QfGZOhIRdW4glVI,2330
25
+ sempy_labs/directlake/_list_directlake_model_calc_tables.py,sha256=a5YOj3FrMj6DGmnNi9iSriredtti8_802L_Da3pBmrI,2079
26
+ sempy_labs/directlake/_show_unsupported_directlake_objects.py,sha256=9e5RSge91Cg_9qGCUqRlIhq2xvPA_ut_bE20SwZHk50,3386
27
+ sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py,sha256=Qq6iYTvMnimNNa67y3Q_Qpigpez49euK2fNXc05Xg4w,3224
28
+ sempy_labs/directlake/_update_directlake_partition_entity.py,sha256=Egr_NH7XoDDEDc8tz2e3JJ5UKXU0H0EgxTAIcpGr5_M,3207
29
+ sempy_labs/directlake/_warm_cache.py,sha256=fkAw7Lpnoh6dnkdFqtKUeH4fhi6Qzk4ydzHQI-KTqEM,8204
30
+ sempy_labs/lakehouse/__init__.py,sha256=i6VRx4dR1SIN-1GxioiNwhC4FxbozRCIz5TfXjb9rKc,587
31
+ sempy_labs/lakehouse/_get_lakehouse_columns.py,sha256=Ot49AuYtoNwsp-gHWqBplclrSD2rscudwXf_PyFU0mY,2578
32
+ sempy_labs/lakehouse/_get_lakehouse_tables.py,sha256=eUeRloAXC5DUEiPNIwcjyRQRnJID_iDzMqIGZM5QJT0,9094
33
+ sempy_labs/lakehouse/_lakehouse.py,sha256=gkLBpxizTAm-ti8OXqbI9VUgurIG4VjxljxKO3xTtE0,2856
34
+ sempy_labs/lakehouse/_shortcuts.py,sha256=k6p4FMW-tLjGR9vRql7vBQf_bDmNYlax6sf47TUIb34,6910
35
+ sempy_labs/migration/__init__.py,sha256=l5v8pC2INdNwbAKVmvWpuVxs6lpb6omim_4BPOmNo4E,1042
36
+ sempy_labs/migration/_create_pqt_file.py,sha256=MNFxF7XVQBT_-iklsvgm41vG2fyZ0WS42jJ9iFrZVRc,9195
37
+ sempy_labs/migration/_migrate_calctables_to_lakehouse.py,sha256=ajFvHauFdEsbgxvn9JXj2kiXaRtJLEjwX4hWQG7FQy0,20609
38
+ sempy_labs/migration/_migrate_calctables_to_semantic_model.py,sha256=Co2f579vSwkWZ95SlBStS-XJ73YwgcdfAMlJbUv_pkk,6343
39
+ sempy_labs/migration/_migrate_model_objects_to_semantic_model.py,sha256=iCfbyJuharFnHvwviuoRM_caQX8AKmaX47HYafI6Wtg,23955
40
+ sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py,sha256=qohJC6ARjM8NiMH7nZEKqUEXMrh-IqhdeUzgrBNZ1DQ,7028
41
+ sempy_labs/migration/_migration_validation.py,sha256=IdSNuK2EVz-egX7glvcocvRlWZrnKj5_nOz-l20yZdk,2370
42
+ sempy_labs/migration/_refresh_calc_tables.py,sha256=MjtBBoDA8W6J-6HPD736Voocp5AK2E1glXCy79C57ts,5985
43
+ sempy_labs/report/__init__.py,sha256=fkjbkAXZuH7VnAn-k3iB4dngWZKaX-k0bxS6mBa9iAs,846
44
+ sempy_labs/report/_generate_report.py,sha256=D1u_XzbvgYzUO4NXU5k9huMNh0n7bEjreW4SSoaa2vE,8532
45
+ sempy_labs/report/_report_functions.py,sha256=Ctx9kCTOC40ibBvkzGt2DAZG9GNa_iE8_T4ruERsk7A,30016
46
+ sempy_labs/report/_report_rebind.py,sha256=sWaj6aw8sh0njZ0__ULeGD4CZrg7oPLKtOlv4ETKGOk,4635
47
+ sempy_labs/tom/__init__.py,sha256=hFwkmWk5AZ7GK1LWqoqaK1g4gDmu9mZMkfLQvLsR_eE,130
48
+ sempy_labs/tom/_model.py,sha256=hRnIL1rkdYBTFt3hg2zcsEmvsoMhUCAeepmFyzyfzhA,135257
49
+ semantic_link_labs-0.4.2.dist-info/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
50
+ semantic_link_labs-0.4.2.dist-info/METADATA,sha256=d4DjYbKoZum30lR9bqhV-X1CHR5DZCfsdnay1UOz-w4,764
51
+ semantic_link_labs-0.4.2.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
52
+ semantic_link_labs-0.4.2.dist-info/top_level.txt,sha256=kiQX1y42Dbein1l3Q8jMUYyRulDjdlc2tMepvtrvixQ,11
53
+ semantic_link_labs-0.4.2.dist-info/RECORD,,
sempy_labs/__init__.py CHANGED
@@ -5,7 +5,7 @@ from sempy_labs._clear_cache import clear_cache
5
5
  # create_connection_vnet,
6
6
  # create_connection_on_prem
7
7
  # )
8
- from sempy_labs._dax import run_dax
8
+ from sempy_labs._dax import evaluate_dax_impersonation
9
9
  from sempy_labs._generate_semantic_model import (
10
10
  create_blank_semantic_model,
11
11
  create_semantic_model_from_bim,
@@ -13,6 +13,8 @@ from sempy_labs._generate_semantic_model import (
13
13
  get_semantic_model_bim,
14
14
  )
15
15
  from sempy_labs._list_functions import (
16
+ list_semantic_model_objects,
17
+ list_shortcuts,
16
18
  get_object_level_security,
17
19
  # list_annotations,
18
20
  # list_columns,
@@ -50,9 +52,9 @@ from sempy_labs._helper_functions import (
50
52
  resolve_dataset_name,
51
53
  resolve_report_id,
52
54
  resolve_report_name,
53
- # language_validate
55
+ # language_validate
54
56
  )
55
- from sempy_labs._model_auto_build import model_auto_build
57
+ # from sempy_labs._model_auto_build import model_auto_build
56
58
  from sempy_labs._model_bpa import model_bpa_rules, run_model_bpa
57
59
  from sempy_labs._model_dependencies import (
58
60
  measure_dependency_tree,
@@ -62,16 +64,15 @@ from sempy_labs._model_dependencies import (
62
64
  from sempy_labs._one_lake_integration import (
63
65
  export_model_to_onelake,
64
66
  )
65
-
66
- # from sempy_labs._query_scale_out import (
67
- # qso_sync,
68
- # qso_sync_status,
69
- # set_qso,
70
- # list_qso_settings,
71
- # disable_qso,
72
- # set_semantic_model_storage_format,
73
- # set_workspace_default_storage_format,
74
- # )
67
+ from sempy_labs._query_scale_out import (
68
+ qso_sync,
69
+ qso_sync_status,
70
+ set_qso,
71
+ list_qso_settings,
72
+ disable_qso,
73
+ set_semantic_model_storage_format,
74
+ set_workspace_default_storage_format,
75
+ )
75
76
  from sempy_labs._refresh_semantic_model import (
76
77
  refresh_semantic_model,
77
78
  cancel_dataset_refresh,
@@ -82,14 +83,13 @@ from sempy_labs._vertipaq import (
82
83
  # visualize_vertipaq,
83
84
  import_vertipaq_analyzer,
84
85
  )
85
- from sempy_labs._tom import TOMWrapper, connect_semantic_model
86
86
 
87
87
  __all__ = [
88
88
  "clear_cache",
89
89
  # create_connection_cloud,
90
90
  # create_connection_vnet,
91
91
  # create_connection_on_prem,
92
- "run_dax",
92
+ "evaluate_dax_impersonation",
93
93
  "create_blank_semantic_model",
94
94
  "create_semantic_model_from_bim",
95
95
  #'deploy_semantic_model',
@@ -129,26 +129,26 @@ __all__ = [
129
129
  "resolve_report_id",
130
130
  "resolve_report_name",
131
131
  #'language_validate',
132
- "model_auto_build",
132
+ #"model_auto_build",
133
133
  "model_bpa_rules",
134
134
  "run_model_bpa",
135
135
  "measure_dependency_tree",
136
136
  "get_measure_dependencies",
137
137
  "get_model_calc_dependencies",
138
138
  "export_model_to_onelake",
139
- #'qso_sync',
140
- #'qso_sync_status',
141
- #'set_qso',
142
- #'list_qso_settings',
143
- #'disable_qso',
144
- #'set_semantic_model_storage_format',
145
- #'set_workspace_default_storage_format',
139
+ 'qso_sync',
140
+ 'qso_sync_status',
141
+ 'set_qso',
142
+ 'list_qso_settings',
143
+ 'disable_qso',
144
+ 'set_semantic_model_storage_format',
145
+ 'set_workspace_default_storage_format',
146
146
  "refresh_semantic_model",
147
147
  "cancel_dataset_refresh",
148
148
  "translate_semantic_model",
149
149
  "vertipaq_analyzer",
150
150
  #'visualize_vertipaq',
151
151
  "import_vertipaq_analyzer",
152
- "TOMWrapper",
153
- "connect_semantic_model",
152
+ "list_semantic_model_objects",
153
+ "list_shortcuts"
154
154
  ]
sempy_labs/_ai.py CHANGED
@@ -6,6 +6,7 @@ from pyspark.sql.functions import col
6
6
  from pyspark.sql import SparkSession
7
7
  from typing import List, Optional, Union
8
8
  from IPython.display import display
9
+ import sempy_labs._icons as icons
9
10
 
10
11
 
11
12
  def optimize_semantic_model(dataset: str, workspace: Optional[str] = None):
@@ -39,7 +40,7 @@ def optimize_semantic_model(dataset: str, workspace: Optional[str] = None):
39
40
 
40
41
  if len(fallback_filt) > 0:
41
42
  print(
42
- f"The '{dataset}' semantic model is a Direct Lake semantic model which contains views. Since views always fall back to DirectQuery, it is recommended to only use lakehouse tables and not views."
43
+ f"{icons.yellow_dot} The '{dataset}' semantic model is a Direct Lake semantic model which contains views. Since views always fall back to DirectQuery, it is recommended to only use lakehouse tables and not views."
43
44
  )
44
45
 
45
46
  # Potential model reduction estimate
@@ -56,11 +57,11 @@ def optimize_semantic_model(dataset: str, workspace: Optional[str] = None):
56
57
  totSize = df["Total Size"].sum()
57
58
  if len(df_filt) > 0:
58
59
  print(
59
- f"Potential savings of {totSize} bytes from following the '{rule}' rule."
60
+ f"{icons.yellow_dot} Potential savings of {totSize} bytes from following the '{rule}' rule."
60
61
  )
61
62
  display(df_filt)
62
63
  else:
63
- print(f"The '{rule}' rule has been followed.")
64
+ print(f"{icons.green_dot} The '{rule}' rule has been followed.")
64
65
 
65
66
 
66
67
  def generate_measure_descriptions(
@@ -78,7 +79,7 @@ def generate_measure_descriptions(
78
79
  validModels = ["gpt-35-turbo", "gpt-35-turbo-16k", "gpt-4"]
79
80
  if gpt_model not in validModels:
80
81
  print(
81
- f"The '{gpt_model}' model is not a valid model. Enter a gpt_model from this list: {validModels}."
82
+ f"{icons.red_dot} The '{gpt_model}' model is not a valid model. Enter a gpt_model from this list: {validModels}."
82
83
  )
83
84
  return
84
85
 
@@ -92,7 +93,7 @@ def generate_measure_descriptions(
92
93
  df = dfM_filt[["Table Name", "Measure Name", "Measure Expression"]]
93
94
 
94
95
  df["prompt"] = (
95
- f"The following is DAX code used by Microsoft Power BI. Please explain this code in simple terms:"
96
+ "The following is DAX code used by Microsoft Power BI. Please explain this code in simple terms:"
96
97
  + df["Measure Expression"]
97
98
  )
98
99
 
@@ -152,11 +153,11 @@ def generate_aggs(
152
153
  #'OrderDateKey': 'GroupBy'
153
154
  # }
154
155
 
155
- if workspace == None:
156
+ if workspace is None:
156
157
  workspace_id = fabric.get_workspace_id()
157
158
  workspace = fabric.resolve_workspace_name(workspace_id)
158
159
 
159
- if lakehouse_workspace == None:
160
+ if lakehouse_workspace is None:
160
161
  lakehouse_workspace = workspace
161
162
  lakehouse_workspace_id = workspace_id
162
163
  else:
@@ -173,7 +174,7 @@ def generate_aggs(
173
174
 
174
175
  if any(value not in aggTypes for value in columns.values()):
175
176
  print(
176
- f"Invalid aggregation type(s) have been specified in the 'columns' parameter. Valid aggregation types: {aggTypes}."
177
+ f"{icons.red_dot} Invalid aggregation type(s) have been specified in the 'columns' parameter. Valid aggregation types: {aggTypes}."
177
178
  )
178
179
  return
179
180
 
@@ -183,7 +184,7 @@ def generate_aggs(
183
184
  dfR = fabric.list_relationships(dataset=dataset, workspace=workspace)
184
185
  if not any(r["Mode"] == "DirectLake" for i, r in dfP.iterrows()):
185
186
  print(
186
- f"The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode. This function is only relevant for Direct Lake semantic models."
187
+ f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode. This function is only relevant for Direct Lake semantic models."
187
188
  )
188
189
  return
189
190
 
@@ -191,7 +192,7 @@ def generate_aggs(
191
192
 
192
193
  if len(dfC_filtT) == 0:
193
194
  print(
194
- f"The '{table_name}' table does not exist in the '{dataset}' semantic model within the '{workspace}' workspace."
195
+ f"{icons.red_dot} The '{table_name}' table does not exist in the '{dataset}' semantic model within the '{workspace}' workspace."
195
196
  )
196
197
  return
197
198
 
@@ -201,7 +202,7 @@ def generate_aggs(
201
202
 
202
203
  if len(columns) != len(dfC_filt):
203
204
  print(
204
- f"Columns listed in '{columnValues}' do not exist in the '{table_name}' table in the '{dataset}' semantic model within the '{workspace}' workspace."
205
+ f"{icons.red_dot} Columns listed in '{columnValues}' do not exist in the '{table_name}' table in the '{dataset}' semantic model within the '{workspace}' workspace."
205
206
  )
206
207
  return
207
208
 
@@ -211,7 +212,7 @@ def generate_aggs(
211
212
  dataType = dfC_col["Data Type"].iloc[0]
212
213
  if agg in aggTypesAggregate and dataType not in numericTypes:
213
214
  print(
214
- f"The '{col}' column in the '{table_name}' table is of '{dataType}' data type. Only columns of '{numericTypes}' data types can be aggregated as '{aggTypesAggregate}' aggregation types."
215
+ f"{icons.red_dot} The '{col}' column in the '{table_name}' table is of '{dataType}' data type. Only columns of '{numericTypes}' data types can be aggregated as '{aggTypesAggregate}' aggregation types."
215
216
  )
216
217
  return
217
218
 
@@ -230,7 +231,7 @@ def generate_aggs(
230
231
 
231
232
  if len(dfI_filt) == 0:
232
233
  print(
233
- f"The lakehouse (SQL Endpoint) used by the '{dataset}' semantic model does not reside in the '{lakehouse_workspace}' workspace. Please update the lakehouse_workspace parameter."
234
+ f"{icons.red_dot} The lakehouse (SQL Endpoint) used by the '{dataset}' semantic model does not reside in the '{lakehouse_workspace}' workspace. Please update the lakehouse_workspace parameter."
234
235
  )
235
236
  return
236
237
 
@@ -278,16 +279,16 @@ def generate_aggs(
278
279
  delta_table_name=aggLakeTName,
279
280
  )
280
281
  spark_df.write.mode("overwrite").format("delta").save(aggFilePath)
281
- f"The '{aggLakeTName}' table has been created/updated in the lakehouse."
282
+ f"{icons.green_dot} The '{aggLakeTName}' table has been created/updated in the lakehouse."
282
283
 
283
284
  # Create/update semantic model agg table
284
285
  tom_server = fabric.create_tom_server(readonly=False, workspace=workspace)
285
286
  m = tom_server.Databases.GetByName(dataset).Model
286
- f"\nUpdating the '{dataset}' semantic model..."
287
+ f"\n{icons.in_progress} Updating the '{dataset}' semantic model..."
287
288
  dfC_agg = dfC[dfC["Table Name"] == aggTableName]
288
289
 
289
290
  if len(dfC_agg) == 0:
290
- print(f"Creating the '{aggTableName}' table...")
291
+ print(f"{icons.in_progress} Creating the '{aggTableName}' table...")
291
292
  exp = m.Expressions["DatabaseQuery"]
292
293
  tbl = TOM.Table()
293
294
  tbl.Name = aggTableName
@@ -318,15 +319,15 @@ def generate_aggs(
318
319
 
319
320
  tbl.Columns.Add(col)
320
321
  print(
321
- f"The '{aggTableName}'[{cName}] column has been added to the '{dataset}' semantic model."
322
+ f"{icons.green_dot} The '{aggTableName}'[{cName}] column has been added to the '{dataset}' semantic model."
322
323
  )
323
324
 
324
325
  m.Tables.Add(tbl)
325
326
  print(
326
- f"The '{aggTableName}' table has been added to the '{dataset}' semantic model."
327
+ f"{icons.green_dot} The '{aggTableName}' table has been added to the '{dataset}' semantic model."
327
328
  )
328
329
  else:
329
- print(f"Updating the '{aggTableName}' table's columns...")
330
+ print(f"{icons.in_progress} Updating the '{aggTableName}' table's columns...")
330
331
  # Remove existing columns
331
332
  for t in m.Tables:
332
333
  tName = t.Name
@@ -347,12 +348,12 @@ def generate_aggs(
347
348
  col.DataType = System.Enum.Parse(TOM.DataType, dType)
348
349
 
349
350
  m.Tables[aggTableName].Columns.Add(col)
350
- print(f"The '{aggTableName}'[{cName}] column has been added.")
351
+ print(f"{icons.green_dot} The '{aggTableName}'[{cName}] column has been added.")
351
352
 
352
353
  # Create relationships
353
354
  relMap = {"m": "Many", "1": "One", "0": "None"}
354
355
 
355
- print(f"\nGenerating necessary relationships...")
356
+ print(f"\n{icons.in_progress} Generating necessary relationships...")
356
357
  for i, r in dfR.iterrows():
357
358
  fromTable = r["From Table"]
358
359
  fromColumn = r["From Column"]
@@ -384,27 +385,27 @@ def generate_aggs(
384
385
  rel.FromColumn = m.Tables[aggTableName].Columns[fromColumn]
385
386
  m.Relationships.Add(rel)
386
387
  print(
387
- f"'{aggTableName}'[{fromColumn}] -> '{toTable}'[{toColumn}] relationship has been added."
388
+ f"{icons.green_dot} '{aggTableName}'[{fromColumn}] -> '{toTable}'[{toColumn}] relationship has been added."
388
389
  )
389
390
  except:
390
391
  print(
391
- f"'{aggTableName}'[{fromColumn}] -> '{toTable}'[{toColumn}] relationship has not been created."
392
+ f"{icons.red_dot} '{aggTableName}'[{fromColumn}] -> '{toTable}'[{toColumn}] relationship has not been created."
392
393
  )
393
394
  elif toTable == table_name:
394
395
  try:
395
396
  rel.ToColumn = m.Tables[aggTableName].Columns[toColumn]
396
397
  m.Relationships.Add(rel)
397
398
  print(
398
- f"'{fromTable}'[{fromColumn}] -> '{aggTableName}'[{toColumn}] relationship has been added."
399
+ f"{icons.green_dot} '{fromTable}'[{fromColumn}] -> '{aggTableName}'[{toColumn}] relationship has been added."
399
400
  )
400
401
  except:
401
402
  print(
402
- f"'{fromTable}'[{fromColumn}] -> '{aggTableName}'[{toColumn}] relationship has not been created."
403
+ f"{icons.red_dot} '{fromTable}'[{fromColumn}] -> '{aggTableName}'[{toColumn}] relationship has not been created."
403
404
  )
404
405
  f"Relationship creation is complete."
405
406
 
406
407
  # Create IF measure
407
- f"\nCreating measure to check if the agg table can be used..."
408
+ f"\n{icons.in_progress} Creating measure to check if the agg table can be used..."
408
409
  aggChecker = "IF("
409
410
  dfR_filt = dfR[
410
411
  (dfR["From Table"] == table_name) & (~dfR["From Column"].isin(columnValues))
@@ -419,7 +420,7 @@ def generate_aggs(
419
420
  print(aggChecker)
420
421
 
421
422
  # Todo: add IFISFILTERED clause for columns
422
- f"\n Creating the base measures in the agg table..."
423
+ f"\n{icons.in_progress} Creating the base measures in the agg table..."
423
424
  # Create base agg measures
424
425
  dep = fabric.evaluate_dax(
425
426
  dataset=dataset,
@@ -8,6 +8,7 @@ import sempy_labs._icons as icons
8
8
  def clear_cache(dataset: str, workspace: Optional[str] = None):
9
9
  """
10
10
  Clears the cache of a semantic model.
11
+ See `here <https://learn.microsoft.com/analysis-services/instances/clear-the-analysis-services-caches?view=asallproducts-allversions>`_ for documentation.
11
12
 
12
13
  Parameters
13
14
  ----------
@@ -19,7 +20,7 @@ def clear_cache(dataset: str, workspace: Optional[str] = None):
19
20
  or if no lakehouse attached, resolves to the workspace of the notebook.
20
21
  """
21
22
 
22
- if workspace == None:
23
+ if workspace is None:
23
24
  workspace_id = fabric.get_workspace_id()
24
25
  workspace = fabric.resolve_workspace_name(workspace_id)
25
26
 
sempy_labs/_dax.py CHANGED
@@ -7,7 +7,7 @@ from sempy._utils._log import log
7
7
 
8
8
 
9
9
  @log
10
- def run_dax(
10
+ def evaluate_dax_impersonation(
11
11
  dataset: str,
12
12
  dax_query: str,
13
13
  user_name: Optional[str] = None,
@@ -27,7 +27,6 @@ def run_dax(
27
27
  The DAX query.
28
28
  user_name : str
29
29
  The user name (i.e. hello@goodbye.com).
30
- Defaults to None which resolves to no user impersonation.
31
30
  workspace : str, default=None
32
31
  The Fabric workspace name.
33
32
  Defaults to None which resolves to the workspace of the attached lakehouse
@@ -49,13 +48,10 @@ def run_dax(
49
48
 
50
49
  dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace)
51
50
 
52
- if user_name is None:
53
- request_body = {"queries": [{"query": dax_query}]}
54
- else:
55
- request_body = {
56
- "queries": [{"query": dax_query}],
57
- "impersonatedUserName": user_name,
58
- }
51
+ request_body = {
52
+ "queries": [{"query": dax_query}],
53
+ "impersonatedUserName": user_name
54
+ }
59
55
 
60
56
  client = fabric.PowerBIRestClient()
61
57
  response = client.post(
@@ -23,16 +23,15 @@ def create_blank_semantic_model(
23
23
  ----------
24
24
  dataset : str
25
25
  Name of the semantic model.
26
- compatibility_level : int
26
+ compatibility_level : int, default=1605
27
27
  The compatibility level of the semantic model.
28
- Defaults to 1605.
29
28
  workspace : str, default=None
30
29
  The Fabric workspace name.
31
30
  Defaults to None which resolves to the workspace of the attached lakehouse
32
31
  or if no lakehouse attached, resolves to the workspace of the notebook.
33
32
  """
34
33
 
35
- if workspace == None:
34
+ if workspace is None:
36
35
  workspace_id = fabric.get_workspace_id()
37
36
  workspace = fabric.resolve_workspace_name(workspace_id)
38
37
 
@@ -66,7 +65,7 @@ def create_blank_semantic_model(
66
65
 
67
66
 
68
67
  def create_semantic_model_from_bim(
69
- dataset: str, bim_file: str, workspace: Optional[str] = None
68
+ dataset: str, bim_file: dict, workspace: Optional[str] = None
70
69
  ):
71
70
  """
72
71
  Creates a new semantic model based on a Model.bim file.
@@ -75,7 +74,7 @@ def create_semantic_model_from_bim(
75
74
  ----------
76
75
  dataset : str
77
76
  Name of the semantic model.
78
- bim_file : str
77
+ bim_file : dict
79
78
  The model.bim file.
80
79
  workspace : str, default=None
81
80
  The Fabric workspace name.
@@ -179,11 +178,11 @@ def deploy_semantic_model(
179
178
 
180
179
  """
181
180
 
182
- if workspace == None:
181
+ if workspace is None:
183
182
  workspace_id = fabric.get_workspace_id()
184
183
  workspace = fabric.resolve_workspace_name(workspace_id)
185
184
 
186
- if new_dataset_workspace == None:
185
+ if new_dataset_workspace is None:
187
186
  new_dataset_workspace = workspace
188
187
 
189
188
  if new_dataset is None:
@@ -258,7 +257,7 @@ def get_semantic_model_bim(
258
257
 
259
258
  if save_to_file_name is not None:
260
259
  lakeAttach = lakehouse_attached()
261
- if lakeAttach == False:
260
+ if lakeAttach is False:
262
261
  print(
263
262
  f"In order to save the model.bim file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
264
263
  )
@@ -1,3 +1,4 @@
1
+ import sempy
1
2
  import sempy.fabric as fabric
2
3
  import re
3
4
  import pandas as pd
@@ -100,7 +101,7 @@ def resolve_report_id(report: str, workspace: Optional[str] = None):
100
101
  The ID of the Power BI report.
101
102
  """
102
103
 
103
- if workspace == None:
104
+ if workspace is None:
104
105
  workspace_id = fabric.get_workspace_id()
105
106
  workspace = fabric.resolve_workspace_name(workspace_id)
106
107
 
@@ -128,7 +129,7 @@ def resolve_report_name(report_id: UUID, workspace: Optional[str] = None):
128
129
  The name of the Power BI report.
129
130
  """
130
131
 
131
- if workspace == None:
132
+ if workspace is None:
132
133
  workspace_id = fabric.get_workspace_id()
133
134
  workspace = fabric.resolve_workspace_name(workspace_id)
134
135
 
@@ -158,7 +159,7 @@ def resolve_dataset_id(dataset: str, workspace: Optional[str] = None):
158
159
  The ID of the semantic model.
159
160
  """
160
161
 
161
- if workspace == None:
162
+ if workspace is None:
162
163
  workspace_id = fabric.get_workspace_id()
163
164
  workspace = fabric.resolve_workspace_name(workspace_id)
164
165
 
@@ -188,7 +189,7 @@ def resolve_dataset_name(dataset_id: UUID, workspace: Optional[str] = None):
188
189
  The name of the semantic model.
189
190
  """
190
191
 
191
- if workspace == None:
192
+ if workspace is None:
192
193
  workspace_id = fabric.get_workspace_id()
193
194
  workspace = fabric.resolve_workspace_name(workspace_id)
194
195
 
@@ -199,14 +200,15 @@ def resolve_dataset_name(dataset_id: UUID, workspace: Optional[str] = None):
199
200
  return obj
200
201
 
201
202
 
202
- def resolve_lakehouse_name(lakehouse_id: UUID, workspace: Optional[str] = None):
203
+ def resolve_lakehouse_name(lakehouse_id: Optional[UUID] = None, workspace: Optional[str] = None):
203
204
  """
204
205
  Obtains the name of the Fabric lakehouse.
205
206
 
206
207
  Parameters
207
208
  ----------
208
- lakehouse_id : UUID
209
+ lakehouse_id : UUID, default=None
209
210
  The name of the Fabric lakehouse.
211
+ Defaults to None which resolves to the lakehouse attached to the notebook.
210
212
  workspace : str, default=None
211
213
  The Fabric workspace name.
212
214
  Defaults to None which resolves to the workspace of the attached lakehouse
@@ -218,9 +220,12 @@ def resolve_lakehouse_name(lakehouse_id: UUID, workspace: Optional[str] = None):
218
220
  The name of the Fabric lakehouse.
219
221
  """
220
222
 
221
- if workspace == None:
223
+ if workspace is None:
222
224
  workspace_id = fabric.get_workspace_id()
223
225
  workspace = fabric.resolve_workspace_name(workspace_id)
226
+
227
+ if lakehouse_id is None:
228
+ lakehouse_id = fabric.get_lakehouse_id()
224
229
 
225
230
  obj = fabric.resolve_item_name(
226
231
  item_id=lakehouse_id, type="Lakehouse", workspace=workspace
@@ -248,7 +253,7 @@ def resolve_lakehouse_id(lakehouse: str, workspace: Optional[str] = None):
248
253
  The ID of the Fabric lakehouse.
249
254
  """
250
255
 
251
- if workspace == None:
256
+ if workspace is None:
252
257
  workspace_id = fabric.get_workspace_id()
253
258
  workspace = fabric.resolve_workspace_name(workspace_id)
254
259
 
@@ -278,7 +283,7 @@ def get_direct_lake_sql_endpoint(dataset: str, workspace: Optional[str] = None)
278
283
  The ID of SQL Endpoint.
279
284
  """
280
285
 
281
- if workspace == None:
286
+ if workspace is None:
282
287
  workspace_id = fabric.get_workspace_id()
283
288
  workspace = fabric.resolve_workspace_name(workspace_id)
284
289
 
@@ -286,10 +291,9 @@ def get_direct_lake_sql_endpoint(dataset: str, workspace: Optional[str] = None)
286
291
  dfP_filt = dfP[dfP["Mode"] == "DirectLake"]
287
292
 
288
293
  if len(dfP_filt) == 0:
289
- print(
294
+ raise ValueError(
290
295
  f"The '{dataset}' semantic model in the '{workspace}' workspace is not in Direct Lake mode."
291
296
  )
292
- return
293
297
 
294
298
  dfE = fabric.list_expressions(dataset=dataset, workspace=workspace)
295
299
  dfE_filt = dfE[dfE["Name"] == "DatabaseQuery"]
@@ -497,10 +501,10 @@ def resolve_workspace_name_and_id(workspace: Optional[str] = None) -> Tuple[str,
497
501
  The name and ID of the Fabric workspace.
498
502
  """
499
503
 
500
- if workspace == None:
504
+ if workspace is None:
501
505
  workspace_id = fabric.get_workspace_id()
502
506
  workspace = fabric.resolve_workspace_name(workspace_id)
503
507
  else:
504
508
  workspace_id = fabric.resolve_workspace_id(workspace)
505
509
 
506
- return workspace, workspace_id
510
+ return str(workspace), str(workspace_id)
sempy_labs/_icons.py CHANGED
@@ -2,3 +2,8 @@ green_dot = "\U0001F7E2"
2
2
  yellow_dot = "\U0001F7E1"
3
3
  red_dot = "\U0001F534"
4
4
  in_progress = "⌛"
5
+ checked = "\u2611"
6
+ unchecked = "\u2610"
7
+ start_bold = "\033[1m"
8
+ end_bold = "\033[0m"
9
+ bullet = "\u2022"