semantic-link-labs 0.5.0__py3-none-any.whl → 0.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (54) hide show
  1. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.6.0.dist-info}/METADATA +2 -2
  2. semantic_link_labs-0.6.0.dist-info/RECORD +54 -0
  3. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.6.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +19 -13
  5. sempy_labs/_ai.py +43 -24
  6. sempy_labs/_clear_cache.py +4 -5
  7. sempy_labs/_connections.py +77 -70
  8. sempy_labs/_dax.py +7 -9
  9. sempy_labs/_generate_semantic_model.py +55 -44
  10. sempy_labs/_helper_functions.py +13 -6
  11. sempy_labs/_icons.py +14 -0
  12. sempy_labs/_list_functions.py +491 -304
  13. sempy_labs/_model_auto_build.py +4 -3
  14. sempy_labs/_model_bpa.py +131 -1118
  15. sempy_labs/_model_bpa_rules.py +831 -0
  16. sempy_labs/_model_dependencies.py +14 -12
  17. sempy_labs/_one_lake_integration.py +11 -5
  18. sempy_labs/_query_scale_out.py +89 -81
  19. sempy_labs/_refresh_semantic_model.py +16 -10
  20. sempy_labs/_translations.py +213 -287
  21. sempy_labs/_vertipaq.py +53 -37
  22. sempy_labs/directlake/__init__.py +2 -0
  23. sempy_labs/directlake/_directlake_schema_compare.py +12 -5
  24. sempy_labs/directlake/_directlake_schema_sync.py +13 -19
  25. sempy_labs/directlake/_fallback.py +5 -3
  26. sempy_labs/directlake/_get_directlake_lakehouse.py +1 -1
  27. sempy_labs/directlake/_get_shared_expression.py +4 -2
  28. sempy_labs/directlake/_guardrails.py +3 -3
  29. sempy_labs/directlake/_list_directlake_model_calc_tables.py +17 -10
  30. sempy_labs/directlake/_show_unsupported_directlake_objects.py +3 -2
  31. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +10 -5
  32. sempy_labs/directlake/_update_directlake_partition_entity.py +132 -9
  33. sempy_labs/directlake/_warm_cache.py +6 -3
  34. sempy_labs/lakehouse/_get_lakehouse_columns.py +1 -1
  35. sempy_labs/lakehouse/_get_lakehouse_tables.py +5 -3
  36. sempy_labs/lakehouse/_lakehouse.py +2 -1
  37. sempy_labs/lakehouse/_shortcuts.py +19 -12
  38. sempy_labs/migration/__init__.py +1 -1
  39. sempy_labs/migration/_create_pqt_file.py +21 -15
  40. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +16 -13
  41. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +17 -18
  42. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +43 -40
  43. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +14 -14
  44. sempy_labs/migration/_migration_validation.py +2 -2
  45. sempy_labs/migration/_refresh_calc_tables.py +8 -5
  46. sempy_labs/report/__init__.py +2 -2
  47. sempy_labs/report/_generate_report.py +10 -5
  48. sempy_labs/report/_report_functions.py +67 -29
  49. sempy_labs/report/_report_rebind.py +9 -8
  50. sempy_labs/tom/__init__.py +1 -4
  51. sempy_labs/tom/_model.py +555 -152
  52. semantic_link_labs-0.5.0.dist-info/RECORD +0 -53
  53. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.6.0.dist-info}/LICENSE +0 -0
  54. {semantic_link_labs-0.5.0.dist-info → semantic_link_labs-0.6.0.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: semantic-link-labs
3
- Version: 0.5.0
3
+ Version: 0.6.0
4
4
  Summary: Semantic Link Labs project
5
5
  Author: Microsoft Corporation
6
6
  License: MIT License
@@ -14,7 +14,7 @@ Classifier: Programming Language :: Python :: 3 :: Only
14
14
  Classifier: Framework :: Jupyter
15
15
  Requires-Python: <3.12,>=3.10
16
16
  License-File: LICENSE
17
- Requires-Dist: semantic-link-sempy >=0.7.5
17
+ Requires-Dist: semantic-link-sempy >=0.7.6
18
18
  Requires-Dist: anytree
19
19
  Requires-Dist: powerbiclient
20
20
  Provides-Extra: test
@@ -0,0 +1,54 @@
1
+ sempy_labs/__init__.py,sha256=T_5thSoxVg83rUR_aac7P4kBKPrf5wylwRAxNMzYkjw,4976
2
+ sempy_labs/_ai.py,sha256=1sWyWxM_fAqdT5Ih2F3QjqWepJ0R48RvffNHSZKfuXs,18079
3
+ sempy_labs/_clear_cache.py,sha256=AfTCAwSK5EYhI2aBPsSun8FxJy-GHlByxH-rBxHfHzM,1362
4
+ sempy_labs/_connections.py,sha256=w1dFC4WeTNFmLGD2EL_Syk0Wb1Eij18we2FVn_VaCD8,7641
5
+ sempy_labs/_dax.py,sha256=dt1GgHceyM7f6phRBPxRKnmQy_KYKpcgFQHuOjGbpLo,2029
6
+ sempy_labs/_generate_semantic_model.py,sha256=qdWL6GDuCstzGLzChNRZ_nr9CndIl-sKmYG1WrZHXcw,9689
7
+ sempy_labs/_helper_functions.py,sha256=bcDDwy7LqOVV3x4scCSBxzMQuU2sLWBzgMSaluDrxEk,14306
8
+ sempy_labs/_icons.py,sha256=RfKaIzFHvu9mtQo6N10w9BQUa9JPHQ6NtJUyASZBdTo,536
9
+ sempy_labs/_list_functions.py,sha256=uNZsQD32jQqQYgOegLYO1m28pRqCpCqZMIm0fyA2QW0,82709
10
+ sempy_labs/_model_auto_build.py,sha256=fX3bCLFCOMQHuheKIoB48fUABG7XAT7qqsMbUiWSrY0,5071
11
+ sempy_labs/_model_bpa.py,sha256=PTC_vKKg-r66o0cXIGsx-0cW2ms-S1mWOLPZW8vGa2Q,14080
12
+ sempy_labs/_model_bpa_rules.py,sha256=aDNudcvBLaKaDLy82MF2D8gbrAtVvrOt9u5AqDkltnk,47291
13
+ sempy_labs/_model_dependencies.py,sha256=0uOhTMLpfhZ0mjIjhKacrvzOYs1uHoFfzc2hqTGAY50,12965
14
+ sempy_labs/_one_lake_integration.py,sha256=r9gnLoFjzfJroAzlJ5aWpKs-rxemzJxnN7tADML0IQc,6246
15
+ sempy_labs/_query_scale_out.py,sha256=-RMDO7T8QsIaDK3xXLyg1eOKXK5sKOBTrIISaKoSFRI,14901
16
+ sempy_labs/_refresh_semantic_model.py,sha256=TGGdqMzpR0POXgaXkqxfD2ene-SwEvU3aujsVlkWX7E,6734
17
+ sempy_labs/_translations.py,sha256=bWlLfz9ynFIQEBRHAWAyV1XZdYTUzLCTtipZKk6Fd_U,12920
18
+ sempy_labs/_vertipaq.py,sha256=pIRfJKjsShxVDdMBoeO97vDvurZ98iblcSdtowSJgxQ,33303
19
+ sempy_labs/directlake/__init__.py,sha256=yDoHDd4TyP7wTTjgMASTsjyXgRrTJwJjgml-IjVl460,1781
20
+ sempy_labs/directlake/_directlake_schema_compare.py,sha256=xljJ60aymEJeQV9w-0NQTzFGA8AUiYvsByX7QqHxglg,4716
21
+ sempy_labs/directlake/_directlake_schema_sync.py,sha256=VzTT9o6xceVfx_8wJkvXwy1jGwjH5HztIrQ_Vwh4B4Q,4940
22
+ sempy_labs/directlake/_fallback.py,sha256=o6FswFiisMVbDqRTCJ2hHa1M65KafkGjaqQl5PQ6szQ,1992
23
+ sempy_labs/directlake/_get_directlake_lakehouse.py,sha256=IkkpJe7iFbKT_9KCMdaLucuw_WCcdmEQd3bz6-Wmgx0,2346
24
+ sempy_labs/directlake/_get_shared_expression.py,sha256=3KIrsaCX-t2r4_anC8Z4MMlbu3Iaa_gCHCC8Ok7NDxA,1936
25
+ sempy_labs/directlake/_guardrails.py,sha256=pnLYM7ZO6_hq38UMNeJRx5Fp9XbcIpgPuAS3py24oA8,2259
26
+ sempy_labs/directlake/_list_directlake_model_calc_tables.py,sha256=_rpnbgsFAz2W16PpgIOB0Rj_Fs1ZKrDbz3DUaaR_bfU,2143
27
+ sempy_labs/directlake/_show_unsupported_directlake_objects.py,sha256=QNj2wHzFGtjnsAICmlc7BuhCYkw0An0XnditDTCG2JM,3358
28
+ sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py,sha256=b_Y5_GSfWC25wH6R7L37-AHO9fvKkmxRGaP6dVDC7-w,3233
29
+ sempy_labs/directlake/_update_directlake_partition_entity.py,sha256=nMh2CqQrp64NipmryuLKewljmFt6aVKaecIwSedIFIw,7687
30
+ sempy_labs/directlake/_warm_cache.py,sha256=RjpRjtzSaKw6gHKPw4O6phBwmnk7LHp4B1yrW8Z6VEY,8242
31
+ sempy_labs/lakehouse/__init__.py,sha256=i6VRx4dR1SIN-1GxioiNwhC4FxbozRCIz5TfXjb9rKc,587
32
+ sempy_labs/lakehouse/_get_lakehouse_columns.py,sha256=Bb_iCTlNwl0wdN4dW_E7tVnfbHhHwQT_l0SUqvcbYpo,2582
33
+ sempy_labs/lakehouse/_get_lakehouse_tables.py,sha256=c2pqEyhiEJhnRaE3Zlz3EHRmR9bFI6J7CZxqLVa87iE,9091
34
+ sempy_labs/lakehouse/_lakehouse.py,sha256=kUMiGi8SyY4oTWAB4OFzGQzlqUdAF2EolpZza15gFrE,2763
35
+ sempy_labs/lakehouse/_shortcuts.py,sha256=oDYexjMxZX2JoaEqSyO1R_PQ2c5qTUNVbHGtVQJZorg,6977
36
+ sempy_labs/migration/__init__.py,sha256=w4vvGk6wTWXVfofJDmio2yIFvSSJsxOpjv6mvNGmrOI,1043
37
+ sempy_labs/migration/_create_pqt_file.py,sha256=XTG1BQJbfgKpwT8jKWrlCfDuMYdecM94qTLzrpQ7wck,9035
38
+ sempy_labs/migration/_migrate_calctables_to_lakehouse.py,sha256=6Z1DIdZCi_LX0o-SfPO5zzqWa0hd_zEwcQzIsOer9SM,20551
39
+ sempy_labs/migration/_migrate_calctables_to_semantic_model.py,sha256=Ne-2_PwW_LUR8vG2pNY3lxa9iib1dIpYkDHRaqyqyY8,6420
40
+ sempy_labs/migration/_migrate_model_objects_to_semantic_model.py,sha256=T7Ijo9g9AmB8KMyqvYOTurSdabUKKaRcfdstSgL1QQ8,24399
41
+ sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py,sha256=1ZWQ9sawd2_ojTnWTsRaDEj7yMnUVOpCQZeI0YHhb-g,7066
42
+ sempy_labs/migration/_migration_validation.py,sha256=phbUUIYA5z4dZvEKrVbByMTPPwMscY7Dy9HVxE8z_HM,2483
43
+ sempy_labs/migration/_refresh_calc_tables.py,sha256=pImD8wMOMxs043e6s4G2TPrFzsGVHklK1aez_oVOgro,6086
44
+ sempy_labs/report/__init__.py,sha256=78lLE4UlmwfSY-6FxMIgnshMQF90Qfq_qM7A3vrNtHw,848
45
+ sempy_labs/report/_generate_report.py,sha256=fJUiUgsF2mS-dyaVjRvwAASvbSrSm8BUDB53Ru3rhQI,8545
46
+ sempy_labs/report/_report_functions.py,sha256=6xpLyVrRkA2ZwwCj2By8_HX2k-EHoQUZtuUaTy0d7vM,30004
47
+ sempy_labs/report/_report_rebind.py,sha256=PmZ3f3Rf3Yka8Eaea87JosSiFvEtzjfbUDQOzRx_vlQ,4614
48
+ sempy_labs/tom/__init__.py,sha256=Qbs8leW0fjzvWwOjyWK3Hjeehu7IvpB1beASGsi28bk,121
49
+ sempy_labs/tom/_model.py,sha256=kGRfpA6IHy8KX6QKVciekOa2Y97d6r0eRol5K8YMMIg,149938
50
+ semantic_link_labs-0.6.0.dist-info/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
51
+ semantic_link_labs-0.6.0.dist-info/METADATA,sha256=ZFdW982vsZ0btahdjKpAgiDMEXxnjn_nwiPY8iMMrdI,764
52
+ semantic_link_labs-0.6.0.dist-info/WHEEL,sha256=Wyh-_nZ0DJYolHNn1_hMa4lM7uDedD_RGVwbmTjyItk,91
53
+ semantic_link_labs-0.6.0.dist-info/top_level.txt,sha256=kiQX1y42Dbein1l3Q8jMUYyRulDjdlc2tMepvtrvixQ,11
54
+ semantic_link_labs-0.6.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (70.2.0)
2
+ Generator: setuptools (71.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
sempy_labs/__init__.py CHANGED
@@ -9,13 +9,15 @@ from sempy_labs._dax import evaluate_dax_impersonation
9
9
  from sempy_labs._generate_semantic_model import (
10
10
  create_blank_semantic_model,
11
11
  create_semantic_model_from_bim,
12
- # deploy_semantic_model,
12
+ deploy_semantic_model,
13
13
  get_semantic_model_bim,
14
14
  )
15
15
  from sempy_labs._list_functions import (
16
+ delete_custom_pool,
16
17
  list_semantic_model_objects,
17
18
  list_shortcuts,
18
19
  get_object_level_security,
20
+ list_capacities,
19
21
  # list_annotations,
20
22
  # list_columns,
21
23
  list_dashboards,
@@ -66,8 +68,10 @@ from sempy_labs._helper_functions import (
66
68
  resolve_report_name,
67
69
  # language_validate
68
70
  )
71
+
69
72
  # from sempy_labs._model_auto_build import model_auto_build
70
- from sempy_labs._model_bpa import model_bpa_rules, run_model_bpa
73
+ from sempy_labs._model_bpa import run_model_bpa
74
+ from sempy_labs._model_bpa_rules import model_bpa_rules
71
75
  from sempy_labs._model_dependencies import (
72
76
  measure_dependency_tree,
73
77
  get_measure_dependencies,
@@ -97,6 +101,7 @@ from sempy_labs._vertipaq import (
97
101
  )
98
102
 
99
103
  __all__ = [
104
+ "delete_custom_pool",
100
105
  "clear_cache",
101
106
  # create_connection_cloud,
102
107
  # create_connection_vnet,
@@ -104,7 +109,7 @@ __all__ = [
104
109
  "evaluate_dax_impersonation",
105
110
  "create_blank_semantic_model",
106
111
  "create_semantic_model_from_bim",
107
- #'deploy_semantic_model',
112
+ "deploy_semantic_model",
108
113
  "get_semantic_model_bim",
109
114
  "get_object_level_security",
110
115
  #'list_annotations',
@@ -125,7 +130,7 @@ __all__ = [
125
130
  #'list_sqlendpoints',
126
131
  #'list_tables',
127
132
  "list_warehouses",
128
- 'list_workspace_role_assignments',
133
+ "list_workspace_role_assignments",
129
134
  "create_warehouse",
130
135
  "update_item",
131
136
  "create_abfss_path",
@@ -141,20 +146,20 @@ __all__ = [
141
146
  "resolve_report_id",
142
147
  "resolve_report_name",
143
148
  #'language_validate',
144
- #"model_auto_build",
149
+ # "model_auto_build",
145
150
  "model_bpa_rules",
146
151
  "run_model_bpa",
147
152
  "measure_dependency_tree",
148
153
  "get_measure_dependencies",
149
154
  "get_model_calc_dependencies",
150
155
  "export_model_to_onelake",
151
- 'qso_sync',
152
- 'qso_sync_status',
153
- 'set_qso',
154
- 'list_qso_settings',
155
- 'disable_qso',
156
- 'set_semantic_model_storage_format',
157
- 'set_workspace_default_storage_format',
156
+ "qso_sync",
157
+ "qso_sync_status",
158
+ "set_qso",
159
+ "list_qso_settings",
160
+ "disable_qso",
161
+ "set_semantic_model_storage_format",
162
+ "set_workspace_default_storage_format",
158
163
  "refresh_semantic_model",
159
164
  "cancel_dataset_refresh",
160
165
  "translate_semantic_model",
@@ -174,5 +179,6 @@ __all__ = [
174
179
  "delete_user_from_workspace",
175
180
  "update_workspace_user",
176
181
  "list_workspace_users",
177
- "assign_workspace_to_dataflow_storage"
182
+ "assign_workspace_to_dataflow_storage",
183
+ "list_capacities",
178
184
  ]
sempy_labs/_ai.py CHANGED
@@ -14,7 +14,6 @@ def optimize_semantic_model(dataset: str, workspace: Optional[str] = None):
14
14
  from ._model_bpa import run_model_bpa
15
15
  from .directlake._fallback import check_fallback_reason
16
16
  from ._helper_functions import format_dax_object_name
17
- from sempy_labs.tom import connect_semantic_model
18
17
 
19
18
  modelBPA = run_model_bpa(
20
19
  dataset=dataset, workspace=workspace, return_dataframe=True
@@ -41,7 +40,8 @@ def optimize_semantic_model(dataset: str, workspace: Optional[str] = None):
41
40
 
42
41
  if len(fallback_filt) > 0:
43
42
  print(
44
- f"{icons.yellow_dot} The '{dataset}' semantic model is a Direct Lake semantic model which contains views. Since views always fall back to DirectQuery, it is recommended to only use lakehouse tables and not views."
43
+ f"{icons.yellow_dot} The '{dataset}' semantic model is a Direct Lake semantic model which contains views. "
44
+ "Since views always fall back to DirectQuery, it is recommended to only use lakehouse tables and not views."
45
45
  )
46
46
 
47
47
  # Potential model reduction estimate
@@ -79,7 +79,9 @@ def generate_measure_descriptions(
79
79
 
80
80
  validModels = ["gpt-35-turbo", "gpt-35-turbo-16k", "gpt-4"]
81
81
  if gpt_model not in validModels:
82
- raise ValueError(f"{icons.red_dot} The '{gpt_model}' model is not a valid model. Enter a gpt_model from this list: {validModels}.")
82
+ raise ValueError(
83
+ f"{icons.red_dot} The '{gpt_model}' model is not a valid model. Enter a gpt_model from this list: {validModels}."
84
+ )
83
85
 
84
86
  dfM = fabric.list_measures(dataset=dataset, workspace=workspace)
85
87
 
@@ -114,8 +116,7 @@ def generate_measure_descriptions(
114
116
  )
115
117
 
116
118
  # Update the model to use the new descriptions
117
- #with connect_semantic_model(dataset=dataset, workspace=workspace, readonly=False) as tom:
118
-
119
+ # with connect_semantic_model(dataset=dataset, workspace=workspace, readonly=False) as tom:
119
120
 
120
121
  # for t in m.Tables:
121
122
  # tName = t.Name
@@ -146,10 +147,10 @@ def generate_aggs(
146
147
  import System
147
148
 
148
149
  # columns = {
149
- #'SalesAmount': 'Sum',
150
- #'ProductKey': 'GroupBy',
151
- #'OrderDateKey': 'GroupBy'
152
- # }
150
+ # 'SalesAmount': 'Sum',
151
+ # 'ProductKey': 'GroupBy',
152
+ # 'OrderDateKey': 'GroupBy'
153
+ # }
153
154
 
154
155
  if workspace is None:
155
156
  workspace_id = fabric.get_workspace_id()
@@ -171,33 +172,44 @@ def generate_aggs(
171
172
  numericTypes = ["Int64", "Double", "Decimal"]
172
173
 
173
174
  if any(value not in aggTypes for value in columns.values()):
174
- raise ValueError(f"{icons.red_dot} Invalid aggregation type(s) have been specified in the 'columns' parameter. Valid aggregation types: {aggTypes}.")
175
+ raise ValueError(
176
+ f"{icons.red_dot} Invalid aggregation type(s) have been specified in the 'columns' parameter. Valid aggregation types: {aggTypes}."
177
+ )
175
178
 
176
179
  dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
177
180
  dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
178
181
  dfM = fabric.list_measures(dataset=dataset, workspace=workspace)
179
182
  dfR = fabric.list_relationships(dataset=dataset, workspace=workspace)
180
183
  if not any(r["Mode"] == "DirectLake" for i, r in dfP.iterrows()):
181
- raise ValueError(f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode. This function is only relevant for Direct Lake semantic models.")
182
-
184
+ raise ValueError(
185
+ f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode. This function is only relevant for Direct Lake semantic models."
186
+ )
187
+
183
188
  dfC_filtT = dfC[dfC["Table Name"] == table_name]
184
189
 
185
190
  if len(dfC_filtT) == 0:
186
- raise ValueError(f"{icons.red_dot} The '{table_name}' table does not exist in the '{dataset}' semantic model within the '{workspace}' workspace.")
191
+ raise ValueError(
192
+ f"{icons.red_dot} The '{table_name}' table does not exist in the '{dataset}' semantic model within the '{workspace}' workspace."
193
+ )
187
194
 
188
195
  dfC_filt = dfC[
189
196
  (dfC["Table Name"] == table_name) & (dfC["Column Name"].isin(columnValues))
190
197
  ]
191
198
 
192
199
  if len(columns) != len(dfC_filt):
193
- raise ValueError(f"{icons.red_dot} Columns listed in '{columnValues}' do not exist in the '{table_name}' table in the '{dataset}' semantic model within the '{workspace}' workspace.")
200
+ raise ValueError(
201
+ f"{icons.red_dot} Columns listed in '{columnValues}' do not exist in the '{table_name}' table in the '{dataset}' semantic model within the '{workspace}' workspace."
202
+ )
194
203
 
195
204
  # Check if doing sum/count/min/max etc. on a non-number column
196
- for col, agg in columns.items():
197
- dfC_col = dfC_filt[dfC_filt["Column Name"] == col]
205
+ for cm, agg in columns.items():
206
+ dfC_col = dfC_filt[dfC_filt["Column Name"] == cm]
198
207
  dataType = dfC_col["Data Type"].iloc[0]
199
208
  if agg in aggTypesAggregate and dataType not in numericTypes:
200
- raise ValueError(f"{icons.red_dot} The '{col}' column in the '{table_name}' table is of '{dataType}' data type. Only columns of '{numericTypes}' data types can be aggregated as '{aggTypesAggregate}' aggregation types.")
209
+ raise ValueError(
210
+ f"{icons.red_dot} The '{cm}' column in the '{table_name}' table is of '{dataType}' data type. Only columns of '{numericTypes}' data types"
211
+ f" can be aggregated as '{aggTypesAggregate}' aggregation types."
212
+ )
201
213
 
202
214
  # Create/update lakehouse delta agg table
203
215
  aggSuffix = "_agg"
@@ -213,7 +225,10 @@ def generate_aggs(
213
225
  dfI_filt = dfI[(dfI["Id"] == sqlEndpointId)]
214
226
 
215
227
  if len(dfI_filt) == 0:
216
- raise ValueError(f"{icons.red_dot} The lakehouse (SQL Endpoint) used by the '{dataset}' semantic model does not reside in the '{lakehouse_workspace}' workspace. Please update the lakehouse_workspace parameter.")
228
+ raise ValueError(
229
+ f"{icons.red_dot} The lakehouse (SQL Endpoint) used by the '{dataset}' semantic model does not reside in"
230
+ f" the '{lakehouse_workspace}' workspace. Please update the lakehouse_workspace parameter."
231
+ )
217
232
 
218
233
  lakehouseName = dfI_filt["Display Name"].iloc[0]
219
234
  lakehouse_id = resolve_lakehouse_id(
@@ -223,8 +238,8 @@ def generate_aggs(
223
238
  # Generate SQL query
224
239
  query = "SELECT"
225
240
  groupBy = "\nGROUP BY"
226
- for col, agg in columns.items():
227
- colFilt = dfC_filt[dfC_filt["Column Name"] == col]
241
+ for cm, agg in columns.items():
242
+ colFilt = dfC_filt[dfC_filt["Column Name"] == cm]
228
243
  sourceCol = colFilt["Source"].iloc[0]
229
244
 
230
245
  if agg == "GroupBy":
@@ -328,7 +343,9 @@ def generate_aggs(
328
343
  col.DataType = System.Enum.Parse(TOM.DataType, dType)
329
344
 
330
345
  m.Tables[aggTableName].Columns.Add(col)
331
- print(f"{icons.green_dot} The '{aggTableName}'[{cName}] column has been added.")
346
+ print(
347
+ f"{icons.green_dot} The '{aggTableName}'[{cName}] column has been added."
348
+ )
332
349
 
333
350
  # Create relationships
334
351
  relMap = {"m": "Many", "1": "One", "0": "None"}
@@ -367,10 +384,11 @@ def generate_aggs(
367
384
  print(
368
385
  f"{icons.green_dot} '{aggTableName}'[{fromColumn}] -> '{toTable}'[{toColumn}] relationship has been added."
369
386
  )
370
- except:
387
+ except Exception as e:
371
388
  print(
372
389
  f"{icons.red_dot} '{aggTableName}'[{fromColumn}] -> '{toTable}'[{toColumn}] relationship has not been created."
373
390
  )
391
+ print(f"Exception occured: {e}")
374
392
  elif toTable == table_name:
375
393
  try:
376
394
  rel.ToColumn = m.Tables[aggTableName].Columns[toColumn]
@@ -378,11 +396,12 @@ def generate_aggs(
378
396
  print(
379
397
  f"{icons.green_dot} '{fromTable}'[{fromColumn}] -> '{aggTableName}'[{toColumn}] relationship has been added."
380
398
  )
381
- except:
399
+ except Exception as e:
382
400
  print(
383
401
  f"{icons.red_dot} '{fromTable}'[{fromColumn}] -> '{aggTableName}'[{toColumn}] relationship has not been created."
384
402
  )
385
- f"Relationship creation is complete."
403
+ print(f"Exception occured: {e}")
404
+ "Relationship creation is complete."
386
405
 
387
406
  # Create IF measure
388
407
  f"\n{icons.in_progress} Creating measure to check if the agg table can be used..."
@@ -1,7 +1,6 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  from ._helper_functions import resolve_dataset_id
4
- from typing import List, Optional, Union
3
+ from typing import Optional
5
4
  import sempy_labs._icons as icons
6
5
 
7
6
 
@@ -25,10 +24,10 @@ def clear_cache(dataset: str, workspace: Optional[str] = None):
25
24
  datasetID = resolve_dataset_id(dataset=dataset, workspace=workspace)
26
25
 
27
26
  xmla = f"""
28
- <ClearCache xmlns="http://schemas.microsoft.com/analysisservices/2003/engine">
27
+ <ClearCache xmlns="http://schemas.microsoft.com/analysisservices/2003/engine">
29
28
  <Object>
30
- <DatabaseID>{datasetID}</DatabaseID>
31
- </Object>
29
+ <DatabaseID>{datasetID}</DatabaseID>
30
+ </Object>
32
31
  </ClearCache>
33
32
  """
34
33
  fabric.execute_xmla(dataset=dataset, xmla_command=xmla, workspace=workspace)
@@ -1,8 +1,6 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  import pandas as pd
4
- from typing import List, Optional, Union
5
- import sempy_labs._icons as icons
3
+ from sempy.fabric.exceptions import FabricHTTPException
6
4
 
7
5
 
8
6
  def create_connection_cloud(
@@ -56,29 +54,32 @@ def create_connection_cloud(
56
54
  },
57
55
  }
58
56
 
59
- response = client.post(f"/v1/connections", json=request_body)
60
-
61
- if response.status_code == 200:
62
- o = response.json()
63
- new_data = {
64
- "Connection Id": o.get("id"),
65
- "Connection Name": o.get("name"),
66
- "Connectivity Type": o.get("connectivityType"),
67
- "Connection Type": o.get("connectionDetails",{}).get("type"),
68
- "Connection Path": o.get("connectionDetails",{}).get("path"),
69
- "Privacy Level": o.get("privacyLevel"),
70
- "Credential Type": o.get("credentialDetails",{}).get("credentialType"),
71
- "Single Sign On Type": o.get("credentialDetails",{}).get("singleSignOnType"),
72
- "Connection Encryption": o.get("credentialDetails",{}).get("connectionEncryption"),
73
- "Skip Test Connection": o.get("credentialDetails",{}).get("skipTestConnection"),
74
- }
75
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
57
+ response = client.post("/v1/connections", json=request_body)
58
+
59
+ if response.status_code != 200:
60
+ raise FabricHTTPException(response)
61
+ o = response.json()
62
+ new_data = {
63
+ "Connection Id": o.get("id"),
64
+ "Connection Name": o.get("name"),
65
+ "Connectivity Type": o.get("connectivityType"),
66
+ "Connection Type": o.get("connectionDetails", {}).get("type"),
67
+ "Connection Path": o.get("connectionDetails", {}).get("path"),
68
+ "Privacy Level": o.get("privacyLevel"),
69
+ "Credential Type": o.get("credentialDetails", {}).get("credentialType"),
70
+ "Single Sign On Type": o.get("credentialDetails", {}).get("singleSignOnType"),
71
+ "Connection Encryption": o.get("credentialDetails", {}).get(
72
+ "connectionEncryption"
73
+ ),
74
+ "Skip Test Connection": o.get("credentialDetails", {}).get(
75
+ "skipTestConnection"
76
+ ),
77
+ }
78
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
76
79
 
77
- df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
80
+ df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
78
81
 
79
- return df
80
- else:
81
- print(f"{icons.red_dot} {response.status_code}")
82
+ return df
82
83
 
83
84
 
84
85
  def create_connection_on_prem(
@@ -131,30 +132,33 @@ def create_connection_on_prem(
131
132
  },
132
133
  }
133
134
 
134
- response = client.post(f"/v1/connections", json=request_body)
135
-
136
- if response.status_code == 200:
137
- o = response.json()
138
- new_data = {
139
- "Connection Id": o.get("id"),
140
- "Connection Name": o.get("name"),
141
- "Gateway ID": o.get("gatewayId"),
142
- "Connectivity Type": o.get("connectivityType"),
143
- "Connection Type": o.get("connectionDetails",{}).get("type"),
144
- "Connection Path": o.get("connectionDetails",{}).get("path"),
145
- "Privacy Level": o.get("privacyLevel"),
146
- "Credential Type": o.get("credentialDetails",{}).get("credentialType"),
147
- "Single Sign On Type": o.get("credentialDetails",{}).get("singleSignOnType"),
148
- "Connection Encryption": o.get("credentialDetails",{}).get("connectionEncryption"),
149
- "Skip Test Connection": o.get("credentialDetails",{}).get("skipTestConnection"),
150
- }
151
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
135
+ response = client.post("/v1/connections", json=request_body)
136
+
137
+ if response.status_code != 200:
138
+ raise FabricHTTPException(response)
139
+ o = response.json()
140
+ new_data = {
141
+ "Connection Id": o.get("id"),
142
+ "Connection Name": o.get("name"),
143
+ "Gateway ID": o.get("gatewayId"),
144
+ "Connectivity Type": o.get("connectivityType"),
145
+ "Connection Type": o.get("connectionDetails", {}).get("type"),
146
+ "Connection Path": o.get("connectionDetails", {}).get("path"),
147
+ "Privacy Level": o.get("privacyLevel"),
148
+ "Credential Type": o.get("credentialDetails", {}).get("credentialType"),
149
+ "Single Sign On Type": o.get("credentialDetails", {}).get("singleSignOnType"),
150
+ "Connection Encryption": o.get("credentialDetails", {}).get(
151
+ "connectionEncryption"
152
+ ),
153
+ "Skip Test Connection": o.get("credentialDetails", {}).get(
154
+ "skipTestConnection"
155
+ ),
156
+ }
157
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
152
158
 
153
- df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
159
+ df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
154
160
 
155
- return df
156
- else:
157
- print(f"{icons.red_dot} {response.status_code}")
161
+ return df
158
162
 
159
163
 
160
164
  def create_connection_vnet(
@@ -209,27 +213,30 @@ def create_connection_vnet(
209
213
  },
210
214
  }
211
215
 
212
- response = client.post(f"/v1/connections", json=request_body)
213
-
214
- if response.status_code == 200:
215
- o = response.json()
216
- new_data = {
217
- "Connection Id": o.get("id"),
218
- "Connection Name": o.get("name"),
219
- "Gateway ID": o.get("gatewayId"),
220
- "Connectivity Type": o.get("connectivityType"),
221
- "Connection Type": o.get("connectionDetails",{}).get("type"),
222
- "Connection Path": o.get("connectionDetails",{}).get("path"),
223
- "Privacy Level": o.get("privacyLevel"),
224
- "Credential Type": o.get("credentialDetails",{}).get("credentialType"),
225
- "Single Sign On Type": o.get("credentialDetails",{}).get("singleSignOnType"),
226
- "Connection Encryption": o.get("credentialDetails",{}).get("connectionEncryption"),
227
- "Skip Test Connection": o.get("credentialDetails",{}).get("skipTestConnection"),
228
- }
229
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
230
-
231
- df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
232
-
233
- return df
234
- else:
235
- print(f"{icons.red_dot} {response.status_code}")
216
+ response = client.post("/v1/connections", json=request_body)
217
+
218
+ if response.status_code != 200:
219
+ raise FabricHTTPException(response)
220
+ o = response.json()
221
+ new_data = {
222
+ "Connection Id": o.get("id"),
223
+ "Connection Name": o.get("name"),
224
+ "Gateway ID": o.get("gatewayId"),
225
+ "Connectivity Type": o.get("connectivityType"),
226
+ "Connection Type": o.get("connectionDetails", {}).get("type"),
227
+ "Connection Path": o.get("connectionDetails", {}).get("path"),
228
+ "Privacy Level": o.get("privacyLevel"),
229
+ "Credential Type": o.get("credentialDetails", {}).get("credentialType"),
230
+ "Single Sign On Type": o.get("credentialDetails", {}).get("singleSignOnType"),
231
+ "Connection Encryption": o.get("credentialDetails", {}).get(
232
+ "connectionEncryption"
233
+ ),
234
+ "Skip Test Connection": o.get("credentialDetails", {}).get(
235
+ "skipTestConnection"
236
+ ),
237
+ }
238
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
239
+
240
+ df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
241
+
242
+ return df
sempy_labs/_dax.py CHANGED
@@ -1,8 +1,10 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  import pandas as pd
4
- from sempy_labs._helper_functions import resolve_dataset_id
5
- from typing import List, Optional, Union
3
+ from sempy_labs._helper_functions import (
4
+ resolve_dataset_id,
5
+ resolve_workspace_name_and_id,
6
+ )
7
+ from typing import Optional
6
8
  from sempy._utils._log import log
7
9
 
8
10
 
@@ -40,17 +42,13 @@ def evaluate_dax_impersonation(
40
42
 
41
43
  # https://learn.microsoft.com/en-us/rest/api/power-bi/datasets/execute-queries-in-group
42
44
 
43
- if workspace is None:
44
- workspace_id = fabric.get_workspace_id()
45
- workspace = fabric.resolve_workspace_name(workspace_id)
46
- else:
47
- workspace_id = fabric.resolve_workspace_id(workspace)
45
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
48
46
 
49
47
  dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace)
50
48
 
51
49
  request_body = {
52
50
  "queries": [{"query": dax_query}],
53
- "impersonatedUserName": user_name
51
+ "impersonatedUserName": user_name,
54
52
  }
55
53
 
56
54
  client = fabric.PowerBIRestClient()