semantic-link-labs 0.4.2__py3-none-any.whl → 0.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (54) hide show
  1. {semantic_link_labs-0.4.2.dist-info → semantic_link_labs-0.6.0.dist-info}/METADATA +2 -2
  2. semantic_link_labs-0.6.0.dist-info/RECORD +54 -0
  3. {semantic_link_labs-0.4.2.dist-info → semantic_link_labs-0.6.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +44 -14
  5. sempy_labs/_ai.py +31 -32
  6. sempy_labs/_clear_cache.py +5 -8
  7. sempy_labs/_connections.py +80 -72
  8. sempy_labs/_dax.py +7 -9
  9. sempy_labs/_generate_semantic_model.py +60 -54
  10. sempy_labs/_helper_functions.py +8 -10
  11. sempy_labs/_icons.py +15 -0
  12. sempy_labs/_list_functions.py +1139 -428
  13. sempy_labs/_model_auto_build.py +5 -6
  14. sempy_labs/_model_bpa.py +134 -1125
  15. sempy_labs/_model_bpa_rules.py +831 -0
  16. sempy_labs/_model_dependencies.py +21 -25
  17. sempy_labs/_one_lake_integration.py +10 -7
  18. sempy_labs/_query_scale_out.py +83 -93
  19. sempy_labs/_refresh_semantic_model.py +12 -16
  20. sempy_labs/_translations.py +214 -288
  21. sempy_labs/_vertipaq.py +51 -42
  22. sempy_labs/directlake/__init__.py +2 -0
  23. sempy_labs/directlake/_directlake_schema_compare.py +12 -11
  24. sempy_labs/directlake/_directlake_schema_sync.py +13 -23
  25. sempy_labs/directlake/_fallback.py +5 -7
  26. sempy_labs/directlake/_get_directlake_lakehouse.py +1 -1
  27. sempy_labs/directlake/_get_shared_expression.py +4 -8
  28. sempy_labs/directlake/_guardrails.py +6 -8
  29. sempy_labs/directlake/_list_directlake_model_calc_tables.py +18 -12
  30. sempy_labs/directlake/_show_unsupported_directlake_objects.py +4 -4
  31. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +9 -8
  32. sempy_labs/directlake/_update_directlake_partition_entity.py +129 -12
  33. sempy_labs/directlake/_warm_cache.py +5 -5
  34. sempy_labs/lakehouse/_get_lakehouse_columns.py +2 -2
  35. sempy_labs/lakehouse/_get_lakehouse_tables.py +4 -4
  36. sempy_labs/lakehouse/_lakehouse.py +3 -4
  37. sempy_labs/lakehouse/_shortcuts.py +17 -13
  38. sempy_labs/migration/__init__.py +1 -1
  39. sempy_labs/migration/_create_pqt_file.py +21 -24
  40. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +16 -13
  41. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +17 -18
  42. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +45 -46
  43. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +14 -14
  44. sempy_labs/migration/_migration_validation.py +6 -2
  45. sempy_labs/migration/_refresh_calc_tables.py +10 -5
  46. sempy_labs/report/__init__.py +2 -2
  47. sempy_labs/report/_generate_report.py +8 -7
  48. sempy_labs/report/_report_functions.py +47 -52
  49. sempy_labs/report/_report_rebind.py +38 -37
  50. sempy_labs/tom/__init__.py +1 -4
  51. sempy_labs/tom/_model.py +541 -180
  52. semantic_link_labs-0.4.2.dist-info/RECORD +0 -53
  53. {semantic_link_labs-0.4.2.dist-info → semantic_link_labs-0.6.0.dist-info}/LICENSE +0 -0
  54. {semantic_link_labs-0.4.2.dist-info → semantic_link_labs-0.6.0.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: semantic-link-labs
3
- Version: 0.4.2
3
+ Version: 0.6.0
4
4
  Summary: Semantic Link Labs project
5
5
  Author: Microsoft Corporation
6
6
  License: MIT License
@@ -14,7 +14,7 @@ Classifier: Programming Language :: Python :: 3 :: Only
14
14
  Classifier: Framework :: Jupyter
15
15
  Requires-Python: <3.12,>=3.10
16
16
  License-File: LICENSE
17
- Requires-Dist: semantic-link-sempy >=0.7.5
17
+ Requires-Dist: semantic-link-sempy >=0.7.6
18
18
  Requires-Dist: anytree
19
19
  Requires-Dist: powerbiclient
20
20
  Provides-Extra: test
@@ -0,0 +1,54 @@
1
+ sempy_labs/__init__.py,sha256=T_5thSoxVg83rUR_aac7P4kBKPrf5wylwRAxNMzYkjw,4976
2
+ sempy_labs/_ai.py,sha256=1sWyWxM_fAqdT5Ih2F3QjqWepJ0R48RvffNHSZKfuXs,18079
3
+ sempy_labs/_clear_cache.py,sha256=AfTCAwSK5EYhI2aBPsSun8FxJy-GHlByxH-rBxHfHzM,1362
4
+ sempy_labs/_connections.py,sha256=w1dFC4WeTNFmLGD2EL_Syk0Wb1Eij18we2FVn_VaCD8,7641
5
+ sempy_labs/_dax.py,sha256=dt1GgHceyM7f6phRBPxRKnmQy_KYKpcgFQHuOjGbpLo,2029
6
+ sempy_labs/_generate_semantic_model.py,sha256=qdWL6GDuCstzGLzChNRZ_nr9CndIl-sKmYG1WrZHXcw,9689
7
+ sempy_labs/_helper_functions.py,sha256=bcDDwy7LqOVV3x4scCSBxzMQuU2sLWBzgMSaluDrxEk,14306
8
+ sempy_labs/_icons.py,sha256=RfKaIzFHvu9mtQo6N10w9BQUa9JPHQ6NtJUyASZBdTo,536
9
+ sempy_labs/_list_functions.py,sha256=uNZsQD32jQqQYgOegLYO1m28pRqCpCqZMIm0fyA2QW0,82709
10
+ sempy_labs/_model_auto_build.py,sha256=fX3bCLFCOMQHuheKIoB48fUABG7XAT7qqsMbUiWSrY0,5071
11
+ sempy_labs/_model_bpa.py,sha256=PTC_vKKg-r66o0cXIGsx-0cW2ms-S1mWOLPZW8vGa2Q,14080
12
+ sempy_labs/_model_bpa_rules.py,sha256=aDNudcvBLaKaDLy82MF2D8gbrAtVvrOt9u5AqDkltnk,47291
13
+ sempy_labs/_model_dependencies.py,sha256=0uOhTMLpfhZ0mjIjhKacrvzOYs1uHoFfzc2hqTGAY50,12965
14
+ sempy_labs/_one_lake_integration.py,sha256=r9gnLoFjzfJroAzlJ5aWpKs-rxemzJxnN7tADML0IQc,6246
15
+ sempy_labs/_query_scale_out.py,sha256=-RMDO7T8QsIaDK3xXLyg1eOKXK5sKOBTrIISaKoSFRI,14901
16
+ sempy_labs/_refresh_semantic_model.py,sha256=TGGdqMzpR0POXgaXkqxfD2ene-SwEvU3aujsVlkWX7E,6734
17
+ sempy_labs/_translations.py,sha256=bWlLfz9ynFIQEBRHAWAyV1XZdYTUzLCTtipZKk6Fd_U,12920
18
+ sempy_labs/_vertipaq.py,sha256=pIRfJKjsShxVDdMBoeO97vDvurZ98iblcSdtowSJgxQ,33303
19
+ sempy_labs/directlake/__init__.py,sha256=yDoHDd4TyP7wTTjgMASTsjyXgRrTJwJjgml-IjVl460,1781
20
+ sempy_labs/directlake/_directlake_schema_compare.py,sha256=xljJ60aymEJeQV9w-0NQTzFGA8AUiYvsByX7QqHxglg,4716
21
+ sempy_labs/directlake/_directlake_schema_sync.py,sha256=VzTT9o6xceVfx_8wJkvXwy1jGwjH5HztIrQ_Vwh4B4Q,4940
22
+ sempy_labs/directlake/_fallback.py,sha256=o6FswFiisMVbDqRTCJ2hHa1M65KafkGjaqQl5PQ6szQ,1992
23
+ sempy_labs/directlake/_get_directlake_lakehouse.py,sha256=IkkpJe7iFbKT_9KCMdaLucuw_WCcdmEQd3bz6-Wmgx0,2346
24
+ sempy_labs/directlake/_get_shared_expression.py,sha256=3KIrsaCX-t2r4_anC8Z4MMlbu3Iaa_gCHCC8Ok7NDxA,1936
25
+ sempy_labs/directlake/_guardrails.py,sha256=pnLYM7ZO6_hq38UMNeJRx5Fp9XbcIpgPuAS3py24oA8,2259
26
+ sempy_labs/directlake/_list_directlake_model_calc_tables.py,sha256=_rpnbgsFAz2W16PpgIOB0Rj_Fs1ZKrDbz3DUaaR_bfU,2143
27
+ sempy_labs/directlake/_show_unsupported_directlake_objects.py,sha256=QNj2wHzFGtjnsAICmlc7BuhCYkw0An0XnditDTCG2JM,3358
28
+ sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py,sha256=b_Y5_GSfWC25wH6R7L37-AHO9fvKkmxRGaP6dVDC7-w,3233
29
+ sempy_labs/directlake/_update_directlake_partition_entity.py,sha256=nMh2CqQrp64NipmryuLKewljmFt6aVKaecIwSedIFIw,7687
30
+ sempy_labs/directlake/_warm_cache.py,sha256=RjpRjtzSaKw6gHKPw4O6phBwmnk7LHp4B1yrW8Z6VEY,8242
31
+ sempy_labs/lakehouse/__init__.py,sha256=i6VRx4dR1SIN-1GxioiNwhC4FxbozRCIz5TfXjb9rKc,587
32
+ sempy_labs/lakehouse/_get_lakehouse_columns.py,sha256=Bb_iCTlNwl0wdN4dW_E7tVnfbHhHwQT_l0SUqvcbYpo,2582
33
+ sempy_labs/lakehouse/_get_lakehouse_tables.py,sha256=c2pqEyhiEJhnRaE3Zlz3EHRmR9bFI6J7CZxqLVa87iE,9091
34
+ sempy_labs/lakehouse/_lakehouse.py,sha256=kUMiGi8SyY4oTWAB4OFzGQzlqUdAF2EolpZza15gFrE,2763
35
+ sempy_labs/lakehouse/_shortcuts.py,sha256=oDYexjMxZX2JoaEqSyO1R_PQ2c5qTUNVbHGtVQJZorg,6977
36
+ sempy_labs/migration/__init__.py,sha256=w4vvGk6wTWXVfofJDmio2yIFvSSJsxOpjv6mvNGmrOI,1043
37
+ sempy_labs/migration/_create_pqt_file.py,sha256=XTG1BQJbfgKpwT8jKWrlCfDuMYdecM94qTLzrpQ7wck,9035
38
+ sempy_labs/migration/_migrate_calctables_to_lakehouse.py,sha256=6Z1DIdZCi_LX0o-SfPO5zzqWa0hd_zEwcQzIsOer9SM,20551
39
+ sempy_labs/migration/_migrate_calctables_to_semantic_model.py,sha256=Ne-2_PwW_LUR8vG2pNY3lxa9iib1dIpYkDHRaqyqyY8,6420
40
+ sempy_labs/migration/_migrate_model_objects_to_semantic_model.py,sha256=T7Ijo9g9AmB8KMyqvYOTurSdabUKKaRcfdstSgL1QQ8,24399
41
+ sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py,sha256=1ZWQ9sawd2_ojTnWTsRaDEj7yMnUVOpCQZeI0YHhb-g,7066
42
+ sempy_labs/migration/_migration_validation.py,sha256=phbUUIYA5z4dZvEKrVbByMTPPwMscY7Dy9HVxE8z_HM,2483
43
+ sempy_labs/migration/_refresh_calc_tables.py,sha256=pImD8wMOMxs043e6s4G2TPrFzsGVHklK1aez_oVOgro,6086
44
+ sempy_labs/report/__init__.py,sha256=78lLE4UlmwfSY-6FxMIgnshMQF90Qfq_qM7A3vrNtHw,848
45
+ sempy_labs/report/_generate_report.py,sha256=fJUiUgsF2mS-dyaVjRvwAASvbSrSm8BUDB53Ru3rhQI,8545
46
+ sempy_labs/report/_report_functions.py,sha256=6xpLyVrRkA2ZwwCj2By8_HX2k-EHoQUZtuUaTy0d7vM,30004
47
+ sempy_labs/report/_report_rebind.py,sha256=PmZ3f3Rf3Yka8Eaea87JosSiFvEtzjfbUDQOzRx_vlQ,4614
48
+ sempy_labs/tom/__init__.py,sha256=Qbs8leW0fjzvWwOjyWK3Hjeehu7IvpB1beASGsi28bk,121
49
+ sempy_labs/tom/_model.py,sha256=kGRfpA6IHy8KX6QKVciekOa2Y97d6r0eRol5K8YMMIg,149938
50
+ semantic_link_labs-0.6.0.dist-info/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
51
+ semantic_link_labs-0.6.0.dist-info/METADATA,sha256=ZFdW982vsZ0btahdjKpAgiDMEXxnjn_nwiPY8iMMrdI,764
52
+ semantic_link_labs-0.6.0.dist-info/WHEEL,sha256=Wyh-_nZ0DJYolHNn1_hMa4lM7uDedD_RGVwbmTjyItk,91
53
+ semantic_link_labs-0.6.0.dist-info/top_level.txt,sha256=kiQX1y42Dbein1l3Q8jMUYyRulDjdlc2tMepvtrvixQ,11
54
+ semantic_link_labs-0.6.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.43.0)
2
+ Generator: setuptools (71.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
sempy_labs/__init__.py CHANGED
@@ -9,13 +9,15 @@ from sempy_labs._dax import evaluate_dax_impersonation
9
9
  from sempy_labs._generate_semantic_model import (
10
10
  create_blank_semantic_model,
11
11
  create_semantic_model_from_bim,
12
- # deploy_semantic_model,
12
+ deploy_semantic_model,
13
13
  get_semantic_model_bim,
14
14
  )
15
15
  from sempy_labs._list_functions import (
16
+ delete_custom_pool,
16
17
  list_semantic_model_objects,
17
18
  list_shortcuts,
18
19
  get_object_level_security,
20
+ list_capacities,
19
21
  # list_annotations,
20
22
  # list_columns,
21
23
  list_dashboards,
@@ -34,9 +36,21 @@ from sempy_labs._list_functions import (
34
36
  # list_sqlendpoints,
35
37
  # list_tables,
36
38
  list_warehouses,
37
- # list_workspace_role_assignments,
39
+ list_workspace_role_assignments,
38
40
  create_warehouse,
39
41
  update_item,
42
+ list_custom_pools,
43
+ create_custom_pool,
44
+ update_custom_pool,
45
+ assign_workspace_to_capacity,
46
+ unassign_workspace_from_capacity,
47
+ get_spark_settings,
48
+ update_spark_settings,
49
+ add_user_to_workspace,
50
+ delete_user_from_workspace,
51
+ update_workspace_user,
52
+ list_workspace_users,
53
+ assign_workspace_to_dataflow_storage,
40
54
  )
41
55
 
42
56
  from sempy_labs._helper_functions import (
@@ -54,8 +68,10 @@ from sempy_labs._helper_functions import (
54
68
  resolve_report_name,
55
69
  # language_validate
56
70
  )
71
+
57
72
  # from sempy_labs._model_auto_build import model_auto_build
58
- from sempy_labs._model_bpa import model_bpa_rules, run_model_bpa
73
+ from sempy_labs._model_bpa import run_model_bpa
74
+ from sempy_labs._model_bpa_rules import model_bpa_rules
59
75
  from sempy_labs._model_dependencies import (
60
76
  measure_dependency_tree,
61
77
  get_measure_dependencies,
@@ -85,6 +101,7 @@ from sempy_labs._vertipaq import (
85
101
  )
86
102
 
87
103
  __all__ = [
104
+ "delete_custom_pool",
88
105
  "clear_cache",
89
106
  # create_connection_cloud,
90
107
  # create_connection_vnet,
@@ -92,7 +109,7 @@ __all__ = [
92
109
  "evaluate_dax_impersonation",
93
110
  "create_blank_semantic_model",
94
111
  "create_semantic_model_from_bim",
95
- #'deploy_semantic_model',
112
+ "deploy_semantic_model",
96
113
  "get_semantic_model_bim",
97
114
  "get_object_level_security",
98
115
  #'list_annotations',
@@ -113,7 +130,7 @@ __all__ = [
113
130
  #'list_sqlendpoints',
114
131
  #'list_tables',
115
132
  "list_warehouses",
116
- #'list_workspace_role_assignments',
133
+ "list_workspace_role_assignments",
117
134
  "create_warehouse",
118
135
  "update_item",
119
136
  "create_abfss_path",
@@ -129,20 +146,20 @@ __all__ = [
129
146
  "resolve_report_id",
130
147
  "resolve_report_name",
131
148
  #'language_validate',
132
- #"model_auto_build",
149
+ # "model_auto_build",
133
150
  "model_bpa_rules",
134
151
  "run_model_bpa",
135
152
  "measure_dependency_tree",
136
153
  "get_measure_dependencies",
137
154
  "get_model_calc_dependencies",
138
155
  "export_model_to_onelake",
139
- 'qso_sync',
140
- 'qso_sync_status',
141
- 'set_qso',
142
- 'list_qso_settings',
143
- 'disable_qso',
144
- 'set_semantic_model_storage_format',
145
- 'set_workspace_default_storage_format',
156
+ "qso_sync",
157
+ "qso_sync_status",
158
+ "set_qso",
159
+ "list_qso_settings",
160
+ "disable_qso",
161
+ "set_semantic_model_storage_format",
162
+ "set_workspace_default_storage_format",
146
163
  "refresh_semantic_model",
147
164
  "cancel_dataset_refresh",
148
165
  "translate_semantic_model",
@@ -150,5 +167,18 @@ __all__ = [
150
167
  #'visualize_vertipaq',
151
168
  "import_vertipaq_analyzer",
152
169
  "list_semantic_model_objects",
153
- "list_shortcuts"
170
+ "list_shortcuts",
171
+ "list_custom_pools",
172
+ "create_custom_pool",
173
+ "update_custom_pool",
174
+ "assign_workspace_to_capacity",
175
+ "unassign_workspace_from_capacity",
176
+ "get_spark_settings",
177
+ "update_spark_settings",
178
+ "add_user_to_workspace",
179
+ "delete_user_from_workspace",
180
+ "update_workspace_user",
181
+ "list_workspace_users",
182
+ "assign_workspace_to_dataflow_storage",
183
+ "list_capacities",
154
184
  ]
sempy_labs/_ai.py CHANGED
@@ -40,7 +40,8 @@ def optimize_semantic_model(dataset: str, workspace: Optional[str] = None):
40
40
 
41
41
  if len(fallback_filt) > 0:
42
42
  print(
43
- f"{icons.yellow_dot} The '{dataset}' semantic model is a Direct Lake semantic model which contains views. Since views always fall back to DirectQuery, it is recommended to only use lakehouse tables and not views."
43
+ f"{icons.yellow_dot} The '{dataset}' semantic model is a Direct Lake semantic model which contains views. "
44
+ "Since views always fall back to DirectQuery, it is recommended to only use lakehouse tables and not views."
44
45
  )
45
46
 
46
47
  # Potential model reduction estimate
@@ -78,10 +79,9 @@ def generate_measure_descriptions(
78
79
 
79
80
  validModels = ["gpt-35-turbo", "gpt-35-turbo-16k", "gpt-4"]
80
81
  if gpt_model not in validModels:
81
- print(
82
+ raise ValueError(
82
83
  f"{icons.red_dot} The '{gpt_model}' model is not a valid model. Enter a gpt_model from this list: {validModels}."
83
84
  )
84
- return
85
85
 
86
86
  dfM = fabric.list_measures(dataset=dataset, workspace=workspace)
87
87
 
@@ -116,8 +116,7 @@ def generate_measure_descriptions(
116
116
  )
117
117
 
118
118
  # Update the model to use the new descriptions
119
- tom_server = fabric.create_tom_server(readonly=False, workspace=workspace)
120
- m = tom_server.Databases.GetByName(dataset).Model
119
+ # with connect_semantic_model(dataset=dataset, workspace=workspace, readonly=False) as tom:
121
120
 
122
121
  # for t in m.Tables:
123
122
  # tName = t.Name
@@ -148,10 +147,10 @@ def generate_aggs(
148
147
  import System
149
148
 
150
149
  # columns = {
151
- #'SalesAmount': 'Sum',
152
- #'ProductKey': 'GroupBy',
153
- #'OrderDateKey': 'GroupBy'
154
- # }
150
+ # 'SalesAmount': 'Sum',
151
+ # 'ProductKey': 'GroupBy',
152
+ # 'OrderDateKey': 'GroupBy'
153
+ # }
155
154
 
156
155
  if workspace is None:
157
156
  workspace_id = fabric.get_workspace_id()
@@ -173,48 +172,44 @@ def generate_aggs(
173
172
  numericTypes = ["Int64", "Double", "Decimal"]
174
173
 
175
174
  if any(value not in aggTypes for value in columns.values()):
176
- print(
175
+ raise ValueError(
177
176
  f"{icons.red_dot} Invalid aggregation type(s) have been specified in the 'columns' parameter. Valid aggregation types: {aggTypes}."
178
177
  )
179
- return
180
178
 
181
179
  dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
182
180
  dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
183
181
  dfM = fabric.list_measures(dataset=dataset, workspace=workspace)
184
182
  dfR = fabric.list_relationships(dataset=dataset, workspace=workspace)
185
183
  if not any(r["Mode"] == "DirectLake" for i, r in dfP.iterrows()):
186
- print(
184
+ raise ValueError(
187
185
  f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode. This function is only relevant for Direct Lake semantic models."
188
186
  )
189
- return
190
187
 
191
188
  dfC_filtT = dfC[dfC["Table Name"] == table_name]
192
189
 
193
190
  if len(dfC_filtT) == 0:
194
- print(
191
+ raise ValueError(
195
192
  f"{icons.red_dot} The '{table_name}' table does not exist in the '{dataset}' semantic model within the '{workspace}' workspace."
196
193
  )
197
- return
198
194
 
199
195
  dfC_filt = dfC[
200
196
  (dfC["Table Name"] == table_name) & (dfC["Column Name"].isin(columnValues))
201
197
  ]
202
198
 
203
199
  if len(columns) != len(dfC_filt):
204
- print(
200
+ raise ValueError(
205
201
  f"{icons.red_dot} Columns listed in '{columnValues}' do not exist in the '{table_name}' table in the '{dataset}' semantic model within the '{workspace}' workspace."
206
202
  )
207
- return
208
203
 
209
204
  # Check if doing sum/count/min/max etc. on a non-number column
210
- for col, agg in columns.items():
211
- dfC_col = dfC_filt[dfC_filt["Column Name"] == col]
205
+ for cm, agg in columns.items():
206
+ dfC_col = dfC_filt[dfC_filt["Column Name"] == cm]
212
207
  dataType = dfC_col["Data Type"].iloc[0]
213
208
  if agg in aggTypesAggregate and dataType not in numericTypes:
214
- print(
215
- f"{icons.red_dot} The '{col}' column in the '{table_name}' table is of '{dataType}' data type. Only columns of '{numericTypes}' data types can be aggregated as '{aggTypesAggregate}' aggregation types."
209
+ raise ValueError(
210
+ f"{icons.red_dot} The '{cm}' column in the '{table_name}' table is of '{dataType}' data type. Only columns of '{numericTypes}' data types"
211
+ f" can be aggregated as '{aggTypesAggregate}' aggregation types."
216
212
  )
217
- return
218
213
 
219
214
  # Create/update lakehouse delta agg table
220
215
  aggSuffix = "_agg"
@@ -230,10 +225,10 @@ def generate_aggs(
230
225
  dfI_filt = dfI[(dfI["Id"] == sqlEndpointId)]
231
226
 
232
227
  if len(dfI_filt) == 0:
233
- print(
234
- f"{icons.red_dot} The lakehouse (SQL Endpoint) used by the '{dataset}' semantic model does not reside in the '{lakehouse_workspace}' workspace. Please update the lakehouse_workspace parameter."
228
+ raise ValueError(
229
+ f"{icons.red_dot} The lakehouse (SQL Endpoint) used by the '{dataset}' semantic model does not reside in"
230
+ f" the '{lakehouse_workspace}' workspace. Please update the lakehouse_workspace parameter."
235
231
  )
236
- return
237
232
 
238
233
  lakehouseName = dfI_filt["Display Name"].iloc[0]
239
234
  lakehouse_id = resolve_lakehouse_id(
@@ -243,8 +238,8 @@ def generate_aggs(
243
238
  # Generate SQL query
244
239
  query = "SELECT"
245
240
  groupBy = "\nGROUP BY"
246
- for col, agg in columns.items():
247
- colFilt = dfC_filt[dfC_filt["Column Name"] == col]
241
+ for cm, agg in columns.items():
242
+ colFilt = dfC_filt[dfC_filt["Column Name"] == cm]
248
243
  sourceCol = colFilt["Source"].iloc[0]
249
244
 
250
245
  if agg == "GroupBy":
@@ -284,7 +279,7 @@ def generate_aggs(
284
279
  # Create/update semantic model agg table
285
280
  tom_server = fabric.create_tom_server(readonly=False, workspace=workspace)
286
281
  m = tom_server.Databases.GetByName(dataset).Model
287
- f"\n{icons.in_progress} Updating the '{dataset}' semantic model..."
282
+ print(f"\n{icons.in_progress} Updating the '{dataset}' semantic model...")
288
283
  dfC_agg = dfC[dfC["Table Name"] == aggTableName]
289
284
 
290
285
  if len(dfC_agg) == 0:
@@ -348,7 +343,9 @@ def generate_aggs(
348
343
  col.DataType = System.Enum.Parse(TOM.DataType, dType)
349
344
 
350
345
  m.Tables[aggTableName].Columns.Add(col)
351
- print(f"{icons.green_dot} The '{aggTableName}'[{cName}] column has been added.")
346
+ print(
347
+ f"{icons.green_dot} The '{aggTableName}'[{cName}] column has been added."
348
+ )
352
349
 
353
350
  # Create relationships
354
351
  relMap = {"m": "Many", "1": "One", "0": "None"}
@@ -387,10 +384,11 @@ def generate_aggs(
387
384
  print(
388
385
  f"{icons.green_dot} '{aggTableName}'[{fromColumn}] -> '{toTable}'[{toColumn}] relationship has been added."
389
386
  )
390
- except:
387
+ except Exception as e:
391
388
  print(
392
389
  f"{icons.red_dot} '{aggTableName}'[{fromColumn}] -> '{toTable}'[{toColumn}] relationship has not been created."
393
390
  )
391
+ print(f"Exception occured: {e}")
394
392
  elif toTable == table_name:
395
393
  try:
396
394
  rel.ToColumn = m.Tables[aggTableName].Columns[toColumn]
@@ -398,11 +396,12 @@ def generate_aggs(
398
396
  print(
399
397
  f"{icons.green_dot} '{fromTable}'[{fromColumn}] -> '{aggTableName}'[{toColumn}] relationship has been added."
400
398
  )
401
- except:
399
+ except Exception as e:
402
400
  print(
403
401
  f"{icons.red_dot} '{fromTable}'[{fromColumn}] -> '{aggTableName}'[{toColumn}] relationship has not been created."
404
402
  )
405
- f"Relationship creation is complete."
403
+ print(f"Exception occured: {e}")
404
+ "Relationship creation is complete."
406
405
 
407
406
  # Create IF measure
408
407
  f"\n{icons.in_progress} Creating measure to check if the agg table can be used..."
@@ -1,7 +1,6 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  from ._helper_functions import resolve_dataset_id
4
- from typing import List, Optional, Union
3
+ from typing import Optional
5
4
  import sempy_labs._icons as icons
6
5
 
7
6
 
@@ -20,17 +19,15 @@ def clear_cache(dataset: str, workspace: Optional[str] = None):
20
19
  or if no lakehouse attached, resolves to the workspace of the notebook.
21
20
  """
22
21
 
23
- if workspace is None:
24
- workspace_id = fabric.get_workspace_id()
25
- workspace = fabric.resolve_workspace_name(workspace_id)
22
+ workspace = fabric.resolve_workspace_name(workspace)
26
23
 
27
24
  datasetID = resolve_dataset_id(dataset=dataset, workspace=workspace)
28
25
 
29
26
  xmla = f"""
30
- <ClearCache xmlns="http://schemas.microsoft.com/analysisservices/2003/engine">
27
+ <ClearCache xmlns="http://schemas.microsoft.com/analysisservices/2003/engine">
31
28
  <Object>
32
- <DatabaseID>{datasetID}</DatabaseID>
33
- </Object>
29
+ <DatabaseID>{datasetID}</DatabaseID>
30
+ </Object>
34
31
  </ClearCache>
35
32
  """
36
33
  fabric.execute_xmla(dataset=dataset, xmla_command=xmla, workspace=workspace)
@@ -1,7 +1,6 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  import pandas as pd
4
- from typing import List, Optional, Union
3
+ from sempy.fabric.exceptions import FabricHTTPException
5
4
 
6
5
 
7
6
  def create_connection_cloud(
@@ -11,7 +10,7 @@ def create_connection_cloud(
11
10
  user_name: str,
12
11
  password: str,
13
12
  privacy_level: str,
14
- ):
13
+ ) -> pd.DataFrame:
15
14
 
16
15
  # https://review.learn.microsoft.com/en-us/rest/api/fabric/core/connections/create-connection?branch=features%2Fdmts&tabs=HTTP
17
16
 
@@ -55,29 +54,32 @@ def create_connection_cloud(
55
54
  },
56
55
  }
57
56
 
58
- response = client.post(f"/v1/connections", json=request_body)
59
-
60
- if response.status_code == 200:
61
- o = response.json()
62
- new_data = {
63
- "Connection Id": o["id"],
64
- "Connection Name": o["name"],
65
- "Connectivity Type": o["connectivityType"],
66
- "Connection Type": o["connectionDetails"]["type"],
67
- "Connection Path": o["connectionDetails"]["path"],
68
- "Privacy Level": o["privacyLevel"],
69
- "Credential Type": o["credentialDetails"]["credentialType"],
70
- "Single Sign On Type": o["credentialDetails"]["singleSignOnType"],
71
- "Connection Encryption": o["credentialDetails"]["connectionEncryption"],
72
- "Skip Test Connection": o["credentialDetails"]["skipTestConnection"],
73
- }
74
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
57
+ response = client.post("/v1/connections", json=request_body)
58
+
59
+ if response.status_code != 200:
60
+ raise FabricHTTPException(response)
61
+ o = response.json()
62
+ new_data = {
63
+ "Connection Id": o.get("id"),
64
+ "Connection Name": o.get("name"),
65
+ "Connectivity Type": o.get("connectivityType"),
66
+ "Connection Type": o.get("connectionDetails", {}).get("type"),
67
+ "Connection Path": o.get("connectionDetails", {}).get("path"),
68
+ "Privacy Level": o.get("privacyLevel"),
69
+ "Credential Type": o.get("credentialDetails", {}).get("credentialType"),
70
+ "Single Sign On Type": o.get("credentialDetails", {}).get("singleSignOnType"),
71
+ "Connection Encryption": o.get("credentialDetails", {}).get(
72
+ "connectionEncryption"
73
+ ),
74
+ "Skip Test Connection": o.get("credentialDetails", {}).get(
75
+ "skipTestConnection"
76
+ ),
77
+ }
78
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
75
79
 
76
- df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
80
+ df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
77
81
 
78
- return df
79
- else:
80
- print(response.status_code)
82
+ return df
81
83
 
82
84
 
83
85
  def create_connection_on_prem(
@@ -87,7 +89,7 @@ def create_connection_on_prem(
87
89
  database_name: str,
88
90
  credentials: str,
89
91
  privacy_level: str,
90
- ):
92
+ ) -> pd.DataFrame:
91
93
 
92
94
  df = pd.DataFrame(
93
95
  columns=[
@@ -130,30 +132,33 @@ def create_connection_on_prem(
130
132
  },
131
133
  }
132
134
 
133
- response = client.post(f"/v1/connections", json=request_body)
134
-
135
- if response.status_code == 200:
136
- o = response.json()
137
- new_data = {
138
- "Connection Id": o["id"],
139
- "Connection Name": o["name"],
140
- "Gateway ID": o["gatewayId"],
141
- "Connectivity Type": o["connectivityType"],
142
- "Connection Type": o["connectionDetails"]["type"],
143
- "Connection Path": o["connectionDetails"]["path"],
144
- "Privacy Level": o["privacyLevel"],
145
- "Credential Type": o["credentialDetails"]["credentialType"],
146
- "Single Sign On Type": o["credentialDetails"]["singleSignOnType"],
147
- "Connection Encryption": o["credentialDetails"]["connectionEncryption"],
148
- "Skip Test Connection": o["credentialDetails"]["skipTestConnection"],
149
- }
150
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
135
+ response = client.post("/v1/connections", json=request_body)
136
+
137
+ if response.status_code != 200:
138
+ raise FabricHTTPException(response)
139
+ o = response.json()
140
+ new_data = {
141
+ "Connection Id": o.get("id"),
142
+ "Connection Name": o.get("name"),
143
+ "Gateway ID": o.get("gatewayId"),
144
+ "Connectivity Type": o.get("connectivityType"),
145
+ "Connection Type": o.get("connectionDetails", {}).get("type"),
146
+ "Connection Path": o.get("connectionDetails", {}).get("path"),
147
+ "Privacy Level": o.get("privacyLevel"),
148
+ "Credential Type": o.get("credentialDetails", {}).get("credentialType"),
149
+ "Single Sign On Type": o.get("credentialDetails", {}).get("singleSignOnType"),
150
+ "Connection Encryption": o.get("credentialDetails", {}).get(
151
+ "connectionEncryption"
152
+ ),
153
+ "Skip Test Connection": o.get("credentialDetails", {}).get(
154
+ "skipTestConnection"
155
+ ),
156
+ }
157
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
151
158
 
152
- df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
159
+ df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
153
160
 
154
- return df
155
- else:
156
- print(response.status_code)
161
+ return df
157
162
 
158
163
 
159
164
  def create_connection_vnet(
@@ -164,7 +169,7 @@ def create_connection_vnet(
164
169
  user_name: str,
165
170
  password: str,
166
171
  privacy_level: str,
167
- ):
172
+ ) -> pd.DataFrame:
168
173
 
169
174
  df = pd.DataFrame(
170
175
  columns=[
@@ -208,27 +213,30 @@ def create_connection_vnet(
208
213
  },
209
214
  }
210
215
 
211
- response = client.post(f"/v1/connections", json=request_body)
212
-
213
- if response.status_code == 200:
214
- o = response.json()
215
- new_data = {
216
- "Connection Id": o["id"],
217
- "Connection Name": o["name"],
218
- "Gateway ID": o["gatewayId"],
219
- "Connectivity Type": o["connectivityType"],
220
- "Connection Type": o["connectionDetails"]["type"],
221
- "Connection Path": o["connectionDetails"]["path"],
222
- "Privacy Level": o["privacyLevel"],
223
- "Credential Type": o["credentialDetails"]["credentialType"],
224
- "Single Sign On Type": o["credentialDetails"]["singleSignOnType"],
225
- "Connection Encryption": o["credentialDetails"]["connectionEncryption"],
226
- "Skip Test Connection": o["credentialDetails"]["skipTestConnection"],
227
- }
228
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
229
-
230
- df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
231
-
232
- return df
233
- else:
234
- print(response.status_code)
216
+ response = client.post("/v1/connections", json=request_body)
217
+
218
+ if response.status_code != 200:
219
+ raise FabricHTTPException(response)
220
+ o = response.json()
221
+ new_data = {
222
+ "Connection Id": o.get("id"),
223
+ "Connection Name": o.get("name"),
224
+ "Gateway ID": o.get("gatewayId"),
225
+ "Connectivity Type": o.get("connectivityType"),
226
+ "Connection Type": o.get("connectionDetails", {}).get("type"),
227
+ "Connection Path": o.get("connectionDetails", {}).get("path"),
228
+ "Privacy Level": o.get("privacyLevel"),
229
+ "Credential Type": o.get("credentialDetails", {}).get("credentialType"),
230
+ "Single Sign On Type": o.get("credentialDetails", {}).get("singleSignOnType"),
231
+ "Connection Encryption": o.get("credentialDetails", {}).get(
232
+ "connectionEncryption"
233
+ ),
234
+ "Skip Test Connection": o.get("credentialDetails", {}).get(
235
+ "skipTestConnection"
236
+ ),
237
+ }
238
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
239
+
240
+ df["Skip Test Connection"] = df["Skip Test Connection"].astype(bool)
241
+
242
+ return df
sempy_labs/_dax.py CHANGED
@@ -1,8 +1,10 @@
1
- import sempy
2
1
  import sempy.fabric as fabric
3
2
  import pandas as pd
4
- from sempy_labs._helper_functions import resolve_dataset_id
5
- from typing import List, Optional, Union
3
+ from sempy_labs._helper_functions import (
4
+ resolve_dataset_id,
5
+ resolve_workspace_name_and_id,
6
+ )
7
+ from typing import Optional
6
8
  from sempy._utils._log import log
7
9
 
8
10
 
@@ -40,17 +42,13 @@ def evaluate_dax_impersonation(
40
42
 
41
43
  # https://learn.microsoft.com/en-us/rest/api/power-bi/datasets/execute-queries-in-group
42
44
 
43
- if workspace is None:
44
- workspace_id = fabric.get_workspace_id()
45
- workspace = fabric.resolve_workspace_name(workspace_id)
46
- else:
47
- workspace_id = fabric.resolve_workspace_id(workspace)
45
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
48
46
 
49
47
  dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace)
50
48
 
51
49
  request_body = {
52
50
  "queries": [{"query": dax_query}],
53
- "impersonatedUserName": user_name
51
+ "impersonatedUserName": user_name,
54
52
  }
55
53
 
56
54
  client = fabric.PowerBIRestClient()