semantic-link-labs 0.4.2__py3-none-any.whl → 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (44) hide show
  1. {semantic_link_labs-0.4.2.dist-info → semantic_link_labs-0.5.0.dist-info}/METADATA +1 -1
  2. semantic_link_labs-0.5.0.dist-info/RECORD +53 -0
  3. {semantic_link_labs-0.4.2.dist-info → semantic_link_labs-0.5.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +27 -3
  5. sempy_labs/_ai.py +12 -32
  6. sempy_labs/_clear_cache.py +1 -3
  7. sempy_labs/_connections.py +39 -38
  8. sempy_labs/_generate_semantic_model.py +9 -14
  9. sempy_labs/_helper_functions.py +3 -12
  10. sempy_labs/_icons.py +1 -0
  11. sempy_labs/_list_functions.py +915 -391
  12. sempy_labs/_model_auto_build.py +2 -4
  13. sempy_labs/_model_bpa.py +26 -30
  14. sempy_labs/_model_dependencies.py +7 -13
  15. sempy_labs/_one_lake_integration.py +2 -5
  16. sempy_labs/_query_scale_out.py +12 -30
  17. sempy_labs/_refresh_semantic_model.py +5 -15
  18. sempy_labs/_translations.py +1 -1
  19. sempy_labs/_vertipaq.py +3 -10
  20. sempy_labs/directlake/_directlake_schema_compare.py +3 -9
  21. sempy_labs/directlake/_directlake_schema_sync.py +2 -6
  22. sempy_labs/directlake/_fallback.py +2 -6
  23. sempy_labs/directlake/_get_shared_expression.py +3 -9
  24. sempy_labs/directlake/_guardrails.py +3 -5
  25. sempy_labs/directlake/_list_directlake_model_calc_tables.py +3 -4
  26. sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -2
  27. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +3 -7
  28. sempy_labs/directlake/_update_directlake_partition_entity.py +2 -8
  29. sempy_labs/directlake/_warm_cache.py +5 -8
  30. sempy_labs/lakehouse/_get_lakehouse_columns.py +1 -1
  31. sempy_labs/lakehouse/_get_lakehouse_tables.py +3 -5
  32. sempy_labs/lakehouse/_lakehouse.py +1 -3
  33. sempy_labs/lakehouse/_shortcuts.py +2 -5
  34. sempy_labs/migration/_create_pqt_file.py +4 -13
  35. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +2 -6
  36. sempy_labs/migration/_migration_validation.py +4 -0
  37. sempy_labs/migration/_refresh_calc_tables.py +2 -0
  38. sempy_labs/report/_generate_report.py +2 -6
  39. sempy_labs/report/_report_functions.py +30 -73
  40. sempy_labs/report/_report_rebind.py +39 -39
  41. sempy_labs/tom/_model.py +141 -183
  42. semantic_link_labs-0.4.2.dist-info/RECORD +0 -53
  43. {semantic_link_labs-0.4.2.dist-info → semantic_link_labs-0.5.0.dist-info}/LICENSE +0 -0
  44. {semantic_link_labs-0.4.2.dist-info → semantic_link_labs-0.5.0.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: semantic-link-labs
3
- Version: 0.4.2
3
+ Version: 0.5.0
4
4
  Summary: Semantic Link Labs project
5
5
  Author: Microsoft Corporation
6
6
  License: MIT License
@@ -0,0 +1,53 @@
1
+ sempy_labs/__init__.py,sha256=Xpq66PKo8KK7QIP0QYme_X_FyfZnpOBsli2V4bnMNYo,4843
2
+ sempy_labs/_ai.py,sha256=WNCLh8wzZ7brXJWe2CNC79D9NHeS-2KpAWCIAlvZD7U,17784
3
+ sempy_labs/_clear_cache.py,sha256=ELHcD4smBS3EiHEO1Ux97yV_Q7j_zB8TJJ4-kS1ylCU,1394
4
+ sempy_labs/_connections.py,sha256=ghYtHBLTaTmYwgllh6I3zfFjG5lcxM2BAQR52B3z-t0,7795
5
+ sempy_labs/_dax.py,sha256=u4qVxsu2dVaOJmso-ErScNZ5yI4lGQTlon_jmrAzvGs,2148
6
+ sempy_labs/_generate_semantic_model.py,sha256=rBi1jmLQJNq9NOt56AqqYzAXBJ2DX2bpG7FYrHXyiEA,9180
7
+ sempy_labs/_helper_functions.py,sha256=RlLtpEmBpoX285sSFKBBjkPfXqp8_FSIwYcoR4rZG58,14251
8
+ sempy_labs/_icons.py,sha256=WkmhtLcQPO1PlcwFklb253dBLpwGUyXLwxn9_-nu3s0,215
9
+ sempy_labs/_list_functions.py,sha256=Gc49_Q5e9CmdsVntvq0wFv1qdGInhhejxcvn-Vokyds,80791
10
+ sempy_labs/_model_auto_build.py,sha256=SGz3ASfKJBpYdgFRPoofiU7kpsjPDBkeB3qCntKb6qs,5083
11
+ sempy_labs/_model_bpa.py,sha256=xEp5LdqoMLBsUDzGWnfNrFCwr9tz9-2QDSi0NwnvXFI,63191
12
+ sempy_labs/_model_dependencies.py,sha256=S8u0f7AAVL6Zk1Jm35EqbLsIEGD8KwBGc_kH1AlBI1A,12948
13
+ sempy_labs/_one_lake_integration.py,sha256=XlGKghnYtXIprUdUI5fQj0ddshxE_AvUowb9YIrL2CE,6184
14
+ sempy_labs/_query_scale_out.py,sha256=xkyCFz7vchxB6c6cMIhZXUWifOduanSbv1KGQbkmVls,15214
15
+ sempy_labs/_refresh_semantic_model.py,sha256=bTVUNEdQiJDxQh1T0g4aYU3-VHaN9__qFAOYbx85-O0,6622
16
+ sempy_labs/_translations.py,sha256=sH_-W8vdQ632lY68RxM5a2lCmy4MRDFXLxjvMuw4xQg,18054
17
+ sempy_labs/_vertipaq.py,sha256=04hQ-A4wuW6gi4zIccmVoDDByk34Rp-QTkRXSo6XLfI,33266
18
+ sempy_labs/directlake/__init__.py,sha256=HbfHvDvGE4H-xSbV6JO7TUb4HoLGJf2AeuqeQxIuuJ4,1689
19
+ sempy_labs/directlake/_directlake_schema_compare.py,sha256=axqSYQHMs3zAhFAF3DMNdm2SK1k-95j_Zh-pZZXv8SQ,4636
20
+ sempy_labs/directlake/_directlake_schema_sync.py,sha256=EMDPAVn53EN1PM769AxKspb_cVDcCazz4kHMKvWqJMQ,5119
21
+ sempy_labs/directlake/_fallback.py,sha256=2dX5MRU2d04_jA799TaPezSQJWW-NrflDtdURge6Ceo,1995
22
+ sempy_labs/directlake/_get_directlake_lakehouse.py,sha256=vEY1QBU7gAdoVQIGl59T_1TTYuXbHgk3pZA41EkVkl8,2358
23
+ sempy_labs/directlake/_get_shared_expression.py,sha256=ngZCnoOjj278n6Yql7TVZ36z89HDej9JSRuoRWALxDs,1926
24
+ sempy_labs/directlake/_guardrails.py,sha256=iReOycR6caBuBWpEOIpicZS_7kkSGJLTUDf2Ch0QUCE,2280
25
+ sempy_labs/directlake/_list_directlake_model_calc_tables.py,sha256=C6C8WycGV4NQOQifh3XexBZx9hm30-Ac56sCo4MfqgI,2082
26
+ sempy_labs/directlake/_show_unsupported_directlake_objects.py,sha256=oumzh7mXghkFFqFDDK7bBmNMRWnOZeE1DxuMsEPSBzo,3365
27
+ sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py,sha256=lY4JlbKwow664sXK4cZ22PFTy9Gw79R-6TYx36uz8SY,3183
28
+ sempy_labs/directlake/_update_directlake_partition_entity.py,sha256=KzUvP4r0-UOylXgM46cVKoUlbIFEpxO3ThMQl9JU-Gw,3140
29
+ sempy_labs/directlake/_warm_cache.py,sha256=codsJhifbn8yO8Cjo40syRtFnIArIvpWQ726WZ88ZsQ,8211
30
+ sempy_labs/lakehouse/__init__.py,sha256=i6VRx4dR1SIN-1GxioiNwhC4FxbozRCIz5TfXjb9rKc,587
31
+ sempy_labs/lakehouse/_get_lakehouse_columns.py,sha256=QGmuH3uFuxv_mIjm4HTXaX-s1UFb9BbHnaCx9kTHgy8,2594
32
+ sempy_labs/lakehouse/_get_lakehouse_tables.py,sha256=g_kKtntm5uPbIxNrunOVhzuKvJfC-9g5awxul2KSmUI,9085
33
+ sempy_labs/lakehouse/_lakehouse.py,sha256=ovm5S4hk3aLChFCzUGWIJmL3wJ47wRycoh0mbBd8pRA,2774
34
+ sempy_labs/lakehouse/_shortcuts.py,sha256=voKile93krzWK7ccmKVk2_fv1lioyAq5rk8YrJ6qy6k,6895
35
+ sempy_labs/migration/__init__.py,sha256=l5v8pC2INdNwbAKVmvWpuVxs6lpb6omim_4BPOmNo4E,1042
36
+ sempy_labs/migration/_create_pqt_file.py,sha256=PGC21nhIsE9TKyz110tYaf4Nle6-eV1xqvBOqUSEDQY,8986
37
+ sempy_labs/migration/_migrate_calctables_to_lakehouse.py,sha256=ajFvHauFdEsbgxvn9JXj2kiXaRtJLEjwX4hWQG7FQy0,20609
38
+ sempy_labs/migration/_migrate_calctables_to_semantic_model.py,sha256=Co2f579vSwkWZ95SlBStS-XJ73YwgcdfAMlJbUv_pkk,6343
39
+ sempy_labs/migration/_migrate_model_objects_to_semantic_model.py,sha256=q4oYBDHXvcTExeufmcOOeK3jv_9A2Xef5ksyEwG0PfA,23801
40
+ sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py,sha256=qohJC6ARjM8NiMH7nZEKqUEXMrh-IqhdeUzgrBNZ1DQ,7028
41
+ sempy_labs/migration/_migration_validation.py,sha256=R7xz_OFmYRO4fxFWxvdl_ORZQKzqflsSBnGapmS067c,2508
42
+ sempy_labs/migration/_refresh_calc_tables.py,sha256=VumJaTiA3bTfm8jWwyIl7gxW4-a7W_3auGjWRcvd65g,6043
43
+ sempy_labs/report/__init__.py,sha256=fkjbkAXZuH7VnAn-k3iB4dngWZKaX-k0bxS6mBa9iAs,846
44
+ sempy_labs/report/_generate_report.py,sha256=uFbTO_7GSItt5wM_yZrKybEQ3kQoqXKbY1bGzlOW5c0,8502
45
+ sempy_labs/report/_report_functions.py,sha256=5rzYaS_8Gj9FEyCgysWIsLxiE-DaCgPNArI9YIe4Jvo,29515
46
+ sempy_labs/report/_report_rebind.py,sha256=hsXXOl6C06hH-SU_TaKBInjIXHf-uRZru7uCHpbRJYA,4756
47
+ sempy_labs/tom/__init__.py,sha256=hFwkmWk5AZ7GK1LWqoqaK1g4gDmu9mZMkfLQvLsR_eE,130
48
+ sempy_labs/tom/_model.py,sha256=a3EXOj_yXxl0aQQfWa5bolC2q-KdCgQMF8CrrQ6aPrU,136859
49
+ semantic_link_labs-0.5.0.dist-info/LICENSE,sha256=ws_MuBL-SCEBqPBFl9_FqZkaaydIJmxHrJG2parhU4M,1141
50
+ semantic_link_labs-0.5.0.dist-info/METADATA,sha256=TINv_gW59FHx2uvyUwYSH9MxKls6eO7XYji03ab-kYk,764
51
+ semantic_link_labs-0.5.0.dist-info/WHEEL,sha256=y4mX-SOX4fYIkonsAGA5N0Oy-8_gI4FXw5HNI1xqvWg,91
52
+ semantic_link_labs-0.5.0.dist-info/top_level.txt,sha256=kiQX1y42Dbein1l3Q8jMUYyRulDjdlc2tMepvtrvixQ,11
53
+ semantic_link_labs-0.5.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.43.0)
2
+ Generator: setuptools (70.2.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
sempy_labs/__init__.py CHANGED
@@ -34,9 +34,21 @@ from sempy_labs._list_functions import (
34
34
  # list_sqlendpoints,
35
35
  # list_tables,
36
36
  list_warehouses,
37
- # list_workspace_role_assignments,
37
+ list_workspace_role_assignments,
38
38
  create_warehouse,
39
39
  update_item,
40
+ list_custom_pools,
41
+ create_custom_pool,
42
+ update_custom_pool,
43
+ assign_workspace_to_capacity,
44
+ unassign_workspace_from_capacity,
45
+ get_spark_settings,
46
+ update_spark_settings,
47
+ add_user_to_workspace,
48
+ delete_user_from_workspace,
49
+ update_workspace_user,
50
+ list_workspace_users,
51
+ assign_workspace_to_dataflow_storage,
40
52
  )
41
53
 
42
54
  from sempy_labs._helper_functions import (
@@ -113,7 +125,7 @@ __all__ = [
113
125
  #'list_sqlendpoints',
114
126
  #'list_tables',
115
127
  "list_warehouses",
116
- #'list_workspace_role_assignments',
128
+ 'list_workspace_role_assignments',
117
129
  "create_warehouse",
118
130
  "update_item",
119
131
  "create_abfss_path",
@@ -150,5 +162,17 @@ __all__ = [
150
162
  #'visualize_vertipaq',
151
163
  "import_vertipaq_analyzer",
152
164
  "list_semantic_model_objects",
153
- "list_shortcuts"
165
+ "list_shortcuts",
166
+ "list_custom_pools",
167
+ "create_custom_pool",
168
+ "update_custom_pool",
169
+ "assign_workspace_to_capacity",
170
+ "unassign_workspace_from_capacity",
171
+ "get_spark_settings",
172
+ "update_spark_settings",
173
+ "add_user_to_workspace",
174
+ "delete_user_from_workspace",
175
+ "update_workspace_user",
176
+ "list_workspace_users",
177
+ "assign_workspace_to_dataflow_storage"
154
178
  ]
sempy_labs/_ai.py CHANGED
@@ -14,6 +14,7 @@ def optimize_semantic_model(dataset: str, workspace: Optional[str] = None):
14
14
  from ._model_bpa import run_model_bpa
15
15
  from .directlake._fallback import check_fallback_reason
16
16
  from ._helper_functions import format_dax_object_name
17
+ from sempy_labs.tom import connect_semantic_model
17
18
 
18
19
  modelBPA = run_model_bpa(
19
20
  dataset=dataset, workspace=workspace, return_dataframe=True
@@ -78,10 +79,7 @@ def generate_measure_descriptions(
78
79
 
79
80
  validModels = ["gpt-35-turbo", "gpt-35-turbo-16k", "gpt-4"]
80
81
  if gpt_model not in validModels:
81
- print(
82
- f"{icons.red_dot} The '{gpt_model}' model is not a valid model. Enter a gpt_model from this list: {validModels}."
83
- )
84
- return
82
+ raise ValueError(f"{icons.red_dot} The '{gpt_model}' model is not a valid model. Enter a gpt_model from this list: {validModels}.")
85
83
 
86
84
  dfM = fabric.list_measures(dataset=dataset, workspace=workspace)
87
85
 
@@ -116,8 +114,8 @@ def generate_measure_descriptions(
116
114
  )
117
115
 
118
116
  # Update the model to use the new descriptions
119
- tom_server = fabric.create_tom_server(readonly=False, workspace=workspace)
120
- m = tom_server.Databases.GetByName(dataset).Model
117
+ #with connect_semantic_model(dataset=dataset, workspace=workspace, readonly=False) as tom:
118
+
121
119
 
122
120
  # for t in m.Tables:
123
121
  # tName = t.Name
@@ -173,48 +171,33 @@ def generate_aggs(
173
171
  numericTypes = ["Int64", "Double", "Decimal"]
174
172
 
175
173
  if any(value not in aggTypes for value in columns.values()):
176
- print(
177
- f"{icons.red_dot} Invalid aggregation type(s) have been specified in the 'columns' parameter. Valid aggregation types: {aggTypes}."
178
- )
179
- return
174
+ raise ValueError(f"{icons.red_dot} Invalid aggregation type(s) have been specified in the 'columns' parameter. Valid aggregation types: {aggTypes}.")
180
175
 
181
176
  dfC = fabric.list_columns(dataset=dataset, workspace=workspace)
182
177
  dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
183
178
  dfM = fabric.list_measures(dataset=dataset, workspace=workspace)
184
179
  dfR = fabric.list_relationships(dataset=dataset, workspace=workspace)
185
180
  if not any(r["Mode"] == "DirectLake" for i, r in dfP.iterrows()):
186
- print(
187
- f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode. This function is only relevant for Direct Lake semantic models."
188
- )
189
- return
190
-
181
+ raise ValueError(f"{icons.red_dot} The '{dataset}' semantic model within the '{workspace}' workspace is not in Direct Lake mode. This function is only relevant for Direct Lake semantic models.")
182
+
191
183
  dfC_filtT = dfC[dfC["Table Name"] == table_name]
192
184
 
193
185
  if len(dfC_filtT) == 0:
194
- print(
195
- f"{icons.red_dot} The '{table_name}' table does not exist in the '{dataset}' semantic model within the '{workspace}' workspace."
196
- )
197
- return
186
+ raise ValueError(f"{icons.red_dot} The '{table_name}' table does not exist in the '{dataset}' semantic model within the '{workspace}' workspace.")
198
187
 
199
188
  dfC_filt = dfC[
200
189
  (dfC["Table Name"] == table_name) & (dfC["Column Name"].isin(columnValues))
201
190
  ]
202
191
 
203
192
  if len(columns) != len(dfC_filt):
204
- print(
205
- f"{icons.red_dot} Columns listed in '{columnValues}' do not exist in the '{table_name}' table in the '{dataset}' semantic model within the '{workspace}' workspace."
206
- )
207
- return
193
+ raise ValueError(f"{icons.red_dot} Columns listed in '{columnValues}' do not exist in the '{table_name}' table in the '{dataset}' semantic model within the '{workspace}' workspace.")
208
194
 
209
195
  # Check if doing sum/count/min/max etc. on a non-number column
210
196
  for col, agg in columns.items():
211
197
  dfC_col = dfC_filt[dfC_filt["Column Name"] == col]
212
198
  dataType = dfC_col["Data Type"].iloc[0]
213
199
  if agg in aggTypesAggregate and dataType not in numericTypes:
214
- print(
215
- f"{icons.red_dot} The '{col}' column in the '{table_name}' table is of '{dataType}' data type. Only columns of '{numericTypes}' data types can be aggregated as '{aggTypesAggregate}' aggregation types."
216
- )
217
- return
200
+ raise ValueError(f"{icons.red_dot} The '{col}' column in the '{table_name}' table is of '{dataType}' data type. Only columns of '{numericTypes}' data types can be aggregated as '{aggTypesAggregate}' aggregation types.")
218
201
 
219
202
  # Create/update lakehouse delta agg table
220
203
  aggSuffix = "_agg"
@@ -230,10 +213,7 @@ def generate_aggs(
230
213
  dfI_filt = dfI[(dfI["Id"] == sqlEndpointId)]
231
214
 
232
215
  if len(dfI_filt) == 0:
233
- print(
234
- f"{icons.red_dot} The lakehouse (SQL Endpoint) used by the '{dataset}' semantic model does not reside in the '{lakehouse_workspace}' workspace. Please update the lakehouse_workspace parameter."
235
- )
236
- return
216
+ raise ValueError(f"{icons.red_dot} The lakehouse (SQL Endpoint) used by the '{dataset}' semantic model does not reside in the '{lakehouse_workspace}' workspace. Please update the lakehouse_workspace parameter.")
237
217
 
238
218
  lakehouseName = dfI_filt["Display Name"].iloc[0]
239
219
  lakehouse_id = resolve_lakehouse_id(
@@ -284,7 +264,7 @@ def generate_aggs(
284
264
  # Create/update semantic model agg table
285
265
  tom_server = fabric.create_tom_server(readonly=False, workspace=workspace)
286
266
  m = tom_server.Databases.GetByName(dataset).Model
287
- f"\n{icons.in_progress} Updating the '{dataset}' semantic model..."
267
+ print(f"\n{icons.in_progress} Updating the '{dataset}' semantic model...")
288
268
  dfC_agg = dfC[dfC["Table Name"] == aggTableName]
289
269
 
290
270
  if len(dfC_agg) == 0:
@@ -20,9 +20,7 @@ def clear_cache(dataset: str, workspace: Optional[str] = None):
20
20
  or if no lakehouse attached, resolves to the workspace of the notebook.
21
21
  """
22
22
 
23
- if workspace is None:
24
- workspace_id = fabric.get_workspace_id()
25
- workspace = fabric.resolve_workspace_name(workspace_id)
23
+ workspace = fabric.resolve_workspace_name(workspace)
26
24
 
27
25
  datasetID = resolve_dataset_id(dataset=dataset, workspace=workspace)
28
26
 
@@ -2,6 +2,7 @@ import sempy
2
2
  import sempy.fabric as fabric
3
3
  import pandas as pd
4
4
  from typing import List, Optional, Union
5
+ import sempy_labs._icons as icons
5
6
 
6
7
 
7
8
  def create_connection_cloud(
@@ -11,7 +12,7 @@ def create_connection_cloud(
11
12
  user_name: str,
12
13
  password: str,
13
14
  privacy_level: str,
14
- ):
15
+ ) -> pd.DataFrame:
15
16
 
16
17
  # https://review.learn.microsoft.com/en-us/rest/api/fabric/core/connections/create-connection?branch=features%2Fdmts&tabs=HTTP
17
18
 
@@ -60,16 +61,16 @@ def create_connection_cloud(
60
61
  if response.status_code == 200:
61
62
  o = response.json()
62
63
  new_data = {
63
- "Connection Id": o["id"],
64
- "Connection Name": o["name"],
65
- "Connectivity Type": o["connectivityType"],
66
- "Connection Type": o["connectionDetails"]["type"],
67
- "Connection Path": o["connectionDetails"]["path"],
68
- "Privacy Level": o["privacyLevel"],
69
- "Credential Type": o["credentialDetails"]["credentialType"],
70
- "Single Sign On Type": o["credentialDetails"]["singleSignOnType"],
71
- "Connection Encryption": o["credentialDetails"]["connectionEncryption"],
72
- "Skip Test Connection": o["credentialDetails"]["skipTestConnection"],
64
+ "Connection Id": o.get("id"),
65
+ "Connection Name": o.get("name"),
66
+ "Connectivity Type": o.get("connectivityType"),
67
+ "Connection Type": o.get("connectionDetails",{}).get("type"),
68
+ "Connection Path": o.get("connectionDetails",{}).get("path"),
69
+ "Privacy Level": o.get("privacyLevel"),
70
+ "Credential Type": o.get("credentialDetails",{}).get("credentialType"),
71
+ "Single Sign On Type": o.get("credentialDetails",{}).get("singleSignOnType"),
72
+ "Connection Encryption": o.get("credentialDetails",{}).get("connectionEncryption"),
73
+ "Skip Test Connection": o.get("credentialDetails",{}).get("skipTestConnection"),
73
74
  }
74
75
  df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
75
76
 
@@ -77,7 +78,7 @@ def create_connection_cloud(
77
78
 
78
79
  return df
79
80
  else:
80
- print(response.status_code)
81
+ print(f"{icons.red_dot} {response.status_code}")
81
82
 
82
83
 
83
84
  def create_connection_on_prem(
@@ -87,7 +88,7 @@ def create_connection_on_prem(
87
88
  database_name: str,
88
89
  credentials: str,
89
90
  privacy_level: str,
90
- ):
91
+ ) -> pd.DataFrame:
91
92
 
92
93
  df = pd.DataFrame(
93
94
  columns=[
@@ -135,17 +136,17 @@ def create_connection_on_prem(
135
136
  if response.status_code == 200:
136
137
  o = response.json()
137
138
  new_data = {
138
- "Connection Id": o["id"],
139
- "Connection Name": o["name"],
140
- "Gateway ID": o["gatewayId"],
141
- "Connectivity Type": o["connectivityType"],
142
- "Connection Type": o["connectionDetails"]["type"],
143
- "Connection Path": o["connectionDetails"]["path"],
144
- "Privacy Level": o["privacyLevel"],
145
- "Credential Type": o["credentialDetails"]["credentialType"],
146
- "Single Sign On Type": o["credentialDetails"]["singleSignOnType"],
147
- "Connection Encryption": o["credentialDetails"]["connectionEncryption"],
148
- "Skip Test Connection": o["credentialDetails"]["skipTestConnection"],
139
+ "Connection Id": o.get("id"),
140
+ "Connection Name": o.get("name"),
141
+ "Gateway ID": o.get("gatewayId"),
142
+ "Connectivity Type": o.get("connectivityType"),
143
+ "Connection Type": o.get("connectionDetails",{}).get("type"),
144
+ "Connection Path": o.get("connectionDetails",{}).get("path"),
145
+ "Privacy Level": o.get("privacyLevel"),
146
+ "Credential Type": o.get("credentialDetails",{}).get("credentialType"),
147
+ "Single Sign On Type": o.get("credentialDetails",{}).get("singleSignOnType"),
148
+ "Connection Encryption": o.get("credentialDetails",{}).get("connectionEncryption"),
149
+ "Skip Test Connection": o.get("credentialDetails",{}).get("skipTestConnection"),
149
150
  }
150
151
  df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
151
152
 
@@ -153,7 +154,7 @@ def create_connection_on_prem(
153
154
 
154
155
  return df
155
156
  else:
156
- print(response.status_code)
157
+ print(f"{icons.red_dot} {response.status_code}")
157
158
 
158
159
 
159
160
  def create_connection_vnet(
@@ -164,7 +165,7 @@ def create_connection_vnet(
164
165
  user_name: str,
165
166
  password: str,
166
167
  privacy_level: str,
167
- ):
168
+ ) -> pd.DataFrame:
168
169
 
169
170
  df = pd.DataFrame(
170
171
  columns=[
@@ -213,17 +214,17 @@ def create_connection_vnet(
213
214
  if response.status_code == 200:
214
215
  o = response.json()
215
216
  new_data = {
216
- "Connection Id": o["id"],
217
- "Connection Name": o["name"],
218
- "Gateway ID": o["gatewayId"],
219
- "Connectivity Type": o["connectivityType"],
220
- "Connection Type": o["connectionDetails"]["type"],
221
- "Connection Path": o["connectionDetails"]["path"],
222
- "Privacy Level": o["privacyLevel"],
223
- "Credential Type": o["credentialDetails"]["credentialType"],
224
- "Single Sign On Type": o["credentialDetails"]["singleSignOnType"],
225
- "Connection Encryption": o["credentialDetails"]["connectionEncryption"],
226
- "Skip Test Connection": o["credentialDetails"]["skipTestConnection"],
217
+ "Connection Id": o.get("id"),
218
+ "Connection Name": o.get("name"),
219
+ "Gateway ID": o.get("gatewayId"),
220
+ "Connectivity Type": o.get("connectivityType"),
221
+ "Connection Type": o.get("connectionDetails",{}).get("type"),
222
+ "Connection Path": o.get("connectionDetails",{}).get("path"),
223
+ "Privacy Level": o.get("privacyLevel"),
224
+ "Credential Type": o.get("credentialDetails",{}).get("credentialType"),
225
+ "Single Sign On Type": o.get("credentialDetails",{}).get("singleSignOnType"),
226
+ "Connection Encryption": o.get("credentialDetails",{}).get("connectionEncryption"),
227
+ "Skip Test Connection": o.get("credentialDetails",{}).get("skipTestConnection"),
227
228
  }
228
229
  df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
229
230
 
@@ -231,4 +232,4 @@ def create_connection_vnet(
231
232
 
232
233
  return df
233
234
  else:
234
- print(response.status_code)
235
+ print(f"{icons.red_dot} {response.status_code}")
@@ -32,12 +32,12 @@ def create_blank_semantic_model(
32
32
  """
33
33
 
34
34
  if workspace is None:
35
- workspace_id = fabric.get_workspace_id()
36
- workspace = fabric.resolve_workspace_name(workspace_id)
35
+ workspace = fabric.resolve_workspace_name()
37
36
 
38
- if compatibility_level < 1500:
39
- print(f"{icons.red_dot} Compatiblity level must be at least 1500.")
40
- return
37
+ min_compat = 1500
38
+
39
+ if compatibility_level < min_compat:
40
+ raise ValueError(f"{icons.red_dot} Compatiblity level must be at least {min_compat}.")
41
41
 
42
42
  tmsl = f"""
43
43
  {{
@@ -90,10 +90,7 @@ def create_semantic_model_from_bim(
90
90
  dfI_filt = dfI[(dfI["Display Name"] == dataset)]
91
91
 
92
92
  if len(dfI_filt) > 0:
93
- print(
94
- f"WARNING: '{dataset}' already exists as a semantic model in the '{workspace}' workspace."
95
- )
96
- return
93
+ raise ValueError(f"{icons.red_dot} '{dataset}' already exists as a semantic model in the '{workspace}' workspace.")
97
94
 
98
95
  client = fabric.FabricRestClient()
99
96
  defPBIDataset = {"version": "1.0", "settings": {}}
@@ -131,7 +128,7 @@ def create_semantic_model_from_bim(
131
128
 
132
129
  if response.status_code == 201:
133
130
  print(
134
- f"The '{dataset}' semantic model has been created within the '{workspace}' workspace."
131
+ f"{icons.green_dot} The '{dataset}' semantic model has been created within the '{workspace}' workspace."
135
132
  )
136
133
  print(response.json())
137
134
  elif response.status_code == 202:
@@ -144,7 +141,7 @@ def create_semantic_model_from_bim(
144
141
  response_body = json.loads(response.content)
145
142
  response = client.get(f"/v1/operations/{operationId}/result")
146
143
  print(
147
- f"The '{dataset}' semantic model has been created within the '{workspace}' workspace."
144
+ f"{icons.green_dot} The '{dataset}' semantic model has been created within the '{workspace}' workspace."
148
145
  )
149
146
  print(response.json())
150
147
 
@@ -178,9 +175,7 @@ def deploy_semantic_model(
178
175
 
179
176
  """
180
177
 
181
- if workspace is None:
182
- workspace_id = fabric.get_workspace_id()
183
- workspace = fabric.resolve_workspace_name(workspace_id)
178
+ workspace = fabric.resolve_workspace_name(workspace)
184
179
 
185
180
  if new_dataset_workspace is None:
186
181
  new_dataset_workspace = workspace
@@ -420,16 +420,10 @@ def save_as_delta_table(
420
420
  write_mode = write_mode.lower()
421
421
 
422
422
  if write_mode not in writeModes:
423
- print(
424
- f"{icons.red_dot} Invalid 'write_type' parameter. Choose from one of the following values: {writeModes}."
425
- )
426
- return
423
+ raise ValueError(f"{icons.red_dot} Invalid 'write_type' parameter. Choose from one of the following values: {writeModes}.")
427
424
 
428
425
  if " " in delta_table_name:
429
- print(
430
- f"{icons.red_dot} Invalid 'delta_table_name'. Delta tables in the lakehouse cannot have spaces in their names."
431
- )
432
- return
426
+ raise ValueError(f"{icons.red_dot} Invalid 'delta_table_name'. Delta tables in the lakehouse cannot have spaces in their names.")
433
427
 
434
428
  dataframe.columns = dataframe.columns.str.replace(" ", "_")
435
429
 
@@ -476,10 +470,7 @@ def language_validate(language: str):
476
470
  elif len(df_filt2) == 1:
477
471
  lang = df_filt2["Language"].iloc[0]
478
472
  else:
479
- print(
480
- f"The '{language}' language is not a valid language code. Please refer to this link for a list of valid language codes: {url}."
481
- )
482
- return
473
+ raise ValueError(f"{icons.red_dot} The '{language}' language is not a valid language code. Please refer to this link for a list of valid language codes: {url}.")
483
474
 
484
475
  return lang
485
476
 
sempy_labs/_icons.py CHANGED
@@ -7,3 +7,4 @@ unchecked = "\u2610"
7
7
  start_bold = "\033[1m"
8
8
  end_bold = "\033[0m"
9
9
  bullet = "\u2022"
10
+ warning = "⚠️"