semantic-link-labs 0.9.1__py3-none-any.whl → 0.9.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (87) hide show
  1. {semantic_link_labs-0.9.1.dist-info → semantic_link_labs-0.9.3.dist-info}/METADATA +67 -8
  2. {semantic_link_labs-0.9.1.dist-info → semantic_link_labs-0.9.3.dist-info}/RECORD +87 -80
  3. sempy_labs/__init__.py +14 -12
  4. sempy_labs/_ai.py +8 -5
  5. sempy_labs/_capacities.py +120 -142
  6. sempy_labs/_capacity_migration.py +61 -94
  7. sempy_labs/_clear_cache.py +9 -8
  8. sempy_labs/_connections.py +107 -104
  9. sempy_labs/_data_pipelines.py +47 -49
  10. sempy_labs/_dataflows.py +45 -51
  11. sempy_labs/_dax.py +228 -6
  12. sempy_labs/_delta_analyzer.py +321 -0
  13. sempy_labs/_deployment_pipelines.py +72 -66
  14. sempy_labs/_environments.py +39 -36
  15. sempy_labs/_eventhouses.py +35 -35
  16. sempy_labs/_eventstreams.py +38 -39
  17. sempy_labs/_external_data_shares.py +29 -42
  18. sempy_labs/_gateways.py +103 -99
  19. sempy_labs/_generate_semantic_model.py +22 -30
  20. sempy_labs/_git.py +46 -66
  21. sempy_labs/_graphQL.py +95 -0
  22. sempy_labs/_helper_functions.py +227 -36
  23. sempy_labs/_job_scheduler.py +47 -59
  24. sempy_labs/_kql_databases.py +27 -34
  25. sempy_labs/_kql_querysets.py +23 -30
  26. sempy_labs/_list_functions.py +264 -167
  27. sempy_labs/_managed_private_endpoints.py +52 -47
  28. sempy_labs/_mirrored_databases.py +110 -134
  29. sempy_labs/_mirrored_warehouses.py +13 -13
  30. sempy_labs/_ml_experiments.py +36 -36
  31. sempy_labs/_ml_models.py +37 -38
  32. sempy_labs/_model_bpa.py +2 -2
  33. sempy_labs/_model_bpa_rules.py +8 -6
  34. sempy_labs/_model_dependencies.py +2 -0
  35. sempy_labs/_notebooks.py +28 -29
  36. sempy_labs/_one_lake_integration.py +2 -0
  37. sempy_labs/_query_scale_out.py +63 -81
  38. sempy_labs/_refresh_semantic_model.py +12 -14
  39. sempy_labs/_spark.py +54 -79
  40. sempy_labs/_sql.py +7 -11
  41. sempy_labs/_translations.py +2 -2
  42. sempy_labs/_vertipaq.py +11 -6
  43. sempy_labs/_warehouses.py +30 -33
  44. sempy_labs/_workloads.py +15 -20
  45. sempy_labs/_workspace_identity.py +13 -17
  46. sempy_labs/_workspaces.py +49 -48
  47. sempy_labs/admin/__init__.py +2 -0
  48. sempy_labs/admin/_basic_functions.py +244 -281
  49. sempy_labs/admin/_domains.py +186 -103
  50. sempy_labs/admin/_external_data_share.py +26 -31
  51. sempy_labs/admin/_git.py +17 -22
  52. sempy_labs/admin/_items.py +34 -48
  53. sempy_labs/admin/_scanner.py +61 -49
  54. sempy_labs/directlake/_directlake_schema_compare.py +2 -0
  55. sempy_labs/directlake/_dl_helper.py +10 -11
  56. sempy_labs/directlake/_generate_shared_expression.py +4 -5
  57. sempy_labs/directlake/_get_directlake_lakehouse.py +1 -0
  58. sempy_labs/directlake/_list_directlake_model_calc_tables.py +1 -0
  59. sempy_labs/directlake/_show_unsupported_directlake_objects.py +2 -0
  60. sempy_labs/directlake/_warm_cache.py +2 -0
  61. sempy_labs/graph/__init__.py +33 -0
  62. sempy_labs/graph/_groups.py +402 -0
  63. sempy_labs/graph/_teams.py +113 -0
  64. sempy_labs/graph/_users.py +191 -0
  65. sempy_labs/lakehouse/__init__.py +4 -0
  66. sempy_labs/lakehouse/_get_lakehouse_columns.py +12 -12
  67. sempy_labs/lakehouse/_get_lakehouse_tables.py +16 -22
  68. sempy_labs/lakehouse/_lakehouse.py +104 -7
  69. sempy_labs/lakehouse/_shortcuts.py +42 -20
  70. sempy_labs/migration/__init__.py +4 -0
  71. sempy_labs/migration/_direct_lake_to_import.py +66 -0
  72. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -2
  73. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +1 -0
  74. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +1 -0
  75. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +2 -0
  76. sempy_labs/migration/_refresh_calc_tables.py +2 -2
  77. sempy_labs/report/_download_report.py +8 -13
  78. sempy_labs/report/_generate_report.py +49 -46
  79. sempy_labs/report/_paginated.py +20 -26
  80. sempy_labs/report/_report_functions.py +52 -47
  81. sempy_labs/report/_report_list_functions.py +2 -0
  82. sempy_labs/report/_report_rebind.py +6 -10
  83. sempy_labs/report/_reportwrapper.py +187 -220
  84. sempy_labs/tom/_model.py +12 -6
  85. {semantic_link_labs-0.9.1.dist-info → semantic_link_labs-0.9.3.dist-info}/LICENSE +0 -0
  86. {semantic_link_labs-0.9.1.dist-info → semantic_link_labs-0.9.3.dist-info}/WHEEL +0 -0
  87. {semantic_link_labs-0.9.1.dist-info → semantic_link_labs-0.9.3.dist-info}/top_level.txt +0 -0
sempy_labs/_gateways.py CHANGED
@@ -1,14 +1,14 @@
1
- import sempy.fabric as fabric
2
1
  from sempy._utils._log import log
3
2
  import pandas as pd
4
3
  from typing import Optional
5
- from sempy.fabric.exceptions import FabricHTTPException
6
4
  from sempy_labs._helper_functions import (
7
- pagination,
8
5
  _is_valid_uuid,
9
6
  resolve_capacity_id,
10
7
  resolve_workspace_name_and_id,
11
8
  resolve_dataset_name_and_id,
9
+ _update_dataframe_datatypes,
10
+ _base_api,
11
+ _create_dataframe,
12
12
  )
13
13
  from uuid import UUID
14
14
  import sempy_labs._icons as icons
@@ -21,33 +21,30 @@ def list_gateways() -> pd.DataFrame:
21
21
 
22
22
  This is a wrapper function for the following API: `Gateways - List Gateways <https://learn.microsoft.com/rest/api/fabric/core/gateways/list-gateways>`_.
23
23
 
24
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
25
+
24
26
  Returns
25
27
  -------
26
28
  pandas.DataFrame
27
29
  A pandas dataframe showing a list of all gateways the user has permission for, including on-premises, on-premises (personal mode), and virtual network gateways.
28
30
  """
29
31
 
30
- client = fabric.FabricRestClient()
31
- response = client.get("/v1/gateways")
32
-
33
- if response.status_code != 200:
34
- raise FabricHTTPException(response)
35
-
36
- responses = pagination(client, response)
37
-
38
- df = pd.DataFrame(
39
- columns=[
40
- "Gateway Name",
41
- "Gateway Id",
42
- "Type",
43
- "Public Key Exponent",
44
- "Public Key Modulus",
45
- "Version",
46
- "Number Of Member Gateways",
47
- "Load Balancing Setting",
48
- "Allow Cloud Connection Refresh",
49
- "Allow Custom Connectors",
50
- ]
32
+ columns = {
33
+ "Gateway Name": "string",
34
+ "Gateway Id": "string",
35
+ "Type": "string",
36
+ "Public Key Exponent": "string",
37
+ "Public Key Modulus": "string",
38
+ "Version": "string",
39
+ "Number Of Member Gateways": "int",
40
+ "Load Balancing Setting": "string",
41
+ "Allow Cloud Connection Refresh": "bool",
42
+ "Allow Custom Connectors": "bool",
43
+ }
44
+ df = _create_dataframe(columns=columns)
45
+
46
+ responses = _base_api(
47
+ request="/v1/gateways", client="fabric_sp", uses_pagination=True
51
48
  )
52
49
 
53
50
  for r in responses:
@@ -67,10 +64,7 @@ def list_gateways() -> pd.DataFrame:
67
64
 
68
65
  df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
69
66
 
70
- int_cols = ["Number Of Member Gateways"]
71
- bool_cols = ["Allow Cloud Connection Refresh", "Allow Custom Connectors"]
72
- df[bool_cols] = df[bool_cols].astype(bool)
73
- df[int_cols] = df[int_cols].astype(int)
67
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
74
68
 
75
69
  return df
76
70
 
@@ -95,6 +89,8 @@ def delete_gateway(gateway: str | UUID):
95
89
 
96
90
  This is a wrapper function for the following API: `Gateways - Delete Gateway <https://learn.microsoft.com/rest/api/fabric/core/gateways/delete-gateway>`_.
97
91
 
92
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
93
+
98
94
  Parameters
99
95
  ----------
100
96
  gateway : str | uuid.UUID
@@ -102,12 +98,7 @@ def delete_gateway(gateway: str | UUID):
102
98
  """
103
99
 
104
100
  gateway_id = _resolve_gateway_id(gateway)
105
- client = fabric.FabricRestClient()
106
- response = client.delete(f"/v1/gateways/{gateway_id}")
107
-
108
- if response.status_code != 200:
109
- raise FabricHTTPException(response)
110
-
101
+ _base_api(request=f"/v1/gateways/{gateway_id}", client="fabric_sp", method="delete")
111
102
  print(f"{icons.green_dot} The '{gateway}' gateway has been deleted.")
112
103
 
113
104
 
@@ -117,6 +108,8 @@ def list_gateway_role_assigments(gateway: str | UUID) -> pd.DataFrame:
117
108
 
118
109
  This is a wrapper function for the following API: `Gateways - List Gateway Role Assignments <https://learn.microsoft.com/rest/api/fabric/core/gateways/list-gateway-role-assignments>`_.
119
110
 
111
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
112
+
120
113
  Parameters
121
114
  ----------
122
115
  gateway : str | uuid.UUID
@@ -128,16 +121,19 @@ def list_gateway_role_assigments(gateway: str | UUID) -> pd.DataFrame:
128
121
  A pandas dataframe showing a list of gateway role assignments.
129
122
  """
130
123
 
124
+ columns = {
125
+ "Gateway Role Assignment Id": "string",
126
+ "Principal Id": "string",
127
+ "Principal Type": "string",
128
+ "Role": "string",
129
+ }
130
+ df = _create_dataframe(columns=columns)
131
131
  gateway_id = _resolve_gateway_id(gateway)
132
- client = fabric.FabricRestClient()
133
- response = client.get(f"/v1/gateways/{gateway_id}/roleAssignments")
134
-
135
- if response.status_code != 200:
136
- raise FabricHTTPException(response)
137
-
138
- df = pd.DataFrame(columns=[])
139
-
140
- responses = pagination(client, response)
132
+ responses = _base_api(
133
+ request=f"/v1/gateways/{gateway_id}/roleAssignments",
134
+ client="fabric_sp",
135
+ uses_pagination=True,
136
+ )
141
137
 
142
138
  for r in responses:
143
139
  for v in r.get("value", []):
@@ -153,31 +149,31 @@ def list_gateway_role_assigments(gateway: str | UUID) -> pd.DataFrame:
153
149
  return df
154
150
 
155
151
 
156
- def delete_gateway_role_assignment(gateway: str | UUID, role_assignement_id: UUID):
152
+ def delete_gateway_role_assignment(gateway: str | UUID, role_assignment_id: UUID):
157
153
  """
158
154
  Delete the specified role assignment for the gateway.
159
155
 
160
156
  This is a wrapper function for the following API: `Gateways - Delete Gateway Role Assignment <https://learn.microsoft.com/rest/api/fabric/core/gateways/delete-gateway-role-assignment>`_.
161
157
 
158
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
159
+
162
160
  Parameters
163
161
  ----------
164
162
  gateway : str | uuid.UUID
165
163
  The gateway name or ID.
166
- role_assignement_id : uuid.UUID
164
+ role_assignment_id : uuid.UUID
167
165
  The role assignment ID.
168
166
  """
169
167
 
170
168
  gateway_id = _resolve_gateway_id(gateway)
171
- client = fabric.FabricRestClient()
172
- response = client.delete(
173
- f"/v1/gateways/{gateway_id}/roleAssignments/{role_assignement_id}"
169
+ _base_api(
170
+ request=f"/v1/gateways/{gateway_id}/roleAssignments/{role_assignment_id}",
171
+ client="fabric_sp",
172
+ method="delete",
174
173
  )
175
174
 
176
- if response.status_code != 200:
177
- raise FabricHTTPException(response)
178
-
179
175
  print(
180
- f"{icons.green_dot} The '{role_assignement_id}' role assignment for the '{gateway}' gateway has been deleted."
176
+ f"{icons.green_dot} The '{role_assignment_id}' role assignment for the '{gateway}' gateway has been deleted."
181
177
  )
182
178
 
183
179
 
@@ -204,6 +200,8 @@ def delete_gateway_member(gateway: str | UUID, gateway_member: str | UUID):
204
200
 
205
201
  This is a wrapper function for the following API: `Gateways - Delete Gateway Member <https://learn.microsoft.com/rest/api/fabric/core/gateways/delete-gateway-member>`_.
206
202
 
203
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
204
+
207
205
  Parameters
208
206
  ----------
209
207
  gateway : str | uuid.UUID
@@ -217,12 +215,11 @@ def delete_gateway_member(gateway: str | UUID, gateway_member: str | UUID):
217
215
  gateway=gateway_id, gateway_member=gateway_member
218
216
  )
219
217
 
220
- client = fabric.FabricRestClient()
221
- response = client.delete(f"/v1/gateways/{gateway_id}/members/{member_id}")
222
-
223
- if response.status_code != 200:
224
- raise FabricHTTPException(response)
225
-
218
+ _base_api(
219
+ request=f"/v1/gateways/{gateway_id}/members/{member_id}",
220
+ client="fabric_sp",
221
+ method="delete",
222
+ )
226
223
  print(
227
224
  f"{icons.green_dot} The '{member_id}' member for the '{gateway}' gateway has been deleted."
228
225
  )
@@ -234,6 +231,8 @@ def list_gateway_members(gateway: str | UUID) -> pd.DataFrame:
234
231
 
235
232
  This is a wrapper function for the following API: `Gateways - List Gateway Members <https://learn.microsoft.com/rest/api/fabric/core/gateways/list-gateway-members>`_.
236
233
 
234
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
235
+
237
236
  Parameters
238
237
  ----------
239
238
  gateway : str | uuid.UUID
@@ -246,21 +245,19 @@ def list_gateway_members(gateway: str | UUID) -> pd.DataFrame:
246
245
  """
247
246
 
248
247
  gateway_id = _resolve_gateway_id(gateway)
249
- client = fabric.FabricRestClient()
250
- response = client.get(f"/v1/gateways/{gateway_id}/members")
251
-
252
- if response.status_code != 200:
253
- raise FabricHTTPException(response)
254
-
255
- df = pd.DataFrame(
256
- columns=[
257
- "Member Id",
258
- "Member Name",
259
- "Public Key Exponent",
260
- "Public Key Modulus",
261
- "Version",
262
- "Enabled",
263
- ]
248
+
249
+ columns = {
250
+ "Member Id": "string",
251
+ "Member Name": "string",
252
+ "Public Key Exponent": "string",
253
+ "Public Key Modulus": "string",
254
+ "Version": "string",
255
+ "Enabled": "bool",
256
+ }
257
+ df = _create_dataframe(columns=columns)
258
+
259
+ response = _base_api(
260
+ request=f"/v1/gateways/{gateway_id}/members", client="fabric_sp"
264
261
  )
265
262
 
266
263
  for v in response.json().get("value", []):
@@ -275,8 +272,7 @@ def list_gateway_members(gateway: str | UUID) -> pd.DataFrame:
275
272
 
276
273
  df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
277
274
 
278
- bool_cols = ["Enabled"]
279
- df[bool_cols] = df[bool_cols].astype(bool)
275
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
280
276
 
281
277
  return df
282
278
 
@@ -296,6 +292,8 @@ def create_vnet_gateway(
296
292
 
297
293
  This is a wrapper function for the following API: `Gateways - Create Gateway <https://learn.microsoft.com/rest/api/fabric/core/gateways/create-gateway>`_.
298
294
 
295
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
296
+
299
297
  Parameters
300
298
  ----------
301
299
  name : str
@@ -316,8 +314,6 @@ def create_vnet_gateway(
316
314
  The name of the subnet.
317
315
  """
318
316
 
319
- client = fabric.FabricRestClient()
320
-
321
317
  capacity_id = resolve_capacity_id(capacity)
322
318
  payload = {
323
319
  "type": "VirtualNetwork",
@@ -332,10 +328,14 @@ def create_vnet_gateway(
332
328
  "inactivityMinutesBeforeSleep": inactivity_minutes_before_sleep,
333
329
  "numberOfMemberGateways": number_of_member_gateways,
334
330
  }
335
- response = client.post("/v1/gateways", json=payload)
336
331
 
337
- if response.status_code != 201:
338
- raise FabricHTTPException(response)
332
+ _base_api(
333
+ request="/v1/gateways",
334
+ client="fabric_sp",
335
+ method="post",
336
+ payload=payload,
337
+ status_codes=201,
338
+ )
339
339
 
340
340
  print(
341
341
  f"{icons.green_dot} The '{name}' gateway was created within the '{capacity}' capacity."
@@ -353,6 +353,8 @@ def update_on_premises_gateway(
353
353
 
354
354
  This is a wrapper function for the following API: `Gateways - Update Gateway <https://learn.microsoft.com/rest/api/fabric/core/gateways/update-gateway>`_.
355
355
 
356
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
357
+
356
358
  Parameters
357
359
  ----------
358
360
  gateway : str | uuid.UUID
@@ -383,11 +385,12 @@ def update_on_premises_gateway(
383
385
 
384
386
  payload["type"] = "OnPremises"
385
387
 
386
- client = fabric.FabricRestClient()
387
- response = client.patch(f"/v1/gateways/{gateway_id}", json=payload)
388
-
389
- if response.status_code != 200:
390
- raise FabricHTTPException(response)
388
+ _base_api(
389
+ request=f"/v1/gateways/{gateway_id}",
390
+ client="fabric_sp",
391
+ method="patch",
392
+ payload=payload,
393
+ )
391
394
 
392
395
  print(f"{icons.green_dot} The '{gateway}' has been updated accordingly.")
393
396
 
@@ -403,6 +406,8 @@ def update_vnet_gateway(
403
406
 
404
407
  This is a wrapper function for the following API: `Gateways - Update Gateway <https://learn.microsoft.com/rest/api/fabric/core/gateways/update-gateway>`_.
405
408
 
409
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
410
+
406
411
  Parameters
407
412
  ----------
408
413
  gateway : str | uuid.UUID
@@ -434,12 +439,12 @@ def update_vnet_gateway(
434
439
 
435
440
  payload["type"] = "VirtualNetwork"
436
441
 
437
- client = fabric.FabricRestClient()
438
- response = client.patch(f"/v1/gateways/{gateway_id}", json=payload)
439
-
440
- if response.status_code != 200:
441
- raise FabricHTTPException(response)
442
-
442
+ _base_api(
443
+ request=f"/v1/gateways/{gateway_id}",
444
+ client="fabric_sp",
445
+ method="patch",
446
+ payload=payload,
447
+ )
443
448
  print(f"{icons.green_dot} The '{gateway}' has been updated accordingly.")
444
449
 
445
450
 
@@ -451,6 +456,8 @@ def bind_semantic_model_to_gateway(
451
456
 
452
457
  This is a wrapper function for the following API: `Datasets - Bind To Gateway In Group <https://learn.microsoft.com/rest/api/power-bi/datasets/bind-to-gateway-in-group>`_.
453
458
 
459
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
460
+
454
461
  Parameters
455
462
  ----------
456
463
  dataset : str | uuid.UUID
@@ -473,15 +480,12 @@ def bind_semantic_model_to_gateway(
473
480
  "gatewayObjectId": gateway_id,
474
481
  }
475
482
 
476
- client = fabric.FabricRestClient()
477
- response = client.post(
478
- f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/Default.BindToGateway",
479
- json=payload,
483
+ _base_api(
484
+ request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/Default.BindToGateway",
485
+ client="fabric_sp",
486
+ method="post",
487
+ payload=payload,
480
488
  )
481
-
482
- if response.status_code != 200:
483
- raise FabricHTTPException(response)
484
-
485
489
  print(
486
490
  f"{icons.green_dot} The '{dataset_name}' semantic model within the '{workspace_name}' workspace has been binded to the '{gateway_id}' gateway."
487
491
  )
@@ -10,7 +10,7 @@ from sempy_labs._helper_functions import (
10
10
  resolve_dataset_name_and_id,
11
11
  _conv_b64,
12
12
  _decode_b64,
13
- lro,
13
+ _base_api,
14
14
  )
15
15
  from sempy_labs.lakehouse._lakehouse import lakehouse_attached
16
16
  import sempy_labs._icons as icons
@@ -145,18 +145,16 @@ def create_semantic_model_from_bim(
145
145
  dfI = fabric.list_datasets(workspace=workspace_id, mode="rest")
146
146
  dfI_filt = dfI[(dfI["Dataset Name"] == dataset)]
147
147
 
148
- if len(dfI_filt) > 0:
148
+ if not dfI_filt.empty:
149
149
  raise ValueError(
150
150
  f"{icons.red_dot} The '{dataset}' semantic model already exists as a semantic model in the '{workspace_name}' workspace."
151
151
  )
152
152
 
153
- client = fabric.FabricRestClient()
154
153
  defPBIDataset = {"version": "1.0", "settings": {}}
155
-
156
154
  payloadPBIDefinition = _conv_b64(defPBIDataset)
157
155
  payloadBim = _conv_b64(bim_file)
158
156
 
159
- request_body = {
157
+ payload = {
160
158
  "displayName": dataset,
161
159
  "definition": {
162
160
  "parts": [
@@ -174,13 +172,14 @@ def create_semantic_model_from_bim(
174
172
  },
175
173
  }
176
174
 
177
- response = client.post(
178
- f"/v1/workspaces/{workspace_id}/semanticModels",
179
- json=request_body,
175
+ _base_api(
176
+ request=f"v1/workspaces/{workspace_id}/semanticModels",
177
+ payload=payload,
178
+ method="post",
179
+ lro_return_status_code=True,
180
+ status_codes=[201, 202],
180
181
  )
181
182
 
182
- lro(client, response, status_codes=[201, 202])
183
-
184
183
  print(
185
184
  f"{icons.green_dot} The '{dataset}' semantic model has been created within the '{workspace_name}' workspace."
186
185
  )
@@ -210,13 +209,11 @@ def update_semantic_model_from_bim(
210
209
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
211
210
  (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
212
211
 
213
- client = fabric.FabricRestClient()
214
212
  defPBIDataset = {"version": "1.0", "settings": {}}
215
-
216
213
  payloadPBIDefinition = _conv_b64(defPBIDataset)
217
214
  payloadBim = _conv_b64(bim_file)
218
215
 
219
- request_body = {
216
+ payload = {
220
217
  "displayName": dataset_name,
221
218
  "definition": {
222
219
  "parts": [
@@ -234,13 +231,14 @@ def update_semantic_model_from_bim(
234
231
  },
235
232
  }
236
233
 
237
- response = client.post(
238
- f"/v1/workspaces/{workspace_id}/semanticModels/{dataset_id}/updateDefinition",
239
- json=request_body,
234
+ _base_api(
235
+ request=f"v1/workspaces/{workspace_id}/semanticModels/{dataset_id}/updateDefinition",
236
+ payload=payload,
237
+ method="post",
238
+ lro_return_status_code=True,
239
+ status_codes=None,
240
240
  )
241
241
 
242
- lro(client, response, status_codes=[200, 202], return_status_code=True)
243
-
244
242
  print(
245
243
  f"{icons.green_dot} The '{dataset_name}' semantic model has been updated within the '{workspace_name}' workspace."
246
244
  )
@@ -333,7 +331,6 @@ def get_semantic_model_bim(
333
331
  dataset: str | UUID,
334
332
  workspace: Optional[str | UUID] = None,
335
333
  save_to_file_name: Optional[str] = None,
336
- lakehouse_workspace: Optional[str] = None,
337
334
  ) -> dict:
338
335
  """
339
336
  Extracts the Model.bim file for a given semantic model.
@@ -348,10 +345,6 @@ def get_semantic_model_bim(
348
345
  or if no lakehouse attached, resolves to the workspace of the notebook.
349
346
  save_to_file_name : str, default=None
350
347
  If specified, saves the Model.bim as a file in the lakehouse attached to the notebook.
351
- lakehouse_workspace : str, default=None
352
- The Fabric workspace name in which the lakehouse attached to the workspace resides.
353
- Defaults to None which resolves to the workspace of the attached lakehouse
354
- or if no lakehouse attached, resolves to the workspace of the notebook.
355
348
 
356
349
  Returns
357
350
  -------
@@ -375,9 +368,7 @@ def get_semantic_model_bim(
375
368
  f"{icons.red_dot} In order to save the model.bim file, a lakehouse must be attached to the notebook. Please attach a lakehouse to this notebook."
376
369
  )
377
370
 
378
- lakehouse_id = fabric.get_lakehouse_id()
379
- lake_workspace = fabric.resolve_workspace_name()
380
- lakehouse = resolve_lakehouse_name(lakehouse_id, lake_workspace)
371
+ lakehouse = resolve_lakehouse_name()
381
372
  folderPath = "/lakehouse/default/Files"
382
373
  fileExt = ".bim"
383
374
  if not save_to_file_name.endswith(fileExt):
@@ -437,11 +428,12 @@ def get_semantic_model_definition(
437
428
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
438
429
  (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
439
430
 
440
- client = fabric.FabricRestClient()
441
- response = client.post(
442
- f"/v1/workspaces/{workspace_id}/semanticModels/{dataset_id}/getDefinition?format={format}",
431
+ result = _base_api(
432
+ request=f"v1/workspaces/{workspace_id}/semanticModels/{dataset_id}/getDefinition?format={format}",
433
+ method="post",
434
+ lro_return_json=True,
435
+ status_codes=None,
443
436
  )
444
- result = lro(client, response).json()
445
437
 
446
438
  files = result["definition"]["parts"]
447
439