semantic-link-labs 0.12.2__py3-none-any.whl → 0.12.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (40) hide show
  1. {semantic_link_labs-0.12.2.dist-info → semantic_link_labs-0.12.4.dist-info}/METADATA +5 -3
  2. {semantic_link_labs-0.12.2.dist-info → semantic_link_labs-0.12.4.dist-info}/RECORD +39 -31
  3. sempy_labs/__init__.py +18 -10
  4. sempy_labs/_a_lib_info.py +1 -1
  5. sempy_labs/_authentication.py +1 -1
  6. sempy_labs/_capacities.py +1 -1
  7. sempy_labs/_generate_semantic_model.py +2 -2
  8. sempy_labs/_get_connection_string.py +84 -0
  9. sempy_labs/_git.py +1 -1
  10. sempy_labs/_helper_functions.py +28 -4
  11. sempy_labs/_list_functions.py +55 -5
  12. sempy_labs/_managed_private_endpoints.py +1 -1
  13. sempy_labs/_notebooks.py +4 -2
  14. sempy_labs/_semantic_models.py +118 -0
  15. sempy_labs/_sql_audit_settings.py +208 -0
  16. sempy_labs/_sql_endpoints.py +27 -24
  17. sempy_labs/_utils.py +2 -0
  18. sempy_labs/_warehouses.py +1 -56
  19. sempy_labs/admin/__init__.py +6 -0
  20. sempy_labs/admin/_items.py +3 -3
  21. sempy_labs/admin/_labels.py +211 -0
  22. sempy_labs/directlake/_warm_cache.py +3 -1
  23. sempy_labs/eventstream/__init__.py +37 -0
  24. sempy_labs/eventstream/_items.py +263 -0
  25. sempy_labs/eventstream/_topology.py +652 -0
  26. sempy_labs/graph/__init__.py +12 -0
  27. sempy_labs/graph/_groups.py +60 -53
  28. sempy_labs/graph/_sensitivity_labels.py +120 -0
  29. sempy_labs/graph/_teams.py +19 -18
  30. sempy_labs/graph/_user_licenses.py +96 -0
  31. sempy_labs/graph/_users.py +23 -16
  32. sempy_labs/lakehouse/_get_lakehouse_tables.py +33 -1
  33. sempy_labs/lakehouse/_lakehouse.py +6 -2
  34. sempy_labs/lakehouse/_partitioning.py +165 -0
  35. sempy_labs/report/_reportwrapper.py +15 -5
  36. sempy_labs/tom/_model.py +111 -16
  37. sempy_labs/_eventstreams.py +0 -123
  38. {semantic_link_labs-0.12.2.dist-info → semantic_link_labs-0.12.4.dist-info}/WHEEL +0 -0
  39. {semantic_link_labs-0.12.2.dist-info → semantic_link_labs-0.12.4.dist-info}/licenses/LICENSE +0 -0
  40. {semantic_link_labs-0.12.2.dist-info → semantic_link_labs-0.12.4.dist-info}/top_level.txt +0 -0
@@ -348,3 +348,121 @@ def list_semantic_model_datasources(
348
348
  df = pd.DataFrame(rows, columns=list(columns.keys()))
349
349
 
350
350
  return df
351
+
352
+
353
+ @log
354
+ def bind_semantic_model_connection(
355
+ dataset: str | UUID,
356
+ connection_id: UUID,
357
+ connectivity_type: str,
358
+ connection_type: str,
359
+ connection_path: str,
360
+ workspace: Optional[str | UUID] = None,
361
+ ):
362
+ """
363
+ Binds a semantic model data source reference to a data connection.
364
+ This API can also be used to unbind data source references.
365
+
366
+ This is a wrapper function for the following API: `Items - Bind Semantic Model Connection <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/bind-semantic-model-connection>`_.
367
+
368
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
369
+
370
+ Parameters
371
+ ----------
372
+ dataset : str | uuid.UUID
373
+ Name or ID of the semantic model.
374
+ connection_id : uuid.UUID
375
+ The object ID of the connection.
376
+ connectivity_type : str
377
+ The connectivity type of the connection. Additional connectivity types may be added over time.
378
+ connection_type : str
379
+ The `type <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/bind-semantic-model-connection?tabs=HTTP#connectivitytype>`_ of the connection.
380
+ connection_path : str
381
+ The path of the connection.
382
+ workspace : str | uuid.UUID, default=None
383
+ The workspace name or ID.
384
+ Defaults to None which resolves to the workspace of the attached lakehouse
385
+ or if no lakehouse attached, resolves to the workspace of the notebook.
386
+ """
387
+
388
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
389
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(
390
+ dataset=dataset, workspace=workspace_id
391
+ )
392
+
393
+ payload = {
394
+ "connectionBinding": {
395
+ "id": str(connection_id),
396
+ "connectivityType": connectivity_type,
397
+ "connectionDetails": {
398
+ "type": connection_type,
399
+ "path": connection_path,
400
+ },
401
+ }
402
+ }
403
+
404
+ _base_api(
405
+ request=f"/v1/workspaces/{workspace_id}/semanticModels/{dataset_id}/bindConnection",
406
+ method="post",
407
+ client="fabric_sp",
408
+ payload=payload,
409
+ )
410
+
411
+ print(
412
+ f"{icons.green_dot} Connection '{connection_id}' has been bound to the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
413
+ )
414
+
415
+
416
+ @log
417
+ def unbind_semantic_model_connection(
418
+ dataset: str | UUID,
419
+ connection_type: str,
420
+ connection_path: str,
421
+ workspace: Optional[str | UUID] = None,
422
+ ):
423
+ """
424
+ Unbinds a semantic model data source reference to a data connection.
425
+
426
+ This is a wrapper function for the following API: `Items - Bind Semantic Model Connection <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/bind-semantic-model-connection>`_.
427
+
428
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
429
+
430
+ Parameters
431
+ ----------
432
+ dataset : str | uuid.UUID
433
+ Name or ID of the semantic model.
434
+ connection_type : str
435
+ The `type <https://learn.microsoft.com/rest/api/fabric/semanticmodel/items/bind-semantic-model-connection?tabs=HTTP#connectivitytype>`_ of the connection.
436
+ connection_path : str
437
+ The path of the connection.
438
+ workspace : str | uuid.UUID, default=None
439
+ The workspace name or ID.
440
+ Defaults to None which resolves to the workspace of the attached lakehouse
441
+ or if no lakehouse attached, resolves to the workspace of the notebook.
442
+ """
443
+
444
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
445
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(
446
+ dataset=dataset, workspace=workspace_id
447
+ )
448
+
449
+ payload = {
450
+ "connectionBinding": {
451
+ "connectivityType": "None",
452
+ "connectionDetails": {
453
+ "type": connection_type,
454
+ "path": connection_path,
455
+ },
456
+ }
457
+ }
458
+
459
+ _base_api(
460
+ request=f"/v1/workspaces/{workspace_id}/semanticModels/{dataset_id}/bindConnection",
461
+ method="post",
462
+ client="fabric_sp",
463
+ payload=payload,
464
+ )
465
+
466
+ print(
467
+ f"{icons.green_dot} The '{dataset_name}' semantic model within the '{workspace_name}' workspace has been unbound from its connection."
468
+ )
@@ -0,0 +1,208 @@
1
+ from sempy_labs._helper_functions import (
2
+ resolve_item_id,
3
+ resolve_workspace_name_and_id,
4
+ _base_api,
5
+ _create_dataframe,
6
+ _update_dataframe_datatypes,
7
+ resolve_workspace_id,
8
+ resolve_item_name_and_id,
9
+ )
10
+ import pandas as pd
11
+ from typing import Optional, List, Literal
12
+ import sempy_labs._icons as icons
13
+ from uuid import UUID
14
+ from sempy._utils._log import log
15
+
16
+
17
+ def _get_base_url(item, type, workspace):
18
+
19
+ workspace_id = resolve_workspace_id(workspace)
20
+ item_id = resolve_item_id(item=item, type=type, workspace=workspace)
21
+
22
+ type_dict = {
23
+ "Warehouse": "warehouses",
24
+ "SQLEndpoint": "sqlEndpoints",
25
+ }
26
+ type_for_url = type_dict.get(type)
27
+
28
+ if type in ["SQLEndpoint", "Warehouse"]:
29
+ url = f"/v1/workspaces/{workspace_id}/{type_for_url}/{item_id}"
30
+ else:
31
+ raise ValueError(
32
+ f"{icons.red_dot} The type must be 'Warehouse' or 'SQLEndpoint'."
33
+ )
34
+
35
+ return url
36
+
37
+
38
+ @log
39
+ def get_sql_audit_settings(
40
+ item: str | UUID,
41
+ type: Literal["Warehouse", "SQLEndpoint"],
42
+ workspace: Optional[str | UUID] = None,
43
+ ) -> pd.DataFrame:
44
+ """
45
+ Shows the SQL audit settings of a Fabric item.
46
+
47
+ This is a wrapper function for the following API: `SQL Audit Settings - Get SQL Audit Settings <https://learn.microsoft.com/rest/api/fabric/warehouse/sql-audit-settings/get-sql-audit-settings>`_.
48
+
49
+ Service Principal Authentication is supported (see `here <https://learn.microsoft.com/rest/api/fabric/warehouse/sql-audit-settings/get-sql-audit-settings#service-principal-authentication>`_).
50
+
51
+ Parameters
52
+ ----------
53
+ item : str | uuid.UUID
54
+ The name or ID of the item (Warehouse or SQLEndpoint).
55
+ type : Literal['Warehouse', 'SQLEndpoint']
56
+ The type of the item. Must be 'Warehouse' or 'SQLEndpoint'.
57
+ workspace : str | uuid.UUID, default=None
58
+ The Fabric workspace name or ID.
59
+ Defaults to None which resolves to the workspace of the attached lakehouse
60
+ or if no lakehouse attached, resolves to the workspace of the notebook.
61
+
62
+ Returns
63
+ -------
64
+ pandas.DataFrame
65
+ A pandas dataframe containing the SQL audit settings of the specified warehouse.
66
+ """
67
+
68
+ columns = {
69
+ "State": "string",
70
+ "Retention Days": "int",
71
+ "Audit Actions And Group": "list",
72
+ }
73
+
74
+ df = _create_dataframe(columns=columns)
75
+
76
+ url = _get_base_url(item=item, type=type, workspace=workspace)
77
+ response = _base_api(
78
+ request=f"{url}/settings/sqlAudit",
79
+ client="fabric_sp",
80
+ ).json()
81
+
82
+ rows = []
83
+ rows.append(
84
+ {
85
+ "State": response.get("state"),
86
+ "Retention Days": response.get("retentionDays"),
87
+ "Audit Actions And Group": response.get("auditActionsAndGroups"),
88
+ }
89
+ )
90
+
91
+ if rows:
92
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
93
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
94
+
95
+ return df
96
+
97
+
98
+ @log
99
+ def update_sql_audit_settings(
100
+ item: str | UUID,
101
+ type: Literal["Warehouse", "SQLEndpoint"],
102
+ workspace: Optional[str | UUID] = None,
103
+ retention_days: Optional[int] = None,
104
+ state: Optional[str] = None,
105
+ ):
106
+ """
107
+ Update settings associated with the item.
108
+
109
+ This is a wrapper function for the following API: SQL Audit Settings - Update SQL Audit Settings <https://learn.microsoft.com/rest/api/fabric/warehouse/sql-audit-settings/update-sql-audit-settings>`_.
110
+
111
+ Service Principal Authentication is supported (see `here <https://learn.microsoft.com/rest/api/fabric/warehouse/sql-audit-settings/get-sql-audit-settings#service-principal-authentication>`_).
112
+
113
+ Parameters
114
+ ----------
115
+ item : str | uuid.UUID
116
+ The name or ID of the item (Warehouse or SQLEndpoint).
117
+ type : Literal['Warehouse', 'SQLEndpoint']
118
+ The type of the item. Must be 'Warehouse' or 'SQLEndpoint'.
119
+ workspace : str | uuid.UUID, default=None
120
+ The Fabric workspace name or ID.
121
+ Defaults to None which resolves to the workspace of the attached lakehouse
122
+ or if no lakehouse attached, resolves to the workspace of the notebook.
123
+ """
124
+
125
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
126
+
127
+ payload = {}
128
+ if retention_days is not None:
129
+ if not isinstance(retention_days, int) or retention_days < 0:
130
+ raise ValueError(
131
+ f"{icons.red_dot} retention_days must be a non-negative integer."
132
+ )
133
+ payload["retentionDays"] = retention_days
134
+ if state is not None:
135
+ state = state.capitalize()
136
+ if state not in ["Enabled", "Disabled"]:
137
+ raise ValueError(
138
+ f"{icons.red_dot} state must be either 'Enabled' or 'Disabled'."
139
+ )
140
+ payload["state"] = state
141
+
142
+ if not payload:
143
+ print(
144
+ f"{icons.info} No updates were made as neither retention_days nor state were provided."
145
+ )
146
+ return
147
+
148
+ url = _get_base_url(item=item, type=type, workspace=workspace)
149
+ _base_api(
150
+ request=f"{url}/settings/sqlAudit",
151
+ client="fabric_sp",
152
+ method="patch",
153
+ payload=payload,
154
+ )
155
+
156
+ print(
157
+ f"{icons.green_dot} The SQL audit settings for the '{item}' {type.lower()} within the '{workspace_name}' workspace have been updated accordingly."
158
+ )
159
+
160
+
161
+ @log
162
+ def set_audit_actions_and_group(
163
+ item: str | UUID,
164
+ type: Literal["Warehouse", "SQLEndpoint"],
165
+ sql_audit_groups: List[str],
166
+ workspace: Optional[str | UUID] = None,
167
+ ):
168
+ """
169
+ Update the audit actions and groups for this item.
170
+
171
+ This is a wrapper function for the following API: SQL Audit Settings - Set Audit Actions And Groups <https://learn.microsoft.com/rest/api/fabric/warehouse/sql-audit-settings/set-audit-actions-and-groups>`_.
172
+
173
+ Service Principal Authentication is supported (see `here <https://learn.microsoft.com/rest/api/fabric/warehouse/sql-audit-settings/get-sql-audit-settings#service-principal-authentication>`_).
174
+
175
+ Parameters
176
+ ----------
177
+ item : str | uuid.UUID
178
+ The name or ID of the item (Warehouse or SQLEndpoint).
179
+ type : Literal['Warehouse', 'SQLEndpoint']
180
+ The type of the item. Must be 'Warehouse' or 'SQLEndpoint'.
181
+ workspace : str | uuid.UUID, default=None
182
+ The Fabric workspace name or ID.
183
+ Defaults to None which resolves to the workspace of the attached lakehouse
184
+ or if no lakehouse attached, resolves to the workspace of the notebook.
185
+ """
186
+
187
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
188
+
189
+ if (
190
+ not sql_audit_groups
191
+ or not isinstance(sql_audit_groups, list)
192
+ or not all(isinstance(item, str) for item in sql_audit_groups)
193
+ ):
194
+ raise ValueError(
195
+ f"{icons.red_dot} sql_audit_groups must be a non-empty list of strings."
196
+ )
197
+
198
+ url = _get_base_url(item=item, type=type, workspace=workspace)
199
+ _base_api(
200
+ request=f"{url}/settings/sqlAudit/setAuditActionsAndGroups",
201
+ client="fabric_sp",
202
+ method="post",
203
+ payload=sql_audit_groups,
204
+ )
205
+
206
+ print(
207
+ f"{icons.green_dot} The SQL audit actions and groups for the '{item}' {type.lower()} within the '{workspace_name}' workspace have been updated accordingly."
208
+ )
@@ -66,7 +66,8 @@ def refresh_sql_endpoint_metadata(
66
66
  item: str | UUID,
67
67
  type: Literal["Lakehouse", "MirroredDatabase"],
68
68
  workspace: Optional[str | UUID] = None,
69
- tables: dict[str, list[str]] = None,
69
+ timeout_unit: Literal["Seconds", "Minutes", "Hours", "Days"] = "Minutes",
70
+ timeout_value: int = 15,
70
71
  ) -> pd.DataFrame:
71
72
  """
72
73
  Refreshes the metadata of a SQL endpoint.
@@ -85,15 +86,10 @@ def refresh_sql_endpoint_metadata(
85
86
  The Fabric workspace name or ID.
86
87
  Defaults to None which resolves to the workspace of the attached lakehouse
87
88
  or if no lakehouse attached, resolves to the workspace of the notebook.
88
- tables : dict[str, list[str]], default=None
89
- A dictionary where the keys are schema names and the values are lists of table names.
90
- If empty, all table metadata will be refreshed.
91
-
92
- Example:
93
- {
94
- "dbo": ["DimDate", "DimGeography"],
95
- "sls": ["FactSales", "FactBudget"],
96
- }
89
+ timeout_unit : Literal['Seconds', 'Minutes', 'Hours', 'Days'], default='Minutes'
90
+ The unit of time for the request duration before timing out. Additional duration types may be added over time.
91
+ timeout_value : int, default=15
92
+ The number of time units in the request duration.
97
93
 
98
94
  Returns
99
95
  -------
@@ -132,14 +128,25 @@ def refresh_sql_endpoint_metadata(
132
128
  else:
133
129
  raise ValueError("Invalid type. Must be 'Lakehouse' or 'MirroredDatabase'.")
134
130
 
135
- payload = {}
136
- if tables:
137
- payload = {
138
- "tableDefinitions": [
139
- {"schema": schema, "tableNames": tables}
140
- for schema, tables in tables.items()
141
- ]
142
- }
131
+ timeout_unit = timeout_unit.capitalize()
132
+ if timeout_unit not in ["Seconds", "Minutes", "Hours", "Days"]:
133
+ raise ValueError(
134
+ "Invalid timeout_unit. Must be 'Seconds', 'Minutes', 'Hours', or 'Days'."
135
+ )
136
+ if timeout_unit == "Hours" and timeout_value > 24:
137
+ raise ValueError("timeout_value cannot exceed 24 when timeout_unit is 'Hours'.")
138
+ if timeout_unit == "Days" and timeout_value > 1:
139
+ raise ValueError("timeout_value cannot exceed 1 when timeout_unit is 'Days'.")
140
+ if timeout_unit == "Minutes" and timeout_value > 1440:
141
+ raise ValueError(
142
+ "timeout_value cannot exceed 1440 when timeout_unit is 'Minutes'."
143
+ )
144
+ if timeout_unit == "Seconds" and timeout_value > 86400:
145
+ raise ValueError(
146
+ "timeout_value cannot exceed 86400 when timeout_unit is 'Seconds'."
147
+ )
148
+
149
+ payload = {"timeout": {"timeUnit": timeout_unit, "value": timeout_value}}
143
150
 
144
151
  result = _base_api(
145
152
  request=f"v1/workspaces/{workspace_id}/sqlEndpoints/{sql_endpoint_id}/refreshMetadata",
@@ -195,10 +202,8 @@ def refresh_sql_endpoint_metadata(
195
202
  df = df[column_order]
196
203
 
197
204
  printout = f"{icons.green_dot} The metadata of the SQL endpoint for the '{item_name}' {type.lower()} within the '{workspace_name}' workspace has been refreshed"
198
- if tables:
199
- print(f"{printout} for the following tables: {tables}.")
200
- else:
201
- print(f"{printout} for all tables.")
205
+ print(f"{printout} for all tables.")
206
+ _update_dataframe_datatypes(df, columns)
202
207
  else:
203
208
  # If the target item has no tables to refresh the metadata for
204
209
  df = pd.DataFrame(columns=columns.keys())
@@ -206,6 +211,4 @@ def refresh_sql_endpoint_metadata(
206
211
  f"{icons.yellow_dot} The SQL endpoint '{item_name}' {type.lower()} within the '{workspace_name}' workspace has no tables to refresh..."
207
212
  )
208
213
 
209
- _update_dataframe_datatypes(df, columns)
210
-
211
214
  return df
sempy_labs/_utils.py CHANGED
@@ -66,4 +66,6 @@ items = {
66
66
  "DigitalTwinBuilder": "digitaltwinbuilders",
67
67
  "DigitalTwinBuilderFlow": "DigitalTwinBuilderFlows",
68
68
  "MirroredAzureDatabricksCatalog": "mirroredAzureDatabricksCatalogs",
69
+ "Map": "Maps",
70
+ "AnomalyDetector": "anomalydetectors",
69
71
  }
sempy_labs/_warehouses.py CHANGED
@@ -1,5 +1,4 @@
1
- from ._helper_functions import (
2
- resolve_item_id,
1
+ from sempy_labs._helper_functions import (
3
2
  resolve_workspace_name_and_id,
4
3
  _base_api,
5
4
  _create_dataframe,
@@ -233,57 +232,3 @@ def get_warehouse_columns(
233
232
  )
234
233
 
235
234
  return df
236
-
237
-
238
- @log
239
- def get_warehouse_connection_string(
240
- warehouse: str | UUID,
241
- workspace: Optional[str | UUID] = None,
242
- guest_tenant_id: Optional[UUID] = None,
243
- private_link_type: Optional[str] = None,
244
- ) -> str:
245
- """
246
- Returns the SQL connection string of the specified warehouse.
247
-
248
- Parameters
249
- ----------
250
- warehouse : str | uuid.UUID
251
- Name or ID of the Fabric warehouse.
252
- workspace : str | uuid.UUID, default=None
253
- The Fabric workspace name or ID.
254
- Defaults to None which resolves to the workspace of the attached lakehouse
255
- or if no lakehouse attached, resolves to the workspace of the notebook.
256
- guest_tenant_id : uuid.UUID, default=None
257
- The guest tenant ID if the end user's tenant is different from the warehouse tenant.
258
- private_link_type : str, default=None
259
- Indicates the type of private link this connection string uses. Must be either 'Workspace' or 'None' or left as None.
260
-
261
- Returns
262
- -------
263
- str
264
- Returns the SQL connection string of the specified warehouse.
265
- """
266
- workspace_id = resolve_workspace_id(workspace)
267
- warehouse_id = resolve_item_id(
268
- item=warehouse, type="Warehouse", workspace=workspace
269
- )
270
-
271
- url = f"/v1/workspaces/{workspace_id}/warehouses/{warehouse_id}/connectionString"
272
-
273
- if private_link_type is not None and private_link_type not in ["Workspace", "None"]:
274
- raise ValueError(
275
- f"{icons.red_dot} private_link_type must be 'Workspace' or 'None' or left as None."
276
- )
277
-
278
- if guest_tenant_id or private_link_type:
279
- params = []
280
- if guest_tenant_id:
281
- params.append(f"guestTenantId={guest_tenant_id}")
282
- if private_link_type:
283
- params.append(f"privateLinkType={private_link_type}")
284
- param_str = "?" + "&".join(params)
285
- url += param_str
286
-
287
- response = _base_api(request=url, client="fabric_sp")
288
-
289
- return response.json().get("connectionString")
@@ -98,6 +98,10 @@ from ._sharing_links import (
98
98
  remove_all_sharing_links,
99
99
  remove_sharing_links,
100
100
  )
101
+ from ._labels import (
102
+ bulk_set_labels,
103
+ bulk_remove_labels,
104
+ )
101
105
 
102
106
  __all__ = [
103
107
  "list_items",
@@ -161,4 +165,6 @@ __all__ = [
161
165
  "rotate_tenant_key",
162
166
  "remove_all_sharing_links",
163
167
  "remove_sharing_links",
168
+ "bulk_set_labels",
169
+ "bulk_remove_labels",
164
170
  ]
@@ -2,13 +2,13 @@ import pandas as pd
2
2
  from typing import Optional, Tuple
3
3
  from uuid import UUID
4
4
  import sempy_labs._icons as icons
5
- from ._basic_functions import (
5
+ from sempy_labs.admin._basic_functions import (
6
6
  _resolve_workspace_name_and_id,
7
7
  )
8
- from ._capacities import (
8
+ from sempy_labs.admin._capacities import (
9
9
  _resolve_capacity_name_and_id,
10
10
  )
11
- from .._helper_functions import (
11
+ from sempy_labs._helper_functions import (
12
12
  _is_valid_uuid,
13
13
  _build_url,
14
14
  _base_api,