semantic-link-labs 0.11.3__py3-none-any.whl → 0.12.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (75) hide show
  1. {semantic_link_labs-0.11.3.dist-info → semantic_link_labs-0.12.1.dist-info}/METADATA +6 -4
  2. {semantic_link_labs-0.11.3.dist-info → semantic_link_labs-0.12.1.dist-info}/RECORD +75 -73
  3. sempy_labs/__init__.py +6 -0
  4. sempy_labs/_a_lib_info.py +1 -1
  5. sempy_labs/_authentication.py +85 -32
  6. sempy_labs/_capacities.py +2 -2
  7. sempy_labs/_capacity_migration.py +4 -4
  8. sempy_labs/_clear_cache.py +1 -1
  9. sempy_labs/_connections.py +107 -70
  10. sempy_labs/_dashboards.py +6 -2
  11. sempy_labs/_data_pipelines.py +1 -1
  12. sempy_labs/_dataflows.py +1 -1
  13. sempy_labs/_dax.py +3 -3
  14. sempy_labs/_delta_analyzer.py +4 -4
  15. sempy_labs/_delta_analyzer_history.py +1 -1
  16. sempy_labs/_deployment_pipelines.py +1 -1
  17. sempy_labs/_environments.py +1 -1
  18. sempy_labs/_eventhouses.py +9 -3
  19. sempy_labs/_eventstreams.py +1 -1
  20. sempy_labs/_external_data_shares.py +1 -1
  21. sempy_labs/_gateways.py +14 -7
  22. sempy_labs/_generate_semantic_model.py +7 -12
  23. sempy_labs/_git.py +1 -1
  24. sempy_labs/_graphQL.py +1 -1
  25. sempy_labs/_helper_functions.py +161 -54
  26. sempy_labs/_job_scheduler.py +12 -1
  27. sempy_labs/_kql_databases.py +1 -1
  28. sempy_labs/_kql_querysets.py +10 -2
  29. sempy_labs/_kusto.py +2 -2
  30. sempy_labs/_list_functions.py +1 -1
  31. sempy_labs/_managed_private_endpoints.py +1 -1
  32. sempy_labs/_mirrored_databases.py +40 -16
  33. sempy_labs/_mirrored_warehouses.py +1 -1
  34. sempy_labs/_ml_experiments.py +1 -1
  35. sempy_labs/_model_bpa.py +6 -6
  36. sempy_labs/_model_bpa_bulk.py +3 -3
  37. sempy_labs/_model_dependencies.py +1 -1
  38. sempy_labs/_mounted_data_factories.py +3 -3
  39. sempy_labs/_notebooks.py +2 -1
  40. sempy_labs/_query_scale_out.py +2 -2
  41. sempy_labs/_refresh_semantic_model.py +1 -1
  42. sempy_labs/_semantic_models.py +15 -3
  43. sempy_labs/_spark.py +1 -1
  44. sempy_labs/_sql.py +3 -3
  45. sempy_labs/_sql_endpoints.py +5 -3
  46. sempy_labs/_sqldatabase.py +5 -1
  47. sempy_labs/_tags.py +3 -1
  48. sempy_labs/_translations.py +7 -360
  49. sempy_labs/_user_delegation_key.py +2 -2
  50. sempy_labs/_utils.py +27 -0
  51. sempy_labs/_vertipaq.py +3 -3
  52. sempy_labs/_vpax.py +1 -1
  53. sempy_labs/_warehouses.py +5 -0
  54. sempy_labs/_workloads.py +1 -1
  55. sempy_labs/_workspace_identity.py +1 -1
  56. sempy_labs/_workspaces.py +145 -11
  57. sempy_labs/admin/__init__.py +6 -0
  58. sempy_labs/admin/_capacities.py +37 -14
  59. sempy_labs/admin/_items.py +2 -2
  60. sempy_labs/admin/_tenant_keys.py +89 -0
  61. sempy_labs/directlake/_dl_helper.py +1 -1
  62. sempy_labs/lakehouse/__init__.py +4 -0
  63. sempy_labs/lakehouse/_materialized_lake_views.py +76 -0
  64. sempy_labs/lakehouse/_shortcuts.py +8 -2
  65. sempy_labs/report/_download_report.py +4 -1
  66. sempy_labs/report/_export_report.py +12 -5
  67. sempy_labs/report/_generate_report.py +11 -3
  68. sempy_labs/report/_paginated.py +21 -15
  69. sempy_labs/report/_report_functions.py +19 -11
  70. sempy_labs/report/_report_rebind.py +56 -33
  71. sempy_labs/theme/_org_themes.py +5 -6
  72. sempy_labs/tom/_model.py +5 -16
  73. {semantic_link_labs-0.11.3.dist-info → semantic_link_labs-0.12.1.dist-info}/WHEEL +0 -0
  74. {semantic_link_labs-0.11.3.dist-info → semantic_link_labs-0.12.1.dist-info}/licenses/LICENSE +0 -0
  75. {semantic_link_labs-0.11.3.dist-info → semantic_link_labs-0.12.1.dist-info}/top_level.txt +0 -0
@@ -1,8 +1,8 @@
1
1
  import pandas as pd
2
2
  from typing import Optional
3
- from ._helper_functions import (
3
+ from sempy_labs._helper_functions import (
4
4
  _is_valid_uuid,
5
- resolve_workspace_name_and_id,
5
+ resolve_workspace_id,
6
6
  _update_dataframe_datatypes,
7
7
  _base_api,
8
8
  _create_dataframe,
@@ -10,8 +10,9 @@ from ._helper_functions import (
10
10
  )
11
11
  from uuid import UUID
12
12
  import sempy_labs._icons as icons
13
- from ._gateways import _resolve_gateway_id
13
+ from sempy_labs._gateways import _resolve_gateway_id
14
14
  from sempy._utils._log import log
15
+ import warnings
15
16
 
16
17
 
17
18
  @log
@@ -68,13 +69,13 @@ def delete_connection_role_assignment(connection: str | UUID, role_assignment_id
68
69
  @log
69
70
  def _resolve_connection_id(connection: str | UUID) -> UUID:
70
71
 
71
- dfC = list_connections()
72
72
  if _is_valid_uuid(connection):
73
- dfC_filt = dfC[dfC["Connection Id"] == connection]
74
- else:
75
- dfC_filt = dfC[dfC["Connection Name"] == connection]
73
+ return connection
74
+
75
+ dfC = list_connections()
76
+ dfC_filt = dfC[dfC["Connection Name"] == connection]
76
77
 
77
- if len(dfC_filt) == 0:
78
+ if dfC_filt.empty:
78
79
  raise ValueError(
79
80
  f"{icons.red_dot} The '{connection}' is not a valid connection."
80
81
  )
@@ -119,16 +120,20 @@ def list_connection_role_assignments(connection: str | UUID) -> pd.DataFrame:
119
120
  uses_pagination=True,
120
121
  )
121
122
 
123
+ rows = []
122
124
  for r in responses:
123
125
  for v in r.get("value", []):
124
- new_data = {
125
- "Connection Role Assignment Id": v.get("id"),
126
- "Principal Id": v.get("principal", {}).get("id"),
127
- "Principal Type": v.get("principal", {}).get("type"),
128
- "Role": v.get("role"),
129
- }
126
+ rows.append(
127
+ {
128
+ "Connection Role Assignment Id": v.get("id"),
129
+ "Principal Id": v.get("principal", {}).get("id"),
130
+ "Principal Type": v.get("principal", {}).get("type"),
131
+ "Role": v.get("role"),
132
+ }
133
+ )
130
134
 
131
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
135
+ if rows:
136
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
132
137
 
133
138
  return df
134
139
 
@@ -165,51 +170,57 @@ def list_connections() -> pd.DataFrame:
165
170
  request="/v1/connections", client="fabric_sp", uses_pagination=True
166
171
  )
167
172
 
173
+ rows = []
168
174
  for r in responses:
169
175
  for i in r.get("value", []):
170
176
  connection_details = i.get("connectionDetails", {})
171
177
  credential_details = i.get("credentialDetails", {})
172
178
 
173
- new_data = {
174
- "Connection Id": i.get("id"),
175
- "Connection Name": i.get("displayName"),
176
- "Gateway Id": i.get("gatewayId"),
177
- "Connectivity Type": i.get("connectivityType"),
178
- "Connection Path": connection_details.get("path"),
179
- "Connection Type": connection_details.get("type"),
180
- "Privacy Level": i.get("privacyLevel"),
181
- "Credential Type": (
182
- credential_details.get("credentialType")
183
- if credential_details
184
- else None
185
- ),
186
- "Single Sign On Type": (
187
- credential_details.get("singleSignOnType")
188
- if credential_details
189
- else None
190
- ),
191
- "Connection Encryption": (
192
- credential_details.get("connectionEncryption")
193
- if credential_details
194
- else None
195
- ),
196
- "Skip Test Connection": (
197
- credential_details.get("skipTestConnection")
198
- if credential_details
199
- else None
200
- ),
201
- }
202
-
203
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
204
-
205
- _update_dataframe_datatypes(dataframe=df, column_map=columns)
179
+ rows.append(
180
+ {
181
+ "Connection Id": i.get("id"),
182
+ "Connection Name": i.get("displayName"),
183
+ "Gateway Id": i.get("gatewayId"),
184
+ "Connectivity Type": i.get("connectivityType"),
185
+ "Connection Path": connection_details.get("path"),
186
+ "Connection Type": connection_details.get("type"),
187
+ "Privacy Level": i.get("privacyLevel"),
188
+ "Credential Type": (
189
+ credential_details.get("credentialType")
190
+ if credential_details
191
+ else None
192
+ ),
193
+ "Single Sign On Type": (
194
+ credential_details.get("singleSignOnType")
195
+ if credential_details
196
+ else None
197
+ ),
198
+ "Connection Encryption": (
199
+ credential_details.get("connectionEncryption")
200
+ if credential_details
201
+ else None
202
+ ),
203
+ "Skip Test Connection": (
204
+ credential_details.get("skipTestConnection")
205
+ if credential_details
206
+ else None
207
+ ),
208
+ }
209
+ )
210
+
211
+ if rows:
212
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
213
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
206
214
 
207
215
  return df
208
216
 
209
217
 
210
218
  @log
211
219
  def list_item_connections(
212
- item_name: str, item_type: str, workspace: Optional[str | UUID] = None
220
+ item: Optional[str | UUID] = None,
221
+ type: Optional[str] = None,
222
+ workspace: Optional[str | UUID] = None,
223
+ **kwargs,
213
224
  ) -> pd.DataFrame:
214
225
  """
215
226
  Shows the list of connections that the specified item is connected to.
@@ -220,9 +231,9 @@ def list_item_connections(
220
231
 
221
232
  Parameters
222
233
  ----------
223
- item_name : str
224
- The item name.
225
- item_type : str
234
+ item : str | uuid.UUID
235
+ The item name or ID.
236
+ type : str
226
237
  The `item type <https://learn.microsoft.com/rest/api/fabric/core/items/update-item?tabs=HTTP#itemtype>`_.
227
238
  workspace : str | uuid.UUID, default=None
228
239
  The Fabric workspace name or ID.
@@ -235,9 +246,32 @@ def list_item_connections(
235
246
  A pandas dataframe showing the list of connections that the specified item is connected to.
236
247
  """
237
248
 
238
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
239
- item_type = item_type[0].upper() + item_type[1:]
240
- item_id = resolve_item_id(item=item_name, type=item_type, workspace=workspace_id)
249
+ if "item_name" in kwargs:
250
+ if item is not None:
251
+ raise TypeError("Cannot specify both 'item' and 'item_name'")
252
+ item = kwargs.pop("item_name")
253
+ warnings.warn(
254
+ "'item_name' parameter is deprecated, use 'item' instead.",
255
+ FutureWarning,
256
+ stacklevel=2,
257
+ )
258
+ if "item_type" in kwargs:
259
+ if type is not None:
260
+ raise TypeError("Cannot specify both 'type' and 'item_type'")
261
+ type = kwargs.pop("item_type")
262
+ warnings.warn(
263
+ "'item_type' parameter is deprecated, use 'type' instead.",
264
+ FutureWarning,
265
+ stacklevel=2,
266
+ )
267
+
268
+ if item is None or type is None:
269
+ raise TypeError(
270
+ "Missing required parameters: 'item' and 'type' must be provided either directly or via 'item_name' and 'item_type'."
271
+ )
272
+
273
+ workspace_id = resolve_workspace_id(workspace)
274
+ item_id = resolve_item_id(item=item, type=type, workspace=workspace_id)
241
275
 
242
276
  columns = {
243
277
  "Connection Name": "string",
@@ -255,18 +289,22 @@ def list_item_connections(
255
289
  uses_pagination=True,
256
290
  )
257
291
 
292
+ rows = []
258
293
  for r in responses:
259
294
  for v in r.get("value", []):
260
- new_data = {
261
- "Connection Name": v.get("displayName"),
262
- "Connection Id": v.get("id"),
263
- "Connectivity Type": v.get("connectivityType"),
264
- "Connection Type": v.get("connectionDetails", {}).get("type"),
265
- "Connection Path": v.get("connectionDetails", {}).get("path"),
266
- "Gateway Id": v.get("gatewayId"),
267
- }
295
+ rows.append(
296
+ {
297
+ "Connection Name": v.get("displayName"),
298
+ "Connection Id": v.get("id"),
299
+ "Connectivity Type": v.get("connectivityType"),
300
+ "Connection Type": v.get("connectionDetails", {}).get("type"),
301
+ "Connection Path": v.get("connectionDetails", {}).get("path"),
302
+ "Gateway Id": v.get("gatewayId"),
303
+ }
304
+ )
268
305
 
269
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
306
+ if rows:
307
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
270
308
 
271
309
  return df
272
310
 
@@ -293,10 +331,10 @@ def _list_supported_connection_types(
293
331
  url = url.rstrip("&")
294
332
  responses = _base_api(request=url, client="fabric_sp", uses_pagination=True)
295
333
 
296
- records = []
334
+ rows = []
297
335
  for r in responses:
298
336
  for v in r.get("value", []):
299
- records.append(
337
+ rows.append(
300
338
  {
301
339
  "Connection Type": v.get("type"),
302
340
  "Creation Method": v["creationMethods"][0]["name"],
@@ -310,10 +348,9 @@ def _list_supported_connection_types(
310
348
  }
311
349
  )
312
350
 
313
- if records:
314
- df = pd.DataFrame(records)
315
-
316
- _update_dataframe_datatypes(dataframe=df, column_map=columns)
351
+ if rows:
352
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
353
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
317
354
 
318
355
  return df
319
356
 
sempy_labs/_dashboards.py CHANGED
@@ -1,7 +1,7 @@
1
1
  from typing import Optional
2
2
  from uuid import UUID
3
3
  import pandas as pd
4
- from ._helper_functions import (
4
+ from sempy_labs._helper_functions import (
5
5
  _create_dataframe,
6
6
  _base_api,
7
7
  resolve_workspace_id,
@@ -15,6 +15,8 @@ def list_dashboards(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
15
15
  """
16
16
  Shows a list of the dashboards within a workspace.
17
17
 
18
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
19
+
18
20
  Parameters
19
21
  ----------
20
22
  workspace : str | uuid.UUID, default=None
@@ -42,7 +44,9 @@ def list_dashboards(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
42
44
 
43
45
  workspace_id = resolve_workspace_id(workspace)
44
46
 
45
- response = _base_api(request=f"/v1.0/myorg/groups/{workspace_id}/dashboards")
47
+ response = _base_api(
48
+ request=f"/v1.0/myorg/groups/{workspace_id}/dashboards", client="fabric_sp"
49
+ )
46
50
 
47
51
  rows = []
48
52
  for v in response.json().get("value", []):
@@ -1,6 +1,6 @@
1
1
  import pandas as pd
2
2
  from typing import Optional
3
- from ._helper_functions import (
3
+ from sempy_labs._helper_functions import (
4
4
  resolve_workspace_name_and_id,
5
5
  _decode_b64,
6
6
  _base_api,
sempy_labs/_dataflows.py CHANGED
@@ -1,5 +1,5 @@
1
1
  import pandas as pd
2
- from ._helper_functions import (
2
+ from sempy_labs._helper_functions import (
3
3
  resolve_workspace_name_and_id,
4
4
  _is_valid_uuid,
5
5
  _update_dataframe_datatypes,
sempy_labs/_dax.py CHANGED
@@ -1,17 +1,17 @@
1
1
  import sempy.fabric as fabric
2
2
  import pandas as pd
3
- from ._helper_functions import (
3
+ from sempy_labs._helper_functions import (
4
4
  resolve_workspace_name_and_id,
5
5
  format_dax_object_name,
6
6
  resolve_dataset_name_and_id,
7
7
  _base_api,
8
8
  generate_guid,
9
9
  )
10
- from ._model_dependencies import get_model_calc_dependencies
10
+ from sempy_labs._model_dependencies import get_model_calc_dependencies
11
11
  from typing import Optional, List, Tuple
12
12
  from sempy._utils._log import log
13
13
  from uuid import UUID
14
- from .directlake._warm_cache import _put_columns_into_memory
14
+ from sempy_labs.directlake._warm_cache import _put_columns_into_memory
15
15
  import sempy_labs._icons as icons
16
16
  import time
17
17
 
@@ -5,7 +5,7 @@ import os
5
5
  from uuid import UUID
6
6
  from typing import Dict, Optional
7
7
  import pyarrow.parquet as pq
8
- from ._helper_functions import (
8
+ from sempy_labs._helper_functions import (
9
9
  create_abfss_path,
10
10
  save_as_delta_table,
11
11
  _get_column_aggregate,
@@ -21,11 +21,11 @@ from ._helper_functions import (
21
21
  _get_delta_table,
22
22
  )
23
23
  from sempy._utils._log import log
24
- from .lakehouse._get_lakehouse_tables import get_lakehouse_tables
25
- from .lakehouse._lakehouse import (
24
+ from sempy_labs.lakehouse._get_lakehouse_tables import get_lakehouse_tables
25
+ from sempy_labs.lakehouse._lakehouse import (
26
26
  lakehouse_attached,
27
27
  )
28
- from .lakehouse._helper import (
28
+ from sempy_labs.lakehouse._helper import (
29
29
  is_v_ordered,
30
30
  )
31
31
  import sempy_labs._icons as icons
@@ -1,7 +1,7 @@
1
1
  import pandas as pd
2
2
  from typing import Optional
3
3
  import pyarrow.parquet as pq
4
- from ._helper_functions import (
4
+ from sempy_labs._helper_functions import (
5
5
  create_abfss_path,
6
6
  resolve_workspace_id,
7
7
  resolve_lakehouse_id,
@@ -1,5 +1,5 @@
1
1
  import pandas as pd
2
- from ._helper_functions import (
2
+ from sempy_labs._helper_functions import (
3
3
  _is_valid_uuid,
4
4
  _base_api,
5
5
  _update_dataframe_datatypes,
@@ -1,7 +1,7 @@
1
1
  import pandas as pd
2
2
  import sempy_labs._icons as icons
3
3
  from typing import Optional
4
- from ._helper_functions import (
4
+ from sempy_labs._helper_functions import (
5
5
  resolve_workspace_name_and_id,
6
6
  resolve_workspace_id,
7
7
  _base_api,
@@ -1,12 +1,12 @@
1
1
  import pandas as pd
2
2
  from typing import Optional
3
- from ._helper_functions import (
3
+ from sempy_labs._helper_functions import (
4
4
  _base_api,
5
5
  _create_dataframe,
6
6
  _conv_b64,
7
7
  delete_item,
8
8
  create_item,
9
- get_item_definition,
9
+ _get_item_definition,
10
10
  resolve_workspace_id,
11
11
  )
12
12
  from uuid import UUID
@@ -26,6 +26,8 @@ def create_eventhouse(
26
26
 
27
27
  This is a wrapper function for the following API: `Items - Create Eventhouse <https://learn.microsoft.com/rest/api/fabric/environment/items/create-eventhouse>`_.
28
28
 
29
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
30
+
29
31
  Parameters
30
32
  ----------
31
33
  name: str
@@ -127,6 +129,8 @@ def delete_eventhouse(name: str, workspace: Optional[str | UUID] = None):
127
129
 
128
130
  This is a wrapper function for the following API: `Items - Delete Eventhouse <https://learn.microsoft.com/rest/api/fabric/environment/items/delete-eventhouse>`_.
129
131
 
132
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
133
+
130
134
  Parameters
131
135
  ----------
132
136
  name: str
@@ -151,6 +155,8 @@ def get_eventhouse_definition(
151
155
 
152
156
  This is a wrapper function for the following API: `Items - Get Eventhouse Definition <https://learn.microsoft.com/rest/api/fabric/eventhouse/items/get-eventhouse-definition>`_.
153
157
 
158
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
159
+
154
160
  Parameters
155
161
  ----------
156
162
  eventhouse : str
@@ -168,7 +174,7 @@ def get_eventhouse_definition(
168
174
  The eventhouse definition in .json format or as a pandas dataframe.
169
175
  """
170
176
 
171
- return get_item_definition(
177
+ return _get_item_definition(
172
178
  item=eventhouse,
173
179
  type="Eventhouse",
174
180
  workspace=workspace,
@@ -1,6 +1,6 @@
1
1
  import pandas as pd
2
2
  from typing import Optional
3
- from ._helper_functions import (
3
+ from sempy_labs._helper_functions import (
4
4
  _base_api,
5
5
  delete_item,
6
6
  _create_dataframe,
@@ -2,7 +2,7 @@ from uuid import UUID
2
2
  import pandas as pd
3
3
  from typing import Optional, List
4
4
  import sempy_labs._icons as icons
5
- from ._helper_functions import (
5
+ from sempy_labs._helper_functions import (
6
6
  resolve_workspace_name_and_id,
7
7
  _base_api,
8
8
  _create_dataframe,
sempy_labs/_gateways.py CHANGED
@@ -77,16 +77,16 @@ def list_gateways() -> pd.DataFrame:
77
77
  @log
78
78
  def _resolve_gateway_id(gateway: str | UUID) -> UUID:
79
79
 
80
- dfG = list_gateways()
81
80
  if _is_valid_uuid(gateway):
82
- dfG_filt = dfG[dfG["Gateway Id"] == gateway]
81
+ return gateway
83
82
  else:
83
+ dfG = list_gateways()
84
84
  dfG_filt = dfG[dfG["Gateway Name"] == gateway]
85
85
 
86
- if len(dfG_filt) == 0:
87
- raise ValueError(f"{icons.red_dot} The '{gateway}' does not exist.")
86
+ if dfG_filt.empty:
87
+ raise ValueError(f"{icons.red_dot} The '{gateway}' gateway does not exist.")
88
88
 
89
- return dfG_filt["Gateway Id"].iloc[0]
89
+ return dfG_filt["Gateway Id"].iloc[0]
90
90
 
91
91
 
92
92
  @log
@@ -472,7 +472,10 @@ def update_vnet_gateway(
472
472
 
473
473
  @log
474
474
  def bind_semantic_model_to_gateway(
475
- dataset: str | UUID, gateway: str | UUID, workspace: Optional[str | UUID] = None
475
+ dataset: str | UUID,
476
+ gateway: str | UUID,
477
+ workspace: Optional[str | UUID] = None,
478
+ data_source_object_ids: Optional[list[UUID]] = None,
476
479
  ):
477
480
  """
478
481
  Binds the specified dataset from the specified workspace to the specified gateway.
@@ -488,9 +491,11 @@ def bind_semantic_model_to_gateway(
488
491
  gateway : str | uuid.UUID
489
492
  The name or ID of the gateway.
490
493
  workspace : str | uuid.UUID, default=None
491
- The Fabric workspace name.
494
+ The workspace name or ID.
492
495
  Defaults to None which resolves to the workspace of the attached lakehouse
493
496
  or if no lakehouse attached, resolves to the workspace of the notebook.
497
+ data_source_object_ids : list[uuid.UUID], default=None
498
+ A list of data source object IDs to bind to the gateway.
494
499
  """
495
500
 
496
501
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
@@ -502,6 +507,8 @@ def bind_semantic_model_to_gateway(
502
507
  payload = {
503
508
  "gatewayObjectId": gateway_id,
504
509
  }
510
+ if data_source_object_ids is not None:
511
+ payload["datasourceObjectIds"] = data_source_object_ids
505
512
 
506
513
  _base_api(
507
514
  request=f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/Default.BindToGateway",
@@ -4,7 +4,7 @@ import json
4
4
  import os
5
5
  from typing import Optional, List
6
6
  from sempy._utils._log import log
7
- from ._helper_functions import (
7
+ from sempy_labs._helper_functions import (
8
8
  resolve_workspace_name_and_id,
9
9
  resolve_dataset_name_and_id,
10
10
  _conv_b64,
@@ -13,9 +13,9 @@ from ._helper_functions import (
13
13
  _mount,
14
14
  resolve_workspace_id,
15
15
  )
16
- from .lakehouse._lakehouse import lakehouse_attached
16
+ from sempy_labs.lakehouse._lakehouse import lakehouse_attached
17
17
  import sempy_labs._icons as icons
18
- from ._refresh_semantic_model import refresh_semantic_model
18
+ from sempy_labs._refresh_semantic_model import refresh_semantic_model
19
19
  from uuid import UUID
20
20
 
21
21
 
@@ -284,13 +284,9 @@ def deploy_semantic_model(
284
284
  source_workspace
285
285
  )
286
286
 
287
- if target_workspace is None:
288
- target_workspace_name = source_workspace_name
289
- target_workspace_id = resolve_workspace_id(workspace=target_workspace_name)
290
- else:
291
- (target_workspace_name, target_workspace_id) = resolve_workspace_name_and_id(
292
- target_workspace
293
- )
287
+ (target_workspace_name, target_workspace_id) = resolve_workspace_name_and_id(
288
+ target_workspace
289
+ )
294
290
 
295
291
  if target_dataset is None:
296
292
  target_dataset = source_dataset
@@ -306,13 +302,12 @@ def deploy_semantic_model(
306
302
 
307
303
  dfD = fabric.list_datasets(workspace=target_workspace_id, mode="rest")
308
304
  dfD_filt = dfD[dfD["Dataset Name"] == target_dataset]
309
- if len(dfD_filt) > 0 and not overwrite:
305
+ if not dfD_filt.empty and not overwrite:
310
306
  raise ValueError(
311
307
  f"{icons.warning} The '{target_dataset}' semantic model already exists within the '{target_workspace_name}' workspace. The 'overwrite' parameter is set to False so the source semantic model was not deployed to the target destination."
312
308
  )
313
309
 
314
310
  if perspective is not None:
315
-
316
311
  from sempy_labs.tom import connect_semantic_model
317
312
 
318
313
  with connect_semantic_model(
sempy_labs/_git.py CHANGED
@@ -1,7 +1,7 @@
1
1
  import pandas as pd
2
2
  import sempy_labs._icons as icons
3
3
  from typing import Optional, List
4
- from ._helper_functions import (
4
+ from sempy_labs._helper_functions import (
5
5
  resolve_workspace_name_and_id,
6
6
  _base_api,
7
7
  _create_dataframe,
sempy_labs/_graphQL.py CHANGED
@@ -1,7 +1,7 @@
1
1
  import pandas as pd
2
2
  from uuid import UUID
3
3
  from typing import Optional
4
- from ._helper_functions import (
4
+ from sempy_labs._helper_functions import (
5
5
  _base_api,
6
6
  _create_dataframe,
7
7
  resolve_workspace_id,