semantic-link-labs 0.8.10__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (81) hide show
  1. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/METADATA +6 -5
  2. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/RECORD +81 -80
  3. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +34 -3
  5. sempy_labs/_authentication.py +80 -4
  6. sempy_labs/_capacities.py +770 -200
  7. sempy_labs/_capacity_migration.py +7 -37
  8. sempy_labs/_clear_cache.py +37 -35
  9. sempy_labs/_connections.py +13 -13
  10. sempy_labs/_data_pipelines.py +20 -20
  11. sempy_labs/_dataflows.py +27 -28
  12. sempy_labs/_dax.py +41 -47
  13. sempy_labs/_deployment_pipelines.py +1 -1
  14. sempy_labs/_environments.py +26 -23
  15. sempy_labs/_eventhouses.py +16 -15
  16. sempy_labs/_eventstreams.py +16 -15
  17. sempy_labs/_external_data_shares.py +18 -20
  18. sempy_labs/_gateways.py +16 -14
  19. sempy_labs/_generate_semantic_model.py +107 -62
  20. sempy_labs/_git.py +105 -43
  21. sempy_labs/_helper_functions.py +251 -194
  22. sempy_labs/_job_scheduler.py +227 -0
  23. sempy_labs/_kql_databases.py +16 -15
  24. sempy_labs/_kql_querysets.py +16 -15
  25. sempy_labs/_list_functions.py +150 -126
  26. sempy_labs/_managed_private_endpoints.py +19 -17
  27. sempy_labs/_mirrored_databases.py +51 -48
  28. sempy_labs/_mirrored_warehouses.py +5 -4
  29. sempy_labs/_ml_experiments.py +16 -15
  30. sempy_labs/_ml_models.py +15 -14
  31. sempy_labs/_model_bpa.py +210 -207
  32. sempy_labs/_model_bpa_bulk.py +2 -2
  33. sempy_labs/_model_bpa_rules.py +3 -3
  34. sempy_labs/_model_dependencies.py +55 -29
  35. sempy_labs/_notebooks.py +29 -25
  36. sempy_labs/_one_lake_integration.py +23 -26
  37. sempy_labs/_query_scale_out.py +75 -64
  38. sempy_labs/_refresh_semantic_model.py +25 -26
  39. sempy_labs/_spark.py +33 -32
  40. sempy_labs/_sql.py +19 -12
  41. sempy_labs/_translations.py +10 -7
  42. sempy_labs/_vertipaq.py +38 -33
  43. sempy_labs/_warehouses.py +26 -25
  44. sempy_labs/_workspace_identity.py +11 -10
  45. sempy_labs/_workspaces.py +40 -33
  46. sempy_labs/admin/_basic_functions.py +166 -115
  47. sempy_labs/admin/_domains.py +7 -2
  48. sempy_labs/admin/_external_data_share.py +3 -3
  49. sempy_labs/admin/_git.py +4 -1
  50. sempy_labs/admin/_items.py +11 -6
  51. sempy_labs/admin/_scanner.py +10 -5
  52. sempy_labs/directlake/_directlake_schema_compare.py +25 -16
  53. sempy_labs/directlake/_directlake_schema_sync.py +24 -12
  54. sempy_labs/directlake/_dl_helper.py +74 -55
  55. sempy_labs/directlake/_generate_shared_expression.py +10 -9
  56. sempy_labs/directlake/_get_directlake_lakehouse.py +32 -36
  57. sempy_labs/directlake/_get_shared_expression.py +4 -3
  58. sempy_labs/directlake/_guardrails.py +12 -6
  59. sempy_labs/directlake/_list_directlake_model_calc_tables.py +15 -9
  60. sempy_labs/directlake/_show_unsupported_directlake_objects.py +16 -10
  61. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +35 -31
  62. sempy_labs/directlake/_update_directlake_partition_entity.py +39 -31
  63. sempy_labs/directlake/_warm_cache.py +87 -65
  64. sempy_labs/lakehouse/_get_lakehouse_columns.py +23 -26
  65. sempy_labs/lakehouse/_get_lakehouse_tables.py +27 -38
  66. sempy_labs/lakehouse/_lakehouse.py +7 -20
  67. sempy_labs/lakehouse/_shortcuts.py +42 -23
  68. sempy_labs/migration/_create_pqt_file.py +16 -11
  69. sempy_labs/migration/_refresh_calc_tables.py +16 -10
  70. sempy_labs/report/_download_report.py +9 -8
  71. sempy_labs/report/_generate_report.py +85 -44
  72. sempy_labs/report/_paginated.py +9 -9
  73. sempy_labs/report/_report_bpa.py +15 -11
  74. sempy_labs/report/_report_functions.py +80 -91
  75. sempy_labs/report/_report_helper.py +8 -4
  76. sempy_labs/report/_report_list_functions.py +24 -13
  77. sempy_labs/report/_report_rebind.py +17 -16
  78. sempy_labs/report/_reportwrapper.py +41 -33
  79. sempy_labs/tom/_model.py +139 -21
  80. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/LICENSE +0 -0
  81. {semantic_link_labs-0.8.10.dist-info → semantic_link_labs-0.9.0.dist-info}/top_level.txt +0 -0
@@ -138,14 +138,11 @@ def migrate_workspaces(
138
138
  @log
139
139
  def migrate_capacities(
140
140
  azure_subscription_id: str,
141
- key_vault_uri: str,
142
- key_vault_tenant_id: str,
143
- key_vault_client_id: str,
144
- key_vault_client_secret: str,
145
141
  resource_group: str | dict,
146
142
  capacities: Optional[str | List[str]] = None,
147
143
  use_existing_rg_for_A_sku: bool = True,
148
144
  p_sku_only: bool = True,
145
+ **kwargs,
149
146
  ):
150
147
  """
151
148
  This function creates new Fabric capacities for given A or P sku capacities and reassigns their workspaces to the newly created capacity.
@@ -154,14 +151,6 @@ def migrate_capacities(
154
151
  ----------
155
152
  azure_subscription_id : str
156
153
  The Azure subscription ID.
157
- key_vault_uri : str
158
- The name of the `Azure key vault <https://azure.microsoft.com/products/key-vault>`_ URI. Example: "https://<Key Vault Name>.vault.azure.net/"
159
- key_vault_tenant_id : str
160
- The name of the Azure key vault secret storing the Tenant ID.
161
- key_vault_client_id : str
162
- The name of the Azure key vault secret storing the Client ID.
163
- key_vault_client_secret : str
164
- The name of the Azure key vault secret storing the Client Secret.
165
154
  resource_group : str | dict
166
155
  The name of the Azure resource group.
167
156
  For A skus, this parameter will be ignored and the resource group used for the F sku will be the same as the A sku's resource group.
@@ -233,10 +222,6 @@ def migrate_capacities(
233
222
  create_fabric_capacity(
234
223
  capacity_name=tgt_capacity,
235
224
  azure_subscription_id=azure_subscription_id,
236
- key_vault_uri=key_vault_uri,
237
- key_vault_tenant_id=key_vault_tenant_id,
238
- key_vault_client_id=key_vault_client_id,
239
- key_vault_client_secret=key_vault_client_secret,
240
225
  resource_group=rg,
241
226
  region=region,
242
227
  sku=icons.sku_mapping.get(sku_size),
@@ -248,7 +233,7 @@ def migrate_capacities(
248
233
  )
249
234
 
250
235
  # Migrate settings to new capacity
251
- migrate_settings(source_capacity=cap_name, target_capacity=tgt_capacity)
236
+ # migrate_settings(source_capacity=cap_name, target_capacity=tgt_capacity)
252
237
 
253
238
 
254
239
  @log
@@ -624,15 +609,12 @@ def migrate_spark_settings(source_capacity: str, target_capacity: str):
624
609
  @log
625
610
  def migrate_fabric_trial_capacity(
626
611
  azure_subscription_id: str,
627
- key_vault_uri: str,
628
- key_vault_tenant_id: str,
629
- key_vault_client_id: str,
630
- key_vault_client_secret: str,
631
612
  resource_group: str,
632
613
  source_capacity: str,
633
614
  target_capacity: str,
634
615
  target_capacity_sku: str = "F64",
635
616
  target_capacity_admin_members: Optional[str | List[str]] = None,
617
+ **kwargs,
636
618
  ):
637
619
  """
638
620
  This function migrates a Fabric trial capacity to a Fabric capacity. If the 'target_capacity' does not exist, it is created with the relevant target capacity parameters (sku, region, admin members).
@@ -641,14 +623,6 @@ def migrate_fabric_trial_capacity(
641
623
  ----------
642
624
  azure_subscription_id : str
643
625
  The Azure subscription ID.
644
- key_vault_uri : str
645
- The name of the `Azure key vault <https://azure.microsoft.com/products/key-vault>`_ URI. Example: "https://<Key Vault Name>.vault.azure.net/"
646
- key_vault_tenant_id : str
647
- The name of the Azure key vault secret storing the Tenant ID.
648
- key_vault_client_id : str
649
- The name of the Azure key vault secret storing the Client ID.
650
- key_vault_client_secret : str
651
- The name of the Azure key vault secret storing the Client Secret.
652
626
  resource_group : str
653
627
  The name of the Azure resource group.
654
628
  source_capacity : str
@@ -701,10 +675,6 @@ def migrate_fabric_trial_capacity(
701
675
  create_fabric_capacity(
702
676
  capacity_name=target_capacity,
703
677
  azure_subscription_id=azure_subscription_id,
704
- key_vault_uri=key_vault_uri,
705
- key_vault_tenant_id=key_vault_tenant_id,
706
- key_vault_client_id=key_vault_client_id,
707
- key_vault_client_secret=key_vault_client_secret,
708
678
  resource_group=resource_group,
709
679
  region=target_capacity_region,
710
680
  admin_members=target_capacity_admin_members,
@@ -718,7 +688,7 @@ def migrate_fabric_trial_capacity(
718
688
  )
719
689
 
720
690
  # This migrates all the capacity settings
721
- migrate_settings(
722
- source_capacity=source_capacity,
723
- target_capacity=target_capacity,
724
- )
691
+ # migrate_settings(
692
+ # source_capacity=source_capacity,
693
+ # target_capacity=target_capacity,
694
+ # )
@@ -1,39 +1,42 @@
1
1
  import sempy.fabric as fabric
2
2
  from sempy_labs._helper_functions import (
3
- resolve_dataset_id,
4
3
  is_default_semantic_model,
5
4
  _get_adls_client,
5
+ resolve_workspace_name_and_id,
6
+ resolve_dataset_name_and_id,
6
7
  )
7
8
  from typing import Optional
8
9
  import sempy_labs._icons as icons
9
10
  from sempy._utils._log import log
10
11
  import pandas as pd
11
12
  from sempy.fabric.exceptions import FabricHTTPException
13
+ from uuid import UUID
12
14
 
13
15
 
14
- def clear_cache(dataset: str, workspace: Optional[str] = None):
16
+ @log
17
+ def clear_cache(dataset: str | UUID, workspace: Optional[str | UUID] = None):
15
18
  """
16
19
  Clears the cache of a semantic model.
17
20
  See `here <https://learn.microsoft.com/analysis-services/instances/clear-the-analysis-services-caches?view=asallproducts-allversions>`_ for documentation.
18
21
 
19
22
  Parameters
20
23
  ----------
21
- dataset : str
22
- Name of the semantic model.
23
- workspace : str, default=None
24
- The Fabric workspace name.
24
+ dataset : str | uuid.UUID
25
+ Name or ID of the semantic model.
26
+ workspace : str | uuid.UUID, default=None
27
+ The Fabric workspace name or ID.
25
28
  Defaults to None which resolves to the workspace of the attached lakehouse
26
29
  or if no lakehouse attached, resolves to the workspace of the notebook.
27
30
  """
28
31
 
29
- workspace = fabric.resolve_workspace_name(workspace)
32
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
30
33
  if is_default_semantic_model(dataset=dataset, workspace=workspace):
31
34
  raise ValueError(
32
35
  f"{icons.red_dot} Cannot run XMLA operations against a default semantic model. Please choose a different semantic model. "
33
36
  "See here for more information: https://learn.microsoft.com/fabric/data-warehouse/semantic-models"
34
37
  )
35
38
 
36
- dataset_id = resolve_dataset_id(dataset=dataset, workspace=workspace)
39
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
37
40
 
38
41
  xmla = f"""
39
42
  <ClearCache xmlns="http://schemas.microsoft.com/analysisservices/2003/engine">
@@ -42,27 +45,27 @@ def clear_cache(dataset: str, workspace: Optional[str] = None):
42
45
  </Object>
43
46
  </ClearCache>
44
47
  """
45
- fabric.execute_xmla(dataset=dataset, xmla_command=xmla, workspace=workspace)
48
+ fabric.execute_xmla(dataset=dataset_id, xmla_command=xmla, workspace=workspace_id)
46
49
  print(
47
- f"{icons.green_dot} Cache cleared for the '{dataset}' semantic model within the '{workspace}' workspace."
50
+ f"{icons.green_dot} Cache cleared for the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
48
51
  )
49
52
 
50
53
 
51
54
  @log
52
55
  def backup_semantic_model(
53
- dataset: str,
56
+ dataset: str | UUID,
54
57
  file_path: str,
55
58
  allow_overwrite: bool = True,
56
59
  apply_compression: bool = True,
57
- workspace: Optional[str] = None,
60
+ workspace: Optional[str | UUID] = None,
58
61
  ):
59
62
  """
60
63
  `Backs up <https://learn.microsoft.com/azure/analysis-services/analysis-services-backup>`_ a semantic model to the ADLS Gen2 storage account connected to the workspace.
61
64
 
62
65
  Parameters
63
66
  ----------
64
- dataset : str
65
- Name of the semantic model.
67
+ dataset : str | uuid.UUID
68
+ Name or ID of the semantic model.
66
69
  file_path : str
67
70
  The ADLS Gen2 storage account location in which to backup the semantic model. Always saves within the 'power-bi-backup/<workspace name>' folder.
68
71
  Must end in '.abf'.
@@ -72,8 +75,8 @@ def backup_semantic_model(
72
75
  If True, overwrites backup files of the same name. If False, the file you are saving cannot have the same name as a file that already exists in the same location.
73
76
  apply_compression : bool, default=True
74
77
  If True, compresses the backup file. Compressed backup files save disk space, but require slightly higher CPU utilization.
75
- workspace : str, default=None
76
- The Fabric workspace name.
78
+ workspace : str | uuid.UUID, default=None
79
+ The Fabric workspace name or ID.
77
80
  Defaults to None which resolves to the workspace of the attached lakehouse
78
81
  or if no lakehouse attached, resolves to the workspace of the notebook.
79
82
  """
@@ -83,20 +86,21 @@ def backup_semantic_model(
83
86
  f"{icons.red_dot} The backup file for restoring must be in the .abf format."
84
87
  )
85
88
 
86
- workspace = fabric.resolve_workspace_name(workspace)
89
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
90
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
87
91
 
88
92
  tmsl = {
89
93
  "backup": {
90
- "database": dataset,
94
+ "database": dataset_name,
91
95
  "file": file_path,
92
96
  "allowOverwrite": allow_overwrite,
93
97
  "applyCompression": apply_compression,
94
98
  }
95
99
  }
96
100
 
97
- fabric.execute_tmsl(script=tmsl, workspace=workspace)
101
+ fabric.execute_tmsl(script=tmsl, workspace=workspace_id)
98
102
  print(
99
- f"{icons.green_dot} The '{dataset}' semantic model within the '{workspace}' workspace has been backed up to the '{file_path}' location."
103
+ f"{icons.green_dot} The '{dataset_name}' semantic model within the '{workspace_name}' workspace has been backed up to the '{file_path}' location."
100
104
  )
101
105
 
102
106
 
@@ -107,7 +111,7 @@ def restore_semantic_model(
107
111
  allow_overwrite: bool = True,
108
112
  ignore_incompatibilities: bool = True,
109
113
  force_restore: bool = False,
110
- workspace: Optional[str] = None,
114
+ workspace: Optional[str | UUID] = None,
111
115
  ):
112
116
  """
113
117
  `Restores <https://learn.microsoft.com/power-bi/enterprise/service-premium-backup-restore-dataset>`_ a semantic model based on a backup (.abf) file
@@ -127,19 +131,18 @@ def restore_semantic_model(
127
131
  If True, ignores incompatibilities between Azure Analysis Services and Power BI Premium.
128
132
  force_restore: bool, default=False
129
133
  If True, restores the semantic model with the existing semantic model unloaded and offline.
130
- workspace : str, default=None
131
- The Fabric workspace name.
134
+ workspace : str | uuid.UUID, default=None
135
+ The Fabric workspace name or ID.
132
136
  Defaults to None which resolves to the workspace of the attached lakehouse
133
137
  or if no lakehouse attached, resolves to the workspace of the notebook.
134
138
  """
135
- # https://learn.microsoft.com/en-us/power-bi/enterprise/service-premium-backup-restore-dataset
136
139
 
137
140
  if not file_path.endswith(".abf"):
138
141
  raise ValueError(
139
142
  f"{icons.red_dot} The backup file for restoring must be in the .abf format."
140
143
  )
141
144
 
142
- workspace = fabric.resolve_workspace_name(workspace)
145
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
143
146
 
144
147
  tmsl = {
145
148
  "restore": {
@@ -154,10 +157,10 @@ def restore_semantic_model(
154
157
  if force_restore:
155
158
  tmsl["restore"]["forceRestore"] = force_restore
156
159
 
157
- fabric.execute_tmsl(script=tmsl, workspace=workspace)
160
+ fabric.execute_tmsl(script=tmsl, workspace=workspace_id)
158
161
 
159
162
  print(
160
- f"{icons.green_dot} The '{dataset}' semantic model has been restored to the '{workspace}' workspace based on teh '{file_path}' backup file."
163
+ f"{icons.green_dot} The '{dataset}' semantic model has been restored to the '{workspace_name}' workspace based on the '{file_path}' backup file."
161
164
  )
162
165
 
163
166
 
@@ -243,15 +246,15 @@ def copy_semantic_model_backup_file(
243
246
 
244
247
 
245
248
  @log
246
- def list_backups(workspace: Optional[str] = None) -> pd.DataFrame:
249
+ def list_backups(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
247
250
  """
248
251
  Shows a list of backup files contained within a workspace's ADLS Gen2 storage account.
249
252
  Requirement: An ADLS Gen2 storage account must be `connected to the workspace <https://learn.microsoft.com/power-bi/transform-model/dataflows/dataflows-azure-data-lake-storage-integration#connect-to-an-azure-data-lake-gen-2-at-a-workspace-level>`_.
250
253
 
251
254
  Parameters
252
255
  ----------
253
- workspace : str, default=None
254
- The Fabric workspace name.
256
+ workspace : str | uuid.UUID, default=None
257
+ The Fabric workspace name or ID.
255
258
  Defaults to None which resolves to the workspace of the attached lakehouse
256
259
  or if no lakehouse attached, resolves to the workspace of the notebook.
257
260
 
@@ -262,8 +265,7 @@ def list_backups(workspace: Optional[str] = None) -> pd.DataFrame:
262
265
  """
263
266
 
264
267
  client = fabric.PowerBIRestClient()
265
- workspace = fabric.resolve_workspace_name(workspace)
266
- workspace_id = fabric.resolve_workspace_id(workspace)
268
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
267
269
  response = client.get(
268
270
  f"/v1.0/myorg/resources?resourceType=StorageAccount&folderObjectId={workspace_id}"
269
271
  )
@@ -274,7 +276,7 @@ def list_backups(workspace: Optional[str] = None) -> pd.DataFrame:
274
276
  v = response.json().get("value", [])
275
277
  if not v:
276
278
  raise ValueError(
277
- f"{icons.red_dot} A storage account is not associated with the '{workspace}' workspace."
279
+ f"{icons.red_dot} A storage account is not associated with the '{workspace_name}' workspace."
278
280
  )
279
281
  storage_account = v[0]["resourceName"]
280
282
 
@@ -316,8 +318,8 @@ def list_storage_account_files(
316
318
  ]
317
319
  )
318
320
 
319
- onelake = _get_adls_client(storage_account)
320
- fs = onelake.get_file_system_client(container)
321
+ client = _get_adls_client(storage_account)
322
+ fs = client.get_file_system_client(container)
321
323
 
322
324
  for x in list(fs.get_paths()):
323
325
  if not x.is_directory:
@@ -5,6 +5,7 @@ from typing import Optional
5
5
  from sempy_labs._helper_functions import (
6
6
  pagination,
7
7
  _is_valid_uuid,
8
+ resolve_workspace_name_and_id,
8
9
  )
9
10
  from uuid import UUID
10
11
  import sempy_labs._icons as icons
@@ -19,7 +20,7 @@ def delete_connection(connection: str | UUID):
19
20
 
20
21
  Parameters
21
22
  ----------
22
- connection : str | UUID
23
+ connection : str | uuid.UUID
23
24
  The connection name or ID.
24
25
  """
25
26
 
@@ -42,9 +43,9 @@ def delete_connection_role_assignment(connection: str | UUID, role_assignment_id
42
43
 
43
44
  Parameters
44
45
  ----------
45
- connection : str | UUID
46
+ connection : str | uuid.UUID
46
47
  The connection name or ID.
47
- role_assignment_id : UUID
48
+ role_assignment_id : uuid.UUID
48
49
  The role assignment ID.
49
50
  """
50
51
 
@@ -87,7 +88,7 @@ def list_connection_role_assignments(connection: str | UUID) -> pd.DataFrame:
87
88
 
88
89
  Parameters
89
90
  ----------
90
- connection : str | UUID
91
+ connection : str | uuid.UUID
91
92
  The connection name or ID.
92
93
 
93
94
  Returns
@@ -205,7 +206,7 @@ def list_connections() -> pd.DataFrame:
205
206
 
206
207
 
207
208
  def list_item_connections(
208
- item_name: str, item_type: str, workspace: Optional[str] = None
209
+ item_name: str, item_type: str, workspace: Optional[str | UUID] = None
209
210
  ) -> pd.DataFrame:
210
211
  """
211
212
  Shows the list of connections that the specified item is connected to.
@@ -218,8 +219,8 @@ def list_item_connections(
218
219
  The item name.
219
220
  item_type : str
220
221
  The `item type <https://learn.microsoft.com/rest/api/fabric/core/items/update-item?tabs=HTTP#itemtype>`_.
221
- workspace : str, default=None
222
- The Fabric workspace name.
222
+ workspace : str | uuid.UUID, default=None
223
+ The Fabric workspace name or ID.
223
224
  Defaults to None which resolves to the workspace of the attached lakehouse
224
225
  or if no lakehouse attached, resolves to the workspace of the notebook.
225
226
 
@@ -229,11 +230,10 @@ def list_item_connections(
229
230
  A pandas dataframe showing the list of connections that the specified item is connected to.
230
231
  """
231
232
 
232
- workspace = fabric.resolve_workspace_name(workspace)
233
- workspace_id = fabric.resolve_workspace_id(workspace)
233
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
234
234
  item_type = item_type[0].upper() + item_type[1:]
235
235
  item_id = fabric.resolve_item_id(
236
- item_name=item_name, type=item_type, workspace=workspace
236
+ item_name=item_name, type=item_type, workspace=workspace_id
237
237
  )
238
238
 
239
239
  client = fabric.FabricRestClient()
@@ -416,7 +416,7 @@ def create_on_prem_connection(
416
416
  ----------
417
417
  name : str
418
418
  The name of the connection.
419
- gateway : str | UUID
419
+ gateway : str | uuid.UUID
420
420
  The name or Id of the gateway.
421
421
  server_name : str
422
422
  The name of the server.
@@ -485,7 +485,7 @@ def create_vnet_connection(
485
485
  user_name: str,
486
486
  password: str,
487
487
  privacy_level: str,
488
- connection_encryption: Optional[str] = "NotEncrypted",
488
+ connection_encryption: str = "NotEncrypted",
489
489
  skip_test_connection: bool = False,
490
490
  ):
491
491
  """
@@ -497,7 +497,7 @@ def create_vnet_connection(
497
497
  ----------
498
498
  name : str
499
499
  The name of the connection.
500
- gateway : str | UUID
500
+ gateway : str | uuid.UUID
501
501
  The name or Id of the gateway.
502
502
  server_name : str
503
503
  The name of the server.
@@ -9,9 +9,10 @@ from sempy_labs._helper_functions import (
9
9
  _decode_b64,
10
10
  )
11
11
  from sempy.fabric.exceptions import FabricHTTPException
12
+ from uuid import UUID
12
13
 
13
14
 
14
- def list_data_pipelines(workspace: Optional[str] = None) -> pd.DataFrame:
15
+ def list_data_pipelines(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
15
16
  """
16
17
  Shows the data pipelines within a workspace.
17
18
 
@@ -19,8 +20,8 @@ def list_data_pipelines(workspace: Optional[str] = None) -> pd.DataFrame:
19
20
 
20
21
  Parameters
21
22
  ----------
22
- workspace : str, default=None
23
- The Fabric workspace name.
23
+ workspace : str | uuid.UUID, default=None
24
+ The Fabric workspace name or ID.
24
25
  Defaults to None which resolves to the workspace of the attached lakehouse
25
26
  or if no lakehouse attached, resolves to the workspace of the notebook.
26
27
 
@@ -32,7 +33,7 @@ def list_data_pipelines(workspace: Optional[str] = None) -> pd.DataFrame:
32
33
 
33
34
  df = pd.DataFrame(columns=["Data Pipeline Name", "Data Pipeline ID", "Description"])
34
35
 
35
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
36
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
36
37
 
37
38
  client = fabric.FabricRestClient()
38
39
  response = client.get(f"/v1/workspaces/{workspace_id}/dataPipelines")
@@ -54,7 +55,7 @@ def list_data_pipelines(workspace: Optional[str] = None) -> pd.DataFrame:
54
55
 
55
56
 
56
57
  def create_data_pipeline(
57
- name: str, description: Optional[str] = None, workspace: Optional[str] = None
58
+ name: str, description: Optional[str] = None, workspace: Optional[str | UUID] = None
58
59
  ):
59
60
  """
60
61
  Creates a Fabric data pipeline.
@@ -67,13 +68,13 @@ def create_data_pipeline(
67
68
  Name of the data pipeline.
68
69
  description : str, default=None
69
70
  A description of the environment.
70
- workspace : str, default=None
71
- The Fabric workspace name.
71
+ workspace : str | uuid.UUID, default=None
72
+ The Fabric workspace name or ID.
72
73
  Defaults to None which resolves to the workspace of the attached lakehouse
73
74
  or if no lakehouse attached, resolves to the workspace of the notebook.
74
75
  """
75
76
 
76
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
77
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
77
78
 
78
79
  request_body = {"displayName": name}
79
80
 
@@ -88,11 +89,11 @@ def create_data_pipeline(
88
89
  lro(client, response, status_codes=[201, 202])
89
90
 
90
91
  print(
91
- f"{icons.green_dot} The '{name}' data pipeline has been created within the '{workspace}' workspace."
92
+ f"{icons.green_dot} The '{name}' data pipeline has been created within the '{workspace_name}' workspace."
92
93
  )
93
94
 
94
95
 
95
- def delete_data_pipeline(name: str, workspace: Optional[str] = None):
96
+ def delete_data_pipeline(name: str, workspace: Optional[str | UUID] = None):
96
97
  """
97
98
  Deletes a Fabric data pipeline.
98
99
 
@@ -102,16 +103,16 @@ def delete_data_pipeline(name: str, workspace: Optional[str] = None):
102
103
  ----------
103
104
  name: str
104
105
  Name of the data pipeline.
105
- workspace : str, default=None
106
+ workspace : str | uuid.UUID, default=None
106
107
  The Fabric workspace name.
107
108
  Defaults to None which resolves to the workspace of the attached lakehouse
108
109
  or if no lakehouse attached, resolves to the workspace of the notebook.
109
110
  """
110
111
 
111
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
112
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
112
113
 
113
114
  item_id = fabric.resolve_item_id(
114
- item_name=name, type="DataPipeline", workspace=workspace
115
+ item_name=name, type="DataPipeline", workspace=workspace_id
115
116
  )
116
117
 
117
118
  client = fabric.FabricRestClient()
@@ -121,12 +122,12 @@ def delete_data_pipeline(name: str, workspace: Optional[str] = None):
121
122
  raise FabricHTTPException(response)
122
123
 
123
124
  print(
124
- f"{icons.green_dot} The '{name}' data pipeline within the '{workspace}' workspace has been deleted."
125
+ f"{icons.green_dot} The '{name}' data pipeline within the '{workspace_name}' workspace has been deleted."
125
126
  )
126
127
 
127
128
 
128
129
  def get_data_pipeline_definition(
129
- name: str, workspace: Optional[str] = None, decode: bool = True
130
+ name: str, workspace: Optional[str | UUID] = None, decode: bool = True
130
131
  ) -> dict | pd.DataFrame:
131
132
  """
132
133
  Obtains the definition of a data pipeline.
@@ -135,8 +136,8 @@ def get_data_pipeline_definition(
135
136
  ----------
136
137
  name : str
137
138
  The name of the data pipeline.
138
- workspace : str, default=None
139
- The Fabric workspace name.
139
+ workspace : str | uuid.UUID, default=None
140
+ The Fabric workspace name or ID.
140
141
  Defaults to None which resolves to the workspace of the attached lakehouse
141
142
  or if no lakehouse attached, resolves to the workspace of the notebook.
142
143
  decode : bool, default=True
@@ -150,10 +151,9 @@ def get_data_pipeline_definition(
150
151
  A pandas dataframe showing the data pipelines within a workspace.
151
152
  """
152
153
 
153
- workspace = fabric.resolve_workspace_name(workspace)
154
- workspace_id = fabric.resolve_workspace_id(workspace)
154
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
155
155
  item_id = fabric.resolve_item_id(
156
- item_name=name, type="DataPipeline", workspace=workspace
156
+ item_name=name, type="DataPipeline", workspace=workspace_id
157
157
  )
158
158
 
159
159
  client = fabric.FabricRestClient()