semantic-link-labs 0.8.2__py3-none-any.whl → 0.8.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (35) hide show
  1. {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.3.dist-info}/METADATA +7 -3
  2. {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.3.dist-info}/RECORD +35 -34
  3. sempy_labs/__init__.py +14 -0
  4. sempy_labs/_capacities.py +89 -11
  5. sempy_labs/_capacity_migration.py +167 -60
  6. sempy_labs/_clear_cache.py +3 -3
  7. sempy_labs/_data_pipelines.py +48 -0
  8. sempy_labs/_external_data_shares.py +188 -0
  9. sempy_labs/_generate_semantic_model.py +0 -1
  10. sempy_labs/_git.py +1 -1
  11. sempy_labs/_helper_functions.py +14 -11
  12. sempy_labs/_list_functions.py +6 -3
  13. sempy_labs/_model_bpa.py +5 -5
  14. sempy_labs/_model_bpa_bulk.py +3 -5
  15. sempy_labs/_notebooks.py +4 -3
  16. sempy_labs/_sql.py +2 -2
  17. sempy_labs/_translations.py +14 -14
  18. sempy_labs/_vertipaq.py +121 -101
  19. sempy_labs/_warehouses.py +11 -1
  20. sempy_labs/admin/__init__.py +2 -0
  21. sempy_labs/admin/_basic_functions.py +124 -21
  22. sempy_labs/directlake/_directlake_schema_sync.py +0 -5
  23. sempy_labs/directlake/_generate_shared_expression.py +1 -1
  24. sempy_labs/directlake/_guardrails.py +1 -1
  25. sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -1
  26. sempy_labs/migration/_create_pqt_file.py +2 -2
  27. sempy_labs/report/_generate_report.py +10 -14
  28. sempy_labs/report/_report_bpa.py +8 -10
  29. sempy_labs/report/_report_functions.py +13 -19
  30. sempy_labs/report/_report_rebind.py +4 -1
  31. sempy_labs/report/_reportwrapper.py +3 -3
  32. sempy_labs/tom/_model.py +109 -34
  33. {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.3.dist-info}/LICENSE +0 -0
  34. {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.3.dist-info}/WHEEL +0 -0
  35. {semantic_link_labs-0.8.2.dist-info → semantic_link_labs-0.8.3.dist-info}/top_level.txt +0 -0
@@ -7,6 +7,7 @@ from sempy_labs._workspaces import assign_workspace_to_capacity
7
7
  from sempy_labs.admin._basic_functions import (
8
8
  assign_workspaces_to_capacity,
9
9
  _list_capacities_meta,
10
+ list_capacities,
10
11
  )
11
12
  from sempy_labs._helper_functions import (
12
13
  resolve_capacity_id,
@@ -15,6 +16,34 @@ from sempy_labs._helper_functions import (
15
16
  from sempy_labs._capacities import create_fabric_capacity
16
17
 
17
18
 
19
+ def migrate_settings(source_capacity: str, target_capacity: str):
20
+
21
+ migrate_capacity_settings(
22
+ source_capacity=source_capacity,
23
+ target_capacity=target_capacity,
24
+ )
25
+ migrate_access_settings(
26
+ source_capacity=source_capacity,
27
+ target_capacity=target_capacity,
28
+ )
29
+ migrate_notification_settings(
30
+ source_capacity=source_capacity,
31
+ target_capacity=target_capacity,
32
+ )
33
+ migrate_spark_settings(
34
+ source_capacity=source_capacity,
35
+ target_capacity=target_capacity,
36
+ )
37
+ migrate_delegated_tenant_settings(
38
+ source_capacity=source_capacity,
39
+ target_capacity=target_capacity,
40
+ )
41
+ migrate_disaster_recovery_settings(
42
+ source_capacity=source_capacity,
43
+ target_capacity=target_capacity,
44
+ )
45
+
46
+
18
47
  @log
19
48
  def migrate_workspaces(
20
49
  source_capacity: str,
@@ -146,8 +175,6 @@ def migrate_capacities(
146
175
  If set to True, only migrates P skus. If set to False, migrates both P and A skus.
147
176
  """
148
177
 
149
- from sempy_labs._list_functions import list_capacities
150
-
151
178
  if isinstance(capacities, str):
152
179
  capacities = [capacities]
153
180
 
@@ -158,7 +185,7 @@ def migrate_capacities(
158
185
  if capacities is None:
159
186
  dfC_filt = dfC.copy()
160
187
  else:
161
- dfC_filt = dfC[dfC["Display Name"].isin(capacities)]
188
+ dfC_filt = dfC[dfC["Capacity Name"].isin(capacities)]
162
189
 
163
190
  if p_sku_only:
164
191
  dfC_filt = dfC_filt[dfC_filt["Sku"].str.startswith("P")]
@@ -177,14 +204,14 @@ def migrate_capacities(
177
204
  return
178
205
 
179
206
  for _, r in dfC_filt.iterrows():
180
- cap_name = r["Display Name"]
207
+ cap_name = r["Capacity Name"]
181
208
  region = r["Region"]
182
209
  sku_size = r["Sku"]
183
210
  admins = r["Admins"]
184
211
  tgt_capacity = f"{convert_to_alphanumeric_lowercase(cap_name)}{icons.migrate_capacity_suffix}"
185
212
 
186
213
  # Check if target capacity exists
187
- dfC_tgt = dfC[dfC["Display Name"] == tgt_capacity]
214
+ dfC_tgt = dfC[dfC["Capacity Name"] == tgt_capacity]
188
215
 
189
216
  if sku_size[:1] == "A" and use_existing_rg_for_A_sku:
190
217
  rg = None
@@ -221,24 +248,7 @@ def migrate_capacities(
221
248
  )
222
249
 
223
250
  # Migrate settings to new capacity
224
- migrate_capacity_settings(
225
- source_capacity=cap_name, target_capacity=tgt_capacity
226
- )
227
- migrate_access_settings(
228
- source_capacity=cap_name, target_capacity=tgt_capacity
229
- )
230
- migrate_notification_settings(
231
- source_capacity=cap_name, target_capacity=tgt_capacity
232
- )
233
- migrate_delegated_tenant_settings(
234
- source_capacity=cap_name, target_capacity=tgt_capacity
235
- )
236
- migrate_disaster_recovery_settings(
237
- source_capacity=cap_name, target_capacity=tgt_capacity
238
- )
239
- migrate_spark_settings(
240
- source_capacity=cap_name, target_capacity=tgt_capacity
241
- )
251
+ migrate_settings(source_capacity=cap_name, target_capacity=tgt_capacity)
242
252
 
243
253
 
244
254
  @log
@@ -252,24 +262,21 @@ def migrate_capacity_settings(source_capacity: str, target_capacity: str):
252
262
  Name of the source capacity.
253
263
  target_capacity : str
254
264
  Name of the target capacity.
255
-
256
- Returns
257
- -------
258
265
  """
259
266
 
260
- dfC = fabric.list_capacities()
261
- dfC_filt = dfC[dfC["Display Name"] == source_capacity]
267
+ dfC = list_capacities()
268
+ dfC_filt = dfC[dfC["Capacity Name"] == source_capacity]
262
269
  if len(dfC_filt) == 0:
263
270
  raise ValueError(
264
271
  f"{icons.red_dot} The '{source_capacity}' capacity does not exist."
265
272
  )
266
- source_capacity_id = dfC_filt["Id"].iloc[0].upper()
267
- dfC_filt = dfC[dfC["Display Name"] == target_capacity]
273
+ source_capacity_id = dfC_filt["Capacity Id"].iloc[0].upper()
274
+ dfC_filt = dfC[dfC["Capacity Name"] == target_capacity]
268
275
  if len(dfC_filt) == 0:
269
276
  raise ValueError(
270
277
  f"{icons.red_dot} The '{target_capacity}' capacity does not exist."
271
278
  )
272
- target_capacity_id = dfC_filt["Id"].iloc[0].upper()
279
+ target_capacity_id = dfC_filt["Capacity Id"].iloc[0].upper()
273
280
 
274
281
  workloads_params = "capacityCustomParameters?workloadIds=ADM&workloadIds=CDSA&workloadIds=DMS&workloadIds=RsRdlEngine&workloadIds=ScreenshotEngine&workloadIds=AS&workloadIds=QES&workloadIds=DMR&workloadIds=ESGLake&workloadIds=NLS&workloadIds=lake&workloadIds=TIPS&workloadIds=Kusto&workloadIds=Lakehouse&workloadIds=SparkCore&workloadIds=DI&workloadIds=Notebook&workloadIds=ML&workloadIds=ES&workloadIds=Reflex&workloadIds=Must&workloadIds=dmh&workloadIds=PowerBI&workloadIds=HLS"
275
282
 
@@ -350,19 +357,19 @@ def migrate_disaster_recovery_settings(source_capacity: str, target_capacity: st
350
357
  Name of the target capacity.
351
358
  """
352
359
 
353
- dfC = fabric.list_capacities()
354
- dfC_filt = dfC[dfC["Display Name"] == source_capacity]
360
+ dfC = list_capacities()
361
+ dfC_filt = dfC[dfC["Capacity Name"] == source_capacity]
355
362
  if len(dfC_filt) == 0:
356
363
  raise ValueError(
357
364
  f"{icons.red_dot} The '{source_capacity}' capacity does not exist."
358
365
  )
359
- source_capacity_id = dfC_filt["Id"].iloc[0].upper()
360
- dfC_filt = dfC[dfC["Display Name"] == target_capacity]
366
+ source_capacity_id = dfC_filt["Capacity Id"].iloc[0].upper()
367
+ dfC_filt = dfC[dfC["Capacity Name"] == target_capacity]
361
368
  if len(dfC_filt) == 0:
362
369
  raise ValueError(
363
370
  f"{icons.red_dot} The '{target_capacity}' capacity does not exist."
364
371
  )
365
- target_capacity_id = dfC_filt["Id"].iloc[0].upper()
372
+ target_capacity_id = dfC_filt["Capacity Id"].iloc[0].upper()
366
373
 
367
374
  client = fabric.PowerBIRestClient()
368
375
  response_get_source = client.get(f"capacities/{source_capacity_id}/config")
@@ -395,24 +402,21 @@ def migrate_access_settings(source_capacity: str, target_capacity: str):
395
402
  Name of the source capacity.
396
403
  target_capacity : str
397
404
  Name of the target capacity.
398
-
399
- Returns
400
- -------
401
405
  """
402
406
 
403
- dfC = fabric.list_capacities()
404
- dfC_filt = dfC[dfC["Display Name"] == source_capacity]
407
+ dfC = list_capacities()
408
+ dfC_filt = dfC[dfC["Capacity Name"] == source_capacity]
405
409
  if len(dfC_filt) == 0:
406
410
  raise ValueError(
407
411
  f"{icons.red_dot} The '{source_capacity}' capacity does not exist."
408
412
  )
409
- source_capacity_id = dfC_filt["Id"].iloc[0].upper()
410
- dfC_filt = dfC[dfC["Display Name"] == target_capacity]
413
+ source_capacity_id = dfC_filt["Capacity Id"].iloc[0].upper()
414
+ dfC_filt = dfC[dfC["Capacity Name"] == target_capacity]
411
415
  if len(dfC_filt) == 0:
412
416
  raise ValueError(
413
417
  f"{icons.red_dot} The '{target_capacity}' capacity does not exist."
414
418
  )
415
- target_capacity_id = dfC_filt["Id"].iloc[0].upper()
419
+ target_capacity_id = dfC_filt["Capacity Id"].iloc[0].upper()
416
420
 
417
421
  client = fabric.PowerBIRestClient()
418
422
  response_get_source = client.get(f"capacities/{source_capacity_id}")
@@ -444,24 +448,21 @@ def migrate_notification_settings(source_capacity: str, target_capacity: str):
444
448
  Name of the source capacity.
445
449
  target_capacity : str
446
450
  Name of the target capacity.
447
-
448
- Returns
449
- -------
450
451
  """
451
452
 
452
- dfC = fabric.list_capacities()
453
- dfC_filt = dfC[dfC["Display Name"] == source_capacity]
453
+ dfC = list_capacities()
454
+ dfC_filt = dfC[dfC["Capacity Name"] == source_capacity]
454
455
  if len(dfC_filt) == 0:
455
456
  raise ValueError(
456
457
  f"{icons.red_dot} The '{source_capacity}' capacity does not exist."
457
458
  )
458
- source_capacity_id = dfC_filt["Id"].iloc[0].upper()
459
- dfC_filt = dfC[dfC["Display Name"] == target_capacity]
459
+ source_capacity_id = dfC_filt["Capacity Id"].iloc[0].upper()
460
+ dfC_filt = dfC[dfC["Capacity Name"] == target_capacity]
460
461
  if len(dfC_filt) == 0:
461
462
  raise ValueError(
462
463
  f"{icons.red_dot} The '{target_capacity}' capacity does not exist."
463
464
  )
464
- target_capacity_id = dfC_filt["Id"].iloc[0].upper()
465
+ target_capacity_id = dfC_filt["Capacity Id"].iloc[0].upper()
465
466
 
466
467
  client = fabric.PowerBIRestClient()
467
468
  response_get_source = client.get(f"capacities/{source_capacity_id}")
@@ -495,26 +496,23 @@ def migrate_delegated_tenant_settings(source_capacity: str, target_capacity: str
495
496
  Name of the source capacity.
496
497
  target_capacity : str
497
498
  Name of the target capacity.
498
-
499
- Returns
500
- -------
501
499
  """
502
500
 
503
- dfC = fabric.list_capacities()
501
+ dfC = list_capacities()
504
502
 
505
- dfC_filt = dfC[dfC["Display Name"] == source_capacity]
503
+ dfC_filt = dfC[dfC["Capacity Name"] == source_capacity]
506
504
  if len(dfC_filt) == 0:
507
505
  raise ValueError(
508
506
  f"{icons.red_dot} The '{source_capacity}' capacity does not exist."
509
507
  )
510
- source_capacity_id = dfC_filt["Id"].iloc[0].upper()
508
+ source_capacity_id = dfC_filt["Capacity Id"].iloc[0].upper()
511
509
 
512
- dfC_filt = dfC[dfC["Display Name"] == target_capacity]
510
+ dfC_filt = dfC[dfC["Capacity Name"] == target_capacity]
513
511
  if len(dfC_filt) == 0:
514
512
  raise ValueError(
515
513
  f"{icons.red_dot} The '{target_capacity}' capacity does not exist."
516
514
  )
517
- target_capacity_id = dfC_filt["Id"].iloc[0].upper()
515
+ target_capacity_id = dfC_filt["Capacity Id"].iloc[0].upper()
518
516
 
519
517
  client = fabric.FabricRestClient()
520
518
  response_get = client.get("v1/admin/capacities/delegatedTenantSettingOverrides")
@@ -621,3 +619,112 @@ def migrate_spark_settings(source_capacity: str, target_capacity: str):
621
619
  print(
622
620
  f"{icons.green_dot} The spark settings have been migrated from the '{source_capacity}' capacity to the '{target_capacity}' capacity."
623
621
  )
622
+
623
+
624
+ @log
625
+ def migrate_fabric_trial_capacity(
626
+ azure_subscription_id: str,
627
+ key_vault_uri: str,
628
+ key_vault_tenant_id: str,
629
+ key_vault_client_id: str,
630
+ key_vault_client_secret: str,
631
+ resource_group: str,
632
+ source_capacity: str,
633
+ target_capacity: str,
634
+ target_capacity_sku: str = "F64",
635
+ target_capacity_region: Optional[str] = None,
636
+ target_capacity_admin_members: Optional[str | List[str]] = None,
637
+ ):
638
+ """
639
+ This function migrates a Fabric trial capacity to a Fabric capacity. If the 'target_capacity' does not exist, it is created with the relevant target capacity parameters (sku, region, admin members).
640
+
641
+ Parameters
642
+ ----------
643
+ azure_subscription_id : str
644
+ The Azure subscription ID.
645
+ key_vault_uri : str
646
+ The name of the `Azure key vault <https://azure.microsoft.com/products/key-vault>`_ URI. Example: "https://<Key Vault Name>.vault.azure.net/"
647
+ key_vault_tenant_id : str
648
+ The name of the Azure key vault secret storing the Tenant ID.
649
+ key_vault_client_id : str
650
+ The name of the Azure key vault secret storing the Client ID.
651
+ key_vault_client_secret : str
652
+ The name of the Azure key vault secret storing the Client Secret.
653
+ resource_group : str
654
+ The name of the Azure resource group.
655
+ source_capacity : str
656
+ The name of the Fabric trial capacity.
657
+ target_capacity : str
658
+ The name of the new Fabric capacity (F SKU). If this capacity does not exist, it will be created.
659
+ target_capacity_sku : str, default="F64"
660
+ If the target capacity does not exist, this property sets the SKU size for the target capacity.
661
+ target_capacity_region : str, default=None
662
+ If the target capacity does not exist, this property sets the region for the target capacity.
663
+ Defaults to None which resolves to the region in which the Trial SKU exists.
664
+ target_capacity_admin_members : str, default=None
665
+ If the target capacity does not exist, this property sets the admin members for the target capacity.
666
+ Defaults to None which resolves to the admin members on the Trial SKU.
667
+ """
668
+
669
+ notebook_workspace_id = fabric.get_notebook_workspace_id()
670
+ dfW = fabric.list_workspaces(filter=f"id eq '{notebook_workspace_id}'")
671
+ notebook_capacity_id = dfW["Capacity Id"].iloc[0].lower()
672
+
673
+ dfC = list_capacities()
674
+ dfC_filt = dfC[dfC["Capacity Name"] == source_capacity]
675
+
676
+ if len(dfC_filt) == 0:
677
+ raise ValueError(
678
+ f"{icons.red_dot} The {source_capacity}' capacity does not exist."
679
+ )
680
+
681
+ source_capacity_sku = dfC_filt["Sku"].iloc[0]
682
+ if not source_capacity_sku.startswith("FT"):
683
+ raise ValueError(
684
+ f"{icons.red_dot} This function is for migrating Fabric trial capacites to Fabric capacities."
685
+ )
686
+
687
+ source_capacity_id = dfC_filt["Capacity Id"].iloc[0].lower()
688
+ if source_capacity_id == notebook_capacity_id:
689
+ print(
690
+ f"{icons.warning} The '{source_capacity}' capacity cannot be both the source capacity as well as the capacity in which the notebook is running."
691
+ )
692
+ return
693
+
694
+ # Use same region as source capacity if no region is specified
695
+ if target_capacity_region is None:
696
+ target_capacity_region = dfC_filt["Region"].iloc[0]
697
+
698
+ # Use same admins as source capacity
699
+ if isinstance(target_capacity_admin_members, str):
700
+ target_capacity_admin_members = [target_capacity_admin_members]
701
+
702
+ if target_capacity_admin_members is None:
703
+ target_capacity_admin_members = dfC_filt["Admins"].iloc[0]
704
+
705
+ dfC_filt = dfC[dfC["Capacity Name"] == target_capacity]
706
+ if len(dfC_filt) == 0:
707
+ create_fabric_capacity(
708
+ capacity_name=target_capacity,
709
+ azure_subscription_id=azure_subscription_id,
710
+ key_vault_uri=key_vault_uri,
711
+ key_vault_tenant_id=key_vault_tenant_id,
712
+ key_vault_client_id=key_vault_client_id,
713
+ key_vault_client_secret=key_vault_client_secret,
714
+ resource_group=resource_group,
715
+ region=target_capacity_region,
716
+ admin_members=target_capacity_admin_members,
717
+ sku=target_capacity_sku,
718
+ )
719
+
720
+ assign_workspaces_to_capacity(
721
+ source_capacity=source_capacity,
722
+ target_capacity=target_capacity,
723
+ workspace=None,
724
+ )
725
+
726
+ # This migrates all the capacity settings
727
+ migrate_settings(
728
+ source_capacity=source_capacity,
729
+ target_capacity=target_capacity,
730
+ )
@@ -2,7 +2,7 @@ import sempy.fabric as fabric
2
2
  from sempy_labs._helper_functions import (
3
3
  resolve_dataset_id,
4
4
  is_default_semantic_model,
5
- get_adls_client,
5
+ _get_adls_client,
6
6
  )
7
7
  from typing import Optional
8
8
  import sempy_labs._icons as icons
@@ -213,7 +213,7 @@ def copy_semantic_model_backup_file(
213
213
  source_path = f"/{source_workspace}/{source_file_name}"
214
214
  target_path = f"/{target_workspace}/{target_file_name}"
215
215
 
216
- client = get_adls_client(account_name=storage_account)
216
+ client = _get_adls_client(account_name=storage_account)
217
217
 
218
218
  source_file_system_client = client.get_file_system_client(
219
219
  file_system=source_file_system
@@ -316,7 +316,7 @@ def list_storage_account_files(
316
316
  ]
317
317
  )
318
318
 
319
- onelake = get_adls_client(storage_account)
319
+ onelake = _get_adls_client(storage_account)
320
320
  fs = onelake.get_file_system_client(container)
321
321
 
322
322
  for x in list(fs.get_paths()):
@@ -6,6 +6,7 @@ from sempy_labs._helper_functions import (
6
6
  resolve_workspace_name_and_id,
7
7
  lro,
8
8
  pagination,
9
+ _decode_b64,
9
10
  )
10
11
  from sempy.fabric.exceptions import FabricHTTPException
11
12
 
@@ -116,3 +117,50 @@ def delete_data_pipeline(name: str, workspace: Optional[str] = None):
116
117
  print(
117
118
  f"{icons.green_dot} The '{name}' data pipeline within the '{workspace}' workspace has been deleted."
118
119
  )
120
+
121
+
122
+ def get_data_pipeline_definition(
123
+ name: str, workspace: Optional[str] = None, decode: bool = True
124
+ ) -> dict | pd.DataFrame:
125
+ """
126
+ Obtains the definition of a data pipeline.
127
+
128
+ Parameters
129
+ ----------
130
+ name : str
131
+ The name of the data pipeline.
132
+ workspace : str, default=None
133
+ The Fabric workspace name.
134
+ Defaults to None which resolves to the workspace of the attached lakehouse
135
+ or if no lakehouse attached, resolves to the workspace of the notebook.
136
+ decode : bool, default=True
137
+ decode : bool, default=True
138
+ If True, decodes the data pipeline definition file into .json format.
139
+ If False, obtains the data pipeline definition file a pandas DataFrame format.
140
+
141
+ Returns
142
+ -------
143
+ dict | pandas.DataFrame
144
+ A pandas dataframe showing the data pipelines within a workspace.
145
+ """
146
+
147
+ workspace = fabric.resolve_workspace_name(workspace)
148
+ workspace_id = fabric.resolve_workspace_id(workspace)
149
+ item_id = fabric.resolve_item_id(
150
+ item_name=name, type="DataPipeline", workspace=workspace
151
+ )
152
+
153
+ client = fabric.FabricRestClient()
154
+ response = client.post(
155
+ f"/v1/workspaces/{workspace_id}/dataPipelines/{item_id}/getDefinition"
156
+ )
157
+ result = lro(client, response).json()
158
+
159
+ df = pd.json_normalize(result["definition"]["parts"])
160
+
161
+ if not decode:
162
+ return df
163
+ content = df[df["path"] == "pipeline-content.json"]
164
+ payload = content["payload"].iloc[0]
165
+
166
+ return _decode_b64(payload)
@@ -0,0 +1,188 @@
1
+ import sempy.fabric as fabric
2
+ from uuid import UUID
3
+ import pandas as pd
4
+ from typing import Optional, List
5
+ import sempy_labs._icons as icons
6
+ from sempy_labs._helper_functions import (
7
+ resolve_workspace_name_and_id,
8
+ pagination,
9
+ )
10
+ from sempy.fabric.exceptions import FabricHTTPException
11
+
12
+
13
+ def create_external_data_share(
14
+ item_name: str,
15
+ item_type: str,
16
+ paths: str | List[str],
17
+ recipient: str,
18
+ workspace: Optional[str] = None,
19
+ ):
20
+ """
21
+ Creates an external data share for a given path or list of paths in the specified item.
22
+
23
+ Parameters
24
+ ----------
25
+ item_name : str
26
+ The item name.
27
+ item_type : str
28
+ The `item type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_.
29
+ paths : str | List[str]
30
+ The path or list of paths that are to be externally shared. Currently, only a single path is supported.
31
+ recipient : str
32
+ The email address of the recipient.
33
+ workspace : str, default=None
34
+ The Fabric workspace name.
35
+ Defaults to None which resolves to the workspace of the attached lakehouse
36
+ or if no lakehouse attached, resolves to the workspace of the notebook.
37
+ """
38
+
39
+ # https://learn.microsoft.com/en-us/rest/api/fabric/core/external-data-shares/create-external-data-share?tabs=HTTP
40
+
41
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
42
+ item_id = fabric.resolve_item_id(
43
+ item_name=item_name, type=item_type, workspace=workspace
44
+ )
45
+
46
+ if isinstance(paths, str):
47
+ paths = [paths]
48
+
49
+ payload = {"paths": paths, "recipient": {"userPrincipalName": recipient}}
50
+
51
+ client = fabric.FabricRestClient()
52
+ response = client.post(
53
+ f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares",
54
+ json=payload,
55
+ )
56
+
57
+ if response.status_code != 201:
58
+ raise FabricHTTPException(response)
59
+
60
+ print(
61
+ f"{icons.green_dot} An external data share was created for the '{item_name}' {item_type} within the '{workspace}' workspace for the {paths} paths."
62
+ )
63
+
64
+
65
+ def revoke_external_data_share(
66
+ external_data_share_id: UUID,
67
+ item_name: str,
68
+ item_type: str,
69
+ workspace: Optional[str] = None,
70
+ ):
71
+ """
72
+ Revokes the specified external data share. Note: This action cannot be undone.
73
+
74
+ Parameters
75
+ ----------
76
+ external_data_share_id : UUID
77
+ The external data share ID.
78
+ item_name : str
79
+ The item name.
80
+ item_type : str
81
+ The `item type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_.
82
+ workspace : str, default=None
83
+ The Fabric workspace name.
84
+ Defaults to None which resolves to the workspace of the attached lakehouse
85
+ or if no lakehouse attached, resolves to the workspace of the notebook.
86
+ """
87
+
88
+ # https://learn.microsoft.com/en-us/rest/api/fabric/core/external-data-shares/revoke-external-data-share?tabs=HTTP
89
+
90
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
91
+ item_id = fabric.resolve_item_id(
92
+ item_name=item_name, type=item_type, workspace=workspace
93
+ )
94
+
95
+ client = fabric.FabricRestClient()
96
+ response = client.post(
97
+ f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares/{external_data_share_id}/revoke"
98
+ )
99
+
100
+ if response.status_code != 200:
101
+ raise FabricHTTPException(response)
102
+
103
+ print(
104
+ f"{icons.green_dot} The '{external_data_share_id}' external data share for the '{item_name}' {item_type} within the '{workspace}' workspace has been revoked."
105
+ )
106
+
107
+
108
+ def list_external_data_shares_in_item(
109
+ item_name: str, item_type: str, workspace: Optional[str] = None
110
+ ) -> pd.DataFrame:
111
+ """
112
+ Returns a list of the external data shares that exist for the specified item.
113
+
114
+ Parameters
115
+ ----------
116
+ item_name : str
117
+ The item name.
118
+ item_type : str
119
+ The `item type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_.
120
+ workspace : str, default=None
121
+ The Fabric workspace name.
122
+ Defaults to None which resolves to the workspace of the attached lakehouse
123
+ or if no lakehouse attached, resolves to the workspace of the notebook.
124
+
125
+ Returns
126
+ -------
127
+ pandas.DataFrame
128
+ A pandas dataframe showing a list of the external data shares that exist for the specified item.
129
+ """
130
+
131
+ # https://learn.microsoft.com/en-us/rest/api/fabric/core/external-data-shares/list-external-data-shares-in-item?tabs=HTTP
132
+
133
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
134
+ item_id = fabric.resolve_item_id(
135
+ item_name=item_name, type=item_type, workspace=workspace
136
+ )
137
+
138
+ client = fabric.FabricRestClient()
139
+ response = client.get(
140
+ f"/v1/workspaces/{workspace_id}/items/{item_id}/externalDataShares"
141
+ )
142
+
143
+ if response.status_code != 200:
144
+ raise FabricHTTPException(response)
145
+
146
+ df = pd.DataFrame(
147
+ columns=[
148
+ "External Data Share Id",
149
+ "Paths",
150
+ "Creator Principal Id",
151
+ "Creater Principal Type",
152
+ "Recipient User Principal Name",
153
+ "Status",
154
+ "Expiration Time UTC",
155
+ "Workspace Id",
156
+ "Item Id",
157
+ "Item Name",
158
+ "Item Type",
159
+ "Invitation URL",
160
+ ]
161
+ )
162
+
163
+ responses = pagination(client, response)
164
+ dfs = []
165
+
166
+ for r in responses:
167
+ for i in r.get("value", []):
168
+ item_id = i.get("itemId")
169
+ new_data = {
170
+ "External Data Share Id": i.get("id"),
171
+ "Paths": [i.get("paths")],
172
+ "Creator Principal Id": i.get("creatorPrincipal", {}).get("id"),
173
+ "Creator Principal Type": i.get("creatorPrincipal", {}).get("type"),
174
+ "Recipient User Principal Name": i.get("recipient", {}).get(
175
+ "userPrincipalName"
176
+ ),
177
+ "Status": i.get("status"),
178
+ "Expiration Time UTC": i.get("expriationTimeUtc"),
179
+ "Workspace Id": i.get("workspaceId"),
180
+ "Item Id": item_id,
181
+ "Item Name": item_name,
182
+ "Item Type": item_type,
183
+ "Invitation URL": i.get("invitationUrl"),
184
+ }
185
+ dfs.append(pd.DataFrame(new_data, index=[0]))
186
+ df = pd.concat(dfs, ignore_index=True)
187
+
188
+ return df
@@ -267,7 +267,6 @@ def deploy_semantic_model(
267
267
  dataset=target_dataset,
268
268
  bim_file=bim,
269
269
  workspace=target_workspace,
270
- overwrite=overwrite,
271
270
  )
272
271
  # Update the semantic model if the model exists
273
272
  else:
sempy_labs/_git.py CHANGED
@@ -372,7 +372,7 @@ def update_from_git(
372
372
  if response.status_code not in [200, 202]:
373
373
  raise FabricHTTPException(response)
374
374
 
375
- lro(client, response)
375
+ lro(client, response, return_status_code=True)
376
376
 
377
377
  print(
378
378
  f"{icons.green_dot} The '{workspace}' workspace has been updated with commits pushed to the connected branch."