semantic-link-labs 0.7.4__py3-none-any.whl → 0.8.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (59) hide show
  1. {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/METADATA +43 -7
  2. {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/RECORD +59 -40
  3. {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +116 -58
  5. sempy_labs/_ai.py +0 -2
  6. sempy_labs/_capacities.py +39 -3
  7. sempy_labs/_capacity_migration.py +623 -0
  8. sempy_labs/_clear_cache.py +8 -8
  9. sempy_labs/_connections.py +15 -13
  10. sempy_labs/_data_pipelines.py +118 -0
  11. sempy_labs/_documentation.py +144 -0
  12. sempy_labs/_eventhouses.py +118 -0
  13. sempy_labs/_eventstreams.py +118 -0
  14. sempy_labs/_generate_semantic_model.py +3 -3
  15. sempy_labs/_git.py +23 -24
  16. sempy_labs/_helper_functions.py +140 -47
  17. sempy_labs/_icons.py +40 -0
  18. sempy_labs/_kql_databases.py +134 -0
  19. sempy_labs/_kql_querysets.py +124 -0
  20. sempy_labs/_list_functions.py +218 -421
  21. sempy_labs/_mirrored_warehouses.py +50 -0
  22. sempy_labs/_ml_experiments.py +122 -0
  23. sempy_labs/_ml_models.py +120 -0
  24. sempy_labs/_model_auto_build.py +0 -4
  25. sempy_labs/_model_bpa.py +10 -12
  26. sempy_labs/_model_bpa_bulk.py +8 -7
  27. sempy_labs/_model_dependencies.py +26 -18
  28. sempy_labs/_notebooks.py +5 -16
  29. sempy_labs/_query_scale_out.py +6 -5
  30. sempy_labs/_refresh_semantic_model.py +7 -19
  31. sempy_labs/_spark.py +40 -45
  32. sempy_labs/_sql.py +60 -15
  33. sempy_labs/_vertipaq.py +25 -25
  34. sempy_labs/_warehouses.py +132 -0
  35. sempy_labs/_workspaces.py +0 -3
  36. sempy_labs/admin/__init__.py +53 -0
  37. sempy_labs/admin/_basic_functions.py +888 -0
  38. sempy_labs/admin/_domains.py +411 -0
  39. sempy_labs/directlake/_directlake_schema_sync.py +1 -1
  40. sempy_labs/directlake/_dl_helper.py +32 -16
  41. sempy_labs/directlake/_generate_shared_expression.py +11 -14
  42. sempy_labs/directlake/_guardrails.py +7 -7
  43. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +14 -24
  44. sempy_labs/directlake/_update_directlake_partition_entity.py +1 -1
  45. sempy_labs/directlake/_warm_cache.py +1 -1
  46. sempy_labs/lakehouse/_get_lakehouse_tables.py +3 -3
  47. sempy_labs/lakehouse/_lakehouse.py +3 -2
  48. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +5 -0
  49. sempy_labs/report/__init__.py +9 -6
  50. sempy_labs/report/_generate_report.py +1 -1
  51. sempy_labs/report/_report_bpa.py +369 -0
  52. sempy_labs/report/_report_bpa_rules.py +113 -0
  53. sempy_labs/report/_report_helper.py +254 -0
  54. sempy_labs/report/_report_list_functions.py +95 -0
  55. sempy_labs/report/_report_rebind.py +0 -4
  56. sempy_labs/report/_reportwrapper.py +2037 -0
  57. sempy_labs/tom/_model.py +333 -22
  58. {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/LICENSE +0 -0
  59. {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,888 @@
1
+ import sempy.fabric as fabric
2
+ from typing import Optional, List, Union
3
+ from uuid import UUID
4
+ import sempy_labs._icons as icons
5
+ from sempy.fabric.exceptions import FabricHTTPException
6
+ from sempy_labs._helper_functions import (
7
+ resolve_workspace_name_and_id,
8
+ pagination,
9
+ )
10
+ import datetime
11
+ import numpy as np
12
+ import pandas as pd
13
+ import time
14
+
15
+
16
+ def list_workspaces(
17
+ top: Optional[int] = 5000, skip: Optional[int] = None
18
+ ) -> pd.DataFrame:
19
+ """
20
+ Lists workspaces for the organization. This function is the admin version of list_workspaces.
21
+
22
+ Parameters
23
+ ----------
24
+ top : int, default=5000
25
+ Returns only the first n results. This parameter is mandatory and must be in the range of 1-5000.
26
+ skip : int, default=None
27
+ Skips the first n results. Use with top to fetch results beyond the first 5000.
28
+
29
+ Returns
30
+ -------
31
+ pandas.DataFrame
32
+ A pandas dataframe showing a list of workspaces for the organization.
33
+ """
34
+
35
+ df = pd.DataFrame(
36
+ columns=[
37
+ "Id",
38
+ "Is Read Only",
39
+ "Is On Dedicated Capacity",
40
+ "Type",
41
+ "Name",
42
+ "Capacity Id",
43
+ "Default Dataset Storage Format",
44
+ "Pipeline Id",
45
+ "Has Workspace Level Settings",
46
+ ]
47
+ )
48
+
49
+ url = f"/v1.0/myorg/admin/groups?$top={top}"
50
+ if skip is not None:
51
+ url = f"{url}&$skip={skip}"
52
+
53
+ client = fabric.PowerBIRestClient()
54
+ response = client.get(url)
55
+
56
+ if response.status_code != 200:
57
+ raise FabricHTTPException(response)
58
+
59
+ for v in response.json().get("value", []):
60
+ capacity_id = v.get("capacityId")
61
+ if capacity_id:
62
+ capacity_id = capacity_id.lower()
63
+ new_data = {
64
+ "Id": v.get("id"),
65
+ "Is Read Only": v.get("isReadOnly"),
66
+ "Is On Dedicated Capacity": v.get("isOnDedicatedCapacity"),
67
+ "Capacity Id": capacity_id,
68
+ "Default Dataset Storage Format": v.get("defaultDatasetStorageFormat"),
69
+ "Type": v.get("type"),
70
+ "Name": v.get("name"),
71
+ "State": v.get("state"),
72
+ "Pipeline Id": v.get("pipelineId"),
73
+ "Has Workspace Level Settings": v.get("hasWorkspaceLevelSettings"),
74
+ }
75
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
76
+
77
+ bool_cols = [
78
+ "Is Read Only",
79
+ "Is On Dedicated Capacity",
80
+ "Has Workspace Level Settings",
81
+ ]
82
+ df[bool_cols] = df[bool_cols].astype(bool)
83
+
84
+ return df
85
+
86
+
87
+ def assign_workspaces_to_capacity(
88
+ source_capacity: str,
89
+ target_capacity: str,
90
+ workspace: Optional[str | List[str]] = None,
91
+ ):
92
+ """
93
+ Assigns a workspace to a capacity. This function is the admin version.
94
+
95
+ Parameters
96
+ ----------
97
+ source_capacity : str
98
+ The name of the source capacity.
99
+ target_capacity : str
100
+ The name of the target capacity.
101
+ workspace : str | List[str], default=None
102
+ The name of the workspace(s).
103
+ Defaults to None which resolves to migrating all workspaces within the source capacity to the target capacity.
104
+ """
105
+
106
+ if isinstance(workspace, str):
107
+ workspace = [workspace]
108
+
109
+ dfC = fabric.list_capacities()
110
+ dfC_filt = dfC[dfC["Display Name"] == source_capacity]
111
+ source_capacity_id = dfC_filt["Id"].iloc[0]
112
+
113
+ dfC_filt = dfC[dfC["Display Name"] == target_capacity]
114
+ target_capacity_id = dfC_filt["Id"].iloc[0]
115
+
116
+ if workspace is None:
117
+ workspaces = fabric.list_workspaces(
118
+ filter=f"capacityId eq '{source_capacity_id.upper()}'"
119
+ )["Id"].values
120
+ else:
121
+ dfW = fabric.list_workspaces()
122
+ workspaces = dfW[dfW["Name"].isin(workspace)]["Id"].values
123
+
124
+ workspaces = np.array(workspaces)
125
+ batch_size = 999
126
+ for i in range(0, len(workspaces), batch_size):
127
+ batch = workspaces[i : i + batch_size].tolist()
128
+ batch_length = len(batch)
129
+ start_time = datetime.datetime.now()
130
+ request_body = {
131
+ "capacityMigrationAssignments": [
132
+ {
133
+ "targetCapacityObjectId": target_capacity_id.upper(),
134
+ "workspacesToAssign": batch,
135
+ }
136
+ ]
137
+ }
138
+
139
+ client = fabric.PowerBIRestClient()
140
+ response = client.post(
141
+ "/v1.0/myorg/admin/capacities/AssignWorkspaces",
142
+ json=request_body,
143
+ )
144
+
145
+ if response.status_code != 200:
146
+ raise FabricHTTPException(response)
147
+ end_time = datetime.datetime.now()
148
+ print(
149
+ f"Total time for assigning {str(batch_length)} workspaces is {str((end_time - start_time).total_seconds())}"
150
+ )
151
+ print(
152
+ f"{icons.green_dot} The workspaces have been assigned to the '{target_capacity}' capacity."
153
+ )
154
+
155
+
156
+ def list_capacities() -> pd.DataFrame:
157
+ """
158
+ Shows the a list of capacities and their properties. This function is the admin version.
159
+
160
+ Parameters
161
+ ----------
162
+
163
+ Returns
164
+ -------
165
+ pandas.DataFrame
166
+ A pandas dataframe showing the capacities and their properties
167
+ """
168
+
169
+ df = pd.DataFrame(
170
+ columns=["Capacity Id", "Capacity Name", "Sku", "Region", "State", "Admins"]
171
+ )
172
+
173
+ client = fabric.PowerBIRestClient()
174
+ response = client.get("/v1.0/myorg/admin/capacities")
175
+ if response.status_code != 200:
176
+ raise FabricHTTPException(response)
177
+
178
+ responses = pagination(client, response)
179
+
180
+ for r in responses:
181
+ for i in r.get("value", []):
182
+ new_data = {
183
+ "Capacity Id": i.get("id").lower(),
184
+ "Capacity Name": i.get("displayName"),
185
+ "Sku": i.get("sku"),
186
+ "Region": i.get("region"),
187
+ "State": i.get("state"),
188
+ "Admins": [i.get("admins", [])],
189
+ }
190
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
191
+
192
+ return df
193
+
194
+
195
+ def list_tenant_settings() -> pd.DataFrame:
196
+ """
197
+ Lists all tenant settings.
198
+
199
+ Returns
200
+ -------
201
+ pandas.DataFrame
202
+ A pandas dataframe showing the tenant settings.
203
+ """
204
+
205
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/tenants/list-tenant-settings?tabs=HTTP
206
+
207
+ client = fabric.FabricRestClient()
208
+ response = client.get("/v1/admin/tenantsettings")
209
+
210
+ if response.status_code != 200:
211
+ raise FabricHTTPException(response)
212
+
213
+ df = pd.DataFrame(
214
+ columns=[
215
+ "Setting Name",
216
+ "Title",
217
+ "Enabled",
218
+ "Can Specify Security Groups",
219
+ "Tenant Setting Group",
220
+ "Enabled Security Groups",
221
+ ]
222
+ )
223
+
224
+ for i in response.json().get("tenantSettings", []):
225
+ new_data = {
226
+ "Setting Name": i.get("settingName"),
227
+ "Title": i.get("title"),
228
+ "Enabled": i.get("enabled"),
229
+ "Can Specify Security Groups": i.get("canSpecifySecurityGroups"),
230
+ "Tenant Setting Group": i.get("tenantSettingGroup"),
231
+ "Enabled Security Groups": [i.get("enabledSecurityGroups", [])],
232
+ }
233
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
234
+
235
+ bool_cols = ["Enabled", "Can Specify Security Groups"]
236
+ df[bool_cols] = df[bool_cols].astype(bool)
237
+
238
+ return df
239
+
240
+
241
+ def _list_capacities_meta() -> pd.DataFrame:
242
+
243
+ df = pd.DataFrame(
244
+ columns=["Capacity Id", "Capacity Name", "Sku", "Region", "State", "Admins"]
245
+ )
246
+
247
+ client = fabric.PowerBIRestClient()
248
+ try:
249
+ response = client.get("/v1.0/myorg/admin/capacities")
250
+ except Exception as e:
251
+ if e.status_code not in [200, 401]:
252
+ raise FabricHTTPException(response)
253
+ elif e.status_code == 401:
254
+ response = client.get("/v1.0/myorg/capacities")
255
+
256
+ for i in response.json().get("value", []):
257
+ new_data = {
258
+ "Capacity Id": i.get("id").lower(),
259
+ "Capacity Name": i.get("displayName"),
260
+ "Sku": i.get("sku"),
261
+ "Region": i.get("region"),
262
+ "State": i.get("state"),
263
+ "Admins": [i.get("admins", [])],
264
+ }
265
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
266
+
267
+ return df
268
+
269
+
270
+ def unassign_workspaces_from_capacity(workspaces: str | List[str]):
271
+ """
272
+ Unassigns workspace(s) from their capacity. This function is the admin version of list_workspaces.
273
+
274
+ Parameters
275
+ ----------
276
+ workspaces : str | List[str]
277
+ The Fabric workspace name(s).
278
+ """
279
+
280
+ # https://learn.microsoft.com/en-us/rest/api/power-bi/admin/capacities-unassign-workspaces-from-capacity
281
+
282
+ if isinstance(workspaces, str):
283
+ workspaces = [workspaces]
284
+
285
+ payload = {"workspacesToUnassign": workspaces}
286
+
287
+ client = fabric.PowerBIRestClient()
288
+ response = client.post(
289
+ "/v1.0/myorg/admin/capacities/UnassignWorkspaces",
290
+ json=payload,
291
+ )
292
+
293
+ if response.status_code != 200:
294
+ raise FabricHTTPException(response)
295
+
296
+ print(f"{icons.green_dot} The workspaces have been unassigned.")
297
+
298
+
299
+ def list_external_data_shares():
300
+ """
301
+ Lists external data shares in the tenant. This function is for admins.
302
+
303
+ Returns
304
+ -------
305
+ pandas.DataFrame
306
+ A pandas dataframe showing a list of external data shares in the tenant.
307
+ """
308
+
309
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/external-data-shares/list-external-data-shares?tabs=HTTP
310
+
311
+ df = pd.DataFrame(
312
+ columns=[
313
+ "External Data Share Id",
314
+ "Paths",
315
+ "Creater Principal Id",
316
+ "Creater Principal Name",
317
+ "Creater Principal Type",
318
+ "Creater Principal UPN",
319
+ "Recipient UPN",
320
+ "Status",
321
+ "Expiration Time UTC",
322
+ "Workspace Id",
323
+ "Item Id",
324
+ "Invitation URL",
325
+ ]
326
+ )
327
+
328
+ client = fabric.FabricRestClient()
329
+ response = client.get("/v1/admin/items/externalDataShares")
330
+
331
+ if response.status_code != 200:
332
+ raise FabricHTTPException(response)
333
+
334
+ for i in response.json().get("value", []):
335
+ cp = i.get("creatorPrincipal", {})
336
+ new_data = {
337
+ "External Data Share Id": i.get("id"),
338
+ "Paths": [i.get("paths", [])],
339
+ "Creater Principal Id": cp.get("id"),
340
+ "Creater Principal Name": cp.get("displayName"),
341
+ "Creater Principal Type": cp.get("type"),
342
+ "Creater Principal UPN": cp.get("userDetails", {}).get("userPrincipalName"),
343
+ "Recipient UPN": i.get("recipient", {}).get("userPrincipalName"),
344
+ "Status": i.get("status"),
345
+ "Expiration Time UTC": i.get("expirationTimeUtc"),
346
+ "Workspace Id": i.get("workspaceId"),
347
+ "Item Id": i.get("itemId"),
348
+ "Invitation URL": i.get("invitationUrl"),
349
+ }
350
+
351
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
352
+
353
+ date_time_columns = ["Expiration Time UTC"]
354
+ df[date_time_columns] = pd.to_datetime(df[date_time_columns])
355
+
356
+ return df
357
+
358
+
359
+ def revoke_external_data_share(
360
+ external_data_share_id: UUID, item_id: UUID, workspace: str
361
+ ):
362
+ """
363
+ Revokes the specified external data share. Note: This action cannot be undone.
364
+
365
+ Parameters
366
+ ----------
367
+ external_data_share_id : UUID
368
+ The external data share ID.
369
+ item_id : int, default=None
370
+ The Item ID
371
+ workspace : str
372
+ The Fabric workspace name.
373
+ """
374
+
375
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/external-data-shares/revoke-external-data-share?tabs=HTTP
376
+
377
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
378
+
379
+ client = fabric.FabricRestClient()
380
+ response = client.post(
381
+ f"/v1/admin/workspaces/{workspace_id}/items/{item_id}/externalDataShares/{external_data_share_id}/revoke"
382
+ )
383
+
384
+ if response.status_code != 200:
385
+ raise FabricHTTPException(response)
386
+
387
+ print(
388
+ f"{icons.green_dot} The '{external_data_share_id}' external data share for the '{item_id}' item within the '{workspace}' workspace has been revoked."
389
+ )
390
+
391
+
392
+ def list_capacities_delegated_tenant_settings(
393
+ return_dataframe: bool = True,
394
+ ) -> Optional[pd.DataFrame | dict]:
395
+ """
396
+ Returns list of tenant setting overrides that override at the capacities.
397
+
398
+ Parameters
399
+ ----------
400
+ return_dataframe : bool, default=True
401
+ If True, returns a dataframe. If False, returns a dictionary
402
+
403
+ Returns
404
+ -------
405
+ pandas.DataFrame
406
+ A pandas dataframe showing a list of tenant setting overrides that override at the capacities.
407
+ """
408
+
409
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/tenants/list-capacities-tenant-settings-overrides?tabs=HTTP
410
+
411
+ df = pd.DataFrame(
412
+ columns=[
413
+ "Capacity Id",
414
+ "Setting Name",
415
+ "Setting Title",
416
+ "Setting Enabled",
417
+ "Can Specify Security Groups",
418
+ "Enabled Security Groups",
419
+ "Tenant Setting Group",
420
+ "Tenant Setting Properties",
421
+ "Delegate to Workspace",
422
+ "Delegated From",
423
+ ]
424
+ )
425
+
426
+ client = fabric.FabricRestClient()
427
+ response = client.get("/v1/admin/capacities/delegatedTenantSettingOverrides")
428
+
429
+ if response.status_code != 200:
430
+ raise FabricHTTPException(response)
431
+
432
+ responses = pagination(client, response)
433
+
434
+ if return_dataframe:
435
+ for r in responses:
436
+ for i in r.get("Overrides", []):
437
+ tenant_settings = i.get("tenantSettings", [])
438
+ for setting in tenant_settings:
439
+ new_data = {
440
+ "Capacity Id": i.get("id"),
441
+ "Setting Name": setting.get("settingName"),
442
+ "Setting Title": setting.get("title"),
443
+ "Setting Enabled": setting.get("enabled"),
444
+ "Can Specify Security Groups": setting.get(
445
+ "canSpecifySecurityGroups"
446
+ ),
447
+ "Enabled Security Groups": [
448
+ setting.get("enabledSecurityGroups", [])
449
+ ],
450
+ "Tenant Setting Group": setting.get("tenantSettingGroup"),
451
+ "Tenant Setting Properties": [setting.get("properties", [])],
452
+ "Delegate to Workspace": setting.get("delegateToWorkspace"),
453
+ "Delegated From": setting.get("delegatedFrom"),
454
+ }
455
+
456
+ df = pd.concat(
457
+ [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
458
+ )
459
+
460
+ bool_cols = [
461
+ "Enabled Security Groups",
462
+ "Can Specify Security Groups",
463
+ "Delegate to Workspace",
464
+ ]
465
+ df[bool_cols] = df[bool_cols].astype(bool)
466
+
467
+ return df
468
+ else:
469
+ combined_response = {
470
+ "overrides": [],
471
+ "continuationUri": "",
472
+ "continuationToken": "",
473
+ }
474
+ for r in responses:
475
+ combined_response["overrides"].extend(r["overrides"])
476
+ combined_response["continuationUri"] = r["continuationUri"]
477
+ combined_response["continuationToken"] = r["continuationToken"]
478
+
479
+ return combined_response
480
+
481
+
482
+ def scan_workspaces(
483
+ data_source_details: bool = False,
484
+ dataset_schema: bool = False,
485
+ dataset_expressions: bool = False,
486
+ lineage: bool = False,
487
+ artifact_users: bool = False,
488
+ workspace: Optional[str | List[str]] = None,
489
+ ) -> dict:
490
+
491
+ workspace = fabric.resolve_workspace_name(workspace)
492
+
493
+ if isinstance(workspace, str):
494
+ workspace = [workspace]
495
+
496
+ workspace_list = []
497
+
498
+ for w in workspace:
499
+ workspace_list.append(fabric.resolve_workspace_id(w))
500
+
501
+ client = fabric.PowerBIRestClient()
502
+ request_body = {"workspaces": workspace_list}
503
+
504
+ response_clause = f"/v1.0/myorg/admin/workspaces/getInfo?lineage={lineage}&datasourceDetails={data_source_details}&datasetSchema={dataset_schema}&datasetExpressions={dataset_expressions}&getArtifactUsers={artifact_users}"
505
+ response = client.post(response_clause, json=request_body)
506
+
507
+ if response.status_code != 202:
508
+ raise FabricHTTPException(response)
509
+ scan_id = response.json()["id"]
510
+ scan_status = response.json().get("status")
511
+ while scan_status not in ["Succeeded", "Failed"]:
512
+ time.sleep(1)
513
+ response = client.get(f"/v1.0/myorg/admin/workspaces/scanStatus/{scan_id}")
514
+ scan_status = response.json().get("status")
515
+ if scan_status == "Failed":
516
+ raise FabricHTTPException(response)
517
+ response = client.get(f"/v1.0/myorg/admin/workspaces/scanResult/{scan_id}")
518
+ if response.status_code != 200:
519
+ raise FabricHTTPException(response)
520
+
521
+ return response.json()
522
+
523
+
524
+ def list_datasets() -> pd.DataFrame:
525
+ """
526
+ Shows a list of datasets for the organization.
527
+
528
+ Returns
529
+ -------
530
+ pandas.DataFrame
531
+ A pandas dataframe showing a list of datasets for the organization.
532
+ """
533
+
534
+ # https://learn.microsoft.com/en-us/rest/api/power-bi/admin/datasets-get-datasets-as-admin
535
+
536
+ df = pd.DataFrame(
537
+ columns=[
538
+ "Dataset Id",
539
+ "Dataset Name",
540
+ "Web URL",
541
+ "Add Rows API Enabled",
542
+ "Configured By",
543
+ "Is Refreshable",
544
+ "Is Effective Identity Required",
545
+ "Is Effective Identity Roles Required",
546
+ "Target Storage Mode",
547
+ "Created Date",
548
+ "Content Provider Type",
549
+ "Create Report Embed URL",
550
+ "QnA Embed URL",
551
+ "Upstream Datasets",
552
+ "Users",
553
+ "Is In Place Sharing Enabled",
554
+ "Workspace Id",
555
+ "Auto Sync Read Only Replicas",
556
+ "Max Read Only Replicas",
557
+ ]
558
+ )
559
+
560
+ client = fabric.PowerBIRestClient()
561
+ response = client.get("/v1.0/myorg/admin/datasets")
562
+
563
+ if response.status_code != 200:
564
+ raise FabricHTTPException(response)
565
+
566
+ for v in response.json().get("value", []):
567
+ new_data = {
568
+ "Dataset Id": v.get("id"),
569
+ "Dataset Name": v.get("name"),
570
+ "Web URL": v.get("webUrl"),
571
+ "Add Rows API Enabled": v.get("addRowsAPIEnabled"),
572
+ "Configured By": v.get("configuredBy"),
573
+ "Is Refreshable": v.get("isRefreshable"),
574
+ "Is Effective Identity Required": v.get("isEffectiveIdentityRequired"),
575
+ "Is Effective Identity Roles Required": v.get(
576
+ "isEffectiveIdentityRolesRequired"
577
+ ),
578
+ "Target Storage Mode": v.get("targetStorageMode"),
579
+ "Created Date": pd.to_datetime(v.get("createdDate")),
580
+ "Content Provider Type": v.get("contentProviderType"),
581
+ "Create Report Embed URL": v.get("createReportEmbedURL"),
582
+ "QnA Embed URL": v.get("qnaEmbedURL"),
583
+ "Upstream Datasets": v.get("upstreamDatasets", []),
584
+ "Users": v.get("users", []),
585
+ "Is In Place Sharing Enabled": v.get("isInPlaceSharingEnabled"),
586
+ "Workspace Id": v.get("workspaceId"),
587
+ "Auto Sync Read Only Replicas": v.get("queryScaleOutSettings", {}).get(
588
+ "autoSyncReadOnlyReplicas"
589
+ ),
590
+ "Max Read Only Replicas": v.get("queryScaleOutSettings", {}).get(
591
+ "maxReadOnlyReplicas"
592
+ ),
593
+ }
594
+ df = pd.concat([df, pd.DataFrame([new_data])], ignore_index=True)
595
+
596
+ bool_cols = [
597
+ "Add Rows API Enabled",
598
+ "Is Refreshable",
599
+ "Is Effective Identity Required",
600
+ "Is Effective Identity Roles Required",
601
+ "Is In Place Sharing Enabled",
602
+ "Auto Sync Read Only Replicas",
603
+ ]
604
+ df[bool_cols] = df[bool_cols].astype(bool)
605
+
606
+ df["Created Date"] = pd.to_datetime(df["Created Date"])
607
+ df["Max Read Only Replicas"] = df["Max Read Only Replicas"].astype(int)
608
+
609
+ return df
610
+
611
+
612
+ def list_item_access_details(
613
+ item_name: str, type: str, workspace: Optional[str] = None
614
+ ) -> pd.DataFrame:
615
+ """
616
+ Returns a list of users (including groups and service principals) and lists their workspace roles.
617
+
618
+ Parameters
619
+ ----------
620
+ item_name : str
621
+ Name of the Fabric item.
622
+ type : str
623
+ Type of Fabric item.
624
+ workspace : str, default=None
625
+ The Fabric workspace name.
626
+ Defaults to None which resolves to the workspace of the attached lakehouse
627
+ or if no lakehouse attached, resolves to the workspace of the notebook.
628
+
629
+ Returns
630
+ -------
631
+ pandas.DataFrame
632
+ A pandas dataframe showing a list of users (including groups and service principals) and lists their workspace roles.
633
+ """
634
+
635
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/items/list-item-access-details?tabs=HTTP
636
+
637
+ workspace = fabric.resolve_workspace_name(workspace)
638
+ workspace_id = fabric.resolve_workspace_id(workspace)
639
+ item_id = fabric.resolve_item_id(
640
+ item_name=item_name, type=type, workspace=workspace
641
+ )
642
+
643
+ df = pd.DataFrame(
644
+ columns=[
645
+ "User Id",
646
+ "User Name",
647
+ "User Type",
648
+ "User Principal Name",
649
+ "Item Name",
650
+ "Item Type",
651
+ "Item Id",
652
+ "Permissions",
653
+ "Additional Permissions",
654
+ ]
655
+ )
656
+ client = fabric.FabricRestClient()
657
+ response = client.get(f"/v1/admin/workspaces/{workspace_id}/items/{item_id}/users")
658
+
659
+ if response.status_code != 200:
660
+ raise FabricHTTPException(response)
661
+
662
+ for v in response.json().get("accessDetails", []):
663
+ new_data = {
664
+ "User Id": v.get("principal", {}).get("id"),
665
+ "User Name": v.get("principal", {}).get("displayName"),
666
+ "User Type": v.get("principal", {}).get("type"),
667
+ "User Principal Name": v.get("principal", {})
668
+ .get("userDetails", {})
669
+ .get("userPrincipalName"),
670
+ "Item Type": v.get("itemAccessDetails", {}).get("type"),
671
+ "Permissions": v.get("itemAccessDetails", {}).get("permissions"),
672
+ "Additional Permissions": v.get("itemAccessDetails", {}).get(
673
+ "additionalPermissions"
674
+ ),
675
+ "Item Name": item_name,
676
+ "Item Id": item_id,
677
+ }
678
+ df = pd.concat([df, pd.DataFrame([new_data])], ignore_index=True)
679
+
680
+ return df
681
+
682
+
683
+ def list_access_entities(
684
+ user_email_address: str,
685
+ ) -> pd.DataFrame:
686
+ """
687
+ Shows a list of permission details for Fabric and PowerBI items the specified user can access.
688
+
689
+ Parameters
690
+ ----------
691
+ user_email_address : str
692
+ The user's email address.
693
+
694
+ Returns
695
+ -------
696
+ pandas.DataFrame
697
+ A pandas dataframe showing a list of permission details for Fabric and PowerBI items the specified user can access.
698
+ """
699
+
700
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/users/list-access-entities?tabs=HTTP
701
+
702
+ df = pd.DataFrame(
703
+ columns=[
704
+ "Item Id",
705
+ "Item Name",
706
+ "Item Type",
707
+ "Permissions",
708
+ "Additional Permissions",
709
+ ]
710
+ )
711
+ client = fabric.FabricRestClient()
712
+ response = client.get(f"/v1/admin/users/{user_email_address}/access")
713
+
714
+ if response.status_code != 200:
715
+ raise FabricHTTPException(response)
716
+
717
+ responses = pagination(client, response)
718
+
719
+ for r in responses:
720
+ for v in r.get("accessEntities", []):
721
+ new_data = {
722
+ "Item Id": v.get("id"),
723
+ "Item Name": v.get("displayName"),
724
+ "Item Type": v.get("itemAccessDetails", {}).get("type"),
725
+ "Permissions": v.get("itemAccessDetails", {}).get("permissions"),
726
+ "Additional Permissions": v.get("itemAccessDetails", {}).get(
727
+ "additionalPermissions"
728
+ ),
729
+ }
730
+ df = pd.concat([df, pd.DataFrame([new_data])], ignore_index=True)
731
+
732
+ return df
733
+
734
+
735
+ def list_workspace_access_details(
736
+ workspace: Optional[Union[str, UUID]] = None
737
+ ) -> pd.DataFrame:
738
+ """
739
+ Shows a list of users (including groups and Service Principals) that have access to the specified workspace.
740
+
741
+ Parameters
742
+ ----------
743
+ workspace : str, default=None
744
+ The Fabric workspace name.
745
+ Defaults to None which resolves to the workspace of the attached lakehouse
746
+ or if no lakehouse attached, resolves to the workspace of the notebook.
747
+
748
+ Returns
749
+ -------
750
+ pandas.DataFrame
751
+ A pandas dataframe showing a list of users (including groups and Service Principals) that have access to the specified workspace.
752
+ """
753
+
754
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/workspaces/list-workspace-access-details?tabs=HTTP
755
+
756
+ workspace_name = fabric.resolve_workspace_name(workspace)
757
+ workspace_id = fabric.resolve_workspace_id(workspace_name)
758
+
759
+ df = pd.DataFrame(
760
+ columns=[
761
+ "User Id",
762
+ "User Name",
763
+ "User Type",
764
+ "Workspace Name",
765
+ "Workspace Id",
766
+ "Workspace Role",
767
+ ]
768
+ )
769
+ client = fabric.FabricRestClient()
770
+ response = client.get(f"/v1/admin/workspaces/{workspace_id}/users")
771
+ if response.status_code != 200:
772
+ raise FabricHTTPException(response)
773
+
774
+ for v in response.json().get("accessDetails", []):
775
+ new_data = {
776
+ "User Id": v.get("principal", {}).get("id"),
777
+ "User Name": v.get("principal", {}).get("displayName"),
778
+ "User Type": v.get("principal", {}).get("type"),
779
+ "Workspace Name": workspace_name,
780
+ "Workspace Id": workspace_id,
781
+ "Workspace Role": v.get("workspaceAccessDetails", {}).get("workspaceRole"),
782
+ }
783
+ df = pd.concat([df, pd.DataFrame([new_data])], ignore_index=True)
784
+
785
+ return df
786
+
787
+
788
+ def list_items(
789
+ capacity_name: Optional[str] = None,
790
+ workspace: Optional[str] = None,
791
+ state: Optional[str] = None,
792
+ type: Optional[str] = None,
793
+ ) -> pd.DataFrame:
794
+ """
795
+ Shows a list of active Fabric and PowerBI items.
796
+
797
+ Parameters
798
+ ----------
799
+ capacity_name : str, default=None
800
+ The capacity name.
801
+ workspace : str, default=None
802
+ The Fabric workspace name.
803
+ Defaults to None which resolves to the workspace of the attached lakehouse
804
+ or if no lakehouse attached, resolves to the workspace of the notebook.
805
+ state : str, default=None
806
+ The item state.
807
+ type : str, default=None
808
+ The item type.
809
+
810
+ Returns
811
+ -------
812
+ pandas.DataFrame
813
+ A pandas dataframe showing a list of active Fabric and Power BI items.
814
+ """
815
+
816
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/items/list-items?tabs=HTTP
817
+
818
+ url = "/v1/admin/items?"
819
+
820
+ df = pd.DataFrame(
821
+ columns=[
822
+ "Item Id",
823
+ "Item Name",
824
+ "Type",
825
+ "Description",
826
+ "State",
827
+ "Last Updated Date",
828
+ "Creator Principal Id",
829
+ "Creator Principal Display Name",
830
+ "Creator Principal Type",
831
+ "Creator User Principal Name",
832
+ "Workspace Id",
833
+ "Capacity Id",
834
+ ]
835
+ )
836
+
837
+ if workspace is not None:
838
+ workspace = fabric.resolve_workspace_name(workspace)
839
+ workspace_id = fabric.resolve_workspace_id(workspace)
840
+ url += f"workspaceId={workspace_id}&"
841
+ if capacity_name is not None:
842
+ dfC = list_capacities()
843
+ dfC_filt = dfC[dfC["Capacity Name"] == capacity_name]
844
+ if len(dfC_filt) == 0:
845
+ raise ValueError(
846
+ f"{icons.red_dot} The '{capacity_name}' capacity does not exist."
847
+ )
848
+ capacity_id = dfC_filt["Capacity Id"].iloc[0]
849
+ url += f"capacityId={capacity_id}&"
850
+ if state is not None:
851
+ url += f"state={state}&"
852
+ if type is not None:
853
+ url += f"type={type}&"
854
+
855
+ if url.endswith("?") or url.endswith("&"):
856
+ url = url[:-1]
857
+
858
+ client = fabric.FabricRestClient()
859
+ response = client.get(url)
860
+
861
+ if response.status_code != 200:
862
+ raise FabricHTTPException(response)
863
+
864
+ responses = pagination(client, response)
865
+
866
+ for r in responses:
867
+ for v in r.get("itemEntities", []):
868
+ new_data = {
869
+ "Item Id": v.get("id"),
870
+ "Type": v.get("type"),
871
+ "Item Name": v.get("name"),
872
+ "Description": v.get("description"),
873
+ "State": v.get("state"),
874
+ "Last Updated Date": v.get("lastUpdatedDate"),
875
+ "Creator Principal Id": v.get("creatorPrincipal", {}).get("id"),
876
+ "Creator Principal Display Name": v.get("creatorPrincipal", {}).get(
877
+ "displayName"
878
+ ),
879
+ "Creator Principal Type": v.get("creatorPrincipal", {}).get("type"),
880
+ "Creator User Principal Name": v.get("creatorPrincipal", {})
881
+ .get("userDetails", {})
882
+ .get("userPrincipalName"),
883
+ "Workspace Id": v.get("workspaceId"),
884
+ "Capacity Id": v.get("capacityId"),
885
+ }
886
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
887
+
888
+ return df