semantic-link-labs 0.7.4__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (32) hide show
  1. {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.0.dist-info}/METADATA +7 -3
  2. {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.0.dist-info}/RECORD +32 -23
  3. {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +57 -18
  5. sempy_labs/_capacities.py +39 -3
  6. sempy_labs/_capacity_migration.py +624 -0
  7. sempy_labs/_clear_cache.py +8 -8
  8. sempy_labs/_connections.py +15 -13
  9. sempy_labs/_git.py +20 -21
  10. sempy_labs/_helper_functions.py +33 -30
  11. sempy_labs/_icons.py +19 -0
  12. sempy_labs/_list_functions.py +210 -0
  13. sempy_labs/_model_bpa.py +1 -1
  14. sempy_labs/_query_scale_out.py +4 -3
  15. sempy_labs/_spark.py +31 -36
  16. sempy_labs/_sql.py +60 -15
  17. sempy_labs/_vertipaq.py +9 -7
  18. sempy_labs/admin/__init__.py +53 -0
  19. sempy_labs/admin/_basic_functions.py +806 -0
  20. sempy_labs/admin/_domains.py +411 -0
  21. sempy_labs/directlake/_generate_shared_expression.py +11 -14
  22. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +14 -24
  23. sempy_labs/report/__init__.py +9 -6
  24. sempy_labs/report/_report_bpa.py +359 -0
  25. sempy_labs/report/_report_bpa_rules.py +113 -0
  26. sempy_labs/report/_report_helper.py +254 -0
  27. sempy_labs/report/_report_list_functions.py +95 -0
  28. sempy_labs/report/_report_rebind.py +0 -4
  29. sempy_labs/report/_reportwrapper.py +2039 -0
  30. sempy_labs/tom/_model.py +78 -4
  31. {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.0.dist-info}/LICENSE +0 -0
  32. {semantic_link_labs-0.7.4.dist-info → semantic_link_labs-0.8.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,806 @@
1
+ import sempy.fabric as fabric
2
+ from typing import Optional, List, Union
3
+ from uuid import UUID
4
+ import sempy_labs._icons as icons
5
+ from sempy.fabric.exceptions import FabricHTTPException
6
+ from sempy_labs._helper_functions import resolve_workspace_name_and_id, pagination
7
+ import datetime
8
+ import numpy as np
9
+ import pandas as pd
10
+ import time
11
+
12
+
13
+ def list_workspaces(
14
+ top: Optional[int] = 5000, skip: Optional[int] = None
15
+ ) -> pd.DataFrame:
16
+ """
17
+ Lists workspaces for the organization. This function is the admin version of list_workspaces.
18
+
19
+ Parameters
20
+ ----------
21
+ top : int, default=5000
22
+ Returns only the first n results. This parameter is mandatory and must be in the range of 1-5000.
23
+ skip : int, default=None
24
+ Skips the first n results. Use with top to fetch results beyond the first 5000.
25
+
26
+ Returns
27
+ -------
28
+ pandas.DataFrame
29
+ A pandas dataframe showing a list of workspaces for the organization.
30
+ """
31
+
32
+ df = pd.DataFrame(
33
+ columns=[
34
+ "Id",
35
+ "Is Read Only",
36
+ "Is On Dedicated Capacity",
37
+ "Type",
38
+ "Name",
39
+ "Capacity Id",
40
+ "Default Dataset Storage Format",
41
+ "Pipeline Id",
42
+ "Has Workspace Level Settings",
43
+ ]
44
+ )
45
+
46
+ url = f"/v1.0/myorg/admin/groups?$top={top}"
47
+ if skip is not None:
48
+ url = f"{url}&$skip={skip}"
49
+
50
+ client = fabric.PowerBIRestClient()
51
+ response = client.get(url)
52
+
53
+ if response.status_code != 200:
54
+ raise FabricHTTPException(response)
55
+
56
+ for v in response.json().get("value", []):
57
+ capacity_id = v.get("capacityId")
58
+ if capacity_id:
59
+ capacity_id = capacity_id.lower()
60
+ new_data = {
61
+ "Id": v.get("id"),
62
+ "Is Read Only": v.get("isReadOnly"),
63
+ "Is On Dedicated Capacity": v.get("isOnDedicatedCapacity"),
64
+ "Capacity Id": capacity_id,
65
+ "Default Dataset Storage Format": v.get("defaultDatasetStorageFormat"),
66
+ "Type": v.get("type"),
67
+ "Name": v.get("name"),
68
+ "State": v.get("state"),
69
+ "Pipeline Id": v.get("pipelineId"),
70
+ "Has Workspace Level Settings": v.get("hasWorkspaceLevelSettings"),
71
+ }
72
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
73
+
74
+ bool_cols = [
75
+ "Is Read Only",
76
+ "Is On Dedicated Capacity",
77
+ "Has Workspace Level Settings",
78
+ ]
79
+ df[bool_cols] = df[bool_cols].astype(bool)
80
+
81
+ return df
82
+
83
+
84
+ def assign_workspaces_to_capacity(
85
+ source_capacity: str,
86
+ target_capacity: str,
87
+ workspace: Optional[str | List[str]] = None,
88
+ ):
89
+ """
90
+ Assigns a workspace to a capacity. This function is the admin version.
91
+
92
+ Parameters
93
+ ----------
94
+ source_capacity : str
95
+ The name of the source capacity.
96
+ target_capacity : str
97
+ The name of the target capacity.
98
+ workspace : str | List[str], default=None
99
+ The name of the workspace(s).
100
+ Defaults to None which resolves to migrating all workspaces within the source capacity to the target capacity.
101
+ """
102
+
103
+ if isinstance(workspace, str):
104
+ workspace = [workspace]
105
+
106
+ dfC = fabric.list_capacities()
107
+ dfC_filt = dfC[dfC["Display Name"] == source_capacity]
108
+ source_capacity_id = dfC_filt["Id"].iloc[0]
109
+
110
+ dfC_filt = dfC[dfC["Display Name"] == target_capacity]
111
+ target_capacity_id = dfC_filt["Id"].iloc[0]
112
+
113
+ if workspace is None:
114
+ workspaces = fabric.list_workspaces(
115
+ filter=f"capacityId eq '{source_capacity_id.upper()}'"
116
+ )["Id"].values
117
+ else:
118
+ dfW = fabric.list_workspaces()
119
+ workspaces = dfW[dfW["Name"].isin(workspace)]["Id"].values
120
+
121
+ workspaces = np.array(workspaces)
122
+ batch_size = 999
123
+ for i in range(0, len(workspaces), batch_size):
124
+ batch = workspaces[i : i + batch_size].tolist()
125
+ batch_length = len(batch)
126
+ start_time = datetime.datetime.now()
127
+ request_body = {
128
+ "capacityMigrationAssignments": [
129
+ {
130
+ "targetCapacityObjectId": target_capacity_id.upper(),
131
+ "workspacesToAssign": batch,
132
+ }
133
+ ]
134
+ }
135
+
136
+ client = fabric.PowerBIRestClient()
137
+ response = client.post(
138
+ "/v1.0/myorg/admin/capacities/AssignWorkspaces",
139
+ json=request_body,
140
+ )
141
+
142
+ if response.status_code != 200:
143
+ raise FabricHTTPException(response)
144
+ end_time = datetime.datetime.now()
145
+ print(
146
+ f"Total time for assigning {str(batch_length)} workspaces is {str((end_time - start_time).total_seconds())}"
147
+ )
148
+ print(
149
+ f"{icons.green_dot} The workspaces have been assigned to the '{target_capacity}' capacity."
150
+ )
151
+
152
+
153
+ def list_capacities() -> pd.DataFrame:
154
+ """
155
+ Shows the a list of capacities and their properties. This function is the admin version.
156
+
157
+ Parameters
158
+ ----------
159
+
160
+ Returns
161
+ -------
162
+ pandas.DataFrame
163
+ A pandas dataframe showing the capacities and their properties
164
+ """
165
+
166
+ df = pd.DataFrame(
167
+ columns=["Capacity Id", "Capacity Name", "Sku", "Region", "State", "Admins"]
168
+ )
169
+
170
+ client = fabric.PowerBIRestClient()
171
+ response = client.get("/v1.0/myorg/admin/capacities")
172
+ if response.status_code != 200:
173
+ raise FabricHTTPException(response)
174
+
175
+ responses = pagination(client, response)
176
+
177
+ for r in responses:
178
+ for i in r.get("value", []):
179
+ new_data = {
180
+ "Capacity Id": i.get("id").lower(),
181
+ "Capacity Name": i.get("displayName"),
182
+ "Sku": i.get("sku"),
183
+ "Region": i.get("region"),
184
+ "State": i.get("state"),
185
+ "Admins": [i.get("admins", [])],
186
+ }
187
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
188
+
189
+ return df
190
+
191
+
192
+ def list_tenant_settings() -> pd.DataFrame:
193
+ """
194
+ Lists all tenant settings.
195
+
196
+ Returns
197
+ -------
198
+ pandas.DataFrame
199
+ A pandas dataframe showing the tenant settings.
200
+ """
201
+
202
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/tenants/list-tenant-settings?tabs=HTTP
203
+
204
+ client = fabric.FabricRestClient()
205
+ response = client.get("/v1/admin/tenantsettings")
206
+
207
+ if response.status_code != 200:
208
+ raise FabricHTTPException(response)
209
+
210
+ df = pd.DataFrame(
211
+ columns=[
212
+ "Setting Name",
213
+ "Title",
214
+ "Enabled",
215
+ "Can Specify Security Groups",
216
+ "Tenant Setting Group",
217
+ "Enabled Security Groups",
218
+ ]
219
+ )
220
+
221
+ for i in response.json().get("tenantSettings", []):
222
+ new_data = {
223
+ "Setting Name": i.get("settingName"),
224
+ "Title": i.get("title"),
225
+ "Enabled": i.get("enabled"),
226
+ "Can Specify Security Groups": i.get("canSpecifySecurityGroups"),
227
+ "Tenant Setting Group": i.get("tenantSettingGroup"),
228
+ "Enabled Security Groups": [i.get("enabledSecurityGroups", [])],
229
+ }
230
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
231
+
232
+ bool_cols = ["Enabled", "Can Specify Security Groups"]
233
+ df[bool_cols] = df[bool_cols].astype(bool)
234
+
235
+ return df
236
+
237
+
238
+ def _list_capacities_meta() -> pd.DataFrame:
239
+
240
+ df = pd.DataFrame(
241
+ columns=["Capacity Id", "Capacity Name", "Sku", "Region", "State", "Admins"]
242
+ )
243
+
244
+ client = fabric.PowerBIRestClient()
245
+ try:
246
+ response = client.get("/v1.0/myorg/admin/capacities")
247
+ except Exception as e:
248
+ if e.status_code not in [200, 401]:
249
+ raise FabricHTTPException(response)
250
+ elif e.status_code == 401:
251
+ response = client.get("/v1.0/myorg/capacities")
252
+
253
+ for i in response.json().get("value", []):
254
+ new_data = {
255
+ "Capacity Id": i.get("id").lower(),
256
+ "Capacity Name": i.get("displayName"),
257
+ "Sku": i.get("sku"),
258
+ "Region": i.get("region"),
259
+ "State": i.get("state"),
260
+ "Admins": [i.get("admins", [])],
261
+ }
262
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
263
+
264
+ return df
265
+
266
+
267
+ def unassign_workspaces_from_capacity(workspaces: str | List[str]):
268
+ """
269
+ Unassigns workspace(s) from their capacity. This function is the admin version of list_workspaces.
270
+
271
+ Parameters
272
+ ----------
273
+ workspaces : str | List[str]
274
+ The Fabric workspace name(s).
275
+ """
276
+
277
+ # https://learn.microsoft.com/en-us/rest/api/power-bi/admin/capacities-unassign-workspaces-from-capacity
278
+
279
+ if isinstance(workspaces, str):
280
+ workspaces = [workspaces]
281
+
282
+ payload = {"workspacesToUnassign": workspaces}
283
+
284
+ client = fabric.PowerBIRestClient()
285
+ response = client.post(
286
+ "/v1.0/myorg/admin/capacities/UnassignWorkspaces",
287
+ json=payload,
288
+ )
289
+
290
+ if response.status_code != 200:
291
+ raise FabricHTTPException(response)
292
+
293
+ print(f"{icons.green_dot} The workspaces have been unassigned.")
294
+
295
+
296
+ def list_external_data_shares():
297
+ """
298
+ Lists external data shares in the tenant. This function is for admins.
299
+
300
+ Returns
301
+ -------
302
+ pandas.DataFrame
303
+ A pandas dataframe showing a list of external data shares in the tenant.
304
+ """
305
+
306
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/external-data-shares/list-external-data-shares?tabs=HTTP
307
+
308
+ df = pd.DataFrame(
309
+ columns=[
310
+ "External Data Share Id",
311
+ "Paths",
312
+ "Creater Principal Id",
313
+ "Creater Principal Name",
314
+ "Creater Principal Type",
315
+ "Creater Principal UPN",
316
+ "Recipient UPN",
317
+ "Status",
318
+ "Expiration Time UTC",
319
+ "Workspace Id",
320
+ "Item Id",
321
+ "Invitation URL",
322
+ ]
323
+ )
324
+
325
+ client = fabric.FabricRestClient()
326
+ response = client.get("/v1/admin/items/externalDataShares")
327
+
328
+ if response.status_code != 200:
329
+ raise FabricHTTPException(response)
330
+
331
+ for i in response.json().get("value", []):
332
+ cp = i.get("creatorPrincipal", {})
333
+ new_data = {
334
+ "External Data Share Id": i.get("id"),
335
+ "Paths": [i.get("paths", [])],
336
+ "Creater Principal Id": cp.get("id"),
337
+ "Creater Principal Name": cp.get("displayName"),
338
+ "Creater Principal Type": cp.get("type"),
339
+ "Creater Principal UPN": cp.get("userDetails", {}).get("userPrincipalName"),
340
+ "Recipient UPN": i.get("recipient", {}).get("userPrincipalName"),
341
+ "Status": i.get("status"),
342
+ "Expiration Time UTC": i.get("expirationTimeUtc"),
343
+ "Workspace Id": i.get("workspaceId"),
344
+ "Item Id": i.get("itemId"),
345
+ "Invitation URL": i.get("invitationUrl"),
346
+ }
347
+
348
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
349
+
350
+ date_time_columns = ["Expiration Time UTC"]
351
+ df[date_time_columns] = pd.to_datetime(df[date_time_columns])
352
+
353
+ return df
354
+
355
+
356
+ def revoke_external_data_share(
357
+ external_data_share_id: UUID, item_id: UUID, workspace: str
358
+ ):
359
+ """
360
+ Revokes the specified external data share. Note: This action cannot be undone.
361
+
362
+ Parameters
363
+ ----------
364
+ external_data_share_id : UUID
365
+ The external data share ID.
366
+ item_id : int, default=None
367
+ The Item ID
368
+ workspace : str
369
+ The Fabric workspace name.
370
+ """
371
+
372
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/external-data-shares/revoke-external-data-share?tabs=HTTP
373
+
374
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
375
+
376
+ client = fabric.FabricRestClient()
377
+ response = client.post(
378
+ f"/v1/admin/workspaces/{workspace_id}/items/{item_id}/externalDataShares/{external_data_share_id}/revoke"
379
+ )
380
+
381
+ if response.status_code != 200:
382
+ raise FabricHTTPException(response)
383
+
384
+ print(
385
+ f"{icons.green_dot} The '{external_data_share_id}' external data share for the '{item_id}' item within the '{workspace}' workspace has been revoked."
386
+ )
387
+
388
+
389
+ def list_capacities_delegated_tenant_settings(
390
+ return_dataframe: Optional[bool] = True,
391
+ ) -> Optional[pd.DataFrame | dict]:
392
+ """
393
+ Returns list of tenant setting overrides that override at the capacities.
394
+
395
+ Parameters
396
+ ----------
397
+ return_dataframe : bool, default=True
398
+ If True, returns a dataframe. If False, returns a dictionary
399
+
400
+ Returns
401
+ -------
402
+ pandas.DataFrame
403
+ A pandas dataframe showing a list of tenant setting overrides that override at the capacities.
404
+ """
405
+
406
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/tenants/list-capacities-tenant-settings-overrides?tabs=HTTP
407
+
408
+ df = pd.DataFrame(
409
+ columns=[
410
+ "Capacity Id",
411
+ "Setting Name",
412
+ "Setting Title",
413
+ "Setting Enabled",
414
+ "Can Specify Security Groups",
415
+ "Enabled Security Groups",
416
+ "Tenant Setting Group",
417
+ "Tenant Setting Properties",
418
+ "Delegate to Workspace",
419
+ "Delegated From",
420
+ ]
421
+ )
422
+
423
+ client = fabric.FabricRestClient()
424
+ response = client.get("/v1/admin/capacities/delegatedTenantSettingOverrides")
425
+
426
+ if response.status_code != 200:
427
+ raise FabricHTTPException(response)
428
+
429
+ responses = pagination(client, response)
430
+
431
+ if return_dataframe:
432
+ for r in responses:
433
+ for i in r.get("Overrides", []):
434
+ tenant_settings = i.get("tenantSettings", [])
435
+ for setting in tenant_settings:
436
+ new_data = {
437
+ "Capacity Id": i.get("id"),
438
+ "Setting Name": setting.get("settingName"),
439
+ "Setting Title": setting.get("title"),
440
+ "Setting Enabled": setting.get("enabled"),
441
+ "Can Specify Security Groups": setting.get(
442
+ "canSpecifySecurityGroups"
443
+ ),
444
+ "Enabled Security Groups": [
445
+ setting.get("enabledSecurityGroups", [])
446
+ ],
447
+ "Tenant Setting Group": setting.get("tenantSettingGroup"),
448
+ "Tenant Setting Properties": [setting.get("properties", [])],
449
+ "Delegate to Workspace": setting.get("delegateToWorkspace"),
450
+ "Delegated From": setting.get("delegatedFrom"),
451
+ }
452
+
453
+ df = pd.concat(
454
+ [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
455
+ )
456
+
457
+ bool_cols = [
458
+ "Enabled Security Groups",
459
+ "Can Specify Security Groups",
460
+ "Delegate to Workspace",
461
+ ]
462
+ df[bool_cols] = df[bool_cols].astype(bool)
463
+
464
+ return df
465
+ else:
466
+ combined_response = {
467
+ "overrides": [],
468
+ "continuationUri": "",
469
+ "continuationToken": "",
470
+ }
471
+ for r in responses:
472
+ combined_response["overrides"].extend(r["overrides"])
473
+ combined_response["continuationUri"] = r["continuationUri"]
474
+ combined_response["continuationToken"] = r["continuationToken"]
475
+
476
+ return combined_response
477
+
478
+
479
+ def scan_workspaces(
480
+ data_source_details: Optional[bool] = False,
481
+ dataset_schema: Optional[bool] = False,
482
+ dataset_expressions: Optional[bool] = False,
483
+ lineage: Optional[bool] = False,
484
+ artifact_users: Optional[bool] = False,
485
+ workspace: Optional[str | List[str]] = None,
486
+ ) -> dict:
487
+
488
+ workspace = fabric.resolve_workspace_name(workspace)
489
+
490
+ if isinstance(workspace, str):
491
+ workspace = [workspace]
492
+
493
+ workspace_list = []
494
+
495
+ for w in workspace:
496
+ workspace_list.append(fabric.resolve_workspace_id(w))
497
+
498
+ client = fabric.PowerBIRestClient()
499
+ request_body = {"workspaces": workspace_list}
500
+
501
+ response_clause = f"/v1.0/myorg/admin/workspaces/getInfo?lineage={lineage}&datasourceDetails={data_source_details}&datasetSchema={dataset_schema}&datasetExpressions={dataset_expressions}&getArtifactUsers={artifact_users}"
502
+ response = client.post(response_clause, json=request_body)
503
+
504
+ if response.status_code != 202:
505
+ raise FabricHTTPException(response)
506
+ scan_id = response.json()["id"]
507
+ scan_status = response.json().get("status")
508
+ while scan_status not in ["Succeeded", "Failed"]:
509
+ time.sleep(1)
510
+ response = client.get(f"/v1.0/myorg/admin/workspaces/scanStatus/{scan_id}")
511
+ scan_status = response.json().get("status")
512
+ if scan_status == "Failed":
513
+ raise FabricHTTPException(response)
514
+ response = client.get(f"/v1.0/myorg/admin/workspaces/scanResult/{scan_id}")
515
+ if response.status_code != 200:
516
+ raise FabricHTTPException(response)
517
+
518
+ return response.json()
519
+
520
+
521
+ def list_datasets() -> pd.DataFrame:
522
+
523
+ df = pd.DataFrame(
524
+ columns=[
525
+ "Dataset Id",
526
+ "Dataset Name",
527
+ "Web URL",
528
+ "Add Rows API Enabled",
529
+ "Configured By",
530
+ "Is Refreshable",
531
+ "Is Effective Identity Required",
532
+ "Is Effective Identity Roles Required",
533
+ "Target Storage Mode",
534
+ "Created Date",
535
+ "Content Provider Type",
536
+ "Create Report Embed URL",
537
+ "QnA Embed URL",
538
+ "Upstream Datasets",
539
+ "Users",
540
+ "Is In Place Sharing Enabled",
541
+ "Workspace Id",
542
+ "Auto Sync Read Only Replicas",
543
+ "Max Read Only Replicas",
544
+ ]
545
+ )
546
+
547
+ client = fabric.FabricRestClient()
548
+
549
+ response = client.get("/v1.0/myorg/admin/datasets")
550
+
551
+ if response.status_code != 200:
552
+ raise FabricHTTPException(response)
553
+
554
+ for v in response.json().get("value", []):
555
+ new_data = {
556
+ "Dataset Id": v.get("id"),
557
+ "Dataset Name": v.get("name"),
558
+ "Web URL": v.get("webUrl"),
559
+ "Add Rows API Enabled": v.get("addRowsAPIEnabled"),
560
+ "Configured By": v.get("configuredBy"),
561
+ "Is Refreshable": v.get("isRefreshable"),
562
+ "Is Effective Identity Required": v.get("isEffectiveIdentityRequired"),
563
+ "Is Effective Identity Roles Required": v.get(
564
+ "isEffectiveIdentityRolesRequired"
565
+ ),
566
+ "Target Storage Mode": v.get("targetStorageMode"),
567
+ "Created Date": pd.to_datetime(v.get("createdDate")),
568
+ "Content Provider Type": v.get("contentProviderType"),
569
+ "Create Report Embed URL": v.get("createReportEmbedURL"),
570
+ "QnA Embed URL": v.get("qnaEmbedURL"),
571
+ "Upstream Datasets": v.get("upstreamDatasets", []),
572
+ "Users": v.get("users", []),
573
+ "Is In Place Sharing Enabled": v.get("isInPlaceSharingEnabled"),
574
+ "Workspace Id": v.get("workspaceId"),
575
+ "Auto Sync Read Only Replicas": v.get("queryScaleOutSettings", {}).get(
576
+ "autoSyncReadOnlyReplicas"
577
+ ),
578
+ "Max Read Only Replicas": v.get("queryScaleOutSettings", {}).get(
579
+ "maxReadOnlyReplicas"
580
+ ),
581
+ }
582
+ df = pd.concat([df, pd.DataFrame([new_data])], ignore_index=True)
583
+
584
+ bool_cols = [
585
+ "Add Rows API Enabled",
586
+ "Is Refreshable",
587
+ "Is Effective Identity Required",
588
+ "Is Effective Identity Roles Required",
589
+ "Is In Place Sharing Enabled",
590
+ "Auto Sync Read Only Replicas",
591
+ ]
592
+ df[bool_cols] = df[bool_cols].astype(bool)
593
+
594
+ df["Created Date"] = pd.to_datetime(df["Created Date"])
595
+ df["Max Read Only Replicas"] = df["Max Read Only Replicas"].astype(int)
596
+
597
+ return df
598
+
599
+
600
+ def list_item_access_details(
601
+ item_name: str, type: str, workspace: Optional[str] = None
602
+ ) -> pd.DataFrame:
603
+
604
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/items/list-item-access-details?tabs=HTTP
605
+
606
+ workspace = fabric.resolve_workspace_name(workspace)
607
+ workspace_id = fabric.resolve_workspace_id(workspace)
608
+ item_id = fabric.resolve_item_id(
609
+ item_name=item_name, type=type, workspace=workspace
610
+ )
611
+
612
+ df = pd.DataFrame(
613
+ columns=[
614
+ "User Id",
615
+ "User Name",
616
+ "User Type",
617
+ "User Principal Name",
618
+ "Item Name",
619
+ "Item Type",
620
+ "Item Id",
621
+ "Permissions",
622
+ "Additional Permissions",
623
+ ]
624
+ )
625
+ client = fabric.FabricRestClient()
626
+ response = client.get(f"/v1/admin/workspaces/{workspace_id}/items/{item_id}/users")
627
+
628
+ if response.status_code != 200:
629
+ raise FabricHTTPException(response)
630
+
631
+ for v in response.json().get("accessDetails", []):
632
+ new_data = {
633
+ "User Id": v.get("principal", {}).get("id"),
634
+ "User Name": v.get("principal", {}).get("displayName"),
635
+ "User Type": v.get("principal", {}).get("type"),
636
+ "User Principal Name": v.get("principal", {})
637
+ .get("userDetails", {})
638
+ .get("userPrincipalName"),
639
+ "Item Type": v.get("itemAccessDetails", {}).get("type"),
640
+ "Permissions": v.get("itemAccessDetails", {}).get("permissions"),
641
+ "Additional Permissions": v.get("itemAccessDetails", {}).get(
642
+ "additionalPermissions"
643
+ ),
644
+ "Item Name": item_name,
645
+ "Item Id": item_id,
646
+ }
647
+ df = pd.concat([df, pd.DataFrame([new_data])], ignore_index=True)
648
+
649
+ return df
650
+
651
+
652
+ def list_access_entities(
653
+ user_email_address: str,
654
+ ) -> pd.DataFrame:
655
+
656
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/users/list-access-entities?tabs=HTTP
657
+
658
+ df = pd.DataFrame(
659
+ columns=[
660
+ "Item Id",
661
+ "Item Name",
662
+ "Item Type",
663
+ "Permissions",
664
+ "Additional Permissions",
665
+ ]
666
+ )
667
+ client = fabric.FabricRestClient()
668
+ response = client.get(f"/v1/admin/users/{user_email_address}/access")
669
+
670
+ if response.status_code != 200:
671
+ raise FabricHTTPException(response)
672
+
673
+ responses = pagination(client, response)
674
+
675
+ for r in responses:
676
+ for v in r.get("accessEntities", []):
677
+ new_data = {
678
+ "Item Id": v.get("id"),
679
+ "Item Name": v.get("displayName"),
680
+ "Item Type": v.get("itemAccessDetails", {}).get("type"),
681
+ "Permissions": v.get("itemAccessDetails", {}).get("permissions"),
682
+ "Additional Permissions": v.get("itemAccessDetails", {}).get(
683
+ "additionalPermissions"
684
+ ),
685
+ }
686
+ df = pd.concat([df, pd.DataFrame([new_data])], ignore_index=True)
687
+
688
+ return df
689
+
690
+
691
+ def list_workspace_access_details(
692
+ workspace: Optional[Union[str, UUID]] = None
693
+ ) -> pd.DataFrame:
694
+
695
+ # https://learn.microsoft.com/en-us/rest/api/fabric/admin/items/list-items?tabs=HTTP
696
+
697
+ workspace_name = fabric.resolve_workspace_name(workspace)
698
+ workspace_id = fabric.resolve_workspace_id(workspace_name)
699
+
700
+ df = pd.DataFrame(
701
+ columns=[
702
+ "User Id",
703
+ "User Name",
704
+ "User Type",
705
+ "Workspace Name",
706
+ "Workspace Id",
707
+ "Workspace Role",
708
+ ]
709
+ )
710
+ client = fabric.FabricRestClient()
711
+ response = client.get(f"/v1/admin/workspaces/{workspace_id}/users")
712
+ if response.status_code != 200:
713
+ raise FabricHTTPException(response)
714
+
715
+ for v in response.json().get("accessDetails", []):
716
+ new_data = {
717
+ "User Id": v.get("principal", {}).get("id"),
718
+ "User Name": v.get("principal", {}).get("displayName"),
719
+ "User Type": v.get("principal", {}).get("type"),
720
+ "Workspace Name": workspace_name,
721
+ "Workspace Id": workspace_id,
722
+ "Workspace Role": v.get("workspaceAccessDetails", {}).get("workspaceRole"),
723
+ }
724
+ df = pd.concat([df, pd.DataFrame([new_data])], ignore_index=True)
725
+
726
+ return df
727
+
728
+
729
+ def list_items(
730
+ capacity_name: Optional[str] = None,
731
+ workspace: Optional[str] = None,
732
+ state: Optional[str] = None,
733
+ type: Optional[str] = None,
734
+ ) -> pd.DataFrame:
735
+
736
+ url = "/v1/admin/items?"
737
+
738
+ df = pd.DataFrame(
739
+ columns=[
740
+ "Item Id",
741
+ "Item Name",
742
+ "Type",
743
+ "Description",
744
+ "State",
745
+ "Last Updated Date",
746
+ "Creator Principal Id",
747
+ "Creator Principal Display Name",
748
+ "Creator Principal Type",
749
+ "Creator User Principal Name",
750
+ "Workspace Id",
751
+ "Capacity Id",
752
+ ]
753
+ )
754
+
755
+ if workspace is not None:
756
+ workspace = fabric.resolve_workspace_name(workspace)
757
+ workspace_id = fabric.resolve_workspace_id(workspace)
758
+ url += f"workspaceId={workspace_id}&"
759
+ if capacity_name is not None:
760
+ dfC = list_capacities()
761
+ dfC_filt = dfC[dfC["Capacity Name"] == capacity_name]
762
+ if len(dfC_filt) == 0:
763
+ raise ValueError(
764
+ f"{icons.red_dot} The '{capacity_name}' capacity does not exist."
765
+ )
766
+ capacity_id = dfC_filt["Capacity Id"].iloc[0]
767
+ url += f"capacityId={capacity_id}&"
768
+ if state is not None:
769
+ url += f"state={state}&"
770
+ if type is not None:
771
+ url += f"type={type}&"
772
+
773
+ if url.endswith("?") or url.endswith("&"):
774
+ url = url[:-1]
775
+
776
+ client = fabric.FabricRestClient()
777
+ response = client.get(url)
778
+
779
+ if response.status_code != 200:
780
+ raise FabricHTTPException(response)
781
+
782
+ responses = pagination(client, response)
783
+
784
+ for r in responses:
785
+ for v in r.get("itemEntities", []):
786
+ new_data = {
787
+ "Item Id": v.get("id"),
788
+ "Type": v.get("type"),
789
+ "Item Name": v.get("name"),
790
+ "Description": v.get("description"),
791
+ "State": v.get("state"),
792
+ "Last Updated Date": v.get("lastUpdatedDate"),
793
+ "Creator Principal Id": v.get("creatorPrincipal", {}).get("id"),
794
+ "Creator Principal Display Name": v.get("creatorPrincipal", {}).get(
795
+ "displayName"
796
+ ),
797
+ "Creator Principal Type": v.get("creatorPrincipal", {}).get("type"),
798
+ "Creator User Principal Name": v.get("creatorPrincipal", {})
799
+ .get("userDetails", {})
800
+ .get("userPrincipalName"),
801
+ "Workspace Id": v.get("workspaceId"),
802
+ "Capacity Id": v.get("capacityId"),
803
+ }
804
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
805
+
806
+ return df