semantic-link-labs 0.9.0__py3-none-any.whl → 0.9.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (83) hide show
  1. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/METADATA +68 -7
  2. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/RECORD +83 -76
  3. sempy_labs/__init__.py +14 -12
  4. sempy_labs/_authentication.py +0 -2
  5. sempy_labs/_capacities.py +120 -142
  6. sempy_labs/_capacity_migration.py +61 -94
  7. sempy_labs/_clear_cache.py +9 -8
  8. sempy_labs/_connections.py +72 -105
  9. sempy_labs/_data_pipelines.py +47 -49
  10. sempy_labs/_dataflows.py +45 -51
  11. sempy_labs/_dax.py +228 -6
  12. sempy_labs/_delta_analyzer.py +303 -0
  13. sempy_labs/_deployment_pipelines.py +72 -66
  14. sempy_labs/_environments.py +39 -36
  15. sempy_labs/_eventhouses.py +35 -35
  16. sempy_labs/_eventstreams.py +38 -39
  17. sempy_labs/_external_data_shares.py +29 -42
  18. sempy_labs/_gateways.py +57 -101
  19. sempy_labs/_generate_semantic_model.py +22 -30
  20. sempy_labs/_git.py +46 -66
  21. sempy_labs/_graphQL.py +95 -0
  22. sempy_labs/_helper_functions.py +175 -30
  23. sempy_labs/_job_scheduler.py +47 -59
  24. sempy_labs/_kql_databases.py +27 -34
  25. sempy_labs/_kql_querysets.py +23 -30
  26. sempy_labs/_list_functions.py +262 -164
  27. sempy_labs/_managed_private_endpoints.py +52 -47
  28. sempy_labs/_mirrored_databases.py +110 -134
  29. sempy_labs/_mirrored_warehouses.py +13 -13
  30. sempy_labs/_ml_experiments.py +36 -36
  31. sempy_labs/_ml_models.py +37 -38
  32. sempy_labs/_model_dependencies.py +2 -0
  33. sempy_labs/_notebooks.py +28 -29
  34. sempy_labs/_one_lake_integration.py +2 -0
  35. sempy_labs/_query_scale_out.py +63 -81
  36. sempy_labs/_refresh_semantic_model.py +12 -14
  37. sempy_labs/_spark.py +54 -79
  38. sempy_labs/_sql.py +7 -11
  39. sempy_labs/_vertipaq.py +8 -3
  40. sempy_labs/_warehouses.py +30 -33
  41. sempy_labs/_workloads.py +15 -20
  42. sempy_labs/_workspace_identity.py +13 -17
  43. sempy_labs/_workspaces.py +49 -48
  44. sempy_labs/admin/__init__.py +2 -0
  45. sempy_labs/admin/_basic_functions.py +244 -281
  46. sempy_labs/admin/_domains.py +188 -103
  47. sempy_labs/admin/_external_data_share.py +26 -31
  48. sempy_labs/admin/_git.py +17 -22
  49. sempy_labs/admin/_items.py +34 -48
  50. sempy_labs/admin/_scanner.py +20 -13
  51. sempy_labs/directlake/_directlake_schema_compare.py +2 -0
  52. sempy_labs/directlake/_dl_helper.py +10 -11
  53. sempy_labs/directlake/_generate_shared_expression.py +4 -5
  54. sempy_labs/directlake/_get_directlake_lakehouse.py +1 -0
  55. sempy_labs/directlake/_list_directlake_model_calc_tables.py +1 -0
  56. sempy_labs/directlake/_show_unsupported_directlake_objects.py +2 -0
  57. sempy_labs/directlake/_warm_cache.py +2 -0
  58. sempy_labs/graph/__init__.py +33 -0
  59. sempy_labs/graph/_groups.py +402 -0
  60. sempy_labs/graph/_teams.py +113 -0
  61. sempy_labs/graph/_users.py +191 -0
  62. sempy_labs/lakehouse/__init__.py +4 -0
  63. sempy_labs/lakehouse/_get_lakehouse_columns.py +10 -10
  64. sempy_labs/lakehouse/_get_lakehouse_tables.py +14 -20
  65. sempy_labs/lakehouse/_lakehouse.py +101 -4
  66. sempy_labs/lakehouse/_shortcuts.py +42 -20
  67. sempy_labs/migration/__init__.py +4 -0
  68. sempy_labs/migration/_direct_lake_to_import.py +66 -0
  69. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +1 -0
  70. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +1 -0
  71. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +1 -0
  72. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +2 -0
  73. sempy_labs/report/_download_report.py +8 -13
  74. sempy_labs/report/_generate_report.py +49 -46
  75. sempy_labs/report/_paginated.py +20 -26
  76. sempy_labs/report/_report_functions.py +50 -45
  77. sempy_labs/report/_report_list_functions.py +2 -0
  78. sempy_labs/report/_report_rebind.py +6 -10
  79. sempy_labs/report/_reportwrapper.py +187 -220
  80. sempy_labs/tom/_model.py +8 -5
  81. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/LICENSE +0 -0
  82. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/WHEEL +0 -0
  83. {semantic_link_labs-0.9.0.dist-info → semantic_link_labs-0.9.2.dist-info}/top_level.txt +0 -0
@@ -2,17 +2,18 @@ import sempy.fabric as fabric
2
2
  from typing import Optional, List, Union, Tuple
3
3
  from uuid import UUID
4
4
  import sempy_labs._icons as icons
5
- from sempy.fabric.exceptions import FabricHTTPException
6
5
  from sempy_labs._helper_functions import (
7
- pagination,
8
6
  _is_valid_uuid,
9
7
  _build_url,
8
+ _update_dataframe_datatypes,
9
+ _base_api,
10
+ _create_dataframe,
11
+ get_capacity_id,
10
12
  )
11
13
  from sempy._utils._log import log
12
14
  import numpy as np
13
15
  import pandas as pd
14
16
  from dateutil.parser import parse as dtparser
15
- import sempy_labs._authentication as auth
16
17
 
17
18
 
18
19
  @log
@@ -64,17 +65,14 @@ def list_workspaces(
64
65
  )
65
66
  del kwargs["skip"]
66
67
 
67
- client = fabric.FabricRestClient(token_provider=auth.token_provider.get())
68
-
69
- df = pd.DataFrame(
70
- columns=[
71
- "Id",
72
- "Name",
73
- "State",
74
- "Type",
75
- "Capacity Id",
76
- ]
77
- )
68
+ columns = {
69
+ "Id": "string",
70
+ "Name": "string",
71
+ "State": "string",
72
+ "Type": "string",
73
+ "Capacity Id": "string",
74
+ }
75
+ df = _create_dataframe(columns=columns)
78
76
 
79
77
  url = "/v1/admin/workspaces"
80
78
  params = {}
@@ -93,16 +91,10 @@ def list_workspaces(
93
91
 
94
92
  url = _build_url(url, params)
95
93
 
96
- response = client.get(path_or_url=url)
97
-
98
- if response.status_code != 200:
99
- raise FabricHTTPException(response)
100
-
101
- responsePaginated = pagination(client, response)
102
-
94
+ responses = _base_api(request=url, client="fabric_sp", uses_pagination=True)
103
95
  workspaces = []
104
96
 
105
- for r in responsePaginated:
97
+ for r in responses:
106
98
  workspaces = workspaces + r.get("workspaces", [])
107
99
 
108
100
  if len(workspaces) > 0:
@@ -147,18 +139,20 @@ def list_capacities(
147
139
  pandas.DataFrame
148
140
  A pandas dataframe showing the capacities and their properties.
149
141
  """
150
- client = fabric.FabricRestClient(token_provider=auth.token_provider.get())
151
-
152
- df = pd.DataFrame(
153
- columns=["Capacity Id", "Capacity Name", "Sku", "Region", "State", "Admins"]
154
- )
155
-
156
- response = client.get("/v1.0/myorg/admin/capacities")
157
142
 
158
- if response.status_code != 200:
159
- raise FabricHTTPException(response)
143
+ columns = {
144
+ "Capacity Id": "string",
145
+ "Capacity Name": "string",
146
+ "Sku": "string",
147
+ "Region": "string",
148
+ "State": "string",
149
+ "Admins": "string",
150
+ }
151
+ df = _create_dataframe(columns=columns)
160
152
 
161
- responses = pagination(client, response)
153
+ responses = _base_api(
154
+ request="/v1.0/myorg/admin/capacities", client="fabric_sp", uses_pagination=True
155
+ )
162
156
 
163
157
  for r in responses:
164
158
  for i in r.get("value", []):
@@ -250,7 +244,7 @@ def assign_workspaces_to_capacity(
250
244
  batch_size = 999
251
245
  for i in range(0, len(workspaces), batch_size):
252
246
  batch = workspaces[i : i + batch_size].tolist()
253
- request_body = {
247
+ payload = {
254
248
  "capacityMigrationAssignments": [
255
249
  {
256
250
  "targetCapacityObjectId": target_capacity_id.upper(),
@@ -259,15 +253,12 @@ def assign_workspaces_to_capacity(
259
253
  ]
260
254
  }
261
255
 
262
- client = fabric.FabricRestClient()
263
-
264
- response = client.post(
265
- "/v1.0/myorg/admin/capacities/AssignWorkspaces",
266
- json=request_body,
256
+ _base_api(
257
+ request="/v1.0/myorg/admin/capacities/AssignWorkspaces",
258
+ method="post",
259
+ payload=payload,
267
260
  )
268
261
 
269
- if response.status_code != 200:
270
- raise FabricHTTPException(response)
271
262
  print(
272
263
  f"{icons.green_dot} The workspaces have been assigned to the '{target_capacity}' capacity. A total of {len(workspaces)} were moved."
273
264
  )
@@ -300,16 +291,12 @@ def unassign_workspaces_from_capacity(
300
291
  )
301
292
 
302
293
  payload = {"workspacesToUnassign": workspacesIds}
303
-
304
- client = fabric.PowerBIRestClient()
305
- response = client.post(
306
- "/v1.0/myorg/admin/capacities/UnassignWorkspaces",
307
- json=payload,
294
+ _base_api(
295
+ request="/v1.0/myorg/admin/capacities/UnassignWorkspaces",
296
+ method="post",
297
+ payload=payload,
308
298
  )
309
299
 
310
- if response.status_code != 200:
311
- raise FabricHTTPException(response)
312
-
313
300
  print(
314
301
  f"{icons.green_dot} A total of {len(workspacesIds)} workspaces have been unassigned."
315
302
  )
@@ -329,25 +316,20 @@ def list_tenant_settings() -> pd.DataFrame:
329
316
  pandas.DataFrame
330
317
  A pandas dataframe showing the tenant settings.
331
318
  """
332
- client = fabric.FabricRestClient(token_provider=auth.token_provider.get())
333
-
334
- response = client.get("/v1/admin/tenantsettings")
335
319
 
336
- if response.status_code != 200:
337
- raise FabricHTTPException(response)
320
+ columns = {
321
+ "Setting Name": "string",
322
+ "Title": "string",
323
+ "Enabled": "bool",
324
+ "Can Specify Security Groups": "bool",
325
+ "Tenant Setting Group": "string",
326
+ "Enabled Security Groups": "string",
327
+ }
328
+ df = _create_dataframe(columns=columns)
338
329
 
339
- df = pd.DataFrame(
340
- columns=[
341
- "Setting Name",
342
- "Title",
343
- "Enabled",
344
- "Can Specify Security Groups",
345
- "Tenant Setting Group",
346
- "Enabled Security Groups",
347
- ]
348
- )
330
+ response = _base_api(request="/v1/admin/tenantsettings", client="fabric_sp")
349
331
 
350
- for i in response.json().get("tenantSettings", []):
332
+ for i in response.json().get("value", []):
351
333
  new_data = {
352
334
  "Setting Name": i.get("settingName"),
353
335
  "Title": i.get("title"),
@@ -358,8 +340,7 @@ def list_tenant_settings() -> pd.DataFrame:
358
340
  }
359
341
  df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
360
342
 
361
- bool_cols = ["Enabled", "Can Specify Security Groups"]
362
- df[bool_cols] = df[bool_cols].astype(bool)
343
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
363
344
 
364
345
  return df
365
346
 
@@ -384,28 +365,26 @@ def list_capacities_delegated_tenant_settings(
384
365
  pandas.DataFrame | dict
385
366
  A pandas dataframe showing a list of tenant setting overrides that override at the capacities.
386
367
  """
387
- df = pd.DataFrame(
388
- columns=[
389
- "Capacity Id",
390
- "Setting Name",
391
- "Setting Title",
392
- "Setting Enabled",
393
- "Can Specify Security Groups",
394
- "Enabled Security Groups",
395
- "Tenant Setting Group",
396
- "Tenant Setting Properties",
397
- "Delegate to Workspace",
398
- "Delegated From",
399
- ]
400
- )
401
368
 
402
- client = fabric.FabricRestClient(token_provider=auth.token_provider.get())
403
- response = client.get("/v1/admin/capacities/delegatedTenantSettingOverrides")
404
-
405
- if response.status_code != 200:
406
- raise FabricHTTPException(response)
369
+ columns = {
370
+ "Capacity Id": "string",
371
+ "Setting Name": "string",
372
+ "Setting Title": "string",
373
+ "Setting Enabled": "bool",
374
+ "Can Specify Security Groups": "bool",
375
+ "Enabled Security Groups": "string",
376
+ "Tenant Setting Group": "string",
377
+ "Tenant Setting Properties": "string",
378
+ "Delegate to Workspace": "bool",
379
+ "Delegated From": "string",
380
+ }
381
+ df = _create_dataframe(columns=columns)
407
382
 
408
- responses = pagination(client, response)
383
+ responses = _base_api(
384
+ request="/v1/admin/capacities/delegatedTenantSettingOverrides",
385
+ client="fabric_sp",
386
+ uses_pagination=True,
387
+ )
409
388
 
410
389
  if return_dataframe:
411
390
  for r in responses:
@@ -433,12 +412,7 @@ def list_capacities_delegated_tenant_settings(
433
412
  [df, pd.DataFrame(new_data, index=[0])], ignore_index=True
434
413
  )
435
414
 
436
- bool_cols = [
437
- "Enabled Security Groups",
438
- "Can Specify Security Groups",
439
- "Delegate to Workspace",
440
- ]
441
- df[bool_cols] = df[bool_cols].astype(bool)
415
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
442
416
 
443
417
  return df
444
418
  else:
@@ -481,10 +455,7 @@ def list_modified_workspaces(
481
455
  pandas.DataFrame
482
456
  A pandas dataframe showing a list of workspace IDs in the organization.
483
457
  """
484
- client = fabric.FabricRestClient(token_provider=auth.token_provider.get())
485
-
486
458
  params = {}
487
-
488
459
  url = "/v1.0/myorg/admin/workspaces/modified"
489
460
 
490
461
  if modified_since is not None:
@@ -500,11 +471,7 @@ def list_modified_workspaces(
500
471
  params["excludePersonalWorkspaces"] = exclude_personal_workspaces
501
472
 
502
473
  url = _build_url(url, params)
503
-
504
- response = client.get(url)
505
-
506
- if response.status_code != 200:
507
- raise FabricHTTPException(response)
474
+ response = _base_api(request=url, client="fabric_sp")
508
475
 
509
476
  df = pd.DataFrame(response.json()).rename(columns={"id": "Workspace Id"})
510
477
 
@@ -538,31 +505,29 @@ def list_datasets(
538
505
  A pandas dataframe showing a list of datasets for the organization.
539
506
  """
540
507
 
541
- columns = [
542
- "Dataset Id",
543
- "Dataset Name",
544
- "Web URL",
545
- "Add Rows API Enabled",
546
- "Configured By",
547
- "Is Refreshable",
548
- "Is Effective Identity Required",
549
- "Is Effective Identity Roles Required",
550
- "Target Storage Mode",
551
- "Created Date",
552
- "Content Provider Type",
553
- "Create Report Embed URL",
554
- "QnA Embed URL",
555
- "Upstream Datasets",
556
- "Users",
557
- "Is In Place Sharing Enabled",
558
- "Workspace Id",
559
- "Auto Sync Read Only Replicas",
560
- "Max Read Only Replicas",
561
- ]
562
-
563
- df = pd.DataFrame(columns=columns)
564
-
565
- client = fabric.FabricRestClient(token_provider=auth.token_provider.get())
508
+ columns = {
509
+ "Dataset Id": "string",
510
+ "Dataset Name": "string",
511
+ "Web URL": "string",
512
+ "Add Rows API Enabled": "bool",
513
+ "Configured By": "string",
514
+ "Is Refreshable": "bool",
515
+ "Is Effective Identity Required": "bool",
516
+ "Is Effective Identity Roles Required": "bool",
517
+ "Target Storage Mode": "string",
518
+ "Created Date": "datetime",
519
+ "Content Provider Type": "string",
520
+ "Create Report Embed URL": "string",
521
+ "QnA Embed URL": "string",
522
+ "Upstream Datasets": "string",
523
+ "Users": "string",
524
+ "Is In Place Sharing Enabled": "bool",
525
+ "Workspace Id": "string",
526
+ "Auto Sync Read Only Replicas": "bool",
527
+ "Max Read Only Replicas": "int",
528
+ }
529
+
530
+ df = _create_dataframe(columns=columns)
566
531
 
567
532
  params = {}
568
533
  url = "/v1.0/myorg/admin/datasets"
@@ -577,11 +542,7 @@ def list_datasets(
577
542
  params["$skip"] = skip
578
543
 
579
544
  url = _build_url(url, params)
580
-
581
- response = client.get(url)
582
-
583
- if response.status_code != 200:
584
- raise FabricHTTPException(response)
545
+ response = _base_api(request=url, client="fabric_sp")
585
546
 
586
547
  rows = []
587
548
  for v in response.json().get("value", []):
@@ -616,20 +577,9 @@ def list_datasets(
616
577
  )
617
578
 
618
579
  if rows:
619
- df = pd.DataFrame(rows, columns=columns)
620
-
621
- bool_cols = [
622
- "Add Rows API Enabled",
623
- "Is Refreshable",
624
- "Is Effective Identity Required",
625
- "Is Effective Identity Roles Required",
626
- "Is In Place Sharing Enabled",
627
- "Auto Sync Read Only Replicas",
628
- ]
629
- df[bool_cols] = df[bool_cols].astype(bool)
580
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
630
581
 
631
- df["Created Date"] = pd.to_datetime(df["Created Date"])
632
- df["Max Read Only Replicas"] = df["Max Read Only Replicas"].astype(int)
582
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
633
583
 
634
584
  return df
635
585
 
@@ -654,23 +604,21 @@ def list_access_entities(
654
604
  pandas.DataFrame
655
605
  A pandas dataframe showing a list of permission details for Fabric and Power BI items the specified user can access.
656
606
  """
657
- df = pd.DataFrame(
658
- columns=[
659
- "Item Id",
660
- "Item Name",
661
- "Item Type",
662
- "Permissions",
663
- "Additional Permissions",
664
- ]
665
- )
666
- client = fabric.FabricRestClient(token_provider=auth.token_provider.get())
667
-
668
- response = client.get(f"/v1/admin/users/{user_email_address}/access")
669
607
 
670
- if response.status_code != 200:
671
- raise FabricHTTPException(response)
608
+ columns = {
609
+ "Item Id": "string",
610
+ "Item Name": "string",
611
+ "Item Type": "string",
612
+ "Permissions": "string",
613
+ "Additional Permissions": "string",
614
+ }
615
+ df = _create_dataframe(columns=columns)
672
616
 
673
- responses = pagination(client, response)
617
+ responses = _base_api(
618
+ request=f"/v1/admin/users/{user_email_address}/access",
619
+ client="fabric_sp",
620
+ uses_pagination=True,
621
+ )
674
622
 
675
623
  for r in responses:
676
624
  for v in r.get("accessEntities", []):
@@ -712,22 +660,19 @@ def list_workspace_access_details(
712
660
  """
713
661
  (workspace_name, workspace_id) = _resolve_workspace_name_and_id(workspace)
714
662
 
715
- df = pd.DataFrame(
716
- columns=[
717
- "User Id",
718
- "User Name",
719
- "User Type",
720
- "Workspace Name",
721
- "Workspace Id",
722
- "Workspace Role",
723
- ]
724
- )
725
-
726
- client = fabric.FabricRestClient(token_provider=auth.token_provider.get())
663
+ columns = {
664
+ "User Id": "string",
665
+ "User Name": "string",
666
+ "User Type": "string",
667
+ "Workspace Name": "string",
668
+ "Workspace Id": "string",
669
+ "Workspace Role": "string",
670
+ }
671
+ df = _create_dataframe(columns=columns)
727
672
 
728
- response = client.get(f"/v1/admin/workspaces/{workspace_id}/users")
729
- if response.status_code != 200:
730
- raise FabricHTTPException(response)
673
+ response = _base_api(
674
+ request=f"/v1/admin/workspaces/{workspace_id}/users", client="fabric_sp"
675
+ )
731
676
 
732
677
  for v in response.json().get("accessDetails", []):
733
678
  new_data = {
@@ -783,51 +728,49 @@ def list_activity_events(
783
728
  f"{icons.red_dot} Start and End Times must be within the same UTC day. Please refer to the documentation here: https://learn.microsoft.com/rest/api/power-bi/admin/get-activity-events#get-audit-activity-events-within-a-time-window-and-for-a-specific-activity-type-and-user-id-example"
784
729
  )
785
730
 
786
- df = pd.DataFrame(
787
- columns=[
788
- "Id",
789
- "Record Type",
790
- "Creation Time",
791
- "Operation",
792
- "Organization Id",
793
- "User Type",
794
- "User Key",
795
- "Workload",
796
- "Result Status",
797
- "User Id",
798
- "Client IP",
799
- "User Agent",
800
- "Activity",
801
- "Workspace Name",
802
- "Workspace Id",
803
- "Object Id",
804
- "Request Id",
805
- "Object Type",
806
- "Object Display Name",
807
- "Experience",
808
- "Refresh Enforcement Policy",
809
- "Is Success",
810
- "Activity Id",
811
- "Item Name",
812
- "Dataset Name",
813
- "Report Name",
814
- "Capacity Id",
815
- "Capacity Name",
816
- "App Name",
817
- "Dataset Id",
818
- "Report Id",
819
- "Artifact Id",
820
- "Artifact Name",
821
- "Report Type",
822
- "App Report Id",
823
- "Distribution Method",
824
- "Consumption Method",
825
- "Artifact Kind",
826
- ]
827
- )
731
+ columns = {
732
+ "Id": "string",
733
+ "Record Type": "string",
734
+ "Creation Time": "datetime",
735
+ "Operation": "string",
736
+ "Organization Id": "string",
737
+ "User Type": "string",
738
+ "User Key": "string",
739
+ "Workload": "string",
740
+ "Result Status": "string",
741
+ "User Id": "string",
742
+ "Client IP": "string",
743
+ "User Agent": "string",
744
+ "Activity": "string",
745
+ "Workspace Name": "string",
746
+ "Workspace Id": "string",
747
+ "Object Id": "string",
748
+ "Request Id": "string",
749
+ "Object Type": "string",
750
+ "Object Display Name": "string",
751
+ "Experience": "string",
752
+ "Refresh Enforcement Policy": "string",
753
+ "Is Success": "bool",
754
+ "Activity Id": "string",
755
+ "Item Name": "string",
756
+ "Dataset Name": "string",
757
+ "Report Name": "string",
758
+ "Capacity Id": "string",
759
+ "Capacity Name": "string",
760
+ "App Name": "string",
761
+ "Dataset Id": "string",
762
+ "Report Id": "string",
763
+ "Artifact Id": "string",
764
+ "Artifact Name": "string",
765
+ "Report Type": "string",
766
+ "App Report Id": "string",
767
+ "Distribution Method": "string",
768
+ "Consumption Method": "string",
769
+ "Artifact Kind": "string",
770
+ }
771
+ df = _create_dataframe(columns=columns)
828
772
 
829
773
  response_json = {"activityEventEntities": []}
830
- client = fabric.FabricRestClient(token_provider=auth.token_provider.get())
831
774
  url = f"/v1.0/myorg/admin/activityevents?startDateTime='{start_time}'&endDateTime='{end_time}'"
832
775
 
833
776
  conditions = []
@@ -839,12 +782,7 @@ def list_activity_events(
839
782
  if conditions:
840
783
  url += f"&$filter={f' and '.join(conditions)}"
841
784
 
842
- response = client.get(url)
843
-
844
- if response.status_code != 200:
845
- raise FabricHTTPException(response)
846
-
847
- responses = pagination(client, response)
785
+ responses = _base_api(request=url, client="fabric_sp", uses_pagination=True)
848
786
 
849
787
  for r in responses:
850
788
  if return_dataframe:
@@ -899,7 +837,7 @@ def list_activity_events(
899
837
  )
900
838
 
901
839
  if return_dataframe:
902
- df["Creation Time"] = pd.to_datetime(df["Creation Time"])
840
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
903
841
  return df
904
842
  else:
905
843
  return response_json
@@ -931,19 +869,20 @@ def _list_capacities_meta() -> pd.DataFrame:
931
869
  A pandas dataframe showing the capacities and their properties
932
870
  """
933
871
 
934
- client = fabric.FabricRestClient()
872
+ columns = {
873
+ "Capacity Id": "string",
874
+ "Capacity Name": "string",
875
+ "Sku": "string",
876
+ "Region": "string",
877
+ "State": "string",
878
+ "Admins": "string",
879
+ }
880
+ df = _create_dataframe(columns=columns)
935
881
 
936
- df = pd.DataFrame(
937
- columns=["Capacity Id", "Capacity Name", "Sku", "Region", "State", "Admins"]
882
+ responses = _base_api(
883
+ request="/v1.0/myorg/admin/capacities", client="fabric_sp", uses_pagination=True
938
884
  )
939
885
 
940
- response = client.get("/v1.0/myorg/admin/capacities")
941
-
942
- if response.status_code != 200:
943
- raise FabricHTTPException(response)
944
-
945
- responses = pagination(client, response)
946
-
947
886
  for r in responses:
948
887
  for i in r.get("value", []):
949
888
  new_data = {
@@ -1006,25 +945,25 @@ def list_reports(
1006
945
  A pandas dataframe showing a list of reports for the organization.
1007
946
  """
1008
947
 
1009
- columns = [
1010
- "Report Id",
1011
- "Report Name",
1012
- "Type",
1013
- "Web URL",
1014
- "Embed URL",
1015
- "Dataset Id",
1016
- "Created Date",
1017
- "Modified Date",
1018
- "Created By",
1019
- "Modified By",
1020
- "Sensitivity Label Id",
1021
- "Users",
1022
- "Subscriptions",
1023
- "Workspace Id",
1024
- "Report Flags",
1025
- ]
1026
-
1027
- df = pd.DataFrame(columns=columns)
948
+ columns = {
949
+ "Report Id": "string",
950
+ "Report Name": "string",
951
+ "Type": "string",
952
+ "Web URL": "string",
953
+ "Embed URL": "string",
954
+ "Dataset Id": "string",
955
+ "Created Date": "datetime_coerce",
956
+ "Modified Date": "datetime_coerce",
957
+ "Created By": "string",
958
+ "Modified By": "string",
959
+ "Sensitivity Label Id": "string",
960
+ "Users": "string",
961
+ "Subscriptions": "string",
962
+ "Workspace Id": "string",
963
+ "Report Flags": "int",
964
+ }
965
+
966
+ df = _create_dataframe(columns=columns)
1028
967
 
1029
968
  url = "/v1.0/myorg/admin/reports?"
1030
969
  if top is not None:
@@ -1035,14 +974,9 @@ def list_reports(
1035
974
  url += f"$filter={filter}&"
1036
975
 
1037
976
  url.rstrip("$").rstrip("?")
1038
-
1039
- client = fabric.FabricRestClient(token_provider=auth.token_provider.get())
1040
- response = client.get(url)
1041
-
1042
- if response.status_code != 200:
1043
- raise FabricHTTPException(response)
1044
-
977
+ response = _base_api(request=url, client="fabric_sp")
1045
978
  rows = []
979
+
1046
980
  for v in response.json().get("value", []):
1047
981
  rows.append(
1048
982
  {
@@ -1065,13 +999,9 @@ def list_reports(
1065
999
  )
1066
1000
 
1067
1001
  if rows:
1068
- df = pd.DataFrame(rows, columns=columns)
1002
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
1069
1003
 
1070
- int_cols = ["Report Flags"]
1071
- df[int_cols] = df[int_cols].astype(int)
1072
-
1073
- df["Created Date"] = pd.to_datetime(df["Created Date"], errors="coerce")
1074
- df["Modified Date"] = pd.to_datetime(df["Modified Date"], errors="coerce")
1004
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
1075
1005
 
1076
1006
  return df
1077
1007
 
@@ -1101,23 +1031,20 @@ def get_capacity_assignment_status(
1101
1031
 
1102
1032
  (workspace_name, workspace_id) = _resolve_workspace_name_and_id(workspace)
1103
1033
 
1104
- df = pd.DataFrame(
1105
- columns=[
1106
- "Status",
1107
- "Activity Id",
1108
- "Start Time",
1109
- "End Time",
1110
- "Capacity Id",
1111
- "Capacity Name",
1112
- ]
1113
- )
1114
-
1115
- client = fabric.FabricRestClient(token_provider=auth.token_provider.get())
1116
- response = client.get(f"/v1.0/myorg/groups/{workspace_id}/CapacityAssignmentStatus")
1117
-
1118
- if response.status_code != 200:
1119
- raise FabricHTTPException(response)
1034
+ columns = {
1035
+ "Status": "string",
1036
+ "Activity Id": "string",
1037
+ "Start Time": "datetime",
1038
+ "End Time": "datetime",
1039
+ "Capacity Id": "string",
1040
+ "Capacity Name": "string",
1041
+ }
1042
+ df = _create_dataframe(columns=columns)
1120
1043
 
1044
+ response = _base_api(
1045
+ request=f"/v1.0/myorg/groups/{workspace_id}/CapacityAssignmentStatus",
1046
+ client="fabric_sp",
1047
+ )
1121
1048
  v = response.json()
1122
1049
  capacity_id = v.get("capacityId")
1123
1050
 
@@ -1134,4 +1061,40 @@ def get_capacity_assignment_status(
1134
1061
 
1135
1062
  df = pd.concat([df, pd.DataFrame([new_data])], ignore_index=True)
1136
1063
 
1064
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
1065
+
1137
1066
  return df
1067
+
1068
+
1069
+ def get_capacity_state(capacity: Optional[str | UUID] = None):
1070
+ """
1071
+ Gets the state of a capacity.
1072
+
1073
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
1074
+
1075
+ Parameters
1076
+ ----------
1077
+ capacity : str | uuid.UUID, default=None
1078
+ The capacity name or ID.
1079
+ Defaults to None which resolves to the capacity of the attached lakehouse
1080
+ or if no lakehouse is attached, resolves to the workspace of the notebook.
1081
+
1082
+ Returns
1083
+ -------
1084
+ str
1085
+ The capacity state.
1086
+ """
1087
+
1088
+ df = list_capacities()
1089
+
1090
+ if capacity is None:
1091
+ capacity = get_capacity_id()
1092
+ if _is_valid_uuid(capacity):
1093
+ df_filt = df[df["Capacity Id"] == capacity]
1094
+ else:
1095
+ df_filt = df[df["Capacity Name"] == capacity]
1096
+
1097
+ if df_filt.empty:
1098
+ raise ValueError(f"{icons.red_dot} The capacity '{capacity}' was not found.")
1099
+
1100
+ return df_filt["State"].iloc[0]