semantic-link-labs 0.6.0__py3-none-any.whl → 0.7.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (104) hide show
  1. semantic_link_labs-0.7.1.dist-info/METADATA +148 -0
  2. semantic_link_labs-0.7.1.dist-info/RECORD +111 -0
  3. {semantic_link_labs-0.6.0.dist-info → semantic_link_labs-0.7.1.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +26 -2
  5. sempy_labs/_ai.py +3 -65
  6. sempy_labs/_bpa_translation/_translations_am-ET.po +828 -0
  7. sempy_labs/_bpa_translation/_translations_ar-AE.po +860 -0
  8. sempy_labs/_bpa_translation/_translations_cs-CZ.po +894 -0
  9. sempy_labs/_bpa_translation/_translations_da-DK.po +894 -0
  10. sempy_labs/_bpa_translation/_translations_de-DE.po +933 -0
  11. sempy_labs/_bpa_translation/_translations_el-GR.po +936 -0
  12. sempy_labs/_bpa_translation/_translations_es-ES.po +915 -0
  13. sempy_labs/_bpa_translation/_translations_fa-IR.po +883 -0
  14. sempy_labs/_bpa_translation/_translations_fr-FR.po +938 -0
  15. sempy_labs/_bpa_translation/_translations_ga-IE.po +912 -0
  16. sempy_labs/_bpa_translation/_translations_he-IL.po +855 -0
  17. sempy_labs/_bpa_translation/_translations_hi-IN.po +892 -0
  18. sempy_labs/_bpa_translation/_translations_hu-HU.po +910 -0
  19. sempy_labs/_bpa_translation/_translations_is-IS.po +887 -0
  20. sempy_labs/_bpa_translation/_translations_it-IT.po +931 -0
  21. sempy_labs/_bpa_translation/_translations_ja-JP.po +805 -0
  22. sempy_labs/_bpa_translation/_translations_nl-NL.po +924 -0
  23. sempy_labs/_bpa_translation/_translations_pl-PL.po +913 -0
  24. sempy_labs/_bpa_translation/_translations_pt-BR.po +909 -0
  25. sempy_labs/_bpa_translation/_translations_pt-PT.po +904 -0
  26. sempy_labs/_bpa_translation/_translations_ru-RU.po +909 -0
  27. sempy_labs/_bpa_translation/_translations_ta-IN.po +922 -0
  28. sempy_labs/_bpa_translation/_translations_te-IN.po +896 -0
  29. sempy_labs/_bpa_translation/_translations_th-TH.po +873 -0
  30. sempy_labs/_bpa_translation/_translations_zh-CN.po +767 -0
  31. sempy_labs/_bpa_translation/_translations_zu-ZA.po +916 -0
  32. sempy_labs/_clear_cache.py +9 -4
  33. sempy_labs/_generate_semantic_model.py +30 -56
  34. sempy_labs/_helper_functions.py +361 -14
  35. sempy_labs/_icons.py +10 -1
  36. sempy_labs/_list_functions.py +539 -260
  37. sempy_labs/_model_bpa.py +194 -18
  38. sempy_labs/_model_bpa_bulk.py +367 -0
  39. sempy_labs/_model_bpa_rules.py +19 -8
  40. sempy_labs/_model_dependencies.py +12 -10
  41. sempy_labs/_one_lake_integration.py +7 -7
  42. sempy_labs/_query_scale_out.py +61 -96
  43. sempy_labs/_refresh_semantic_model.py +7 -0
  44. sempy_labs/_translations.py +154 -1
  45. sempy_labs/_vertipaq.py +103 -90
  46. sempy_labs/directlake/__init__.py +5 -1
  47. sempy_labs/directlake/_directlake_schema_compare.py +27 -31
  48. sempy_labs/directlake/_directlake_schema_sync.py +55 -66
  49. sempy_labs/directlake/_dl_helper.py +233 -0
  50. sempy_labs/directlake/_get_directlake_lakehouse.py +6 -7
  51. sempy_labs/directlake/_get_shared_expression.py +1 -1
  52. sempy_labs/directlake/_guardrails.py +17 -13
  53. sempy_labs/directlake/_update_directlake_partition_entity.py +54 -30
  54. sempy_labs/directlake/_warm_cache.py +1 -1
  55. sempy_labs/lakehouse/__init__.py +2 -0
  56. sempy_labs/lakehouse/_get_lakehouse_tables.py +61 -69
  57. sempy_labs/lakehouse/_lakehouse.py +66 -9
  58. sempy_labs/lakehouse/_shortcuts.py +1 -1
  59. sempy_labs/migration/_create_pqt_file.py +174 -182
  60. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +236 -268
  61. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +75 -73
  62. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +442 -426
  63. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +91 -97
  64. sempy_labs/migration/_refresh_calc_tables.py +92 -101
  65. sempy_labs/report/_BPAReportTemplate.json +232 -0
  66. sempy_labs/report/__init__.py +6 -2
  67. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  68. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  69. sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
  70. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
  71. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
  72. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
  73. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
  74. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
  75. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
  76. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
  77. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
  78. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
  79. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
  80. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
  81. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
  82. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
  83. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
  84. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
  85. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
  86. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
  87. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
  88. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
  89. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
  90. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
  91. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
  92. sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
  93. sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
  94. sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
  95. sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
  96. sempy_labs/report/_generate_report.py +255 -139
  97. sempy_labs/report/_report_functions.py +26 -33
  98. sempy_labs/report/_report_rebind.py +31 -26
  99. sempy_labs/tom/_model.py +75 -58
  100. semantic_link_labs-0.6.0.dist-info/METADATA +0 -22
  101. semantic_link_labs-0.6.0.dist-info/RECORD +0 -54
  102. sempy_labs/directlake/_fallback.py +0 -60
  103. {semantic_link_labs-0.6.0.dist-info → semantic_link_labs-0.7.1.dist-info}/LICENSE +0 -0
  104. {semantic_link_labs-0.6.0.dist-info → semantic_link_labs-0.7.1.dist-info}/top_level.txt +0 -0
@@ -4,10 +4,15 @@ from sempy_labs._helper_functions import (
4
4
  resolve_lakehouse_name,
5
5
  create_relationship_name,
6
6
  resolve_lakehouse_id,
7
+ resolve_dataset_id,
8
+ _decode_b64,
9
+ pagination,
10
+ lro,
11
+ resolve_item_type,
7
12
  )
8
13
  import pandas as pd
9
- import json
10
- import time
14
+ import base64
15
+ import requests
11
16
  from pyspark.sql import SparkSession
12
17
  from typing import Optional
13
18
  import sempy_labs._icons as icons
@@ -37,8 +42,7 @@ def get_object_level_security(
37
42
 
38
43
  from sempy_labs.tom import connect_semantic_model
39
44
 
40
- if workspace is None:
41
- workspace = fabric.resolve_workspace_name()
45
+ workspace = fabric.resolve_workspace_name(workspace)
42
46
 
43
47
  df = pd.DataFrame(columns=["Role Name", "Object Type", "Table Name", "Object Name"])
44
48
 
@@ -101,7 +105,7 @@ def list_tables(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
101
105
  A pandas dataframe showing the semantic model's tables and their properties.
102
106
  """
103
107
 
104
- workspace = fabric.resolve_workspace_name()
108
+ workspace = fabric.resolve_workspace_name(workspace)
105
109
 
106
110
  df = fabric.list_tables(
107
111
  dataset=dataset,
@@ -138,7 +142,7 @@ def list_annotations(dataset: str, workspace: Optional[str] = None) -> pd.DataFr
138
142
 
139
143
  from sempy_labs.tom import connect_semantic_model
140
144
 
141
- workspace = fabric.resolve_workspace_name()
145
+ workspace = fabric.resolve_workspace_name(workspace)
142
146
 
143
147
  df = pd.DataFrame(
144
148
  columns=[
@@ -381,8 +385,7 @@ def list_columns(
381
385
  get_direct_lake_lakehouse,
382
386
  )
383
387
 
384
- if workspace is None:
385
- workspace = fabric.resolve_workspace_name()
388
+ workspace = fabric.resolve_workspace_name(workspace)
386
389
 
387
390
  dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
388
391
 
@@ -422,10 +425,10 @@ def list_columns(
422
425
  ].iloc[0]
423
426
 
424
427
  # Build the query to be executed dynamically
425
- query = query + f"COUNT(DISTINCT({scName})) AS {scName}, "
428
+ query = f"{query}COUNT(DISTINCT({scName})) AS {scName}, "
426
429
 
427
430
  query = query[:-2]
428
- query = query + f" FROM {lakehouse}.{lakeTName}"
431
+ query = f"{query} FROM {lakehouse}.{lakeTName}"
429
432
  sql_statements.append((table_name, query))
430
433
 
431
434
  spark = SparkSession.builder.getOrCreate()
@@ -487,8 +490,10 @@ def list_dashboards(workspace: Optional[str] = None) -> pd.DataFrame:
487
490
 
488
491
  client = fabric.PowerBIRestClient()
489
492
  response = client.get(f"/v1.0/myorg/groups/{workspace_id}/dashboards")
493
+ if response.status_code != 200:
494
+ raise FabricHTTPException(response)
490
495
 
491
- for v in response.json()["value"]:
496
+ for v in response.json().get("value", []):
492
497
  new_data = {
493
498
  "Dashboard ID": v.get("id"),
494
499
  "Dashboard Name": v.get("displayName"),
@@ -539,23 +544,29 @@ def list_lakehouses(workspace: Optional[str] = None) -> pd.DataFrame:
539
544
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
540
545
 
541
546
  client = fabric.FabricRestClient()
542
- response = client.get(f"/v1/workspaces/{workspace_id}/lakehouses/")
547
+ response = client.get(f"/v1/workspaces/{workspace_id}/lakehouses")
543
548
 
544
- for v in response.json()["value"]:
545
- prop = v.get("properties", {})
546
- sqlEPProp = prop.get("sqlEndpointProperties", {})
549
+ if response.status_code != 200:
550
+ raise FabricHTTPException(response)
547
551
 
548
- new_data = {
549
- "Lakehouse Name": v.get("displayName"),
550
- "Lakehouse ID": v.get("id"),
551
- "Description": v.get("description"),
552
- "OneLake Tables Path": prop.get("oneLakeTablesPath"),
553
- "OneLake Files Path": prop.get("oneLakeFilesPath"),
554
- "SQL Endpoint Connection String": sqlEPProp.get("connectionString"),
555
- "SQL Endpoint ID": sqlEPProp.get("id"),
556
- "SQL Endpoint Provisioning Status": sqlEPProp.get("provisioningStatus"),
557
- }
558
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
552
+ responses = pagination(client, response)
553
+
554
+ for r in responses:
555
+ for v in r.get("value", []):
556
+ prop = v.get("properties", {})
557
+ sqlEPProp = prop.get("sqlEndpointProperties", {})
558
+
559
+ new_data = {
560
+ "Lakehouse Name": v.get("displayName"),
561
+ "Lakehouse ID": v.get("id"),
562
+ "Description": v.get("description"),
563
+ "OneLake Tables Path": prop.get("oneLakeTablesPath"),
564
+ "OneLake Files Path": prop.get("oneLakeFilesPath"),
565
+ "SQL Endpoint Connection String": sqlEPProp.get("connectionString"),
566
+ "SQL Endpoint ID": sqlEPProp.get("id"),
567
+ "SQL Endpoint Provisioning Status": sqlEPProp.get("provisioningStatus"),
568
+ }
569
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
559
570
 
560
571
  return df
561
572
 
@@ -591,20 +602,25 @@ def list_warehouses(workspace: Optional[str] = None) -> pd.DataFrame:
591
602
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
592
603
 
593
604
  client = fabric.FabricRestClient()
594
- response = client.get(f"/v1/workspaces/{workspace_id}/warehouses/")
605
+ response = client.get(f"/v1/workspaces/{workspace_id}/warehouses")
606
+ if response.status_code != 200:
607
+ raise FabricHTTPException(response)
595
608
 
596
- for v in response.json()["value"]:
597
- prop = v.get("properties", {})
609
+ responses = pagination(client, response)
598
610
 
599
- new_data = {
600
- "Warehouse Name": v.get("displayName"),
601
- "Warehouse ID": v.get("id"),
602
- "Description": v.get("description"),
603
- "Connection Info": prop.get("connectionInfo"),
604
- "Created Date": prop.get("createdDate"),
605
- "Last Updated Time": prop.get("lastUpdatedTime"),
606
- }
607
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
611
+ for r in responses:
612
+ for v in r.get("value", []):
613
+ prop = v.get("properties", {})
614
+
615
+ new_data = {
616
+ "Warehouse Name": v.get("displayName"),
617
+ "Warehouse ID": v.get("id"),
618
+ "Description": v.get("description"),
619
+ "Connection Info": prop.get("connectionInfo"),
620
+ "Created Date": prop.get("createdDate"),
621
+ "Last Updated Time": prop.get("lastUpdatedTime"),
622
+ }
623
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
608
624
 
609
625
  return df
610
626
 
@@ -631,16 +647,21 @@ def list_sqlendpoints(workspace: Optional[str] = None) -> pd.DataFrame:
631
647
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
632
648
 
633
649
  client = fabric.FabricRestClient()
634
- response = client.get(f"/v1/workspaces/{workspace_id}/sqlEndpoints/")
650
+ response = client.get(f"/v1/workspaces/{workspace_id}/sqlEndpoints")
651
+ if response.status_code != 200:
652
+ raise FabricHTTPException(response)
635
653
 
636
- for v in response.json()["value"]:
654
+ responses = pagination(client, response)
637
655
 
638
- new_data = {
639
- "SQL Endpoint ID": v.get("id"),
640
- "SQL Endpoint Name": v.get("displayName"),
641
- "Description": v.get("description"),
642
- }
643
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
656
+ for r in responses:
657
+ for v in r.get("value", []):
658
+
659
+ new_data = {
660
+ "SQL Endpoint ID": v.get("id"),
661
+ "SQL Endpoint Name": v.get("displayName"),
662
+ "Description": v.get("description"),
663
+ }
664
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
644
665
 
645
666
  return df
646
667
 
@@ -669,16 +690,21 @@ def list_mirroredwarehouses(workspace: Optional[str] = None) -> pd.DataFrame:
669
690
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
670
691
 
671
692
  client = fabric.FabricRestClient()
672
- response = client.get(f"/v1/workspaces/{workspace_id}/mirroredWarehouses/")
693
+ response = client.get(f"/v1/workspaces/{workspace_id}/mirroredWarehouses")
694
+ if response.status_code != 200:
695
+ raise FabricHTTPException(response)
673
696
 
674
- for v in response.json()["value"]:
697
+ responses = pagination(client, response)
675
698
 
676
- new_data = {
677
- "Mirrored Warehouse": v.get("displayName"),
678
- "Mirrored Warehouse ID": v.get("id"),
679
- "Description": v.get("description"),
680
- }
681
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
699
+ for r in responses:
700
+ for v in r.get("value", []):
701
+
702
+ new_data = {
703
+ "Mirrored Warehouse": v.get("displayName"),
704
+ "Mirrored Warehouse ID": v.get("id"),
705
+ "Description": v.get("description"),
706
+ }
707
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
682
708
 
683
709
  return df
684
710
 
@@ -715,21 +741,26 @@ def list_kqldatabases(workspace: Optional[str] = None) -> pd.DataFrame:
715
741
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
716
742
 
717
743
  client = fabric.FabricRestClient()
718
- response = client.get(f"/v1/workspaces/{workspace_id}/kqlDatabases/")
744
+ response = client.get(f"/v1/workspaces/{workspace_id}/kqlDatabases")
745
+ if response.status_code != 200:
746
+ raise FabricHTTPException(response)
719
747
 
720
- for v in response.json()["value"]:
721
- prop = v.get("properties", {})
748
+ responses = pagination(client, response)
722
749
 
723
- new_data = {
724
- "KQL Database Name": v.get("displayName"),
725
- "KQL Database ID": v.get("id"),
726
- "Description": v.get("description"),
727
- "Parent Eventhouse Item ID": prop.get("parentEventhouseItemId"),
728
- "Query Service URI": prop.get("queryServiceUri"),
729
- "Ingestion Service URI": prop.get("ingestionServiceUri"),
730
- "Kusto Database Type": prop.get("kustoDatabaseType"),
731
- }
732
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
750
+ for r in responses:
751
+ for v in r.get("value", []):
752
+ prop = v.get("properties", {})
753
+
754
+ new_data = {
755
+ "KQL Database Name": v.get("displayName"),
756
+ "KQL Database ID": v.get("id"),
757
+ "Description": v.get("description"),
758
+ "Parent Eventhouse Item ID": prop.get("parentEventhouseItemId"),
759
+ "Query Service URI": prop.get("queryServiceUri"),
760
+ "Ingestion Service URI": prop.get("ingestionServiceUri"),
761
+ "Kusto Database Type": prop.get("kustoDatabaseType"),
762
+ }
763
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
733
764
 
734
765
  return df
735
766
 
@@ -756,16 +787,21 @@ def list_kqlquerysets(workspace: Optional[str] = None) -> pd.DataFrame:
756
787
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
757
788
 
758
789
  client = fabric.FabricRestClient()
759
- response = client.get(f"/v1/workspaces/{workspace_id}/kqlQuerysets/")
790
+ response = client.get(f"/v1/workspaces/{workspace_id}/kqlQuerysets")
791
+ if response.status_code != 200:
792
+ raise FabricHTTPException(response)
760
793
 
761
- for v in response.json()["value"]:
794
+ responses = pagination(client, response)
762
795
 
763
- new_data = {
764
- "KQL Queryset Name": v.get("displayName"),
765
- "KQL Queryset ID": v.get("id"),
766
- "Description": v.get("description"),
767
- }
768
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
796
+ for r in responses:
797
+ for v in r.get("value", []):
798
+
799
+ new_data = {
800
+ "KQL Queryset Name": v.get("displayName"),
801
+ "KQL Queryset ID": v.get("id"),
802
+ "Description": v.get("description"),
803
+ }
804
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
769
805
 
770
806
  return df
771
807
 
@@ -792,19 +828,24 @@ def list_mlmodels(workspace: Optional[str] = None) -> pd.DataFrame:
792
828
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
793
829
 
794
830
  client = fabric.FabricRestClient()
795
- response = client.get(f"/v1/workspaces/{workspace_id}/mlModels/")
831
+ response = client.get(f"/v1/workspaces/{workspace_id}/mlModels")
832
+ if response.status_code != 200:
833
+ raise FabricHTTPException(response)
796
834
 
797
- for v in response.json()["value"]:
798
- model_id = v.get("id")
799
- modelName = v.get("displayName")
800
- desc = v.get("description")
835
+ responses = pagination(client, response)
801
836
 
802
- new_data = {
803
- "ML Model Name": modelName,
804
- "ML Model ID": model_id,
805
- "Description": desc,
806
- }
807
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
837
+ for r in responses:
838
+ for v in r.get("value", []):
839
+ model_id = v.get("id")
840
+ modelName = v.get("displayName")
841
+ desc = v.get("description")
842
+
843
+ new_data = {
844
+ "ML Model Name": modelName,
845
+ "ML Model ID": model_id,
846
+ "Description": desc,
847
+ }
848
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
808
849
 
809
850
  return df
810
851
 
@@ -831,19 +872,24 @@ def list_eventstreams(workspace: Optional[str] = None) -> pd.DataFrame:
831
872
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
832
873
 
833
874
  client = fabric.FabricRestClient()
834
- response = client.get(f"/v1/workspaces/{workspace_id}/eventstreams/")
875
+ response = client.get(f"/v1/workspaces/{workspace_id}/eventstreams")
876
+ if response.status_code != 200:
877
+ raise FabricHTTPException(response)
835
878
 
836
- for v in response.json()["value"]:
837
- model_id = v.get("id")
838
- modelName = v.get("displayName")
839
- desc = v.get("description")
879
+ responses = pagination(client, response)
840
880
 
841
- new_data = {
842
- "Eventstream Name": modelName,
843
- "Eventstream ID": model_id,
844
- "Description": desc,
845
- }
846
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
881
+ for r in responses:
882
+ for v in r.get("value", []):
883
+ model_id = v.get("id")
884
+ modelName = v.get("displayName")
885
+ desc = v.get("description")
886
+
887
+ new_data = {
888
+ "Eventstream Name": modelName,
889
+ "Eventstream ID": model_id,
890
+ "Description": desc,
891
+ }
892
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
847
893
 
848
894
  return df
849
895
 
@@ -870,19 +916,20 @@ def list_datapipelines(workspace: Optional[str] = None) -> pd.DataFrame:
870
916
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
871
917
 
872
918
  client = fabric.FabricRestClient()
873
- response = client.get(f"/v1/workspaces/{workspace_id}/dataPipelines/")
919
+ response = client.get(f"/v1/workspaces/{workspace_id}/dataPipelines")
920
+ if response.status_code != 200:
921
+ raise FabricHTTPException(response)
874
922
 
875
- for v in response.json()["value"]:
876
- model_id = v.get("id")
877
- modelName = v.get("displayName")
878
- desc = v.get("description")
923
+ responses = pagination(client, response)
879
924
 
880
- new_data = {
881
- "Data Pipeline Name": modelName,
882
- "Data Pipeline ID": model_id,
883
- "Description": desc,
884
- }
885
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
925
+ for r in responses:
926
+ for v in r.get("value", []):
927
+ new_data = {
928
+ "Data Pipeline Name": v.get("displayName"),
929
+ "Data Pipeline ID": v.get("id"),
930
+ "Description": v.get("description"),
931
+ }
932
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
886
933
 
887
934
  return df
888
935
 
@@ -909,16 +956,20 @@ def list_mlexperiments(workspace: Optional[str] = None) -> pd.DataFrame:
909
956
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
910
957
 
911
958
  client = fabric.FabricRestClient()
912
- response = client.get(f"/v1/workspaces/{workspace_id}/mlExperiments/")
959
+ response = client.get(f"/v1/workspaces/{workspace_id}/mlExperiments")
960
+ if response.status_code != 200:
961
+ raise FabricHTTPException(response)
913
962
 
914
- for v in response.json()["value"]:
963
+ responses = pagination(client, response)
915
964
 
916
- new_data = {
917
- "ML Experiment Name": v.get("displayName"),
918
- "ML Experiment ID": v.get("id"),
919
- "Description": v.get("description"),
920
- }
921
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
965
+ for r in responses:
966
+ for v in r.get("value", []):
967
+ new_data = {
968
+ "ML Experiment Name": v.get("displayName"),
969
+ "ML Experiment ID": v.get("id"),
970
+ "Description": v.get("description"),
971
+ }
972
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
922
973
 
923
974
  return df
924
975
 
@@ -945,16 +996,20 @@ def list_datamarts(workspace: Optional[str] = None) -> pd.DataFrame:
945
996
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
946
997
 
947
998
  client = fabric.FabricRestClient()
948
- response = client.get(f"/v1/workspaces/{workspace_id}/datamarts/")
999
+ response = client.get(f"/v1/workspaces/{workspace_id}/datamarts")
1000
+ if response.status_code != 200:
1001
+ raise FabricHTTPException(response)
949
1002
 
950
- for v in response.json()["value"]:
1003
+ responses = pagination(client, response)
951
1004
 
952
- new_data = {
953
- "Datamart Name": v.get("displayName"),
954
- "Datamart ID": v.get("id"),
955
- "Description": v.get("description"),
956
- }
957
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
1005
+ for r in responses:
1006
+ for v in response.get("value", []):
1007
+ new_data = {
1008
+ "Datamart Name": v.get("displayName"),
1009
+ "Datamart ID": v.get("id"),
1010
+ "Description": v.get("description"),
1011
+ }
1012
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
958
1013
 
959
1014
  return df
960
1015
 
@@ -990,29 +1045,14 @@ def create_warehouse(
990
1045
 
991
1046
  client = fabric.FabricRestClient()
992
1047
  response = client.post(
993
- f"/v1/workspaces/{workspace_id}/warehouses/", json=request_body
1048
+ f"/v1/workspaces/{workspace_id}/warehouses/", json=request_body, lro_wait=True
994
1049
  )
995
1050
 
996
- if response.status_code == 201:
997
- print(
998
- f"{icons.green_dot} The '{warehouse}' warehouse has been created within the '{workspace}' workspace."
999
- )
1000
- elif response.status_code == 202:
1001
- operationId = response.headers["x-ms-operation-id"]
1002
- response = client.get(f"/v1/operations/{operationId}")
1003
- response_body = json.loads(response.content)
1004
- while response_body["status"] != "Succeeded":
1005
- time.sleep(3)
1006
- response = client.get(f"/v1/operations/{operationId}")
1007
- response_body = json.loads(response.content)
1008
- response = client.get(f"/v1/operations/{operationId}/result")
1009
- print(
1010
- f"{icons.green_dot} The '{warehouse}' warehouse has been created within the '{workspace}' workspace."
1011
- )
1012
- else:
1013
- raise ValueError(
1014
- f"{icons.red_dot} Failed to create the '{warehouse}' warehouse within the '{workspace}' workspace."
1015
- )
1051
+ if response.status_code != 200:
1052
+ raise FabricHTTPException(response)
1053
+ print(
1054
+ f"{icons.green_dot} The '{warehouse}' warehouse has been created within the '{workspace}' workspace."
1055
+ )
1016
1056
 
1017
1057
 
1018
1058
  def update_item(
@@ -1118,8 +1158,7 @@ def list_relationships(
1118
1158
  A pandas dataframe showing the object level security for the semantic model.
1119
1159
  """
1120
1160
 
1121
- if workspace is None:
1122
- workspace = fabric.resolve_workspace_name()
1161
+ workspace = fabric.resolve_workspace_name(workspace)
1123
1162
 
1124
1163
  dfR = fabric.list_relationships(dataset=dataset, workspace=workspace)
1125
1164
 
@@ -1199,9 +1238,10 @@ def list_dataflow_storage_accounts() -> pd.DataFrame:
1199
1238
  )
1200
1239
  client = fabric.PowerBIRestClient()
1201
1240
  response = client.get("/v1.0/myorg/dataflowStorageAccounts")
1241
+ if response.status_code != 200:
1242
+ raise FabricHTTPException(response)
1202
1243
 
1203
- for v in response.json()["value"]:
1204
-
1244
+ for v in response.json().get("value", []):
1205
1245
  new_data = {
1206
1246
  "Dataflow Storage Account ID": v.get("id"),
1207
1247
  "Dataflow Storage Account Name": v.get("name"),
@@ -1301,22 +1341,21 @@ def list_workspace_role_assignments(workspace: Optional[str] = None) -> pd.DataF
1301
1341
 
1302
1342
  client = fabric.FabricRestClient()
1303
1343
  response = client.get(f"/v1/workspaces/{workspace_id}/roleAssignments")
1344
+ if response.status_code != 200:
1345
+ raise FabricHTTPException(response)
1304
1346
 
1305
- for i in response.json()["value"]:
1306
- user_name = i.get("principal", {}).get("displayName")
1307
- role_name = i.get("role")
1308
- user_email = (
1309
- i.get("principal", {}).get("userDetails", {}).get("userPrincipalName")
1310
- )
1311
- user_type = i.get("principal", {}).get("type")
1347
+ responses = pagination(client, response)
1312
1348
 
1313
- new_data = {
1314
- "User Name": user_name,
1315
- "Role Name": role_name,
1316
- "Type": user_type,
1317
- "User Email": user_email,
1318
- }
1319
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
1349
+ for r in responses:
1350
+ for i in r.get("value", []):
1351
+ principal = i.get("principal", {})
1352
+ new_data = {
1353
+ "User Name": principal.get("displayName"),
1354
+ "Role Name": i.get("role"),
1355
+ "Type": principal.get("type"),
1356
+ "User Email": principal.get("userDetails", {}).get("userPrincipalName"),
1357
+ }
1358
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
1320
1359
 
1321
1360
  return df
1322
1361
 
@@ -1489,7 +1528,7 @@ def list_shortcuts(
1489
1528
  lakehouse: Optional[str] = None, workspace: Optional[str] = None
1490
1529
  ) -> pd.DataFrame:
1491
1530
  """
1492
- Shows all shortcuts which exist in a Fabric lakehouse.
1531
+ Shows all shortcuts which exist in a Fabric lakehouse and their properties.
1493
1532
 
1494
1533
  Parameters
1495
1534
  ----------
@@ -1511,68 +1550,86 @@ def list_shortcuts(
1511
1550
 
1512
1551
  if lakehouse is None:
1513
1552
  lakehouse_id = fabric.get_lakehouse_id()
1514
- lakehouse = resolve_lakehouse_name(lakehouse_id, workspace)
1515
1553
  else:
1516
1554
  lakehouse_id = resolve_lakehouse_id(lakehouse, workspace)
1517
1555
 
1556
+ client = fabric.FabricRestClient()
1557
+
1518
1558
  df = pd.DataFrame(
1519
1559
  columns=[
1520
1560
  "Shortcut Name",
1521
1561
  "Shortcut Path",
1522
- "Source",
1523
- "Source Lakehouse Name",
1562
+ "Source Type",
1563
+ "Source Workspace Id",
1524
1564
  "Source Workspace Name",
1525
- "Source Path",
1526
- "Source Connection ID",
1527
- "Source Location",
1528
- "Source SubPath",
1565
+ "Source Item Id",
1566
+ "Source Item Name",
1567
+ "Source Item Type",
1568
+ "OneLake Path",
1569
+ "Connection Id",
1570
+ "Location",
1571
+ "Bucket",
1572
+ "SubPath",
1529
1573
  ]
1530
1574
  )
1531
1575
 
1532
- client = fabric.FabricRestClient()
1533
1576
  response = client.get(
1534
1577
  f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts"
1535
1578
  )
1536
1579
 
1537
1580
  if response.status_code != 200:
1538
1581
  raise FabricHTTPException(response)
1539
- for s in response.json()["value"]:
1540
- shortcutName = s.get("name")
1541
- shortcutPath = s.get("path")
1542
- source = list(s["target"].keys())[0]
1543
- (
1544
- sourceLakehouseName,
1545
- sourceWorkspaceName,
1546
- sourcePath,
1547
- connectionId,
1548
- location,
1549
- subpath,
1550
- ) = (None, None, None, None, None, None)
1551
- if source == "oneLake":
1552
- sourceLakehouseId = s.get("target", {}).get(source, {}).get("itemId")
1553
- sourcePath = s.get("target", {}).get(source, {}).get("path")
1554
- sourceWorkspaceId = s.get("target", {}).get(source, {}).get("workspaceId")
1555
- sourceWorkspaceName = fabric.resolve_workspace_name(sourceWorkspaceId)
1556
- sourceLakehouseName = resolve_lakehouse_name(
1557
- sourceLakehouseId, sourceWorkspaceName
1582
+
1583
+ responses = pagination(client, response)
1584
+
1585
+ for r in responses:
1586
+ for i in r.get("value", []):
1587
+ tgt = i.get("target", {})
1588
+ s3_compat = tgt.get("s3Compatible", {})
1589
+ gcs = tgt.get("googleCloudStorage", {})
1590
+ eds = tgt.get("externalDataShare", {})
1591
+ connection_id = (
1592
+ s3_compat.get("connectionId")
1593
+ or gcs.get("connectionId")
1594
+ or eds.get("connectionId")
1595
+ or None
1596
+ )
1597
+ location = s3_compat.get("location") or gcs.get("location") or None
1598
+ sub_path = s3_compat.get("subpath") or gcs.get("subpath") or None
1599
+ source_workspace_id = tgt.get("oneLake", {}).get("workspaceId")
1600
+ source_item_id = tgt.get("oneLake", {}).get("itemId")
1601
+ source_workspace_name = (
1602
+ fabric.resolve_workspace_name(source_workspace_id)
1603
+ if source_workspace_id is not None
1604
+ else None
1558
1605
  )
1559
- else:
1560
- connectionId = s.get("target", {}).get(source, {}).get("connectionId")
1561
- location = s.get("target", {}).get(source, {}).get("location")
1562
- subpath = s.get("target", {}).get(source, {}).get("subpath")
1563
1606
 
1564
- new_data = {
1565
- "Shortcut Name": shortcutName,
1566
- "Shortcut Path": shortcutPath,
1567
- "Source": source,
1568
- "Source Lakehouse Name": sourceLakehouseName,
1569
- "Source Workspace Name": sourceWorkspaceName,
1570
- "Source Path": sourcePath,
1571
- "Source Connection ID": connectionId,
1572
- "Source Location": location,
1573
- "Source SubPath": subpath,
1574
- }
1575
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
1607
+ new_data = {
1608
+ "Shortcut Name": i.get("name"),
1609
+ "Shortcut Path": i.get("path"),
1610
+ "Source Type": tgt.get("type"),
1611
+ "Source Workspace Id": source_workspace_id,
1612
+ "Source Workspace Name": source_workspace_name,
1613
+ "Source Item Id": source_item_id,
1614
+ "Source Item Name": (
1615
+ fabric.resolve_item_name(
1616
+ source_item_id, workspace=source_workspace_name
1617
+ )
1618
+ if source_item_id is not None
1619
+ else None
1620
+ ),
1621
+ "Source Item Type": (
1622
+ resolve_item_type(source_item_id, workspace=source_workspace_name)
1623
+ if source_item_id is not None
1624
+ else None
1625
+ ),
1626
+ "OneLake Path": tgt.get("oneLake", {}).get("path"),
1627
+ "Connection Id": connection_id,
1628
+ "Location": location,
1629
+ "Bucket": s3_compat.get("bucket"),
1630
+ "SubPath": sub_path,
1631
+ }
1632
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
1576
1633
 
1577
1634
  return df
1578
1635
 
@@ -1615,6 +1672,8 @@ def list_custom_pools(workspace: Optional[str] = None) -> pd.DataFrame:
1615
1672
 
1616
1673
  client = fabric.FabricRestClient()
1617
1674
  response = client.get(f"/v1/workspaces/{workspace_id}/spark/pools")
1675
+ if response.status_code != 200:
1676
+ raise FabricHTTPException(response)
1618
1677
 
1619
1678
  for i in response.json()["value"]:
1620
1679
 
@@ -1675,9 +1734,9 @@ def create_custom_pool(
1675
1734
  min_node_count : int
1676
1735
  The `minimum node count <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties>`_.
1677
1736
  max_node_count : int
1678
- The `maximum node count <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties>`_.
1737
+ The `maximum node count <https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties>`_.
1679
1738
  min_executors : int
1680
- The `minimum executors <https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties>`_.
1739
+ The `minimum executors <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties>`_.
1681
1740
  max_executors : int
1682
1741
  The `maximum executors <https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties>`_.
1683
1742
  node_family : str, default='MemoryOptimized'
@@ -1716,15 +1775,14 @@ def create_custom_pool(
1716
1775
 
1717
1776
  client = fabric.FabricRestClient()
1718
1777
  response = client.post(
1719
- f"/v1/workspaces/{workspace_id}/spark/pools", json=request_body
1778
+ f"/v1/workspaces/{workspace_id}/spark/pools", json=request_body, lro_wait=True
1720
1779
  )
1721
1780
 
1722
- if response.status_code == 201:
1723
- print(
1724
- f"{icons.green_dot} The '{pool_name}' spark pool has been created within the '{workspace}' workspace."
1725
- )
1726
- else:
1727
- raise ValueError(f"{icons.red_dot} {response.status_code}")
1781
+ if response.status_code != 200:
1782
+ raise FabricHTTPException(response)
1783
+ print(
1784
+ f"{icons.green_dot} The '{pool_name}' spark pool has been created within the '{workspace}' workspace."
1785
+ )
1728
1786
 
1729
1787
 
1730
1788
  def update_custom_pool(
@@ -1753,10 +1811,10 @@ def update_custom_pool(
1753
1811
  The `minimum node count <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties>`_.
1754
1812
  Defaults to None which keeps the existing property setting.
1755
1813
  max_node_count : int, default=None
1756
- The `maximum node count <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties>`_.
1814
+ The `maximum node count <https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#autoscaleproperties>`_.
1757
1815
  Defaults to None which keeps the existing property setting.
1758
1816
  min_executors : int, default=None
1759
- The `minimum executors <https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties>`_.
1817
+ The `minimum executors <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties>`_.
1760
1818
  Defaults to None which keeps the existing property setting.
1761
1819
  max_executors : int, default=None
1762
1820
  The `maximum executors <https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool?tabs=HTTP#dynamicexecutorallocationproperties>`_.
@@ -1837,7 +1895,7 @@ def update_custom_pool(
1837
1895
  )
1838
1896
 
1839
1897
 
1840
- def delete_custom_pool(pool_name: str, workspace: Optional[str | None] = None):
1898
+ def delete_custom_pool(pool_name: str, workspace: Optional[str] = None):
1841
1899
  """
1842
1900
  Deletes a `custom pool <https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools>`_ within a workspace.
1843
1901
 
@@ -1902,15 +1960,16 @@ def assign_workspace_to_capacity(capacity_name: str, workspace: Optional[str] =
1902
1960
 
1903
1961
  client = fabric.FabricRestClient()
1904
1962
  response = client.post(
1905
- f"/v1/workspaces/{workspace_id}/assignToCapacity", json=request_body
1963
+ f"/v1/workspaces/{workspace_id}/assignToCapacity",
1964
+ json=request_body,
1965
+ lro_wait=True,
1906
1966
  )
1907
1967
 
1908
- if response.status_code == 202:
1909
- print(
1910
- f"{icons.green_dot} The '{workspace}' workspace has been assigned to the '{capacity_name}' capacity."
1911
- )
1912
- else:
1913
- raise ValueError(f"{icons.red_dot} {response.status_code}")
1968
+ if response.status_code not in [200, 202]:
1969
+ raise FabricHTTPException(response)
1970
+ print(
1971
+ f"{icons.green_dot} The '{workspace}' workspace has been assigned to the '{capacity_name}' capacity."
1972
+ )
1914
1973
 
1915
1974
 
1916
1975
  def unassign_workspace_from_capacity(workspace: Optional[str] = None):
@@ -1932,14 +1991,15 @@ def unassign_workspace_from_capacity(workspace: Optional[str] = None):
1932
1991
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
1933
1992
 
1934
1993
  client = fabric.FabricRestClient()
1935
- response = client.post(f"/v1/workspaces/{workspace_id}/unassignFromCapacity")
1994
+ response = client.post(
1995
+ f"/v1/workspaces/{workspace_id}/unassignFromCapacity", lro_wait=True
1996
+ )
1936
1997
 
1937
- if response.status_code == 202:
1938
- print(
1939
- f"{icons.green_dot} The '{workspace}' workspace has been unassigned from its capacity."
1940
- )
1941
- else:
1942
- raise ValueError(f"{icons.red_dot} {response.status_code}")
1998
+ if response.status_code not in [200, 202]:
1999
+ raise FabricHTTPException(response)
2000
+ print(
2001
+ f"{icons.green_dot} The '{workspace}' workspace has been unassigned from its capacity."
2002
+ )
1943
2003
 
1944
2004
 
1945
2005
  def get_spark_settings(workspace: Optional[str] = None) -> pd.DataFrame:
@@ -1978,6 +2038,8 @@ def get_spark_settings(workspace: Optional[str] = None) -> pd.DataFrame:
1978
2038
 
1979
2039
  client = fabric.FabricRestClient()
1980
2040
  response = client.get(f"/v1/workspaces/{workspace_id}/spark/settings")
2041
+ if response.status_code != 200:
2042
+ raise FabricHTTPException(response)
1981
2043
 
1982
2044
  i = response.json()
1983
2045
  p = i.get("pool")
@@ -2042,7 +2104,7 @@ def update_spark_settings(
2042
2104
  `Default pool <https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#poolproperties>`_ for workspace.
2043
2105
  Defaults to None which keeps the existing property setting.
2044
2106
  max_node_count : int, default=None
2045
- The `maximum node count <https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#starterpoolproperties>`_.
2107
+ The `maximum node count <https://learn.microsoft.com/en-us/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#starterpoolproperties>`_.
2046
2108
  Defaults to None which keeps the existing property setting.
2047
2109
  max_executors : int, default=None
2048
2110
  The `maximum executors <https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP#starterpoolproperties>`_.
@@ -2111,7 +2173,10 @@ def update_spark_settings(
2111
2173
 
2112
2174
 
2113
2175
  def add_user_to_workspace(
2114
- email_address: str, role_name: str, workspace: Optional[str] = None
2176
+ email_address: str,
2177
+ role_name: str,
2178
+ principal_type: Optional[str] = "User",
2179
+ workspace: Optional[str] = None,
2115
2180
  ):
2116
2181
  """
2117
2182
  Adds a user to a workspace.
@@ -2122,13 +2187,12 @@ def add_user_to_workspace(
2122
2187
  The email address of the user.
2123
2188
  role_name : str
2124
2189
  The `role <https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user#groupuseraccessright>`_ of the user within the workspace.
2190
+ principal_type : str, default='User'
2191
+ The `principal type <https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user#principaltype>`_.
2125
2192
  workspace : str, default=None
2126
2193
  The name of the workspace.
2127
2194
  Defaults to None which resolves to the workspace of the attached lakehouse
2128
2195
  or if no lakehouse attached, resolves to the workspace of the notebook.
2129
-
2130
- Returns
2131
- -------
2132
2196
  """
2133
2197
 
2134
2198
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
@@ -2140,10 +2204,21 @@ def add_user_to_workspace(
2140
2204
  f"{icons.red_dot} Invalid role. The 'role_name' parameter must be one of the following: {role_names}."
2141
2205
  )
2142
2206
  plural = "n" if role_name == "Admin" else ""
2207
+ principal_types = ["App", "Group", "None", "User"]
2208
+ principal_type = principal_type.capitalize()
2209
+ if principal_type not in principal_types:
2210
+ raise ValueError(
2211
+ f"{icons.red_dot} Invalid princpal type. Valid options: {principal_types}."
2212
+ )
2143
2213
 
2144
2214
  client = fabric.PowerBIRestClient()
2145
2215
 
2146
- request_body = {"emailAddress": email_address, "groupUserAccessRight": role_name}
2216
+ request_body = {
2217
+ "emailAddress": email_address,
2218
+ "groupUserAccessRight": role_name,
2219
+ "principalType": principal_type,
2220
+ "identifier": email_address,
2221
+ }
2147
2222
 
2148
2223
  response = client.post(
2149
2224
  f"/v1.0/myorg/groups/{workspace_id}/users", json=request_body
@@ -2186,7 +2261,10 @@ def delete_user_from_workspace(email_address: str, workspace: Optional[str] = No
2186
2261
 
2187
2262
 
2188
2263
  def update_workspace_user(
2189
- email_address: str, role_name: str, workspace: Optional[str] = None
2264
+ email_address: str,
2265
+ role_name: str,
2266
+ principal_type: Optional[str] = "User",
2267
+ workspace: Optional[str] = None,
2190
2268
  ):
2191
2269
  """
2192
2270
  Updates a user's role within a workspace.
@@ -2197,13 +2275,12 @@ def update_workspace_user(
2197
2275
  The email address of the user.
2198
2276
  role_name : str
2199
2277
  The `role <https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user#groupuseraccessright>`_ of the user within the workspace.
2278
+ principal_type : str, default='User'
2279
+ The `principal type <https://learn.microsoft.com/rest/api/power-bi/groups/add-group-user#principaltype>`_.
2200
2280
  workspace : str, default=None
2201
2281
  The name of the workspace.
2202
2282
  Defaults to None which resolves to the workspace of the attached lakehouse
2203
2283
  or if no lakehouse attached, resolves to the workspace of the notebook.
2204
-
2205
- Returns
2206
- -------
2207
2284
  """
2208
2285
 
2209
2286
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
@@ -2214,8 +2291,19 @@ def update_workspace_user(
2214
2291
  raise ValueError(
2215
2292
  f"{icons.red_dot} Invalid role. The 'role_name' parameter must be one of the following: {role_names}."
2216
2293
  )
2294
+ principal_types = ["App", "Group", "None", "User"]
2295
+ principal_type = principal_type.capitalize()
2296
+ if principal_type not in principal_types:
2297
+ raise ValueError(
2298
+ f"{icons.red_dot} Invalid princpal type. Valid options: {principal_types}."
2299
+ )
2217
2300
 
2218
- request_body = {"emailAddress": email_address, "groupUserAccessRight": role_name}
2301
+ request_body = {
2302
+ "emailAddress": email_address,
2303
+ "groupUserAccessRight": role_name,
2304
+ "principalType": principal_type,
2305
+ "identifier": email_address,
2306
+ }
2219
2307
 
2220
2308
  client = fabric.PowerBIRestClient()
2221
2309
  response = client.put(f"/v1.0/myorg/groups/{workspace_id}/users", json=request_body)
@@ -2249,18 +2337,22 @@ def list_workspace_users(workspace: Optional[str] = None) -> pd.DataFrame:
2249
2337
  df = pd.DataFrame(columns=["User Name", "Email Address", "Role", "Type", "User ID"])
2250
2338
  client = fabric.FabricRestClient()
2251
2339
  response = client.get(f"/v1/workspaces/{workspace_id}/roleAssignments")
2340
+ if response.status_code != 200:
2341
+ raise FabricHTTPException(response)
2252
2342
 
2253
- for v in response.json()["value"]:
2254
- p = v.get("principal", {})
2343
+ responses = pagination(client, response)
2255
2344
 
2256
- new_data = {
2257
- "User Name": p.get("displayName"),
2258
- "User ID": p.get("id"),
2259
- "Type": p.get("type"),
2260
- "Role": v.get("role"),
2261
- "Email Address": p.get("userDetails", {}).get("userPrincipalName"),
2262
- }
2263
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
2345
+ for r in responses:
2346
+ for v in r.get("value", []):
2347
+ p = v.get("principal", {})
2348
+ new_data = {
2349
+ "User Name": p.get("displayName"),
2350
+ "User ID": p.get("id"),
2351
+ "Type": p.get("type"),
2352
+ "Role": v.get("role"),
2353
+ "Email Address": p.get("userDetails", {}).get("userPrincipalName"),
2354
+ }
2355
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
2264
2356
 
2265
2357
  return df
2266
2358
 
@@ -2324,16 +2416,203 @@ def list_capacities() -> pd.DataFrame:
2324
2416
 
2325
2417
  client = fabric.PowerBIRestClient()
2326
2418
  response = client.get("/v1.0/myorg/capacities")
2419
+ if response.status_code != 200:
2420
+ raise FabricHTTPException(response)
2327
2421
 
2328
- for i in response.json()["value"]:
2422
+ for i in response.json().get("value", []):
2329
2423
  new_data = {
2330
- "Id": i.get("id", {}).lower(),
2331
- "Display Name": i.get("displayName", {}),
2332
- "Sku": i.get("sku", {}),
2333
- "Region": i.get("region", {}),
2334
- "State": i.get("state", {}),
2424
+ "Id": i.get("id").lower(),
2425
+ "Display Name": i.get("displayName"),
2426
+ "Sku": i.get("sku"),
2427
+ "Region": i.get("region"),
2428
+ "State": i.get("state"),
2335
2429
  "Admins": [i.get("admins", [])],
2336
2430
  }
2337
2431
  df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
2338
2432
 
2339
2433
  return df
2434
+
2435
+
2436
+ def get_notebook_definition(
2437
+ notebook_name: str, workspace: Optional[str] = None, decode: Optional[bool] = True
2438
+ ):
2439
+ """
2440
+ Obtains the notebook definition.
2441
+
2442
+ Parameters
2443
+ ----------
2444
+ notebook_name : str
2445
+ The name of the notebook.
2446
+ workspace : str, default=None
2447
+ The name of the workspace.
2448
+ Defaults to None which resolves to the workspace of the attached lakehouse
2449
+ or if no lakehouse attached, resolves to the workspace of the notebook.
2450
+ decode : bool, default=True
2451
+ If True, decodes the notebook definition file into .ipynb format.
2452
+ If False, obtains the notebook definition file in base64 format.
2453
+
2454
+ Returns
2455
+ -------
2456
+ ipynb
2457
+ The notebook definition.
2458
+ """
2459
+
2460
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
2461
+
2462
+ dfI = fabric.list_items(workspace=workspace, type="Notebook")
2463
+ dfI_filt = dfI[dfI["Display Name"] == notebook_name]
2464
+
2465
+ if len(dfI_filt) == 0:
2466
+ raise ValueError(
2467
+ f"{icons.red_dot} The '{notebook_name}' notebook does not exist within the '{workspace}' workspace."
2468
+ )
2469
+
2470
+ notebook_id = dfI_filt["Id"].iloc[0]
2471
+ client = fabric.FabricRestClient()
2472
+ response = client.post(
2473
+ f"v1/workspaces/{workspace_id}/notebooks/{notebook_id}/getDefinition",
2474
+ )
2475
+
2476
+ result = lro(client, response).json()
2477
+ df_items = pd.json_normalize(result["definition"]["parts"])
2478
+ df_items_filt = df_items[df_items["path"] == "notebook-content.py"]
2479
+ payload = df_items_filt["payload"].iloc[0]
2480
+
2481
+ if decode:
2482
+ result = _decode_b64(payload)
2483
+ else:
2484
+ result = payload
2485
+
2486
+ return result
2487
+
2488
+
2489
+ def import_notebook_from_web(
2490
+ notebook_name: str,
2491
+ url: str,
2492
+ description: Optional[str] = None,
2493
+ workspace: Optional[str] = None,
2494
+ ):
2495
+ """
2496
+ Creates a new notebook within a workspace based on a Jupyter notebook hosted in the web.
2497
+
2498
+ Parameters
2499
+ ----------
2500
+ notebook_name : str
2501
+ The name of the notebook to be created.
2502
+ url : str
2503
+ The url of the Jupyter Notebook (.ipynb)
2504
+ description : str, default=None
2505
+ The description of the notebook.
2506
+ Defaults to None which does not place a description.
2507
+ workspace : str, default=None
2508
+ The name of the workspace.
2509
+ Defaults to None which resolves to the workspace of the attached lakehouse
2510
+ or if no lakehouse attached, resolves to the workspace of the notebook.
2511
+
2512
+ Returns
2513
+ -------
2514
+ """
2515
+
2516
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
2517
+ client = fabric.FabricRestClient()
2518
+ dfI = fabric.list_items(workspace=workspace, type="Notebook")
2519
+ dfI_filt = dfI[dfI["Display Name"] == notebook_name]
2520
+ if len(dfI_filt) > 0:
2521
+ raise ValueError(
2522
+ f"{icons.red_dot} The '{notebook_name}' already exists within the '{workspace}' workspace."
2523
+ )
2524
+
2525
+ # Fix links to go to the raw github file
2526
+ starting_text = "https://github.com/"
2527
+ starting_text_len = len(starting_text)
2528
+ if url.startswith(starting_text):
2529
+ url = f"https://raw.githubusercontent.com/{url[starting_text_len:]}".replace(
2530
+ "/blob/", "/"
2531
+ )
2532
+
2533
+ response = requests.get(url)
2534
+ if response.status_code != 200:
2535
+ raise FabricHTTPException(response)
2536
+ file_content = response.content
2537
+ notebook_payload = base64.b64encode(file_content)
2538
+
2539
+ request_body = {
2540
+ "displayName": notebook_name,
2541
+ "definition": {
2542
+ "format": "ipynb",
2543
+ "parts": [
2544
+ {
2545
+ "path": "notebook-content.py",
2546
+ "payload": notebook_payload,
2547
+ "payloadType": "InlineBase64",
2548
+ }
2549
+ ],
2550
+ },
2551
+ }
2552
+ if description is not None:
2553
+ request_body["description"] = description
2554
+
2555
+ response = client.post(f"v1/workspaces/{workspace_id}/notebooks", json=request_body)
2556
+
2557
+ lro(client, response, status_codes=[201, 202])
2558
+
2559
+ print(
2560
+ f"{icons.green_dot} The '{notebook_name}' notebook was created within the '{workspace}' workspace."
2561
+ )
2562
+
2563
+
2564
+ def list_reports_using_semantic_model(
2565
+ dataset: str, workspace: Optional[str] = None
2566
+ ) -> pd.DataFrame:
2567
+ """
2568
+ Shows a list of all the reports (in all workspaces) which use a given semantic model.
2569
+
2570
+ Parameters
2571
+ ----------
2572
+ dataset : str
2573
+ Name of the semantic model.
2574
+ workspace : str, default=None
2575
+ The Fabric workspace name.
2576
+ Defaults to None which resolves to the workspace of the attached lakehouse
2577
+ or if no lakehouse attached, resolves to the workspace of the notebook.
2578
+
2579
+ Returns
2580
+ -------
2581
+ pandas.DataFrame
2582
+ A pandas dataframe showing the reports which use a given semantic model.
2583
+ """
2584
+
2585
+ df = pd.DataFrame(
2586
+ columns=[
2587
+ "Report Name",
2588
+ "Report Id",
2589
+ "Report Workspace Name",
2590
+ "Report Workspace Id",
2591
+ ]
2592
+ )
2593
+
2594
+ workspace = fabric.resolve_workspace_name(workspace)
2595
+ dataset_id = resolve_dataset_id(dataset, workspace)
2596
+ client = fabric.PowerBIRestClient()
2597
+ response = client.get(
2598
+ f"metadata/relations/downstream/dataset/{dataset_id}?apiVersion=3"
2599
+ )
2600
+
2601
+ response_json = response.json()
2602
+
2603
+ for i in response_json.get("artifacts", []):
2604
+ object_workspace_id = i.get("workspace", {}).get("objectId")
2605
+ object_type = i.get("typeName")
2606
+
2607
+ if object_type == "Report":
2608
+ new_data = {
2609
+ "Report Name": i.get("displayName"),
2610
+ "Report Id": i.get("objectId"),
2611
+ "Report Workspace Name": fabric.resolve_workspace_name(
2612
+ object_workspace_id
2613
+ ),
2614
+ "Report Workspace Id": object_workspace_id,
2615
+ }
2616
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
2617
+
2618
+ return df