semantic-link-labs 0.6.0__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (103) hide show
  1. semantic_link_labs-0.7.0.dist-info/METADATA +148 -0
  2. semantic_link_labs-0.7.0.dist-info/RECORD +111 -0
  3. {semantic_link_labs-0.6.0.dist-info → semantic_link_labs-0.7.0.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +26 -2
  5. sempy_labs/_ai.py +3 -65
  6. sempy_labs/_bpa_translation/_translations_am-ET.po +828 -0
  7. sempy_labs/_bpa_translation/_translations_ar-AE.po +860 -0
  8. sempy_labs/_bpa_translation/_translations_cs-CZ.po +894 -0
  9. sempy_labs/_bpa_translation/_translations_da-DK.po +894 -0
  10. sempy_labs/_bpa_translation/_translations_de-DE.po +933 -0
  11. sempy_labs/_bpa_translation/_translations_el-GR.po +936 -0
  12. sempy_labs/_bpa_translation/_translations_es-ES.po +915 -0
  13. sempy_labs/_bpa_translation/_translations_fa-IR.po +883 -0
  14. sempy_labs/_bpa_translation/_translations_fr-FR.po +938 -0
  15. sempy_labs/_bpa_translation/_translations_ga-IE.po +912 -0
  16. sempy_labs/_bpa_translation/_translations_he-IL.po +855 -0
  17. sempy_labs/_bpa_translation/_translations_hi-IN.po +892 -0
  18. sempy_labs/_bpa_translation/_translations_hu-HU.po +910 -0
  19. sempy_labs/_bpa_translation/_translations_is-IS.po +887 -0
  20. sempy_labs/_bpa_translation/_translations_it-IT.po +931 -0
  21. sempy_labs/_bpa_translation/_translations_ja-JP.po +805 -0
  22. sempy_labs/_bpa_translation/_translations_nl-NL.po +924 -0
  23. sempy_labs/_bpa_translation/_translations_pl-PL.po +913 -0
  24. sempy_labs/_bpa_translation/_translations_pt-BR.po +909 -0
  25. sempy_labs/_bpa_translation/_translations_pt-PT.po +904 -0
  26. sempy_labs/_bpa_translation/_translations_ru-RU.po +909 -0
  27. sempy_labs/_bpa_translation/_translations_ta-IN.po +922 -0
  28. sempy_labs/_bpa_translation/_translations_te-IN.po +896 -0
  29. sempy_labs/_bpa_translation/_translations_th-TH.po +873 -0
  30. sempy_labs/_bpa_translation/_translations_zh-CN.po +767 -0
  31. sempy_labs/_bpa_translation/_translations_zu-ZA.po +916 -0
  32. sempy_labs/_clear_cache.py +9 -4
  33. sempy_labs/_generate_semantic_model.py +30 -56
  34. sempy_labs/_helper_functions.py +358 -14
  35. sempy_labs/_icons.py +10 -1
  36. sempy_labs/_list_functions.py +478 -237
  37. sempy_labs/_model_bpa.py +194 -18
  38. sempy_labs/_model_bpa_bulk.py +363 -0
  39. sempy_labs/_model_bpa_rules.py +4 -4
  40. sempy_labs/_model_dependencies.py +12 -10
  41. sempy_labs/_one_lake_integration.py +7 -7
  42. sempy_labs/_query_scale_out.py +45 -66
  43. sempy_labs/_refresh_semantic_model.py +7 -0
  44. sempy_labs/_translations.py +154 -1
  45. sempy_labs/_vertipaq.py +103 -90
  46. sempy_labs/directlake/__init__.py +5 -1
  47. sempy_labs/directlake/_directlake_schema_compare.py +27 -31
  48. sempy_labs/directlake/_directlake_schema_sync.py +55 -66
  49. sempy_labs/directlake/_dl_helper.py +233 -0
  50. sempy_labs/directlake/_get_directlake_lakehouse.py +6 -7
  51. sempy_labs/directlake/_get_shared_expression.py +1 -1
  52. sempy_labs/directlake/_guardrails.py +17 -13
  53. sempy_labs/directlake/_update_directlake_partition_entity.py +54 -30
  54. sempy_labs/directlake/_warm_cache.py +1 -1
  55. sempy_labs/lakehouse/_get_lakehouse_tables.py +61 -69
  56. sempy_labs/lakehouse/_lakehouse.py +3 -2
  57. sempy_labs/lakehouse/_shortcuts.py +1 -1
  58. sempy_labs/migration/_create_pqt_file.py +174 -182
  59. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +236 -268
  60. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +75 -73
  61. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +442 -426
  62. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +91 -97
  63. sempy_labs/migration/_refresh_calc_tables.py +92 -101
  64. sempy_labs/report/_BPAReportTemplate.json +232 -0
  65. sempy_labs/report/__init__.py +6 -2
  66. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  67. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  68. sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
  69. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
  70. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
  71. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
  72. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
  73. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
  74. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
  75. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
  76. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
  77. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
  78. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
  79. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
  80. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
  81. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
  82. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
  83. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
  84. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
  85. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
  86. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
  87. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
  88. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
  89. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
  90. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
  91. sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
  92. sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
  93. sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
  94. sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
  95. sempy_labs/report/_generate_report.py +255 -139
  96. sempy_labs/report/_report_functions.py +26 -33
  97. sempy_labs/report/_report_rebind.py +31 -26
  98. sempy_labs/tom/_model.py +75 -58
  99. semantic_link_labs-0.6.0.dist-info/METADATA +0 -22
  100. semantic_link_labs-0.6.0.dist-info/RECORD +0 -54
  101. sempy_labs/directlake/_fallback.py +0 -60
  102. {semantic_link_labs-0.6.0.dist-info → semantic_link_labs-0.7.0.dist-info}/LICENSE +0 -0
  103. {semantic_link_labs-0.6.0.dist-info → semantic_link_labs-0.7.0.dist-info}/top_level.txt +0 -0
@@ -4,10 +4,16 @@ from sempy_labs._helper_functions import (
4
4
  resolve_lakehouse_name,
5
5
  create_relationship_name,
6
6
  resolve_lakehouse_id,
7
+ resolve_dataset_id,
8
+ _decode_b64,
9
+ pagination,
10
+ lro,
7
11
  )
8
12
  import pandas as pd
9
- import json
13
+ import base64
14
+ import requests
10
15
  import time
16
+ import json
11
17
  from pyspark.sql import SparkSession
12
18
  from typing import Optional
13
19
  import sempy_labs._icons as icons
@@ -37,8 +43,7 @@ def get_object_level_security(
37
43
 
38
44
  from sempy_labs.tom import connect_semantic_model
39
45
 
40
- if workspace is None:
41
- workspace = fabric.resolve_workspace_name()
46
+ workspace = fabric.resolve_workspace_name(workspace)
42
47
 
43
48
  df = pd.DataFrame(columns=["Role Name", "Object Type", "Table Name", "Object Name"])
44
49
 
@@ -101,7 +106,7 @@ def list_tables(dataset: str, workspace: Optional[str] = None) -> pd.DataFrame:
101
106
  A pandas dataframe showing the semantic model's tables and their properties.
102
107
  """
103
108
 
104
- workspace = fabric.resolve_workspace_name()
109
+ workspace = fabric.resolve_workspace_name(workspace)
105
110
 
106
111
  df = fabric.list_tables(
107
112
  dataset=dataset,
@@ -138,7 +143,7 @@ def list_annotations(dataset: str, workspace: Optional[str] = None) -> pd.DataFr
138
143
 
139
144
  from sempy_labs.tom import connect_semantic_model
140
145
 
141
- workspace = fabric.resolve_workspace_name()
146
+ workspace = fabric.resolve_workspace_name(workspace)
142
147
 
143
148
  df = pd.DataFrame(
144
149
  columns=[
@@ -381,8 +386,7 @@ def list_columns(
381
386
  get_direct_lake_lakehouse,
382
387
  )
383
388
 
384
- if workspace is None:
385
- workspace = fabric.resolve_workspace_name()
389
+ workspace = fabric.resolve_workspace_name(workspace)
386
390
 
387
391
  dfP = fabric.list_partitions(dataset=dataset, workspace=workspace)
388
392
 
@@ -422,10 +426,10 @@ def list_columns(
422
426
  ].iloc[0]
423
427
 
424
428
  # Build the query to be executed dynamically
425
- query = query + f"COUNT(DISTINCT({scName})) AS {scName}, "
429
+ query = f"{query}COUNT(DISTINCT({scName})) AS {scName}, "
426
430
 
427
431
  query = query[:-2]
428
- query = query + f" FROM {lakehouse}.{lakeTName}"
432
+ query = f"{query} FROM {lakehouse}.{lakeTName}"
429
433
  sql_statements.append((table_name, query))
430
434
 
431
435
  spark = SparkSession.builder.getOrCreate()
@@ -487,8 +491,10 @@ def list_dashboards(workspace: Optional[str] = None) -> pd.DataFrame:
487
491
 
488
492
  client = fabric.PowerBIRestClient()
489
493
  response = client.get(f"/v1.0/myorg/groups/{workspace_id}/dashboards")
494
+ if response.status_code != 200:
495
+ raise FabricHTTPException(response)
490
496
 
491
- for v in response.json()["value"]:
497
+ for v in response.json().get("value", []):
492
498
  new_data = {
493
499
  "Dashboard ID": v.get("id"),
494
500
  "Dashboard Name": v.get("displayName"),
@@ -539,23 +545,29 @@ def list_lakehouses(workspace: Optional[str] = None) -> pd.DataFrame:
539
545
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
540
546
 
541
547
  client = fabric.FabricRestClient()
542
- response = client.get(f"/v1/workspaces/{workspace_id}/lakehouses/")
548
+ response = client.get(f"/v1/workspaces/{workspace_id}/lakehouses")
543
549
 
544
- for v in response.json()["value"]:
545
- prop = v.get("properties", {})
546
- sqlEPProp = prop.get("sqlEndpointProperties", {})
550
+ if response.status_code != 200:
551
+ raise FabricHTTPException(response)
547
552
 
548
- new_data = {
549
- "Lakehouse Name": v.get("displayName"),
550
- "Lakehouse ID": v.get("id"),
551
- "Description": v.get("description"),
552
- "OneLake Tables Path": prop.get("oneLakeTablesPath"),
553
- "OneLake Files Path": prop.get("oneLakeFilesPath"),
554
- "SQL Endpoint Connection String": sqlEPProp.get("connectionString"),
555
- "SQL Endpoint ID": sqlEPProp.get("id"),
556
- "SQL Endpoint Provisioning Status": sqlEPProp.get("provisioningStatus"),
557
- }
558
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
553
+ responses = pagination(client, response)
554
+
555
+ for r in responses:
556
+ for v in r.get("value", []):
557
+ prop = v.get("properties", {})
558
+ sqlEPProp = prop.get("sqlEndpointProperties", {})
559
+
560
+ new_data = {
561
+ "Lakehouse Name": v.get("displayName"),
562
+ "Lakehouse ID": v.get("id"),
563
+ "Description": v.get("description"),
564
+ "OneLake Tables Path": prop.get("oneLakeTablesPath"),
565
+ "OneLake Files Path": prop.get("oneLakeFilesPath"),
566
+ "SQL Endpoint Connection String": sqlEPProp.get("connectionString"),
567
+ "SQL Endpoint ID": sqlEPProp.get("id"),
568
+ "SQL Endpoint Provisioning Status": sqlEPProp.get("provisioningStatus"),
569
+ }
570
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
559
571
 
560
572
  return df
561
573
 
@@ -591,20 +603,25 @@ def list_warehouses(workspace: Optional[str] = None) -> pd.DataFrame:
591
603
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
592
604
 
593
605
  client = fabric.FabricRestClient()
594
- response = client.get(f"/v1/workspaces/{workspace_id}/warehouses/")
606
+ response = client.get(f"/v1/workspaces/{workspace_id}/warehouses")
607
+ if response.status_code != 200:
608
+ raise FabricHTTPException(response)
595
609
 
596
- for v in response.json()["value"]:
597
- prop = v.get("properties", {})
610
+ responses = pagination(client, response)
598
611
 
599
- new_data = {
600
- "Warehouse Name": v.get("displayName"),
601
- "Warehouse ID": v.get("id"),
602
- "Description": v.get("description"),
603
- "Connection Info": prop.get("connectionInfo"),
604
- "Created Date": prop.get("createdDate"),
605
- "Last Updated Time": prop.get("lastUpdatedTime"),
606
- }
607
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
612
+ for r in responses:
613
+ for v in r.get("value", []):
614
+ prop = v.get("properties", {})
615
+
616
+ new_data = {
617
+ "Warehouse Name": v.get("displayName"),
618
+ "Warehouse ID": v.get("id"),
619
+ "Description": v.get("description"),
620
+ "Connection Info": prop.get("connectionInfo"),
621
+ "Created Date": prop.get("createdDate"),
622
+ "Last Updated Time": prop.get("lastUpdatedTime"),
623
+ }
624
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
608
625
 
609
626
  return df
610
627
 
@@ -631,16 +648,21 @@ def list_sqlendpoints(workspace: Optional[str] = None) -> pd.DataFrame:
631
648
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
632
649
 
633
650
  client = fabric.FabricRestClient()
634
- response = client.get(f"/v1/workspaces/{workspace_id}/sqlEndpoints/")
651
+ response = client.get(f"/v1/workspaces/{workspace_id}/sqlEndpoints")
652
+ if response.status_code != 200:
653
+ raise FabricHTTPException(response)
635
654
 
636
- for v in response.json()["value"]:
655
+ responses = pagination(client, response)
637
656
 
638
- new_data = {
639
- "SQL Endpoint ID": v.get("id"),
640
- "SQL Endpoint Name": v.get("displayName"),
641
- "Description": v.get("description"),
642
- }
643
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
657
+ for r in responses:
658
+ for v in r.get("value", []):
659
+
660
+ new_data = {
661
+ "SQL Endpoint ID": v.get("id"),
662
+ "SQL Endpoint Name": v.get("displayName"),
663
+ "Description": v.get("description"),
664
+ }
665
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
644
666
 
645
667
  return df
646
668
 
@@ -669,16 +691,21 @@ def list_mirroredwarehouses(workspace: Optional[str] = None) -> pd.DataFrame:
669
691
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
670
692
 
671
693
  client = fabric.FabricRestClient()
672
- response = client.get(f"/v1/workspaces/{workspace_id}/mirroredWarehouses/")
694
+ response = client.get(f"/v1/workspaces/{workspace_id}/mirroredWarehouses")
695
+ if response.status_code != 200:
696
+ raise FabricHTTPException(response)
673
697
 
674
- for v in response.json()["value"]:
698
+ responses = pagination(client, response)
675
699
 
676
- new_data = {
677
- "Mirrored Warehouse": v.get("displayName"),
678
- "Mirrored Warehouse ID": v.get("id"),
679
- "Description": v.get("description"),
680
- }
681
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
700
+ for r in responses:
701
+ for v in r.get("value", []):
702
+
703
+ new_data = {
704
+ "Mirrored Warehouse": v.get("displayName"),
705
+ "Mirrored Warehouse ID": v.get("id"),
706
+ "Description": v.get("description"),
707
+ }
708
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
682
709
 
683
710
  return df
684
711
 
@@ -715,21 +742,26 @@ def list_kqldatabases(workspace: Optional[str] = None) -> pd.DataFrame:
715
742
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
716
743
 
717
744
  client = fabric.FabricRestClient()
718
- response = client.get(f"/v1/workspaces/{workspace_id}/kqlDatabases/")
745
+ response = client.get(f"/v1/workspaces/{workspace_id}/kqlDatabases")
746
+ if response.status_code != 200:
747
+ raise FabricHTTPException(response)
719
748
 
720
- for v in response.json()["value"]:
721
- prop = v.get("properties", {})
749
+ responses = pagination(client, response)
722
750
 
723
- new_data = {
724
- "KQL Database Name": v.get("displayName"),
725
- "KQL Database ID": v.get("id"),
726
- "Description": v.get("description"),
727
- "Parent Eventhouse Item ID": prop.get("parentEventhouseItemId"),
728
- "Query Service URI": prop.get("queryServiceUri"),
729
- "Ingestion Service URI": prop.get("ingestionServiceUri"),
730
- "Kusto Database Type": prop.get("kustoDatabaseType"),
731
- }
732
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
751
+ for r in responses:
752
+ for v in r.get("value", []):
753
+ prop = v.get("properties", {})
754
+
755
+ new_data = {
756
+ "KQL Database Name": v.get("displayName"),
757
+ "KQL Database ID": v.get("id"),
758
+ "Description": v.get("description"),
759
+ "Parent Eventhouse Item ID": prop.get("parentEventhouseItemId"),
760
+ "Query Service URI": prop.get("queryServiceUri"),
761
+ "Ingestion Service URI": prop.get("ingestionServiceUri"),
762
+ "Kusto Database Type": prop.get("kustoDatabaseType"),
763
+ }
764
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
733
765
 
734
766
  return df
735
767
 
@@ -756,16 +788,21 @@ def list_kqlquerysets(workspace: Optional[str] = None) -> pd.DataFrame:
756
788
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
757
789
 
758
790
  client = fabric.FabricRestClient()
759
- response = client.get(f"/v1/workspaces/{workspace_id}/kqlQuerysets/")
791
+ response = client.get(f"/v1/workspaces/{workspace_id}/kqlQuerysets")
792
+ if response.status_code != 200:
793
+ raise FabricHTTPException(response)
760
794
 
761
- for v in response.json()["value"]:
795
+ responses = pagination(client, response)
762
796
 
763
- new_data = {
764
- "KQL Queryset Name": v.get("displayName"),
765
- "KQL Queryset ID": v.get("id"),
766
- "Description": v.get("description"),
767
- }
768
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
797
+ for r in responses:
798
+ for v in r.get("value", []):
799
+
800
+ new_data = {
801
+ "KQL Queryset Name": v.get("displayName"),
802
+ "KQL Queryset ID": v.get("id"),
803
+ "Description": v.get("description"),
804
+ }
805
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
769
806
 
770
807
  return df
771
808
 
@@ -792,19 +829,24 @@ def list_mlmodels(workspace: Optional[str] = None) -> pd.DataFrame:
792
829
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
793
830
 
794
831
  client = fabric.FabricRestClient()
795
- response = client.get(f"/v1/workspaces/{workspace_id}/mlModels/")
832
+ response = client.get(f"/v1/workspaces/{workspace_id}/mlModels")
833
+ if response.status_code != 200:
834
+ raise FabricHTTPException(response)
796
835
 
797
- for v in response.json()["value"]:
798
- model_id = v.get("id")
799
- modelName = v.get("displayName")
800
- desc = v.get("description")
836
+ responses = pagination(client, response)
801
837
 
802
- new_data = {
803
- "ML Model Name": modelName,
804
- "ML Model ID": model_id,
805
- "Description": desc,
806
- }
807
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
838
+ for r in responses:
839
+ for v in r.get("value", []):
840
+ model_id = v.get("id")
841
+ modelName = v.get("displayName")
842
+ desc = v.get("description")
843
+
844
+ new_data = {
845
+ "ML Model Name": modelName,
846
+ "ML Model ID": model_id,
847
+ "Description": desc,
848
+ }
849
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
808
850
 
809
851
  return df
810
852
 
@@ -831,19 +873,24 @@ def list_eventstreams(workspace: Optional[str] = None) -> pd.DataFrame:
831
873
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
832
874
 
833
875
  client = fabric.FabricRestClient()
834
- response = client.get(f"/v1/workspaces/{workspace_id}/eventstreams/")
876
+ response = client.get(f"/v1/workspaces/{workspace_id}/eventstreams")
877
+ if response.status_code != 200:
878
+ raise FabricHTTPException(response)
835
879
 
836
- for v in response.json()["value"]:
837
- model_id = v.get("id")
838
- modelName = v.get("displayName")
839
- desc = v.get("description")
880
+ responses = pagination(client, response)
840
881
 
841
- new_data = {
842
- "Eventstream Name": modelName,
843
- "Eventstream ID": model_id,
844
- "Description": desc,
845
- }
846
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
882
+ for r in responses:
883
+ for v in r.get("value", []):
884
+ model_id = v.get("id")
885
+ modelName = v.get("displayName")
886
+ desc = v.get("description")
887
+
888
+ new_data = {
889
+ "Eventstream Name": modelName,
890
+ "Eventstream ID": model_id,
891
+ "Description": desc,
892
+ }
893
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
847
894
 
848
895
  return df
849
896
 
@@ -870,19 +917,20 @@ def list_datapipelines(workspace: Optional[str] = None) -> pd.DataFrame:
870
917
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
871
918
 
872
919
  client = fabric.FabricRestClient()
873
- response = client.get(f"/v1/workspaces/{workspace_id}/dataPipelines/")
920
+ response = client.get(f"/v1/workspaces/{workspace_id}/dataPipelines")
921
+ if response.status_code != 200:
922
+ raise FabricHTTPException(response)
874
923
 
875
- for v in response.json()["value"]:
876
- model_id = v.get("id")
877
- modelName = v.get("displayName")
878
- desc = v.get("description")
924
+ responses = pagination(client, response)
879
925
 
880
- new_data = {
881
- "Data Pipeline Name": modelName,
882
- "Data Pipeline ID": model_id,
883
- "Description": desc,
884
- }
885
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
926
+ for r in responses:
927
+ for v in r.get("value", []):
928
+ new_data = {
929
+ "Data Pipeline Name": v.get("displayName"),
930
+ "Data Pipeline ID": v.get("id"),
931
+ "Description": v.get("description"),
932
+ }
933
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
886
934
 
887
935
  return df
888
936
 
@@ -909,16 +957,20 @@ def list_mlexperiments(workspace: Optional[str] = None) -> pd.DataFrame:
909
957
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
910
958
 
911
959
  client = fabric.FabricRestClient()
912
- response = client.get(f"/v1/workspaces/{workspace_id}/mlExperiments/")
960
+ response = client.get(f"/v1/workspaces/{workspace_id}/mlExperiments")
961
+ if response.status_code != 200:
962
+ raise FabricHTTPException(response)
913
963
 
914
- for v in response.json()["value"]:
964
+ responses = pagination(client, response)
915
965
 
916
- new_data = {
917
- "ML Experiment Name": v.get("displayName"),
918
- "ML Experiment ID": v.get("id"),
919
- "Description": v.get("description"),
920
- }
921
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
966
+ for r in responses:
967
+ for v in r.get("value", []):
968
+ new_data = {
969
+ "ML Experiment Name": v.get("displayName"),
970
+ "ML Experiment ID": v.get("id"),
971
+ "Description": v.get("description"),
972
+ }
973
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
922
974
 
923
975
  return df
924
976
 
@@ -945,16 +997,20 @@ def list_datamarts(workspace: Optional[str] = None) -> pd.DataFrame:
945
997
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
946
998
 
947
999
  client = fabric.FabricRestClient()
948
- response = client.get(f"/v1/workspaces/{workspace_id}/datamarts/")
1000
+ response = client.get(f"/v1/workspaces/{workspace_id}/datamarts")
1001
+ if response.status_code != 200:
1002
+ raise FabricHTTPException(response)
949
1003
 
950
- for v in response.json()["value"]:
1004
+ responses = pagination(client, response)
951
1005
 
952
- new_data = {
953
- "Datamart Name": v.get("displayName"),
954
- "Datamart ID": v.get("id"),
955
- "Description": v.get("description"),
956
- }
957
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
1006
+ for r in responses:
1007
+ for v in response.get("value", []):
1008
+ new_data = {
1009
+ "Datamart Name": v.get("displayName"),
1010
+ "Datamart ID": v.get("id"),
1011
+ "Description": v.get("description"),
1012
+ }
1013
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
958
1014
 
959
1015
  return df
960
1016
 
@@ -990,29 +1046,14 @@ def create_warehouse(
990
1046
 
991
1047
  client = fabric.FabricRestClient()
992
1048
  response = client.post(
993
- f"/v1/workspaces/{workspace_id}/warehouses/", json=request_body
1049
+ f"/v1/workspaces/{workspace_id}/warehouses/", json=request_body, lro_wait=True
994
1050
  )
995
1051
 
996
- if response.status_code == 201:
997
- print(
998
- f"{icons.green_dot} The '{warehouse}' warehouse has been created within the '{workspace}' workspace."
999
- )
1000
- elif response.status_code == 202:
1001
- operationId = response.headers["x-ms-operation-id"]
1002
- response = client.get(f"/v1/operations/{operationId}")
1003
- response_body = json.loads(response.content)
1004
- while response_body["status"] != "Succeeded":
1005
- time.sleep(3)
1006
- response = client.get(f"/v1/operations/{operationId}")
1007
- response_body = json.loads(response.content)
1008
- response = client.get(f"/v1/operations/{operationId}/result")
1009
- print(
1010
- f"{icons.green_dot} The '{warehouse}' warehouse has been created within the '{workspace}' workspace."
1011
- )
1012
- else:
1013
- raise ValueError(
1014
- f"{icons.red_dot} Failed to create the '{warehouse}' warehouse within the '{workspace}' workspace."
1015
- )
1052
+ if response.status_code != 200:
1053
+ raise FabricHTTPException(response)
1054
+ print(
1055
+ f"{icons.green_dot} The '{warehouse}' warehouse has been created within the '{workspace}' workspace."
1056
+ )
1016
1057
 
1017
1058
 
1018
1059
  def update_item(
@@ -1118,8 +1159,7 @@ def list_relationships(
1118
1159
  A pandas dataframe showing the object level security for the semantic model.
1119
1160
  """
1120
1161
 
1121
- if workspace is None:
1122
- workspace = fabric.resolve_workspace_name()
1162
+ workspace = fabric.resolve_workspace_name(workspace)
1123
1163
 
1124
1164
  dfR = fabric.list_relationships(dataset=dataset, workspace=workspace)
1125
1165
 
@@ -1199,9 +1239,10 @@ def list_dataflow_storage_accounts() -> pd.DataFrame:
1199
1239
  )
1200
1240
  client = fabric.PowerBIRestClient()
1201
1241
  response = client.get("/v1.0/myorg/dataflowStorageAccounts")
1242
+ if response.status_code != 200:
1243
+ raise FabricHTTPException(response)
1202
1244
 
1203
- for v in response.json()["value"]:
1204
-
1245
+ for v in response.json().get("value", []):
1205
1246
  new_data = {
1206
1247
  "Dataflow Storage Account ID": v.get("id"),
1207
1248
  "Dataflow Storage Account Name": v.get("name"),
@@ -1301,22 +1342,21 @@ def list_workspace_role_assignments(workspace: Optional[str] = None) -> pd.DataF
1301
1342
 
1302
1343
  client = fabric.FabricRestClient()
1303
1344
  response = client.get(f"/v1/workspaces/{workspace_id}/roleAssignments")
1345
+ if response.status_code != 200:
1346
+ raise FabricHTTPException(response)
1304
1347
 
1305
- for i in response.json()["value"]:
1306
- user_name = i.get("principal", {}).get("displayName")
1307
- role_name = i.get("role")
1308
- user_email = (
1309
- i.get("principal", {}).get("userDetails", {}).get("userPrincipalName")
1310
- )
1311
- user_type = i.get("principal", {}).get("type")
1348
+ responses = pagination(client, response)
1312
1349
 
1313
- new_data = {
1314
- "User Name": user_name,
1315
- "Role Name": role_name,
1316
- "Type": user_type,
1317
- "User Email": user_email,
1318
- }
1319
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
1350
+ for r in responses:
1351
+ for i in r.get("value", []):
1352
+ principal = i.get("principal", {})
1353
+ new_data = {
1354
+ "User Name": principal.get("displayName"),
1355
+ "Role Name": i.get("role"),
1356
+ "Type": principal.get("type"),
1357
+ "User Email": principal.get("userDetails", {}).get("userPrincipalName"),
1358
+ }
1359
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
1320
1360
 
1321
1361
  return df
1322
1362
 
@@ -1533,46 +1573,51 @@ def list_shortcuts(
1533
1573
  response = client.get(
1534
1574
  f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts"
1535
1575
  )
1536
-
1537
1576
  if response.status_code != 200:
1538
1577
  raise FabricHTTPException(response)
1539
- for s in response.json()["value"]:
1540
- shortcutName = s.get("name")
1541
- shortcutPath = s.get("path")
1542
- source = list(s["target"].keys())[0]
1543
- (
1544
- sourceLakehouseName,
1545
- sourceWorkspaceName,
1546
- sourcePath,
1547
- connectionId,
1548
- location,
1549
- subpath,
1550
- ) = (None, None, None, None, None, None)
1551
- if source == "oneLake":
1552
- sourceLakehouseId = s.get("target", {}).get(source, {}).get("itemId")
1553
- sourcePath = s.get("target", {}).get(source, {}).get("path")
1554
- sourceWorkspaceId = s.get("target", {}).get(source, {}).get("workspaceId")
1555
- sourceWorkspaceName = fabric.resolve_workspace_name(sourceWorkspaceId)
1556
- sourceLakehouseName = resolve_lakehouse_name(
1557
- sourceLakehouseId, sourceWorkspaceName
1558
- )
1559
- else:
1560
- connectionId = s.get("target", {}).get(source, {}).get("connectionId")
1561
- location = s.get("target", {}).get(source, {}).get("location")
1562
- subpath = s.get("target", {}).get(source, {}).get("subpath")
1563
1578
 
1564
- new_data = {
1565
- "Shortcut Name": shortcutName,
1566
- "Shortcut Path": shortcutPath,
1567
- "Source": source,
1568
- "Source Lakehouse Name": sourceLakehouseName,
1569
- "Source Workspace Name": sourceWorkspaceName,
1570
- "Source Path": sourcePath,
1571
- "Source Connection ID": connectionId,
1572
- "Source Location": location,
1573
- "Source SubPath": subpath,
1574
- }
1575
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
1579
+ responses = pagination(client, response)
1580
+
1581
+ for r in responses:
1582
+ for s in r.get("value", []):
1583
+ shortcutName = s.get("name")
1584
+ shortcutPath = s.get("path")
1585
+ source = list(s["target"].keys())[0]
1586
+ (
1587
+ sourceLakehouseName,
1588
+ sourceWorkspaceName,
1589
+ sourcePath,
1590
+ connectionId,
1591
+ location,
1592
+ subpath,
1593
+ ) = (None, None, None, None, None, None)
1594
+ if source == "oneLake":
1595
+ sourceLakehouseId = s.get("target", {}).get(source, {}).get("itemId")
1596
+ sourcePath = s.get("target", {}).get(source, {}).get("path")
1597
+ sourceWorkspaceId = (
1598
+ s.get("target", {}).get(source, {}).get("workspaceId")
1599
+ )
1600
+ sourceWorkspaceName = fabric.resolve_workspace_name(sourceWorkspaceId)
1601
+ sourceLakehouseName = resolve_lakehouse_name(
1602
+ sourceLakehouseId, sourceWorkspaceName
1603
+ )
1604
+ else:
1605
+ connectionId = s.get("target", {}).get(source, {}).get("connectionId")
1606
+ location = s.get("target", {}).get(source, {}).get("location")
1607
+ subpath = s.get("target", {}).get(source, {}).get("subpath")
1608
+
1609
+ new_data = {
1610
+ "Shortcut Name": shortcutName,
1611
+ "Shortcut Path": shortcutPath,
1612
+ "Source": source,
1613
+ "Source Lakehouse Name": sourceLakehouseName,
1614
+ "Source Workspace Name": sourceWorkspaceName,
1615
+ "Source Path": sourcePath,
1616
+ "Source Connection ID": connectionId,
1617
+ "Source Location": location,
1618
+ "Source SubPath": subpath,
1619
+ }
1620
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
1576
1621
 
1577
1622
  return df
1578
1623
 
@@ -1615,6 +1660,8 @@ def list_custom_pools(workspace: Optional[str] = None) -> pd.DataFrame:
1615
1660
 
1616
1661
  client = fabric.FabricRestClient()
1617
1662
  response = client.get(f"/v1/workspaces/{workspace_id}/spark/pools")
1663
+ if response.status_code != 200:
1664
+ raise FabricHTTPException(response)
1618
1665
 
1619
1666
  for i in response.json()["value"]:
1620
1667
 
@@ -1716,15 +1763,14 @@ def create_custom_pool(
1716
1763
 
1717
1764
  client = fabric.FabricRestClient()
1718
1765
  response = client.post(
1719
- f"/v1/workspaces/{workspace_id}/spark/pools", json=request_body
1766
+ f"/v1/workspaces/{workspace_id}/spark/pools", json=request_body, lro_wait=True
1720
1767
  )
1721
1768
 
1722
- if response.status_code == 201:
1723
- print(
1724
- f"{icons.green_dot} The '{pool_name}' spark pool has been created within the '{workspace}' workspace."
1725
- )
1726
- else:
1727
- raise ValueError(f"{icons.red_dot} {response.status_code}")
1769
+ if response.status_code != 200:
1770
+ raise FabricHTTPException(response)
1771
+ print(
1772
+ f"{icons.green_dot} The '{pool_name}' spark pool has been created within the '{workspace}' workspace."
1773
+ )
1728
1774
 
1729
1775
 
1730
1776
  def update_custom_pool(
@@ -1837,7 +1883,7 @@ def update_custom_pool(
1837
1883
  )
1838
1884
 
1839
1885
 
1840
- def delete_custom_pool(pool_name: str, workspace: Optional[str | None] = None):
1886
+ def delete_custom_pool(pool_name: str, workspace: Optional[str] = None):
1841
1887
  """
1842
1888
  Deletes a `custom pool <https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools>`_ within a workspace.
1843
1889
 
@@ -1902,15 +1948,16 @@ def assign_workspace_to_capacity(capacity_name: str, workspace: Optional[str] =
1902
1948
 
1903
1949
  client = fabric.FabricRestClient()
1904
1950
  response = client.post(
1905
- f"/v1/workspaces/{workspace_id}/assignToCapacity", json=request_body
1951
+ f"/v1/workspaces/{workspace_id}/assignToCapacity",
1952
+ json=request_body,
1953
+ lro_wait=True,
1906
1954
  )
1907
1955
 
1908
- if response.status_code == 202:
1909
- print(
1910
- f"{icons.green_dot} The '{workspace}' workspace has been assigned to the '{capacity_name}' capacity."
1911
- )
1912
- else:
1913
- raise ValueError(f"{icons.red_dot} {response.status_code}")
1956
+ if response.status_code not in [200, 202]:
1957
+ raise FabricHTTPException(response)
1958
+ print(
1959
+ f"{icons.green_dot} The '{workspace}' workspace has been assigned to the '{capacity_name}' capacity."
1960
+ )
1914
1961
 
1915
1962
 
1916
1963
  def unassign_workspace_from_capacity(workspace: Optional[str] = None):
@@ -1932,14 +1979,15 @@ def unassign_workspace_from_capacity(workspace: Optional[str] = None):
1932
1979
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
1933
1980
 
1934
1981
  client = fabric.FabricRestClient()
1935
- response = client.post(f"/v1/workspaces/{workspace_id}/unassignFromCapacity")
1982
+ response = client.post(
1983
+ f"/v1/workspaces/{workspace_id}/unassignFromCapacity", lro_wait=True
1984
+ )
1936
1985
 
1937
- if response.status_code == 202:
1938
- print(
1939
- f"{icons.green_dot} The '{workspace}' workspace has been unassigned from its capacity."
1940
- )
1941
- else:
1942
- raise ValueError(f"{icons.red_dot} {response.status_code}")
1986
+ if response.status_code not in [200, 202]:
1987
+ raise FabricHTTPException(response)
1988
+ print(
1989
+ f"{icons.green_dot} The '{workspace}' workspace has been unassigned from its capacity."
1990
+ )
1943
1991
 
1944
1992
 
1945
1993
  def get_spark_settings(workspace: Optional[str] = None) -> pd.DataFrame:
@@ -1978,6 +2026,8 @@ def get_spark_settings(workspace: Optional[str] = None) -> pd.DataFrame:
1978
2026
 
1979
2027
  client = fabric.FabricRestClient()
1980
2028
  response = client.get(f"/v1/workspaces/{workspace_id}/spark/settings")
2029
+ if response.status_code != 200:
2030
+ raise FabricHTTPException(response)
1981
2031
 
1982
2032
  i = response.json()
1983
2033
  p = i.get("pool")
@@ -2249,18 +2299,22 @@ def list_workspace_users(workspace: Optional[str] = None) -> pd.DataFrame:
2249
2299
  df = pd.DataFrame(columns=["User Name", "Email Address", "Role", "Type", "User ID"])
2250
2300
  client = fabric.FabricRestClient()
2251
2301
  response = client.get(f"/v1/workspaces/{workspace_id}/roleAssignments")
2302
+ if response.status_code != 200:
2303
+ raise FabricHTTPException(response)
2252
2304
 
2253
- for v in response.json()["value"]:
2254
- p = v.get("principal", {})
2305
+ responses = pagination(client, response)
2255
2306
 
2256
- new_data = {
2257
- "User Name": p.get("displayName"),
2258
- "User ID": p.get("id"),
2259
- "Type": p.get("type"),
2260
- "Role": v.get("role"),
2261
- "Email Address": p.get("userDetails", {}).get("userPrincipalName"),
2262
- }
2263
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
2307
+ for r in responses:
2308
+ for v in r.get("value", []):
2309
+ p = v.get("principal", {})
2310
+ new_data = {
2311
+ "User Name": p.get("displayName"),
2312
+ "User ID": p.get("id"),
2313
+ "Type": p.get("type"),
2314
+ "Role": v.get("role"),
2315
+ "Email Address": p.get("userDetails", {}).get("userPrincipalName"),
2316
+ }
2317
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
2264
2318
 
2265
2319
  return df
2266
2320
 
@@ -2324,16 +2378,203 @@ def list_capacities() -> pd.DataFrame:
2324
2378
 
2325
2379
  client = fabric.PowerBIRestClient()
2326
2380
  response = client.get("/v1.0/myorg/capacities")
2381
+ if response.status_code != 200:
2382
+ raise FabricHTTPException(response)
2327
2383
 
2328
- for i in response.json()["value"]:
2384
+ for i in response.json().get("value", []):
2329
2385
  new_data = {
2330
- "Id": i.get("id", {}).lower(),
2331
- "Display Name": i.get("displayName", {}),
2332
- "Sku": i.get("sku", {}),
2333
- "Region": i.get("region", {}),
2334
- "State": i.get("state", {}),
2386
+ "Id": i.get("id").lower(),
2387
+ "Display Name": i.get("displayName"),
2388
+ "Sku": i.get("sku"),
2389
+ "Region": i.get("region"),
2390
+ "State": i.get("state"),
2335
2391
  "Admins": [i.get("admins", [])],
2336
2392
  }
2337
2393
  df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
2338
2394
 
2339
2395
  return df
2396
+
2397
+
2398
+ def get_notebook_definition(
2399
+ notebook_name: str, workspace: Optional[str] = None, decode: Optional[bool] = True
2400
+ ):
2401
+ """
2402
+ Obtains the notebook definition.
2403
+
2404
+ Parameters
2405
+ ----------
2406
+ notebook_name : str
2407
+ The name of the notebook.
2408
+ workspace : str, default=None
2409
+ The name of the workspace.
2410
+ Defaults to None which resolves to the workspace of the attached lakehouse
2411
+ or if no lakehouse attached, resolves to the workspace of the notebook.
2412
+ decode : bool, default=True
2413
+ If True, decodes the notebook definition file into .ipynb format.
2414
+ If False, obtains the notebook definition file in base64 format.
2415
+
2416
+ Returns
2417
+ -------
2418
+ ipynb
2419
+ The notebook definition.
2420
+ """
2421
+
2422
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
2423
+
2424
+ dfI = fabric.list_items(workspace=workspace, type="Notebook")
2425
+ dfI_filt = dfI[dfI["Display Name"] == notebook_name]
2426
+
2427
+ if len(dfI_filt) == 0:
2428
+ raise ValueError(
2429
+ f"{icons.red_dot} The '{notebook_name}' notebook does not exist within the '{workspace}' workspace."
2430
+ )
2431
+
2432
+ notebook_id = dfI_filt["Id"].iloc[0]
2433
+ client = fabric.FabricRestClient()
2434
+ response = client.post(
2435
+ f"v1/workspaces/{workspace_id}/notebooks/{notebook_id}/getDefinition",
2436
+ )
2437
+
2438
+ result = lro(client, response).json()
2439
+ df_items = pd.json_normalize(result["definition"]["parts"])
2440
+ df_items_filt = df_items[df_items["path"] == "notebook-content.py"]
2441
+ payload = df_items_filt["payload"].iloc[0]
2442
+
2443
+ if decode:
2444
+ result = _decode_b64(payload)
2445
+ else:
2446
+ result = payload
2447
+
2448
+ return result
2449
+
2450
+
2451
+ def import_notebook_from_web(
2452
+ notebook_name: str,
2453
+ url: str,
2454
+ description: Optional[str] = None,
2455
+ workspace: Optional[str] = None,
2456
+ ):
2457
+ """
2458
+ Creates a new notebook within a workspace based on a Jupyter notebook hosted in the web.
2459
+
2460
+ Parameters
2461
+ ----------
2462
+ notebook_name : str
2463
+ The name of the notebook to be created.
2464
+ url : str
2465
+ The url of the Jupyter Notebook (.ipynb)
2466
+ description : str, default=None
2467
+ The description of the notebook.
2468
+ Defaults to None which does not place a description.
2469
+ workspace : str, default=None
2470
+ The name of the workspace.
2471
+ Defaults to None which resolves to the workspace of the attached lakehouse
2472
+ or if no lakehouse attached, resolves to the workspace of the notebook.
2473
+
2474
+ Returns
2475
+ -------
2476
+ """
2477
+
2478
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
2479
+ client = fabric.FabricRestClient()
2480
+ dfI = fabric.list_items(workspace=workspace, type="Notebook")
2481
+ dfI_filt = dfI[dfI["Display Name"] == notebook_name]
2482
+ if len(dfI_filt) > 0:
2483
+ raise ValueError(
2484
+ f"{icons.red_dot} The '{notebook_name}' already exists within the '{workspace}' workspace."
2485
+ )
2486
+
2487
+ # Fix links to go to the raw github file
2488
+ starting_text = "https://github.com/"
2489
+ starting_text_len = len(starting_text)
2490
+ if url.startswith(starting_text):
2491
+ url = f"https://raw.githubusercontent.com/{url[starting_text_len:]}".replace(
2492
+ "/blob/", "/"
2493
+ )
2494
+
2495
+ response = requests.get(url)
2496
+ if response.status_code != 200:
2497
+ raise FabricHTTPException(response)
2498
+ file_content = response.content
2499
+ notebook_payload = base64.b64encode(file_content)
2500
+
2501
+ request_body = {
2502
+ "displayName": notebook_name,
2503
+ "definition": {
2504
+ "format": "ipynb",
2505
+ "parts": [
2506
+ {
2507
+ "path": "notebook-content.py",
2508
+ "payload": notebook_payload,
2509
+ "payloadType": "InlineBase64",
2510
+ }
2511
+ ],
2512
+ },
2513
+ }
2514
+ if description is not None:
2515
+ request_body["description"] = description
2516
+
2517
+ response = client.post(f"v1/workspaces/{workspace_id}/notebooks", json=request_body)
2518
+
2519
+ lro(client, response, status_codes=[201, 202])
2520
+
2521
+ print(
2522
+ f"{icons.green_dot} The '{notebook_name}' notebook was created within the '{workspace}' workspace."
2523
+ )
2524
+
2525
+
2526
+ def list_reports_using_semantic_model(
2527
+ dataset: str, workspace: Optional[str] = None
2528
+ ) -> pd.DataFrame:
2529
+ """
2530
+ Shows a list of all the reports (in all workspaces) which use a given semantic model.
2531
+
2532
+ Parameters
2533
+ ----------
2534
+ dataset : str
2535
+ Name of the semantic model.
2536
+ workspace : str, default=None
2537
+ The Fabric workspace name.
2538
+ Defaults to None which resolves to the workspace of the attached lakehouse
2539
+ or if no lakehouse attached, resolves to the workspace of the notebook.
2540
+
2541
+ Returns
2542
+ -------
2543
+ pandas.DataFrame
2544
+ A pandas dataframe showing the reports which use a given semantic model.
2545
+ """
2546
+
2547
+ df = pd.DataFrame(
2548
+ columns=[
2549
+ "Report Name",
2550
+ "Report Id",
2551
+ "Report Workspace Name",
2552
+ "Report Workspace Id",
2553
+ ]
2554
+ )
2555
+
2556
+ workspace = fabric.resolve_workspace_name(workspace)
2557
+ dataset_id = resolve_dataset_id(dataset, workspace)
2558
+ client = fabric.PowerBIRestClient()
2559
+ response = client.get(
2560
+ f"metadata/relations/downstream/dataset/{dataset_id}?apiVersion=3"
2561
+ )
2562
+
2563
+ response_json = response.json()
2564
+
2565
+ for i in response_json.get("artifacts", []):
2566
+ object_workspace_id = i.get("workspace", {}).get("objectId")
2567
+ object_type = i.get("typeName")
2568
+
2569
+ if object_type == "Report":
2570
+ new_data = {
2571
+ "Report Name": i.get("displayName"),
2572
+ "Report Id": i.get("objectId"),
2573
+ "Report Workspace Name": fabric.resolve_workspace_name(
2574
+ object_workspace_id
2575
+ ),
2576
+ "Report Workspace Id": object_workspace_id,
2577
+ }
2578
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
2579
+
2580
+ return df