semantic-link-labs 0.8.0__py3-none-any.whl → 0.8.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (47) hide show
  1. {semantic_link_labs-0.8.0.dist-info → semantic_link_labs-0.8.2.dist-info}/METADATA +39 -7
  2. {semantic_link_labs-0.8.0.dist-info → semantic_link_labs-0.8.2.dist-info}/RECORD +47 -37
  3. sempy_labs/__init__.py +70 -51
  4. sempy_labs/_ai.py +0 -2
  5. sempy_labs/_capacity_migration.py +1 -2
  6. sempy_labs/_data_pipelines.py +118 -0
  7. sempy_labs/_documentation.py +144 -0
  8. sempy_labs/_eventhouses.py +118 -0
  9. sempy_labs/_eventstreams.py +118 -0
  10. sempy_labs/_generate_semantic_model.py +3 -3
  11. sempy_labs/_git.py +3 -3
  12. sempy_labs/_helper_functions.py +117 -26
  13. sempy_labs/_icons.py +21 -0
  14. sempy_labs/_kql_databases.py +134 -0
  15. sempy_labs/_kql_querysets.py +124 -0
  16. sempy_labs/_list_functions.py +12 -425
  17. sempy_labs/_mirrored_warehouses.py +50 -0
  18. sempy_labs/_ml_experiments.py +122 -0
  19. sempy_labs/_ml_models.py +120 -0
  20. sempy_labs/_model_auto_build.py +0 -4
  21. sempy_labs/_model_bpa.py +11 -11
  22. sempy_labs/_model_bpa_bulk.py +8 -7
  23. sempy_labs/_model_dependencies.py +26 -18
  24. sempy_labs/_notebooks.py +5 -16
  25. sempy_labs/_query_scale_out.py +2 -2
  26. sempy_labs/_refresh_semantic_model.py +7 -19
  27. sempy_labs/_spark.py +10 -10
  28. sempy_labs/_vertipaq.py +16 -18
  29. sempy_labs/_warehouses.py +132 -0
  30. sempy_labs/_workspaces.py +0 -3
  31. sempy_labs/admin/_basic_functions.py +92 -10
  32. sempy_labs/admin/_domains.py +1 -1
  33. sempy_labs/directlake/_directlake_schema_sync.py +1 -1
  34. sempy_labs/directlake/_dl_helper.py +32 -16
  35. sempy_labs/directlake/_guardrails.py +7 -7
  36. sempy_labs/directlake/_update_directlake_partition_entity.py +1 -1
  37. sempy_labs/directlake/_warm_cache.py +1 -1
  38. sempy_labs/lakehouse/_get_lakehouse_tables.py +3 -3
  39. sempy_labs/lakehouse/_lakehouse.py +3 -2
  40. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +5 -0
  41. sempy_labs/report/_generate_report.py +1 -1
  42. sempy_labs/report/_report_bpa.py +13 -3
  43. sempy_labs/report/_reportwrapper.py +14 -16
  44. sempy_labs/tom/_model.py +261 -24
  45. {semantic_link_labs-0.8.0.dist-info → semantic_link_labs-0.8.2.dist-info}/LICENSE +0 -0
  46. {semantic_link_labs-0.8.0.dist-info → semantic_link_labs-0.8.2.dist-info}/WHEEL +0 -0
  47. {semantic_link_labs-0.8.0.dist-info → semantic_link_labs-0.8.2.dist-info}/top_level.txt +0 -0
@@ -5,10 +5,8 @@ from sempy_labs._helper_functions import (
5
5
  resolve_lakehouse_id,
6
6
  resolve_dataset_id,
7
7
  pagination,
8
- lro,
9
8
  resolve_item_type,
10
9
  format_dax_object_name,
11
- pagination,
12
10
  )
13
11
  import pandas as pd
14
12
  from typing import Optional
@@ -84,7 +82,7 @@ def get_object_level_security(
84
82
 
85
83
 
86
84
  def list_tables(
87
- dataset: str, workspace: Optional[str] = None, extended: Optional[bool] = False
85
+ dataset: str, workspace: Optional[str] = None, extended: bool = False
88
86
  ) -> pd.DataFrame:
89
87
  """
90
88
  Shows a semantic model's tables and their properties.
@@ -707,9 +705,9 @@ def list_lakehouses(workspace: Optional[str] = None) -> pd.DataFrame:
707
705
  return df
708
706
 
709
707
 
710
- def list_warehouses(workspace: Optional[str] = None) -> pd.DataFrame:
708
+ def list_sql_endpoints(workspace: Optional[str] = None) -> pd.DataFrame:
711
709
  """
712
- Shows the warehouses within a workspace.
710
+ Shows the SQL endpoints within a workspace.
713
711
 
714
712
  Parameters
715
713
  ----------
@@ -721,64 +719,10 @@ def list_warehouses(workspace: Optional[str] = None) -> pd.DataFrame:
721
719
  Returns
722
720
  -------
723
721
  pandas.DataFrame
724
- A pandas dataframe showing the warehouses within a workspace.
722
+ A pandas dataframe showing the SQL endpoints within a workspace.
725
723
  """
726
724
 
727
- df = pd.DataFrame(
728
- columns=[
729
- "Warehouse Name",
730
- "Warehouse ID",
731
- "Description",
732
- "Connection Info",
733
- "Created Date",
734
- "Last Updated Time",
735
- ]
736
- )
737
-
738
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
739
-
740
- client = fabric.FabricRestClient()
741
- response = client.get(f"/v1/workspaces/{workspace_id}/warehouses")
742
- if response.status_code != 200:
743
- raise FabricHTTPException(response)
744
-
745
- responses = pagination(client, response)
746
-
747
- for r in responses:
748
- for v in r.get("value", []):
749
- prop = v.get("properties", {})
750
-
751
- new_data = {
752
- "Warehouse Name": v.get("displayName"),
753
- "Warehouse ID": v.get("id"),
754
- "Description": v.get("description"),
755
- "Connection Info": prop.get("connectionInfo"),
756
- "Created Date": prop.get("createdDate"),
757
- "Last Updated Time": prop.get("lastUpdatedTime"),
758
- }
759
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
760
-
761
- return df
762
-
763
-
764
- def list_sqlendpoints(workspace: Optional[str] = None) -> pd.DataFrame:
765
- """
766
- Shows the SQL Endpoints within a workspace.
767
-
768
- Parameters
769
- ----------
770
- workspace : str, default=None
771
- The Fabric workspace name.
772
- Defaults to None which resolves to the workspace of the attached lakehouse
773
- or if no lakehouse attached, resolves to the workspace of the notebook.
774
-
775
- Returns
776
- -------
777
- pandas.DataFrame
778
- A pandas dataframe showing the SQL Endpoints within a workspace.
779
- """
780
-
781
- df = pd.DataFrame(columns=["SQL Endpoint ID", "SQL Endpoint Name", "Description"])
725
+ df = pd.DataFrame(columns=["SQL Endpoint Id", "SQL Endpoint Name", "Description"])
782
726
 
783
727
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
784
728
 
@@ -793,7 +737,7 @@ def list_sqlendpoints(workspace: Optional[str] = None) -> pd.DataFrame:
793
737
  for v in r.get("value", []):
794
738
 
795
739
  new_data = {
796
- "SQL Endpoint ID": v.get("id"),
740
+ "SQL Endpoint Id": v.get("id"),
797
741
  "SQL Endpoint Name": v.get("displayName"),
798
742
  "Description": v.get("description"),
799
743
  }
@@ -802,310 +746,6 @@ def list_sqlendpoints(workspace: Optional[str] = None) -> pd.DataFrame:
802
746
  return df
803
747
 
804
748
 
805
- def list_mirroredwarehouses(workspace: Optional[str] = None) -> pd.DataFrame:
806
- """
807
- Shows the mirrored warehouses within a workspace.
808
-
809
- Parameters
810
- ----------
811
- workspace : str, default=None
812
- The Fabric workspace name.
813
- Defaults to None which resolves to the workspace of the attached lakehouse
814
- or if no lakehouse attached, resolves to the workspace of the notebook.
815
-
816
- Returns
817
- -------
818
- pandas.DataFrame
819
- A pandas dataframe showing the mirrored warehouses within a workspace.
820
- """
821
-
822
- df = pd.DataFrame(
823
- columns=["Mirrored Warehouse", "Mirrored Warehouse ID", "Description"]
824
- )
825
-
826
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
827
-
828
- client = fabric.FabricRestClient()
829
- response = client.get(f"/v1/workspaces/{workspace_id}/mirroredWarehouses")
830
- if response.status_code != 200:
831
- raise FabricHTTPException(response)
832
-
833
- responses = pagination(client, response)
834
-
835
- for r in responses:
836
- for v in r.get("value", []):
837
-
838
- new_data = {
839
- "Mirrored Warehouse": v.get("displayName"),
840
- "Mirrored Warehouse ID": v.get("id"),
841
- "Description": v.get("description"),
842
- }
843
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
844
-
845
- return df
846
-
847
-
848
- def list_kqldatabases(workspace: Optional[str] = None) -> pd.DataFrame:
849
- """
850
- Shows the KQL databases within a workspace.
851
-
852
- Parameters
853
- ----------
854
- workspace : str, default=None
855
- The Fabric workspace name.
856
- Defaults to None which resolves to the workspace of the attached lakehouse
857
- or if no lakehouse attached, resolves to the workspace of the notebook.
858
-
859
- Returns
860
- -------
861
- pandas.DataFrame
862
- A pandas dataframe showing the KQL Databases within a workspace.
863
- """
864
-
865
- df = pd.DataFrame(
866
- columns=[
867
- "KQL Database Name",
868
- "KQL Database ID",
869
- "Description",
870
- "Parent Eventhouse Item ID",
871
- "Query Service URI",
872
- "Ingestion Service URI",
873
- "Kusto Database Type",
874
- ]
875
- )
876
-
877
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
878
-
879
- client = fabric.FabricRestClient()
880
- response = client.get(f"/v1/workspaces/{workspace_id}/kqlDatabases")
881
- if response.status_code != 200:
882
- raise FabricHTTPException(response)
883
-
884
- responses = pagination(client, response)
885
-
886
- for r in responses:
887
- for v in r.get("value", []):
888
- prop = v.get("properties", {})
889
-
890
- new_data = {
891
- "KQL Database Name": v.get("displayName"),
892
- "KQL Database ID": v.get("id"),
893
- "Description": v.get("description"),
894
- "Parent Eventhouse Item ID": prop.get("parentEventhouseItemId"),
895
- "Query Service URI": prop.get("queryServiceUri"),
896
- "Ingestion Service URI": prop.get("ingestionServiceUri"),
897
- "Kusto Database Type": prop.get("kustoDatabaseType"),
898
- }
899
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
900
-
901
- return df
902
-
903
-
904
- def list_kqlquerysets(workspace: Optional[str] = None) -> pd.DataFrame:
905
- """
906
- Shows the KQL Querysets within a workspace.
907
-
908
- Parameters
909
- ----------
910
- workspace : str, default=None
911
- The Fabric workspace name.
912
- Defaults to None which resolves to the workspace of the attached lakehouse
913
- or if no lakehouse attached, resolves to the workspace of the notebook.
914
-
915
- Returns
916
- -------
917
- pandas.DataFrame
918
- A pandas dataframe showing the KQL Querysets within a workspace.
919
- """
920
-
921
- df = pd.DataFrame(columns=["KQL Queryset Name", "KQL Queryset ID", "Description"])
922
-
923
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
924
-
925
- client = fabric.FabricRestClient()
926
- response = client.get(f"/v1/workspaces/{workspace_id}/kqlQuerysets")
927
- if response.status_code != 200:
928
- raise FabricHTTPException(response)
929
-
930
- responses = pagination(client, response)
931
-
932
- for r in responses:
933
- for v in r.get("value", []):
934
-
935
- new_data = {
936
- "KQL Queryset Name": v.get("displayName"),
937
- "KQL Queryset ID": v.get("id"),
938
- "Description": v.get("description"),
939
- }
940
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
941
-
942
- return df
943
-
944
-
945
- def list_mlmodels(workspace: Optional[str] = None) -> pd.DataFrame:
946
- """
947
- Shows the ML models within a workspace.
948
-
949
- Parameters
950
- ----------
951
- workspace : str, default=None
952
- The Fabric workspace name.
953
- Defaults to None which resolves to the workspace of the attached lakehouse
954
- or if no lakehouse attached, resolves to the workspace of the notebook.
955
-
956
- Returns
957
- -------
958
- pandas.DataFrame
959
- A pandas dataframe showing the ML models within a workspace.
960
- """
961
-
962
- df = pd.DataFrame(columns=["ML Model Name", "ML Model ID", "Description"])
963
-
964
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
965
-
966
- client = fabric.FabricRestClient()
967
- response = client.get(f"/v1/workspaces/{workspace_id}/mlModels")
968
- if response.status_code != 200:
969
- raise FabricHTTPException(response)
970
-
971
- responses = pagination(client, response)
972
-
973
- for r in responses:
974
- for v in r.get("value", []):
975
- model_id = v.get("id")
976
- modelName = v.get("displayName")
977
- desc = v.get("description")
978
-
979
- new_data = {
980
- "ML Model Name": modelName,
981
- "ML Model ID": model_id,
982
- "Description": desc,
983
- }
984
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
985
-
986
- return df
987
-
988
-
989
- def list_eventstreams(workspace: Optional[str] = None) -> pd.DataFrame:
990
- """
991
- Shows the eventstreams within a workspace.
992
-
993
- Parameters
994
- ----------
995
- workspace : str, default=None
996
- The Fabric workspace name.
997
- Defaults to None which resolves to the workspace of the attached lakehouse
998
- or if no lakehouse attached, resolves to the workspace of the notebook.
999
-
1000
- Returns
1001
- -------
1002
- pandas.DataFrame
1003
- A pandas dataframe showing the eventstreams within a workspace.
1004
- """
1005
-
1006
- df = pd.DataFrame(columns=["Eventstream Name", "Eventstream ID", "Description"])
1007
-
1008
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
1009
-
1010
- client = fabric.FabricRestClient()
1011
- response = client.get(f"/v1/workspaces/{workspace_id}/eventstreams")
1012
- if response.status_code != 200:
1013
- raise FabricHTTPException(response)
1014
-
1015
- responses = pagination(client, response)
1016
-
1017
- for r in responses:
1018
- for v in r.get("value", []):
1019
- new_data = {
1020
- "Eventstream Name": v.get("displayName"),
1021
- "Eventstream ID": v.get("id"),
1022
- "Description": v.get("description"),
1023
- }
1024
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
1025
-
1026
- return df
1027
-
1028
-
1029
- def list_datapipelines(workspace: Optional[str] = None) -> pd.DataFrame:
1030
- """
1031
- Shows the data pipelines within a workspace.
1032
-
1033
- Parameters
1034
- ----------
1035
- workspace : str, default=None
1036
- The Fabric workspace name.
1037
- Defaults to None which resolves to the workspace of the attached lakehouse
1038
- or if no lakehouse attached, resolves to the workspace of the notebook.
1039
-
1040
- Returns
1041
- -------
1042
- pandas.DataFrame
1043
- A pandas dataframe showing the data pipelines within a workspace.
1044
- """
1045
-
1046
- df = pd.DataFrame(columns=["Data Pipeline Name", "Data Pipeline ID", "Description"])
1047
-
1048
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
1049
-
1050
- client = fabric.FabricRestClient()
1051
- response = client.get(f"/v1/workspaces/{workspace_id}/dataPipelines")
1052
- if response.status_code != 200:
1053
- raise FabricHTTPException(response)
1054
-
1055
- responses = pagination(client, response)
1056
-
1057
- for r in responses:
1058
- for v in r.get("value", []):
1059
- new_data = {
1060
- "Data Pipeline Name": v.get("displayName"),
1061
- "Data Pipeline ID": v.get("id"),
1062
- "Description": v.get("description"),
1063
- }
1064
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
1065
-
1066
- return df
1067
-
1068
-
1069
- def list_mlexperiments(workspace: Optional[str] = None) -> pd.DataFrame:
1070
- """
1071
- Shows the ML experiments within a workspace.
1072
-
1073
- Parameters
1074
- ----------
1075
- workspace : str, default=None
1076
- The Fabric workspace name.
1077
- Defaults to None which resolves to the workspace of the attached lakehouse
1078
- or if no lakehouse attached, resolves to the workspace of the notebook.
1079
-
1080
- Returns
1081
- -------
1082
- pandas.DataFrame
1083
- A pandas dataframe showing the ML experiments within a workspace.
1084
- """
1085
-
1086
- df = pd.DataFrame(columns=["ML Experiment Name", "ML Experiment ID", "Description"])
1087
-
1088
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
1089
-
1090
- client = fabric.FabricRestClient()
1091
- response = client.get(f"/v1/workspaces/{workspace_id}/mlExperiments")
1092
- if response.status_code != 200:
1093
- raise FabricHTTPException(response)
1094
-
1095
- responses = pagination(client, response)
1096
-
1097
- for r in responses:
1098
- for v in r.get("value", []):
1099
- new_data = {
1100
- "ML Experiment Name": v.get("displayName"),
1101
- "ML Experiment ID": v.get("id"),
1102
- "Description": v.get("description"),
1103
- }
1104
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
1105
-
1106
- return df
1107
-
1108
-
1109
749
  def list_datamarts(workspace: Optional[str] = None) -> pd.DataFrame:
1110
750
  """
1111
751
  Shows the datamarts within a workspace.
@@ -1146,43 +786,6 @@ def list_datamarts(workspace: Optional[str] = None) -> pd.DataFrame:
1146
786
  return df
1147
787
 
1148
788
 
1149
- def create_warehouse(
1150
- warehouse: str, description: Optional[str] = None, workspace: Optional[str] = None
1151
- ):
1152
- """
1153
- Creates a Fabric warehouse.
1154
-
1155
- Parameters
1156
- ----------
1157
- warehouse: str
1158
- Name of the warehouse.
1159
- description : str, default=None
1160
- A description of the warehouse.
1161
- workspace : str, default=None
1162
- The Fabric workspace name.
1163
- Defaults to None which resolves to the workspace of the attached lakehouse
1164
- or if no lakehouse attached, resolves to the workspace of the notebook.
1165
- """
1166
-
1167
- (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
1168
-
1169
- request_body = {"displayName": warehouse}
1170
-
1171
- if description:
1172
- request_body["description"] = description
1173
-
1174
- client = fabric.FabricRestClient()
1175
- response = client.post(
1176
- f"/v1/workspaces/{workspace_id}/warehouses/", json=request_body
1177
- )
1178
-
1179
- lro(client, response, status_codes=[201, 202])
1180
-
1181
- print(
1182
- f"{icons.green_dot} The '{warehouse}' warehouse has been created within the '{workspace}' workspace."
1183
- )
1184
-
1185
-
1186
789
  def update_item(
1187
790
  item_type: str,
1188
791
  current_name: str,
@@ -1210,27 +813,14 @@ def update_item(
1210
813
  """
1211
814
 
1212
815
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
1213
-
1214
- itemTypes = {
1215
- "DataPipeline": "dataPipelines",
1216
- "Eventstream": "eventstreams",
1217
- "KQLDatabase": "kqlDatabases",
1218
- "KQLQueryset": "kqlQuerysets",
1219
- "Lakehouse": "lakehouses",
1220
- "MLExperiment": "mlExperiments",
1221
- "MLModel": "mlModels",
1222
- "Notebook": "notebooks",
1223
- "Warehouse": "warehouses",
1224
- }
1225
-
1226
816
  item_type = item_type.replace(" ", "").capitalize()
1227
817
 
1228
- if item_type not in itemTypes.keys():
818
+ if item_type not in icons.itemTypes.keys():
1229
819
  raise ValueError(
1230
820
  f"{icons.red_dot} The '{item_type}' is not a valid item type. "
1231
821
  )
1232
822
 
1233
- itemType = itemTypes[item_type]
823
+ itemType = icons.itemTypes[item_type]
1234
824
 
1235
825
  dfI = fabric.list_items(workspace=workspace, type=item_type)
1236
826
  dfI_filt = dfI[(dfI["Display Name"] == current_name)]
@@ -1264,7 +854,7 @@ def update_item(
1264
854
 
1265
855
 
1266
856
  def list_relationships(
1267
- dataset: str, workspace: Optional[str] = None, extended: Optional[bool] = False
857
+ dataset: str, workspace: Optional[str] = None, extended: bool = False
1268
858
  ) -> pd.DataFrame:
1269
859
  """
1270
860
  Shows a semantic model's relationships and their properties.
@@ -1688,9 +1278,6 @@ def list_capacities() -> pd.DataFrame:
1688
1278
  """
1689
1279
  Shows the capacities and their properties.
1690
1280
 
1691
- Parameters
1692
- ----------
1693
-
1694
1281
  Returns
1695
1282
  -------
1696
1283
  pandas.DataFrame
@@ -1778,7 +1365,7 @@ def list_reports_using_semantic_model(
1778
1365
 
1779
1366
 
1780
1367
  def list_report_semantic_model_objects(
1781
- dataset: str, workspace: Optional[str] = None, extended: Optional[bool] = False
1368
+ dataset: str, workspace: Optional[str] = None, extended: bool = False
1782
1369
  ) -> pd.DataFrame:
1783
1370
  """
1784
1371
  Shows a list of semantic model objects (i.e. columns, measures, hierarchies) used in all reports which feed data from
@@ -1872,8 +1459,8 @@ def list_report_semantic_model_objects(
1872
1459
  def list_semantic_model_object_report_usage(
1873
1460
  dataset: str,
1874
1461
  workspace: Optional[str] = None,
1875
- include_dependencies: Optional[bool] = False,
1876
- extended: Optional[bool] = False,
1462
+ include_dependencies: bool = False,
1463
+ extended: bool = False,
1877
1464
  ) -> pd.DataFrame:
1878
1465
  """
1879
1466
  Shows a list of semantic model objects and how many times they are referenced in all reports which rely on this semantic model.
@@ -0,0 +1,50 @@
1
+ import sempy.fabric as fabric
2
+ import pandas as pd
3
+ from typing import Optional
4
+ from sempy_labs._helper_functions import (
5
+ resolve_workspace_name_and_id,
6
+ pagination,
7
+ )
8
+ from sempy.fabric.exceptions import FabricHTTPException
9
+
10
+
11
+ def list_mirrored_warehouses(workspace: Optional[str] = None) -> pd.DataFrame:
12
+ """
13
+ Shows the mirrored warehouses within a workspace.
14
+
15
+ Parameters
16
+ ----------
17
+ workspace : str, default=None
18
+ The Fabric workspace name.
19
+ Defaults to None which resolves to the workspace of the attached lakehouse
20
+ or if no lakehouse attached, resolves to the workspace of the notebook.
21
+
22
+ Returns
23
+ -------
24
+ pandas.DataFrame
25
+ A pandas dataframe showing the mirrored warehouses within a workspace.
26
+ """
27
+
28
+ df = pd.DataFrame(
29
+ columns=["Mirrored Warehouse Name", "Mirrored Warehouse Id", "Description"]
30
+ )
31
+
32
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
33
+
34
+ client = fabric.FabricRestClient()
35
+ response = client.get(f"/v1/workspaces/{workspace_id}/mirroredWarehouses")
36
+ if response.status_code != 200:
37
+ raise FabricHTTPException(response)
38
+ responses = pagination(client, response)
39
+
40
+ for r in responses:
41
+ for v in r.get("value", []):
42
+
43
+ new_data = {
44
+ "Mirrored Warehouse Name": v.get("displayName"),
45
+ "Mirrored Warehouse Id": v.get("id"),
46
+ "Description": v.get("description"),
47
+ }
48
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
49
+
50
+ return df
@@ -0,0 +1,122 @@
1
+ import sempy.fabric as fabric
2
+ import pandas as pd
3
+ import sempy_labs._icons as icons
4
+ from typing import Optional
5
+ from sempy_labs._helper_functions import (
6
+ resolve_workspace_name_and_id,
7
+ lro,
8
+ pagination,
9
+ )
10
+ from sempy.fabric.exceptions import FabricHTTPException
11
+
12
+
13
+ def list_ml_experiments(workspace: Optional[str] = None) -> pd.DataFrame:
14
+ """
15
+ Shows the ML experiments within a workspace.
16
+
17
+ Parameters
18
+ ----------
19
+ workspace : str, default=None
20
+ The Fabric workspace name.
21
+ Defaults to None which resolves to the workspace of the attached lakehouse
22
+ or if no lakehouse attached, resolves to the workspace of the notebook.
23
+
24
+ Returns
25
+ -------
26
+ pandas.DataFrame
27
+ A pandas dataframe showing the ML models within a workspace.
28
+ """
29
+
30
+ df = pd.DataFrame(columns=["ML Experiment Name", "ML Experiment Id", "Description"])
31
+
32
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
33
+
34
+ client = fabric.FabricRestClient()
35
+ response = client.get(f"/v1/workspaces/{workspace_id}/mlExperiments")
36
+ if response.status_code != 200:
37
+ raise FabricHTTPException(response)
38
+
39
+ responses = pagination(client, response)
40
+
41
+ for r in responses:
42
+ for v in r.get("value", []):
43
+ model_id = v.get("id")
44
+ modelName = v.get("displayName")
45
+ desc = v.get("description")
46
+
47
+ new_data = {
48
+ "ML Experiment Name": modelName,
49
+ "ML Experiment Id": model_id,
50
+ "Description": desc,
51
+ }
52
+ df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
53
+
54
+ return df
55
+
56
+
57
+ def create_ml_experiment(
58
+ name: str, description: Optional[str] = None, workspace: Optional[str] = None
59
+ ):
60
+ """
61
+ Creates a Fabric ML experiment.
62
+
63
+ Parameters
64
+ ----------
65
+ name: str
66
+ Name of the ML experiment.
67
+ description : str, default=None
68
+ A description of the environment.
69
+ workspace : str, default=None
70
+ The Fabric workspace name.
71
+ Defaults to None which resolves to the workspace of the attached lakehouse
72
+ or if no lakehouse attached, resolves to the workspace of the notebook.
73
+ """
74
+
75
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
76
+
77
+ request_body = {"displayName": name}
78
+
79
+ if description:
80
+ request_body["description"] = description
81
+
82
+ client = fabric.FabricRestClient()
83
+ response = client.post(
84
+ f"/v1/workspaces/{workspace_id}/mlExperiments", json=request_body
85
+ )
86
+
87
+ lro(client, response, status_codes=[201, 202])
88
+
89
+ print(
90
+ f"{icons.green_dot} The '{name}' ML experiment has been created within the '{workspace}' workspace."
91
+ )
92
+
93
+
94
+ def delete_ml_experiment(name: str, workspace: Optional[str] = None):
95
+ """
96
+ Deletes a Fabric ML experiment.
97
+
98
+ Parameters
99
+ ----------
100
+ name: str
101
+ Name of the ML experiment.
102
+ workspace : str, default=None
103
+ The Fabric workspace name.
104
+ Defaults to None which resolves to the workspace of the attached lakehouse
105
+ or if no lakehouse attached, resolves to the workspace of the notebook.
106
+ """
107
+
108
+ (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
109
+
110
+ item_id = fabric.resolve_item_id(
111
+ item_name=name, type="MLExperiment", workspace=workspace
112
+ )
113
+
114
+ client = fabric.FabricRestClient()
115
+ response = client.delete(f"/v1/workspaces/{workspace_id}/mlExperiments/{item_id}")
116
+
117
+ if response.status_code != 200:
118
+ raise FabricHTTPException(response)
119
+
120
+ print(
121
+ f"{icons.green_dot} The '{name}' ML experiment within the '{workspace}' workspace has been deleted."
122
+ )