semantic-link-labs 0.9.4__py3-none-any.whl → 0.9.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (71) hide show
  1. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/METADATA +19 -2
  2. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/RECORD +71 -64
  3. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +33 -4
  5. sempy_labs/_capacities.py +59 -128
  6. sempy_labs/_capacity_migration.py +19 -21
  7. sempy_labs/_connections.py +2 -4
  8. sempy_labs/_dashboards.py +60 -0
  9. sempy_labs/_data_pipelines.py +5 -31
  10. sempy_labs/_dataflows.py +2 -2
  11. sempy_labs/_dax_query_view.py +55 -0
  12. sempy_labs/_delta_analyzer.py +16 -14
  13. sempy_labs/_environments.py +28 -49
  14. sempy_labs/_eventhouses.py +27 -53
  15. sempy_labs/_eventstreams.py +16 -34
  16. sempy_labs/_external_data_shares.py +4 -10
  17. sempy_labs/_gateways.py +4 -4
  18. sempy_labs/_generate_semantic_model.py +2 -2
  19. sempy_labs/_git.py +90 -1
  20. sempy_labs/_graphQL.py +8 -21
  21. sempy_labs/_helper_functions.py +440 -91
  22. sempy_labs/_kql_databases.py +24 -35
  23. sempy_labs/_kql_querysets.py +15 -32
  24. sempy_labs/_list_functions.py +17 -192
  25. sempy_labs/_managed_private_endpoints.py +9 -2
  26. sempy_labs/_mirrored_databases.py +17 -49
  27. sempy_labs/_ml_experiments.py +6 -31
  28. sempy_labs/_ml_models.py +4 -28
  29. sempy_labs/_model_bpa.py +4 -11
  30. sempy_labs/_model_bpa_bulk.py +23 -27
  31. sempy_labs/_mounted_data_factories.py +119 -0
  32. sempy_labs/_notebooks.py +16 -26
  33. sempy_labs/_one_lake_integration.py +2 -1
  34. sempy_labs/_semantic_models.py +20 -0
  35. sempy_labs/_sql.py +13 -8
  36. sempy_labs/_sqldatabase.py +61 -100
  37. sempy_labs/_utils.py +42 -0
  38. sempy_labs/_vertipaq.py +25 -13
  39. sempy_labs/_warehouses.py +19 -20
  40. sempy_labs/_workloads.py +23 -9
  41. sempy_labs/_workspace_identity.py +6 -0
  42. sempy_labs/_workspaces.py +55 -7
  43. sempy_labs/admin/__init__.py +21 -1
  44. sempy_labs/admin/_apps.py +1 -1
  45. sempy_labs/admin/_artifacts.py +62 -0
  46. sempy_labs/admin/_basic_functions.py +3 -54
  47. sempy_labs/admin/_capacities.py +61 -0
  48. sempy_labs/admin/_reports.py +74 -0
  49. sempy_labs/admin/_scanner.py +2 -2
  50. sempy_labs/admin/_shared.py +4 -2
  51. sempy_labs/admin/_users.py +133 -0
  52. sempy_labs/admin/_workspaces.py +148 -0
  53. sempy_labs/directlake/_directlake_schema_compare.py +2 -1
  54. sempy_labs/directlake/_directlake_schema_sync.py +65 -19
  55. sempy_labs/directlake/_dl_helper.py +0 -6
  56. sempy_labs/directlake/_generate_shared_expression.py +10 -11
  57. sempy_labs/directlake/_guardrails.py +2 -1
  58. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +12 -25
  59. sempy_labs/directlake/_update_directlake_partition_entity.py +11 -3
  60. sempy_labs/lakehouse/__init__.py +2 -0
  61. sempy_labs/lakehouse/_lakehouse.py +6 -7
  62. sempy_labs/lakehouse/_shortcuts.py +198 -57
  63. sempy_labs/migration/_migration_validation.py +0 -4
  64. sempy_labs/report/_download_report.py +4 -6
  65. sempy_labs/report/_generate_report.py +15 -23
  66. sempy_labs/report/_report_bpa.py +12 -19
  67. sempy_labs/report/_report_functions.py +2 -1
  68. sempy_labs/report/_report_rebind.py +8 -6
  69. sempy_labs/tom/_model.py +34 -16
  70. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/LICENSE +0 -0
  71. {semantic_link_labs-0.9.4.dist-info → semantic_link_labs-0.9.6.dist-info}/top_level.txt +0 -0
@@ -1,13 +1,14 @@
1
- import sempy.fabric as fabric
2
1
  import pandas as pd
3
- import sempy_labs._icons as icons
4
2
  from typing import Optional
5
3
  from sempy_labs._helper_functions import (
6
4
  resolve_workspace_name_and_id,
7
5
  _base_api,
8
6
  _create_dataframe,
7
+ delete_item,
8
+ create_item,
9
9
  )
10
10
  from uuid import UUID
11
+ import sempy_labs._icons as icons
11
12
 
12
13
 
13
14
  def list_kql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
@@ -16,6 +17,8 @@ def list_kql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
16
17
 
17
18
  This is a wrapper function for the following API: `Items - List KQL Databases <https://learn.microsoft.com/rest/api/fabric/kqldatabase/items/list-kql-databases>`_.
18
19
 
20
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
21
+
19
22
  Parameters
20
23
  ----------
21
24
  workspace : str | uuid.UUID, default=None
@@ -43,7 +46,9 @@ def list_kql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
43
46
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
44
47
 
45
48
  responses = _base_api(
46
- request=f"v1/workspaces/{workspace_id}/kqlDatabases", uses_pagination=True
49
+ request=f"v1/workspaces/{workspace_id}/kqlDatabases",
50
+ uses_pagination=True,
51
+ client="fabric_sp",
47
52
  )
48
53
 
49
54
  for r in responses:
@@ -64,7 +69,7 @@ def list_kql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
64
69
  return df
65
70
 
66
71
 
67
- def create_kql_database(
72
+ def _create_kql_database(
68
73
  name: str, description: Optional[str] = None, workspace: Optional[str | UUID] = None
69
74
  ):
70
75
  """
@@ -84,27 +89,16 @@ def create_kql_database(
84
89
  or if no lakehouse attached, resolves to the workspace of the notebook.
85
90
  """
86
91
 
87
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
88
-
89
- payload = {"displayName": name}
90
-
91
- if description:
92
- payload["description"] = description
93
-
94
- _base_api(
95
- request=f"v1/workspaces/{workspace_id}/kqlDatabases",
96
- method="post",
97
- payload=payload,
98
- status_codes=[201, 202],
99
- lro_return_status_code=True,
100
- )
101
-
102
- print(
103
- f"{icons.green_dot} The '{name}' KQL database has been created within the '{workspace_name}' workspace."
92
+ create_item(
93
+ name=name, description=description, type="KQLDatabase", workspace=workspace
104
94
  )
105
95
 
106
96
 
107
- def delete_kql_database(name: str, workspace: Optional[str | UUID] = None):
97
+ def delete_kql_database(
98
+ kql_database: str | UUID,
99
+ workspace: Optional[str | UUID] = None,
100
+ **kwargs,
101
+ ):
108
102
  """
109
103
  Deletes a KQL database.
110
104
 
@@ -112,23 +106,18 @@ def delete_kql_database(name: str, workspace: Optional[str | UUID] = None):
112
106
 
113
107
  Parameters
114
108
  ----------
115
- name: str
116
- Name of the KQL database.
109
+ kql_database: str | uuid.UUID
110
+ Name or ID of the KQL database.
117
111
  workspace : str | uuid.UUID, default=None
118
112
  The Fabric workspace name or ID.
119
113
  Defaults to None which resolves to the workspace of the attached lakehouse
120
114
  or if no lakehouse attached, resolves to the workspace of the notebook.
121
115
  """
122
116
 
123
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
124
- kql_database_id = fabric.resolve_item_id(
125
- item_name=name, type="KQLDatabase", workspace=workspace_id
126
- )
117
+ if "name" in kwargs:
118
+ kql_database = kwargs["name"]
119
+ print(
120
+ f"{icons.warning} The 'name' parameter is deprecated. Please use 'kql_database' instead."
121
+ )
127
122
 
128
- _base_api(
129
- request=f"/v1/workspaces/{workspace_id}/kqlDatabases/{kql_database_id}",
130
- method="delete",
131
- )
132
- print(
133
- f"{icons.green_dot} The '{name}' KQL database within the '{workspace_name}' workspace has been deleted."
134
- )
123
+ delete_item(item=kql_database, type="KQLDatabase", workspace=workspace)
@@ -1,4 +1,3 @@
1
- import sempy.fabric as fabric
2
1
  import pandas as pd
3
2
  import sempy_labs._icons as icons
4
3
  from typing import Optional
@@ -6,6 +5,8 @@ from sempy_labs._helper_functions import (
6
5
  resolve_workspace_name_and_id,
7
6
  _base_api,
8
7
  _create_dataframe,
8
+ delete_item,
9
+ create_item,
9
10
  )
10
11
  from uuid import UUID
11
12
 
@@ -74,27 +75,14 @@ def create_kql_queryset(
74
75
  or if no lakehouse attached, resolves to the workspace of the notebook.
75
76
  """
76
77
 
77
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
78
-
79
- payload = {"displayName": name}
80
-
81
- if description:
82
- payload["description"] = description
83
-
84
- _base_api(
85
- request=f"v1/workspaces/{workspace_id}/kqlQuerysets",
86
- method="post",
87
- payload=payload,
88
- status_codes=[201, 202],
89
- lro_return_status_code=True,
90
- )
91
-
92
- print(
93
- f"{icons.green_dot} The '{name}' KQL queryset has been created within the '{workspace_name}' workspace."
78
+ create_item(
79
+ name=name, description=description, type="KQLQueryset", workspace=workspace
94
80
  )
95
81
 
96
82
 
97
- def delete_kql_queryset(name: str, workspace: Optional[str | UUID] = None):
83
+ def delete_kql_queryset(
84
+ kql_queryset: str | UUID, workspace: Optional[str | UUID] = None, **kwargs
85
+ ):
98
86
  """
99
87
  Deletes a KQL queryset.
100
88
 
@@ -102,23 +90,18 @@ def delete_kql_queryset(name: str, workspace: Optional[str | UUID] = None):
102
90
 
103
91
  Parameters
104
92
  ----------
105
- name: str
106
- Name of the KQL queryset.
93
+ kql_queryset: str | uuid.UUID
94
+ Name or ID of the KQL queryset.
107
95
  workspace : str | uuid.UUID, default=None
108
96
  The Fabric workspace name or ID.
109
97
  Defaults to None which resolves to the workspace of the attached lakehouse
110
98
  or if no lakehouse attached, resolves to the workspace of the notebook.
111
99
  """
112
100
 
113
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
114
- kql_database_id = fabric.resolve_item_id(
115
- item_name=name, type="KQLQueryset", workspace=workspace_id
116
- )
101
+ if "name" in kwargs:
102
+ kql_queryset = kwargs["name"]
103
+ print(
104
+ f"{icons.warning} The 'name' parameter is deprecated. Please use 'kql_queryset' instead."
105
+ )
117
106
 
118
- _base_api(
119
- request=f"/v1/workspaces/{workspace_id}/kqlQuerysets/{kql_database_id}",
120
- method="delete",
121
- )
122
- print(
123
- f"{icons.green_dot} The '{name}' KQL queryset within the '{workspace_name}' workspace has been deleted."
124
- )
107
+ delete_item(item=kql_queryset, type="KQLQueryset", workspace=workspace)
@@ -2,8 +2,6 @@ import sempy.fabric as fabric
2
2
  from sempy_labs._helper_functions import (
3
3
  resolve_workspace_name_and_id,
4
4
  create_relationship_name,
5
- resolve_lakehouse_id,
6
- resolve_item_type,
7
5
  format_dax_object_name,
8
6
  resolve_dataset_name_and_id,
9
7
  _update_dataframe_datatypes,
@@ -534,7 +532,6 @@ def list_columns(
534
532
  from sempy_labs.directlake._get_directlake_lakehouse import (
535
533
  get_direct_lake_lakehouse,
536
534
  )
537
- from pyspark.sql import SparkSession
538
535
 
539
536
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
540
537
  (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
@@ -604,61 +601,12 @@ def list_columns(
604
601
  return dfC
605
602
 
606
603
 
607
- def list_dashboards(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
608
- """
609
- Shows a list of the dashboards within a workspace.
610
-
611
- Parameters
612
- ----------
613
- workspace : str | uuid.UUID, default=None
614
- The Fabric workspace name or ID.
615
- Defaults to None which resolves to the workspace of the attached lakehouse
616
- or if no lakehouse attached, resolves to the workspace of the notebook.
617
-
618
- Returns
619
- -------
620
- pandas.DataFrame
621
- A pandas dataframe showing the dashboards within a workspace.
622
- """
623
-
624
- columns = {
625
- "Dashboard ID": "string",
626
- "Dashboard Name": "string",
627
- "Read Only": "bool",
628
- "Web URL": "string",
629
- "Embed URL": "string",
630
- "Data Classification": "string",
631
- "Users": "string",
632
- "Subscriptions": "string",
633
- }
634
- df = _create_dataframe(columns=columns)
635
-
636
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
637
-
638
- response = _base_api(request=f"/v1.0/myorg/groups/{workspace_id}/dashboards")
639
-
640
- for v in response.json().get("value", []):
641
- new_data = {
642
- "Dashboard ID": v.get("id"),
643
- "Dashboard Name": v.get("displayName"),
644
- "Read Only": v.get("isReadOnly"),
645
- "Web URL": v.get("webUrl"),
646
- "Embed URL": v.get("embedUrl"),
647
- "Data Classification": v.get("dataClassification"),
648
- "Users": v.get("users"),
649
- "Subscriptions": v.get("subscriptions"),
650
- }
651
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
652
-
653
- _update_dataframe_datatypes(dataframe=df, column_map=columns)
654
-
655
- return df
656
-
657
-
658
604
  def list_lakehouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
659
605
  """
660
606
  Shows the lakehouses within a workspace.
661
607
 
608
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
609
+
662
610
  Parameters
663
611
  ----------
664
612
  workspace : str | uuid.UUID, default=None
@@ -687,7 +635,9 @@ def list_lakehouses(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
687
635
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
688
636
 
689
637
  responses = _base_api(
690
- request=f"/v1/workspaces/{workspace_id}/lakehouses", uses_pagination=True
638
+ request=f"/v1/workspaces/{workspace_id}/lakehouses",
639
+ uses_pagination=True,
640
+ client="fabric_sp",
691
641
  )
692
642
 
693
643
  for r in responses:
@@ -1189,11 +1139,15 @@ def list_semantic_model_objects(
1189
1139
 
1190
1140
 
1191
1141
  def list_shortcuts(
1192
- lakehouse: Optional[str] = None, workspace: Optional[str | UUID] = None
1142
+ lakehouse: Optional[str] = None,
1143
+ workspace: Optional[str | UUID] = None,
1144
+ path: Optional[str] = None,
1193
1145
  ) -> pd.DataFrame:
1194
1146
  """
1195
1147
  Shows all shortcuts which exist in a Fabric lakehouse and their properties.
1196
1148
 
1149
+ *** NOTE: This function has been moved to the lakehouse subpackage. Please repoint your code to use that location. ***
1150
+
1197
1151
  Parameters
1198
1152
  ----------
1199
1153
  lakehouse : str, default=None
@@ -1203,6 +1157,9 @@ def list_shortcuts(
1203
1157
  The name or ID of the Fabric workspace in which lakehouse resides.
1204
1158
  Defaults to None which resolves to the workspace of the attached lakehouse
1205
1159
  or if no lakehouse attached, resolves to the workspace of the notebook.
1160
+ path: str, default=None
1161
+ The path within lakehouse where to look for shortcuts. If provied, must start with either "Files" or "Tables". Examples: Tables/FolderName/SubFolderName; Files/FolderName/SubFolderName.
1162
+ Defaults to None which will retun all shortcuts on the given lakehouse
1206
1163
 
1207
1164
  Returns
1208
1165
  -------
@@ -1210,145 +1167,13 @@ def list_shortcuts(
1210
1167
  A pandas dataframe showing all the shortcuts which exist in the specified lakehouse.
1211
1168
  """
1212
1169
 
1213
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
1170
+ from sempy_labs.lakehouse._shortcuts import list_shortcuts
1214
1171
 
1215
- if lakehouse is None:
1216
- lakehouse_id = fabric.get_lakehouse_id()
1217
- else:
1218
- lakehouse_id = resolve_lakehouse_id(lakehouse, workspace_id)
1219
-
1220
- columns = {
1221
- "Shortcut Name": "string",
1222
- "Shortcut Path": "string",
1223
- "Source Type": "string",
1224
- "Source Workspace Id": "string",
1225
- "Source Workspace Name": "string",
1226
- "Source Item Id": "string",
1227
- "Source Item Name": "string",
1228
- "Source Item Type": "string",
1229
- "OneLake Path": "string",
1230
- "Connection Id": "string",
1231
- "Location": "string",
1232
- "Bucket": "string",
1233
- "SubPath": "string",
1234
- }
1235
- df = _create_dataframe(columns=columns)
1236
-
1237
- responses = _base_api(
1238
- request=f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts",
1239
- uses_pagination=True,
1172
+ print(
1173
+ f"{icons.warning} This function has been moved to the lakehouse subpackage. Please repoint your code to use that location."
1240
1174
  )
1241
1175
 
1242
- sources = [
1243
- "s3Compatible",
1244
- "googleCloudStorage",
1245
- "externalDataShare",
1246
- "amazonS3",
1247
- "adlsGen2",
1248
- "dataverse",
1249
- ]
1250
- sources_locpath = ["s3Compatible", "googleCloudStorage", "amazonS3", "adlsGen2"]
1251
-
1252
- for r in responses:
1253
- for i in r.get("value", []):
1254
- tgt = i.get("target", {})
1255
- one_lake = tgt.get("oneLake", {})
1256
- connection_id = next(
1257
- (
1258
- tgt.get(source, {}).get("connectionId")
1259
- for source in sources
1260
- if tgt.get(source)
1261
- ),
1262
- None,
1263
- )
1264
- location = next(
1265
- (
1266
- tgt.get(source, {}).get("location")
1267
- for source in sources_locpath
1268
- if tgt.get(source)
1269
- ),
1270
- None,
1271
- )
1272
- sub_path = next(
1273
- (
1274
- tgt.get(source, {}).get("subpath")
1275
- for source in sources_locpath
1276
- if tgt.get(source)
1277
- ),
1278
- None,
1279
- )
1280
- source_workspace_id = one_lake.get("workspaceId")
1281
- source_item_id = one_lake.get("itemId")
1282
- source_workspace_name = (
1283
- fabric.resolve_workspace_name(source_workspace_id)
1284
- if source_workspace_id is not None
1285
- else None
1286
- )
1287
-
1288
- new_data = {
1289
- "Shortcut Name": i.get("name"),
1290
- "Shortcut Path": i.get("path"),
1291
- "Source Type": tgt.get("type"),
1292
- "Source Workspace Id": source_workspace_id,
1293
- "Source Workspace Name": source_workspace_name,
1294
- "Source Item Id": source_item_id,
1295
- "Source Item Name": (
1296
- fabric.resolve_item_name(
1297
- source_item_id, workspace=source_workspace_name
1298
- )
1299
- if source_item_id is not None
1300
- else None
1301
- ),
1302
- "Source Item Type": (
1303
- resolve_item_type(source_item_id, workspace=source_workspace_name)
1304
- if source_item_id is not None
1305
- else None
1306
- ),
1307
- "OneLake Path": one_lake.get("path"),
1308
- "Connection Id": connection_id,
1309
- "Location": location,
1310
- "Bucket": tgt.get("s3Compatible", {}).get("bucket"),
1311
- "SubPath": sub_path,
1312
- }
1313
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
1314
-
1315
- return df
1316
-
1317
-
1318
- def list_capacities() -> pd.DataFrame:
1319
- """
1320
- Shows the capacities and their properties.
1321
-
1322
- Returns
1323
- -------
1324
- pandas.DataFrame
1325
- A pandas dataframe showing the capacities and their properties
1326
- """
1327
-
1328
- columns = {
1329
- "Id": "string",
1330
- "Display Name": "string",
1331
- "Sku": "string",
1332
- "Region": "string",
1333
- "State": "string",
1334
- "Admins": "string",
1335
- }
1336
- df = _create_dataframe(columns=columns)
1337
-
1338
- response = _base_api(request="/v1.0/myorg/capacities")
1339
-
1340
- for i in response.json().get("value", []):
1341
- new_data = {
1342
- "Id": i.get("id").lower(),
1343
- "Display Name": i.get("displayName"),
1344
- "Sku": i.get("sku"),
1345
- "Region": i.get("region"),
1346
- "State": i.get("state"),
1347
- "Admins": [i.get("admins", [])],
1348
- }
1349
- df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
1350
-
1351
- return df
1176
+ return list_shortcuts(lakehouse=lakehouse, workspace=workspace, path=path)
1352
1177
 
1353
1178
 
1354
1179
  def list_reports_using_semantic_model(
@@ -23,6 +23,8 @@ def create_managed_private_endpoint(
23
23
 
24
24
  This is a wrapper function for the following API: `Managed Private Endpoints - Create Workspace Managed Private Endpoint <https://learn.microsoft.com/rest/api/fabric/core/managed-private-endpoints/create-workspace-managed-private-endpoint>`.
25
25
 
26
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
27
+
26
28
  Parameters
27
29
  ----------
28
30
  name: str
@@ -60,6 +62,7 @@ def create_managed_private_endpoint(
60
62
  status_codes=[201, 202],
61
63
  payload=request_body,
62
64
  lro_return_status_code=True,
65
+ client="fabric_sp",
63
66
  )
64
67
  _print_success(
65
68
  item_name=name,
@@ -77,6 +80,8 @@ def list_managed_private_endpoints(
77
80
 
78
81
  This is a wrapper function for the following API: `Managed Private Endpoints - List Workspace Managed Private Endpoints <https://learn.microsoft.com/rest/api/fabric/core/managed-private-endpoints/list-workspace-managed-private-endpoints>`.
79
82
 
83
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
84
+
80
85
  Parameters
81
86
  ----------
82
87
  workspace : str | uuid.UUID, default=None
@@ -106,7 +111,7 @@ def list_managed_private_endpoints(
106
111
  responses = _base_api(
107
112
  request=f"/v1/workspaces/{workspace_id}/managedPrivateEndpoints",
108
113
  uses_pagination=True,
109
- status_codes=200,
114
+ client="fabric_sp",
110
115
  )
111
116
 
112
117
  for r in responses:
@@ -134,6 +139,8 @@ def delete_managed_private_endpoint(
134
139
 
135
140
  This is a wrapper function for the following API: `Managed Private Endpoints - Delete Workspace Managed Private Endpoint <https://learn.microsoft.com/rest/api/fabric/core/managed-private-endpoints/delete-workspace-managed-private-endpoint>`.
136
141
 
142
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
143
+
137
144
  Parameters
138
145
  ----------
139
146
  managed_private_endpoint: str | uuid.UUID
@@ -162,7 +169,7 @@ def delete_managed_private_endpoint(
162
169
  _base_api(
163
170
  request=f"/v1/workspaces/{workspace_id}/managedPrivateEndpoints/{item_id}",
164
171
  method="delete",
165
- status_codes=200,
172
+ client="fabric_sp",
166
173
  )
167
174
 
168
175
  _print_success(
@@ -1,4 +1,3 @@
1
- import sempy.fabric as fabric
2
1
  import pandas as pd
3
2
  from typing import Optional
4
3
  from sempy_labs._helper_functions import (
@@ -6,9 +5,11 @@ from sempy_labs._helper_functions import (
6
5
  _decode_b64,
7
6
  _update_dataframe_datatypes,
8
7
  _base_api,
9
- _print_success,
10
8
  resolve_item_id,
11
9
  _create_dataframe,
10
+ delete_item,
11
+ create_item,
12
+ get_item_definition,
12
13
  )
13
14
  import sempy_labs._icons as icons
14
15
  import base64
@@ -21,6 +22,8 @@ def list_mirrored_databases(workspace: Optional[str | UUID] = None) -> pd.DataFr
21
22
 
22
23
  This is a wrapper function for the following API: `Items - List Mirrored Databases <https://learn.microsoft.com/rest/api/fabric/mirroredwarehouse/items/list-mirrored-databases>`_.
23
24
 
25
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
26
+
24
27
  Parameters
25
28
  ----------
26
29
  workspace : str | uuid.UUID, default=None
@@ -49,8 +52,8 @@ def list_mirrored_databases(workspace: Optional[str | UUID] = None) -> pd.DataFr
49
52
  (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
50
53
  responses = _base_api(
51
54
  request=f"/v1/workspaces/{workspace_id}/mirroredDatabases",
52
- status_codes=200,
53
55
  uses_pagination=True,
56
+ client="fabric_sp",
54
57
  )
55
58
 
56
59
  for r in responses:
@@ -92,21 +95,8 @@ def create_mirrored_database(
92
95
  or if no lakehouse attached, resolves to the workspace of the notebook.
93
96
  """
94
97
 
95
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
96
-
97
- payload = {"displayName": name}
98
-
99
- if description:
100
- payload["description"] = description
101
-
102
- _base_api(
103
- request=f"/v1/workspaces/{workspace_id}/mirroredDatabases",
104
- status_codes=201,
105
- method="post",
106
- payload=payload,
107
- )
108
- _print_success(
109
- item_name=name, item_type="mirrored database", workspace_name=workspace_name
98
+ create_item(
99
+ name=name, description=description, type="MirroredDatabase", workspace=workspace
110
100
  )
111
101
 
112
102
 
@@ -128,15 +118,7 @@ def delete_mirrored_database(
128
118
  or if no lakehouse attached, resolves to the workspace of the notebook.
129
119
  """
130
120
 
131
- item_id = resolve_item_id(
132
- item=mirrored_database, type="MirroredDatabase", workspace=workspace
133
- )
134
- fabric.delete_item(item_id=item_id, workspace=workspace)
135
- _print_success(
136
- item_name=mirrored_database,
137
- item_type="mirrored database",
138
- workspace_name=workspace,
139
- )
121
+ delete_item(item=mirrored_database, type="MirroredDatabase", workspace=workspace)
140
122
 
141
123
 
142
124
  def get_mirroring_status(
@@ -307,7 +289,7 @@ def get_mirrored_database_definition(
307
289
  mirrored_database: str | UUID,
308
290
  workspace: Optional[str | UUID] = None,
309
291
  decode: bool = True,
310
- ) -> str:
292
+ ) -> dict:
311
293
  """
312
294
  Obtains the mirrored database definition.
313
295
 
@@ -327,31 +309,17 @@ def get_mirrored_database_definition(
327
309
 
328
310
  Returns
329
311
  -------
330
- str
312
+ dict
331
313
  The mirrored database definition.
332
314
  """
333
315
 
334
- (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
335
- item_id = resolve_item_id(
336
- item=mirrored_database, type="MirroredDatabase", workspace=workspace
316
+ return get_item_definition(
317
+ item=mirrored_database,
318
+ type="MirroredDatabase",
319
+ workspace=workspace,
320
+ return_dataframe=False,
321
+ decode=decode,
337
322
  )
338
- result = _base_api(
339
- request=f"/v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}/getDefinition",
340
- method="post",
341
- status_codes=200,
342
- lro_return_json=True,
343
- )
344
-
345
- df_items = pd.json_normalize(result["definition"]["parts"])
346
- df_items_filt = df_items[df_items["path"] == "mirroredDatabase.json"]
347
- payload = df_items_filt["payload"].iloc[0]
348
-
349
- if decode:
350
- result = _decode_b64(payload)
351
- else:
352
- result = payload
353
-
354
- return result
355
323
 
356
324
 
357
325
  def update_mirrored_database_definition(