semantic-link-labs 0.12.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (243) hide show
  1. semantic_link_labs-0.12.8.dist-info/METADATA +354 -0
  2. semantic_link_labs-0.12.8.dist-info/RECORD +243 -0
  3. semantic_link_labs-0.12.8.dist-info/WHEEL +5 -0
  4. semantic_link_labs-0.12.8.dist-info/licenses/LICENSE +21 -0
  5. semantic_link_labs-0.12.8.dist-info/top_level.txt +1 -0
  6. sempy_labs/__init__.py +606 -0
  7. sempy_labs/_a_lib_info.py +2 -0
  8. sempy_labs/_ai.py +437 -0
  9. sempy_labs/_authentication.py +264 -0
  10. sempy_labs/_bpa_translation/_model/_translations_am-ET.po +869 -0
  11. sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +908 -0
  12. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +968 -0
  13. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +963 -0
  14. sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +943 -0
  15. sempy_labs/_bpa_translation/_model/_translations_da-DK.po +945 -0
  16. sempy_labs/_bpa_translation/_model/_translations_de-DE.po +988 -0
  17. sempy_labs/_bpa_translation/_model/_translations_el-GR.po +993 -0
  18. sempy_labs/_bpa_translation/_model/_translations_es-ES.po +971 -0
  19. sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +933 -0
  20. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +942 -0
  21. sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +994 -0
  22. sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +967 -0
  23. sempy_labs/_bpa_translation/_model/_translations_he-IL.po +902 -0
  24. sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +944 -0
  25. sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +963 -0
  26. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +946 -0
  27. sempy_labs/_bpa_translation/_model/_translations_is-IS.po +939 -0
  28. sempy_labs/_bpa_translation/_model/_translations_it-IT.po +986 -0
  29. sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +846 -0
  30. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +839 -0
  31. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +967 -0
  32. sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +978 -0
  33. sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +962 -0
  34. sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +962 -0
  35. sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +957 -0
  36. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +968 -0
  37. sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +964 -0
  38. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +952 -0
  39. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +950 -0
  40. sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +942 -0
  41. sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +976 -0
  42. sempy_labs/_bpa_translation/_model/_translations_te-IN.po +947 -0
  43. sempy_labs/_bpa_translation/_model/_translations_th-TH.po +924 -0
  44. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +953 -0
  45. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +961 -0
  46. sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +804 -0
  47. sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +969 -0
  48. sempy_labs/_capacities.py +1198 -0
  49. sempy_labs/_capacity_migration.py +660 -0
  50. sempy_labs/_clear_cache.py +351 -0
  51. sempy_labs/_connections.py +610 -0
  52. sempy_labs/_dashboards.py +69 -0
  53. sempy_labs/_data_access_security.py +98 -0
  54. sempy_labs/_data_pipelines.py +162 -0
  55. sempy_labs/_dataflows.py +668 -0
  56. sempy_labs/_dax.py +501 -0
  57. sempy_labs/_daxformatter.py +80 -0
  58. sempy_labs/_delta_analyzer.py +467 -0
  59. sempy_labs/_delta_analyzer_history.py +301 -0
  60. sempy_labs/_dictionary_diffs.py +221 -0
  61. sempy_labs/_documentation.py +147 -0
  62. sempy_labs/_domains.py +51 -0
  63. sempy_labs/_eventhouses.py +182 -0
  64. sempy_labs/_external_data_shares.py +230 -0
  65. sempy_labs/_gateways.py +521 -0
  66. sempy_labs/_generate_semantic_model.py +521 -0
  67. sempy_labs/_get_connection_string.py +84 -0
  68. sempy_labs/_git.py +543 -0
  69. sempy_labs/_graphQL.py +90 -0
  70. sempy_labs/_helper_functions.py +2833 -0
  71. sempy_labs/_icons.py +149 -0
  72. sempy_labs/_job_scheduler.py +609 -0
  73. sempy_labs/_kql_databases.py +149 -0
  74. sempy_labs/_kql_querysets.py +124 -0
  75. sempy_labs/_kusto.py +137 -0
  76. sempy_labs/_labels.py +124 -0
  77. sempy_labs/_list_functions.py +1720 -0
  78. sempy_labs/_managed_private_endpoints.py +253 -0
  79. sempy_labs/_mirrored_databases.py +416 -0
  80. sempy_labs/_mirrored_warehouses.py +60 -0
  81. sempy_labs/_ml_experiments.py +113 -0
  82. sempy_labs/_model_auto_build.py +140 -0
  83. sempy_labs/_model_bpa.py +557 -0
  84. sempy_labs/_model_bpa_bulk.py +378 -0
  85. sempy_labs/_model_bpa_rules.py +859 -0
  86. sempy_labs/_model_dependencies.py +343 -0
  87. sempy_labs/_mounted_data_factories.py +123 -0
  88. sempy_labs/_notebooks.py +441 -0
  89. sempy_labs/_one_lake_integration.py +151 -0
  90. sempy_labs/_onelake.py +131 -0
  91. sempy_labs/_query_scale_out.py +433 -0
  92. sempy_labs/_refresh_semantic_model.py +435 -0
  93. sempy_labs/_semantic_models.py +468 -0
  94. sempy_labs/_spark.py +455 -0
  95. sempy_labs/_sql.py +241 -0
  96. sempy_labs/_sql_audit_settings.py +207 -0
  97. sempy_labs/_sql_endpoints.py +214 -0
  98. sempy_labs/_tags.py +201 -0
  99. sempy_labs/_translations.py +43 -0
  100. sempy_labs/_user_delegation_key.py +44 -0
  101. sempy_labs/_utils.py +79 -0
  102. sempy_labs/_vertipaq.py +1021 -0
  103. sempy_labs/_vpax.py +388 -0
  104. sempy_labs/_warehouses.py +234 -0
  105. sempy_labs/_workloads.py +140 -0
  106. sempy_labs/_workspace_identity.py +72 -0
  107. sempy_labs/_workspaces.py +595 -0
  108. sempy_labs/admin/__init__.py +170 -0
  109. sempy_labs/admin/_activities.py +167 -0
  110. sempy_labs/admin/_apps.py +145 -0
  111. sempy_labs/admin/_artifacts.py +65 -0
  112. sempy_labs/admin/_basic_functions.py +463 -0
  113. sempy_labs/admin/_capacities.py +508 -0
  114. sempy_labs/admin/_dataflows.py +45 -0
  115. sempy_labs/admin/_datasets.py +186 -0
  116. sempy_labs/admin/_domains.py +522 -0
  117. sempy_labs/admin/_external_data_share.py +100 -0
  118. sempy_labs/admin/_git.py +72 -0
  119. sempy_labs/admin/_items.py +265 -0
  120. sempy_labs/admin/_labels.py +211 -0
  121. sempy_labs/admin/_reports.py +241 -0
  122. sempy_labs/admin/_scanner.py +118 -0
  123. sempy_labs/admin/_shared.py +82 -0
  124. sempy_labs/admin/_sharing_links.py +110 -0
  125. sempy_labs/admin/_tags.py +131 -0
  126. sempy_labs/admin/_tenant.py +503 -0
  127. sempy_labs/admin/_tenant_keys.py +89 -0
  128. sempy_labs/admin/_users.py +140 -0
  129. sempy_labs/admin/_workspaces.py +236 -0
  130. sempy_labs/deployment_pipeline/__init__.py +23 -0
  131. sempy_labs/deployment_pipeline/_items.py +580 -0
  132. sempy_labs/directlake/__init__.py +57 -0
  133. sempy_labs/directlake/_autosync.py +58 -0
  134. sempy_labs/directlake/_directlake_schema_compare.py +120 -0
  135. sempy_labs/directlake/_directlake_schema_sync.py +161 -0
  136. sempy_labs/directlake/_dl_helper.py +274 -0
  137. sempy_labs/directlake/_generate_shared_expression.py +94 -0
  138. sempy_labs/directlake/_get_directlake_lakehouse.py +62 -0
  139. sempy_labs/directlake/_get_shared_expression.py +34 -0
  140. sempy_labs/directlake/_guardrails.py +96 -0
  141. sempy_labs/directlake/_list_directlake_model_calc_tables.py +70 -0
  142. sempy_labs/directlake/_show_unsupported_directlake_objects.py +90 -0
  143. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +239 -0
  144. sempy_labs/directlake/_update_directlake_partition_entity.py +259 -0
  145. sempy_labs/directlake/_warm_cache.py +236 -0
  146. sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
  147. sempy_labs/environment/__init__.py +23 -0
  148. sempy_labs/environment/_items.py +212 -0
  149. sempy_labs/environment/_pubstage.py +223 -0
  150. sempy_labs/eventstream/__init__.py +37 -0
  151. sempy_labs/eventstream/_items.py +263 -0
  152. sempy_labs/eventstream/_topology.py +652 -0
  153. sempy_labs/graph/__init__.py +59 -0
  154. sempy_labs/graph/_groups.py +651 -0
  155. sempy_labs/graph/_sensitivity_labels.py +120 -0
  156. sempy_labs/graph/_teams.py +125 -0
  157. sempy_labs/graph/_user_licenses.py +96 -0
  158. sempy_labs/graph/_users.py +516 -0
  159. sempy_labs/graph_model/__init__.py +15 -0
  160. sempy_labs/graph_model/_background_jobs.py +63 -0
  161. sempy_labs/graph_model/_items.py +149 -0
  162. sempy_labs/lakehouse/__init__.py +67 -0
  163. sempy_labs/lakehouse/_blobs.py +247 -0
  164. sempy_labs/lakehouse/_get_lakehouse_columns.py +102 -0
  165. sempy_labs/lakehouse/_get_lakehouse_tables.py +274 -0
  166. sempy_labs/lakehouse/_helper.py +250 -0
  167. sempy_labs/lakehouse/_lakehouse.py +351 -0
  168. sempy_labs/lakehouse/_livy_sessions.py +143 -0
  169. sempy_labs/lakehouse/_materialized_lake_views.py +157 -0
  170. sempy_labs/lakehouse/_partitioning.py +165 -0
  171. sempy_labs/lakehouse/_schemas.py +217 -0
  172. sempy_labs/lakehouse/_shortcuts.py +440 -0
  173. sempy_labs/migration/__init__.py +35 -0
  174. sempy_labs/migration/_create_pqt_file.py +238 -0
  175. sempy_labs/migration/_direct_lake_to_import.py +105 -0
  176. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +398 -0
  177. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +148 -0
  178. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +533 -0
  179. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +172 -0
  180. sempy_labs/migration/_migration_validation.py +71 -0
  181. sempy_labs/migration/_refresh_calc_tables.py +131 -0
  182. sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
  183. sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
  184. sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
  185. sempy_labs/ml_model/__init__.py +23 -0
  186. sempy_labs/ml_model/_functions.py +427 -0
  187. sempy_labs/report/_BPAReportTemplate.json +232 -0
  188. sempy_labs/report/__init__.py +55 -0
  189. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  190. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  191. sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
  192. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
  193. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
  194. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
  195. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
  196. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
  197. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
  198. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
  199. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
  200. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
  201. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
  202. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
  203. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
  204. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
  205. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
  206. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
  207. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
  208. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
  209. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
  210. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
  211. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
  212. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
  213. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
  214. sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
  215. sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
  216. sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
  217. sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
  218. sempy_labs/report/_download_report.py +76 -0
  219. sempy_labs/report/_export_report.py +257 -0
  220. sempy_labs/report/_generate_report.py +427 -0
  221. sempy_labs/report/_paginated.py +76 -0
  222. sempy_labs/report/_report_bpa.py +354 -0
  223. sempy_labs/report/_report_bpa_rules.py +115 -0
  224. sempy_labs/report/_report_functions.py +581 -0
  225. sempy_labs/report/_report_helper.py +227 -0
  226. sempy_labs/report/_report_list_functions.py +110 -0
  227. sempy_labs/report/_report_rebind.py +149 -0
  228. sempy_labs/report/_reportwrapper.py +3100 -0
  229. sempy_labs/report/_save_report.py +147 -0
  230. sempy_labs/snowflake_database/__init__.py +10 -0
  231. sempy_labs/snowflake_database/_items.py +105 -0
  232. sempy_labs/sql_database/__init__.py +21 -0
  233. sempy_labs/sql_database/_items.py +201 -0
  234. sempy_labs/sql_database/_mirroring.py +79 -0
  235. sempy_labs/theme/__init__.py +12 -0
  236. sempy_labs/theme/_org_themes.py +129 -0
  237. sempy_labs/tom/__init__.py +3 -0
  238. sempy_labs/tom/_model.py +5977 -0
  239. sempy_labs/variable_library/__init__.py +19 -0
  240. sempy_labs/variable_library/_functions.py +403 -0
  241. sempy_labs/warehouse/__init__.py +28 -0
  242. sempy_labs/warehouse/_items.py +234 -0
  243. sempy_labs/warehouse/_restore_points.py +309 -0
@@ -0,0 +1,253 @@
1
+ import pandas as pd
2
+ import sempy_labs._icons as icons
3
+ from typing import Optional
4
+ from sempy_labs._helper_functions import (
5
+ resolve_workspace_name_and_id,
6
+ _is_valid_uuid,
7
+ _base_api,
8
+ _print_success,
9
+ _create_dataframe,
10
+ resolve_workspace_id,
11
+ )
12
+ from uuid import UUID
13
+ from sempy._utils._log import log
14
+
15
+
16
+ @log
17
+ def create_managed_private_endpoint(
18
+ name: str,
19
+ target_private_link_resource_id: UUID,
20
+ target_subresource_type: str,
21
+ request_message: Optional[str] = None,
22
+ workspace: Optional[str | UUID] = None,
23
+ ):
24
+ """
25
+ Creates a managed private endpoint.
26
+
27
+ This is a wrapper function for the following API: `Managed Private Endpoints - Create Workspace Managed Private Endpoint <https://learn.microsoft.com/rest/api/fabric/core/managed-private-endpoints/create-workspace-managed-private-endpoint>`_.
28
+
29
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
30
+
31
+ Parameters
32
+ ----------
33
+ name: str
34
+ Name of the managed private endpoint.
35
+ target_private_link_resource_id: uuid.UUID
36
+ Resource Id of data source for which private endpoint needs to be created.
37
+ target_subresource_type : str
38
+ Sub-resource pointing to Private-link resoure.
39
+ request_message : str, default=None
40
+ Message to approve private endpoint request. Should not be more than 140 characters.
41
+ workspace : str | uuid.UUID, default=None
42
+ The Fabric workspace name or ID.
43
+ Defaults to None which resolves to the workspace of the attached lakehouse
44
+ or if no lakehouse attached, resolves to the workspace of the notebook.
45
+ """
46
+
47
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
48
+
49
+ request_body = {
50
+ "name": name,
51
+ "targetPrivateLinkResourceId": target_private_link_resource_id,
52
+ "targetSubresourceType": target_subresource_type,
53
+ }
54
+
55
+ if request_message is not None:
56
+ if len(request_message) > 140:
57
+ raise ValueError(
58
+ f"{icons.red_dot} The request message cannot be more than 140 characters."
59
+ )
60
+ request_body["requestMessage"] = request_message
61
+
62
+ _base_api(
63
+ request=f"/v1/workspaces/{workspace_id}/managedPrivateEndpoints",
64
+ method="post",
65
+ status_codes=[201, 202],
66
+ payload=request_body,
67
+ lro_return_status_code=True,
68
+ client="fabric_sp",
69
+ )
70
+ _print_success(
71
+ item_name=name,
72
+ item_type="managed private endpoint",
73
+ workspace_name=workspace_name,
74
+ action="created",
75
+ )
76
+
77
+
78
+ @log
79
+ def list_managed_private_endpoints(
80
+ workspace: Optional[str | UUID] = None,
81
+ ) -> pd.DataFrame:
82
+ """
83
+ Shows the managed private endpoints within a workspace.
84
+
85
+ This is a wrapper function for the following API: `Managed Private Endpoints - List Workspace Managed Private Endpoints <https://learn.microsoft.com/rest/api/fabric/core/managed-private-endpoints/list-workspace-managed-private-endpoints>`.
86
+
87
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
88
+
89
+ Parameters
90
+ ----------
91
+ workspace : str | uuid.UUID, default=None
92
+ The Fabric workspace name or ID.
93
+ Defaults to None which resolves to the workspace of the attached lakehouse
94
+ or if no lakehouse attached, resolves to the workspace of the notebook.
95
+
96
+ Returns
97
+ -------
98
+ pandas.DataFrame
99
+ A pandas dataframe showing the managed private endpoints within a workspace.
100
+ """
101
+
102
+ columns = {
103
+ "Managed Private Endpoint Name": "string",
104
+ "Managed Private Endpoint Id": "string",
105
+ "Target Private Link Resource Id": "string",
106
+ "Provisioning State": "string",
107
+ "Connection Status": "string",
108
+ "Connection Description": "string",
109
+ "Target Subresource Type": "string",
110
+ }
111
+ df = _create_dataframe(columns=columns)
112
+
113
+ workspace_id = resolve_workspace_id(workspace)
114
+
115
+ responses = _base_api(
116
+ request=f"/v1/workspaces/{workspace_id}/managedPrivateEndpoints",
117
+ uses_pagination=True,
118
+ client="fabric_sp",
119
+ )
120
+
121
+ rows = []
122
+ for r in responses:
123
+ for v in r.get("value", []):
124
+ conn = v.get("connectionState", {})
125
+ rows.append(
126
+ {
127
+ "Managed Private Endpoint Name": v.get("name"),
128
+ "Managed Private Endpoint Id": v.get("id"),
129
+ "Target Private Link Resource Id": v.get(
130
+ "targetPrivateLinkResourceId"
131
+ ),
132
+ "Provisioning State": v.get("provisioningState"),
133
+ "Connection Status": conn.get("status"),
134
+ "Connection Description": conn.get("description"),
135
+ "Target Subresource Type": v.get("targetSubresourceType"),
136
+ }
137
+ )
138
+
139
+ if rows:
140
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
141
+
142
+ return df
143
+
144
+
145
+ @log
146
+ def delete_managed_private_endpoint(
147
+ managed_private_endpoint: str | UUID, workspace: Optional[str | UUID] = None
148
+ ):
149
+ """
150
+ Deletes a Fabric managed private endpoint.
151
+
152
+ This is a wrapper function for the following API: `Managed Private Endpoints - Delete Workspace Managed Private Endpoint <https://learn.microsoft.com/rest/api/fabric/core/managed-private-endpoints/delete-workspace-managed-private-endpoint>`.
153
+
154
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
155
+
156
+ Parameters
157
+ ----------
158
+ managed_private_endpoint: str | uuid.UUID
159
+ Name or ID of the managed private endpoint.
160
+ workspace : str | uuid.UUID, default=None
161
+ The Fabric workspace name or ID.
162
+ Defaults to None which resolves to the workspace of the attached lakehouse
163
+ or if no lakehouse attached, resolves to the workspace of the notebook.
164
+ """
165
+
166
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
167
+
168
+ if _is_valid_uuid(managed_private_endpoint):
169
+ item_id = managed_private_endpoint
170
+ else:
171
+ df = list_managed_private_endpoints(workspace=workspace)
172
+ df_filt = df[df["Managed Private Endpoint Name"] == managed_private_endpoint]
173
+
174
+ if df_filt.empty:
175
+ raise ValueError(
176
+ f"{icons.red_dot} The '{managed_private_endpoint}' managed private endpoint does not exist within the '{workspace_name}' workspace."
177
+ )
178
+
179
+ item_id = df_filt["Managed Private Endpoint Id"].iloc[0]
180
+
181
+ _base_api(
182
+ request=f"/v1/workspaces/{workspace_id}/managedPrivateEndpoints/{item_id}",
183
+ method="delete",
184
+ client="fabric_sp",
185
+ )
186
+
187
+ _print_success(
188
+ item_name=managed_private_endpoint,
189
+ item_type="managed private endpoint",
190
+ workspace_name=workspace_name,
191
+ action="deleted",
192
+ )
193
+
194
+
195
+ @log
196
+ def list_managed_private_endpoint_fqdns(
197
+ managed_private_endpoint: str | UUID, workspace: Optional[str | UUID] = None
198
+ ) -> pd.DataFrame:
199
+ """
200
+ Shows a list of fully qualified domain names (FQDNs) associated with the specified managed private endpoint.
201
+
202
+ This is a wrapper function for the following API: `Managed Private Endpoints - List FQDNs <https://learn.microsoft.com/rest/api/fabric/core/managed-private-endpoints/list-fqd-ns>`.
203
+
204
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
205
+
206
+ Parameters
207
+ ----------
208
+ managed_private_endpoint : str | uuid.UUID
209
+ The managed private endpoint name or ID.
210
+ workspace : str | uuid.UUID, default=None
211
+ The Fabric workspace name or ID.
212
+ Defaults to None which resolves to the workspace of the attached lakehouse
213
+ or if no lakehouse attached, resolves to the workspace of the notebook.
214
+
215
+ Returns
216
+ -------
217
+ pandas.DataFrame
218
+ A pandas dataframe showing a list of fully qualified domain names (FQDNs) associated with the specified managed private endpoint.
219
+ """
220
+
221
+ workspace_id = resolve_workspace_id(workspace)
222
+ if _is_valid_uuid(managed_private_endpoint):
223
+ item_id = managed_private_endpoint
224
+ else:
225
+ df = list_managed_private_endpoints(workspace=workspace_id)
226
+ df_filt = df[df["Managed Private Endpoint Name"] == managed_private_endpoint]
227
+ if df_filt.empty:
228
+ raise ValueError(
229
+ f"{icons.red_dot} The '{managed_private_endpoint}' managed private endpoint does not exist within the workspace."
230
+ )
231
+ item_id = df_filt["Managed Private Endpoint Id"].iloc[0]
232
+
233
+ columns = {"FQDN": "str"}
234
+ df = _create_dataframe(columns=columns)
235
+ responses = _base_api(
236
+ request=f"/v1/workspaces/{workspace_id}/managedPrivateEndpoints/{item_id}/targetFQDNs",
237
+ uses_pagination=True,
238
+ client="fabric_sp",
239
+ )
240
+
241
+ rows = []
242
+ for r in responses:
243
+ for v in r.get("value", []):
244
+ rows.append(
245
+ {
246
+ "FQDN": v,
247
+ }
248
+ )
249
+
250
+ if rows:
251
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
252
+
253
+ return df
@@ -0,0 +1,416 @@
1
+ import pandas as pd
2
+ from typing import Optional
3
+ from sempy_labs._helper_functions import (
4
+ resolve_workspace_name_and_id,
5
+ _update_dataframe_datatypes,
6
+ _base_api,
7
+ resolve_item_id,
8
+ _create_dataframe,
9
+ delete_item,
10
+ create_item,
11
+ _get_item_definition,
12
+ resolve_workspace_id,
13
+ )
14
+ import sempy_labs._icons as icons
15
+ import base64
16
+ from uuid import UUID
17
+ from sempy._utils._log import log
18
+
19
+
20
+ @log
21
+ def list_mirrored_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
22
+ """
23
+ Shows the mirrored databases within a workspace.
24
+
25
+ This is a wrapper function for the following API: `Items - List Mirrored Databases <https://learn.microsoft.com/rest/api/fabric/mirroredwarehouse/items/list-mirrored-databases>`_.
26
+
27
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
28
+
29
+ Parameters
30
+ ----------
31
+ workspace : str | uuid.UUID, default=None
32
+ The Fabric workspace name or ID.
33
+ Defaults to None which resolves to the workspace of the attached lakehouse
34
+ or if no lakehouse attached, resolves to the workspace of the notebook.
35
+
36
+ Returns
37
+ -------
38
+ pandas.DataFrame
39
+ A pandas dataframe showing the mirrored databases within a workspace.
40
+ """
41
+
42
+ columns = {
43
+ "Mirrored Database Name": "string",
44
+ "Mirrored Database Id": "string",
45
+ "Description": "string",
46
+ "OneLake Tables Path": "string",
47
+ "SQL Endpoint Connection String": "string",
48
+ "SQL Endpoint Id": "string",
49
+ "Provisioning Status": "string",
50
+ "Default Schema": "string",
51
+ }
52
+ df = _create_dataframe(columns=columns)
53
+
54
+ workspace_id = resolve_workspace_id(workspace)
55
+ responses = _base_api(
56
+ request=f"/v1/workspaces/{workspace_id}/mirroredDatabases",
57
+ uses_pagination=True,
58
+ client="fabric_sp",
59
+ )
60
+
61
+ rows = []
62
+ for r in responses:
63
+ for v in r.get("value", []):
64
+ prop = v.get("properties", {})
65
+ sql = prop.get("sqlEndpointProperties", {})
66
+ rows.append(
67
+ {
68
+ "Mirrored Database Name": v.get("displayName"),
69
+ "Mirrored Database Id": v.get("id"),
70
+ "Description": v.get("description"),
71
+ "OneLake Tables Path": prop.get("oneLakeTablesPath"),
72
+ "SQL Endpoint Connection String": sql.get("connectionString"),
73
+ "SQL Endpoint Id": sql.get("id"),
74
+ "Provisioning Status": sql.get("provisioningStatus"),
75
+ "Default Schema": prop.get("defaultSchema"),
76
+ }
77
+ )
78
+
79
+ if rows:
80
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
81
+
82
+ return df
83
+
84
+
85
+ @log
86
+ def create_mirrored_database(
87
+ name: str, description: Optional[str] = None, workspace: Optional[str | UUID] = None
88
+ ):
89
+ """
90
+ Creates a Fabric mirrored database.
91
+
92
+ This is a wrapper function for the following API: `Items - Create Mirrored Database <https://learn.microsoft.com/rest/api/fabric/mirroreddatabase/items/create-mirrored-database>`_.
93
+
94
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
95
+
96
+ Parameters
97
+ ----------
98
+ name: str
99
+ Name of the mirrored database.
100
+ description : str, default=None
101
+ A description of the mirrored database.
102
+ workspace : str | uuid.UUID, default=None
103
+ The Fabric workspace name or ID.
104
+ Defaults to None which resolves to the workspace of the attached lakehouse
105
+ or if no lakehouse attached, resolves to the workspace of the notebook.
106
+ """
107
+
108
+ create_item(
109
+ name=name, description=description, type="MirroredDatabase", workspace=workspace
110
+ )
111
+
112
+
113
+ @log
114
+ def delete_mirrored_database(
115
+ mirrored_database: str, workspace: Optional[str | UUID] = None
116
+ ):
117
+ """
118
+ Deletes a mirrored database.
119
+
120
+ This is a wrapper function for the following API: `Items - Delete Mirrored Database <https://learn.microsoft.com/rest/api/fabric/mirroreddatabase/items/delete-mirrored-database>`_.
121
+
122
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
123
+
124
+ Parameters
125
+ ----------
126
+ mirrored_database: str
127
+ Name of the mirrored database.
128
+ workspace : str | uuid.UUID, default=None
129
+ The Fabric workspace name or ID.
130
+ Defaults to None which resolves to the workspace of the attached lakehouse
131
+ or if no lakehouse attached, resolves to the workspace of the notebook.
132
+ """
133
+
134
+ delete_item(item=mirrored_database, type="MirroredDatabase", workspace=workspace)
135
+
136
+
137
+ @log
138
+ def get_mirroring_status(
139
+ mirrored_database: str | UUID, workspace: Optional[str | UUID] = None
140
+ ) -> str:
141
+ """
142
+ Get the status of the mirrored database.
143
+
144
+ This is a wrapper function for the following API: `Mirroring - Get Mirroring Status <https://learn.microsoft.com/rest/api/fabric/mirroreddatabase/mirroring/get-mirroring-status>`_.
145
+
146
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
147
+
148
+ Parameters
149
+ ----------
150
+ mirrored_database: str | uuid.UUID
151
+ Name or ID of the mirrored database.
152
+ workspace : str | uuid.UUID, default=None
153
+ The Fabric workspace name or ID.
154
+ Defaults to None which resolves to the workspace of the attached lakehouse
155
+ or if no lakehouse attached, resolves to the workspace of the notebook.
156
+
157
+ Returns
158
+ -------
159
+ str
160
+ The status of a mirrored database.
161
+ """
162
+
163
+ workspace_id = resolve_workspace_id(workspace)
164
+ item_id = resolve_item_id(
165
+ item=mirrored_database, type="MirroredDatabase", workspace=workspace
166
+ )
167
+ response = _base_api(
168
+ request=f"/v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}/getMirroringStatus",
169
+ status_codes=200,
170
+ client="fabric_sp",
171
+ )
172
+
173
+ return response.json().get("status", {})
174
+
175
+
176
+ @log
177
+ def get_tables_mirroring_status(
178
+ mirrored_database: str | UUID, workspace: Optional[str | UUID] = None
179
+ ) -> pd.DataFrame:
180
+ """
181
+ Gets the mirroring status of the tables.
182
+
183
+ This is a wrapper function for the following API: `Mirroring - Get Tables Mirroring Status <https://learn.microsoft.com/rest/api/fabric/mirroreddatabase/mirroring/get-tables-mirroring-status>`_.
184
+
185
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
186
+
187
+ Parameters
188
+ ----------
189
+ mirrored_database: str | uuid.UUID
190
+ Name or ID of the mirrored database.
191
+ workspace : str | uuid.UUID, default=None
192
+ The Fabric workspace name or ID.
193
+ Defaults to None which resolves to the workspace of the attached lakehouse
194
+ or if no lakehouse attached, resolves to the workspace of the notebook.
195
+
196
+ Returns
197
+ -------
198
+ pandas.DataFrame
199
+ A pandas dataframe showing the mirroring status of the tables.
200
+ """
201
+
202
+ workspace_id = resolve_workspace_id(workspace)
203
+ item_id = resolve_item_id(
204
+ item=mirrored_database, type="MirroredDatabase", workspace=workspace
205
+ )
206
+ responses = _base_api(
207
+ request=f"/v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}/getTablesMirroringStatus",
208
+ method="post",
209
+ status_codes=200,
210
+ uses_pagination=True,
211
+ client="fabric_sp",
212
+ )
213
+
214
+ columns = {
215
+ "Source Schema Name": "string",
216
+ "Source Table Name": "string",
217
+ "Status": "string",
218
+ "Processed Bytes": "int",
219
+ "Processed Rows": "int",
220
+ "Last Sync Date": "datetime",
221
+ }
222
+ df = _create_dataframe(columns=columns)
223
+
224
+ rows = []
225
+ for r in responses:
226
+ for v in r.get("data", []):
227
+ m = v.get("metrics", {})
228
+ rows.append(
229
+ {
230
+ "Source Schema Name": v.get("sourceSchemaName"),
231
+ "Source Table Name": v.get("sourceTableName"),
232
+ "Status": v.get("status"),
233
+ "Processed Bytes": m.get("processedBytes"),
234
+ "Processed Rows": m.get("processedRows"),
235
+ "Last Sync Date": m.get("lastSyncDateTime"),
236
+ }
237
+ )
238
+
239
+ if rows:
240
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
241
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
242
+
243
+ return df
244
+
245
+
246
+ @log
247
+ def start_mirroring(
248
+ mirrored_database: str | UUID, workspace: Optional[str | UUID] = None
249
+ ):
250
+ """
251
+ Starts the mirroring for a database.
252
+
253
+ This is a wrapper function for the following API: `Mirroring - Start Mirroring <https://learn.microsoft.com/rest/api/fabric/mirroreddatabase/mirroring/start-mirroring>`_.
254
+
255
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
256
+
257
+ Parameters
258
+ ----------
259
+ mirrored_database: str | uuid.UUID
260
+ Name or ID of the mirrored database.
261
+ workspace : str | uuid.UUID, default=None
262
+ The Fabric workspace name or ID.
263
+ Defaults to None which resolves to the workspace of the attached lakehouse
264
+ or if no lakehouse attached, resolves to the workspace of the notebook.
265
+ """
266
+
267
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
268
+ item_id = resolve_item_id(
269
+ item=mirrored_database, type="MirroredDatabase", workspace=workspace
270
+ )
271
+ _base_api(
272
+ request=f"/v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}/startMirroring",
273
+ method="post",
274
+ status_codes=200,
275
+ client="fabric_sp",
276
+ )
277
+
278
+ print(
279
+ f"{icons.green_dot} Mirroring has started for the '{mirrored_database}' database within the '{workspace_name}' workspace."
280
+ )
281
+
282
+
283
+ @log
284
+ def stop_mirroring(
285
+ mirrored_database: str | UUID, workspace: Optional[str | UUID] = None
286
+ ):
287
+ """
288
+ Stops the mirroring for a database.
289
+
290
+ This is a wrapper function for the following API: `Mirroring - Stop Mirroring <https://learn.microsoft.com/rest/api/fabric/mirroreddatabase/mirroring/stop-mirroring>`_.
291
+
292
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
293
+
294
+ Parameters
295
+ ----------
296
+ mirrored_database: str | uuid.UUID
297
+ Name or ID of the mirrored database.
298
+ workspace : str | uuid.UUID, default=None
299
+ The Fabric workspace name or ID.
300
+ Defaults to None which resolves to the workspace of the attached lakehouse
301
+ or if no lakehouse attached, resolves to the workspace of the notebook.
302
+ """
303
+
304
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
305
+ item_id = resolve_item_id(
306
+ item=mirrored_database, type="MirroredDatabase", workspace=workspace
307
+ )
308
+ _base_api(
309
+ request=f"/v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}/stopMirroring",
310
+ method="post",
311
+ status_codes=200,
312
+ client="fabric_sp",
313
+ )
314
+
315
+ print(
316
+ f"{icons.green_dot} Mirroring has stopped for the '{mirrored_database}' database within the '{workspace_name}' workspace."
317
+ )
318
+
319
+
320
+ @log
321
+ def get_mirrored_database_definition(
322
+ mirrored_database: str | UUID,
323
+ workspace: Optional[str | UUID] = None,
324
+ decode: bool = True,
325
+ ) -> dict:
326
+ """
327
+ Obtains the mirrored database definition.
328
+
329
+ This is a wrapper function for the following API: `Items - Get Mirrored Database Definition <https://learn.microsoft.com/rest/api/fabric/mirroreddatabase/items/get-mirrored-database-definition>`_.
330
+
331
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
332
+
333
+ Parameters
334
+ ----------
335
+ mirrored_database : str | uuid.UUID
336
+ The name or ID of the mirrored database.
337
+ workspace : str | uuid.UUID, default=None
338
+ The name or ID of the workspace.
339
+ Defaults to None which resolves to the workspace of the attached lakehouse
340
+ or if no lakehouse attached, resolves to the workspace of the notebook.
341
+ decode : bool, default=True
342
+ If True, decodes the mirrored database definition file into .json format.
343
+ If False, obtains the mirrored database definition file in base64 format.
344
+
345
+ Returns
346
+ -------
347
+ dict
348
+ The mirrored database definition.
349
+ """
350
+
351
+ return _get_item_definition(
352
+ item=mirrored_database,
353
+ type="MirroredDatabase",
354
+ workspace=workspace,
355
+ return_dataframe=False,
356
+ decode=decode,
357
+ )
358
+
359
+
360
+ @log
361
+ def update_mirrored_database_definition(
362
+ mirrored_database: str | UUID,
363
+ mirrored_database_content: dict,
364
+ workspace: Optional[str | UUID] = None,
365
+ ):
366
+ """
367
+ Updates an existing notebook with a new definition.
368
+
369
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
370
+
371
+ Parameters
372
+ ----------
373
+ mirrored_database : str | uuid.UUID
374
+ The name or ID of the mirrored database to be updated.
375
+ mirrored_database_content : dict
376
+ The mirrored database definition (not in Base64 format).
377
+ workspace : str | uuid.UUID, default=None
378
+ The name or ID of the workspace.
379
+ Defaults to None which resolves to the workspace of the attached lakehouse
380
+ or if no lakehouse attached, resolves to the workspace of the notebook.
381
+ """
382
+
383
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
384
+ item_id = resolve_item_id(
385
+ item=mirrored_database, type="MirroredDatabase", workspace=workspace
386
+ )
387
+ payload = (
388
+ base64.b64encode(mirrored_database_content).encode("utf-8").decode("utf-8")
389
+ )
390
+
391
+ request_body = {
392
+ "displayName": mirrored_database,
393
+ "definition": {
394
+ "format": "ipynb",
395
+ "parts": [
396
+ {
397
+ "path": "mirroredDatabase.json",
398
+ "payload": payload,
399
+ "payloadType": "InlineBase64",
400
+ }
401
+ ],
402
+ },
403
+ }
404
+
405
+ _base_api(
406
+ request=f"/v1/workspaces/{workspace_id}/mirroredDatabases/{item_id}/updateDefinition",
407
+ method="post",
408
+ json=request_body,
409
+ status_codes=None,
410
+ lro_return_status_code=True,
411
+ client="fabric_sp",
412
+ )
413
+
414
+ print(
415
+ f"{icons.green_dot} The '{mirrored_database}' mirrored database was updated within the '{workspace_name}' workspace."
416
+ )