semantic-link-labs 0.12.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (243) hide show
  1. semantic_link_labs-0.12.8.dist-info/METADATA +354 -0
  2. semantic_link_labs-0.12.8.dist-info/RECORD +243 -0
  3. semantic_link_labs-0.12.8.dist-info/WHEEL +5 -0
  4. semantic_link_labs-0.12.8.dist-info/licenses/LICENSE +21 -0
  5. semantic_link_labs-0.12.8.dist-info/top_level.txt +1 -0
  6. sempy_labs/__init__.py +606 -0
  7. sempy_labs/_a_lib_info.py +2 -0
  8. sempy_labs/_ai.py +437 -0
  9. sempy_labs/_authentication.py +264 -0
  10. sempy_labs/_bpa_translation/_model/_translations_am-ET.po +869 -0
  11. sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +908 -0
  12. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +968 -0
  13. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +963 -0
  14. sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +943 -0
  15. sempy_labs/_bpa_translation/_model/_translations_da-DK.po +945 -0
  16. sempy_labs/_bpa_translation/_model/_translations_de-DE.po +988 -0
  17. sempy_labs/_bpa_translation/_model/_translations_el-GR.po +993 -0
  18. sempy_labs/_bpa_translation/_model/_translations_es-ES.po +971 -0
  19. sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +933 -0
  20. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +942 -0
  21. sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +994 -0
  22. sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +967 -0
  23. sempy_labs/_bpa_translation/_model/_translations_he-IL.po +902 -0
  24. sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +944 -0
  25. sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +963 -0
  26. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +946 -0
  27. sempy_labs/_bpa_translation/_model/_translations_is-IS.po +939 -0
  28. sempy_labs/_bpa_translation/_model/_translations_it-IT.po +986 -0
  29. sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +846 -0
  30. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +839 -0
  31. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +967 -0
  32. sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +978 -0
  33. sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +962 -0
  34. sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +962 -0
  35. sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +957 -0
  36. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +968 -0
  37. sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +964 -0
  38. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +952 -0
  39. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +950 -0
  40. sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +942 -0
  41. sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +976 -0
  42. sempy_labs/_bpa_translation/_model/_translations_te-IN.po +947 -0
  43. sempy_labs/_bpa_translation/_model/_translations_th-TH.po +924 -0
  44. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +953 -0
  45. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +961 -0
  46. sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +804 -0
  47. sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +969 -0
  48. sempy_labs/_capacities.py +1198 -0
  49. sempy_labs/_capacity_migration.py +660 -0
  50. sempy_labs/_clear_cache.py +351 -0
  51. sempy_labs/_connections.py +610 -0
  52. sempy_labs/_dashboards.py +69 -0
  53. sempy_labs/_data_access_security.py +98 -0
  54. sempy_labs/_data_pipelines.py +162 -0
  55. sempy_labs/_dataflows.py +668 -0
  56. sempy_labs/_dax.py +501 -0
  57. sempy_labs/_daxformatter.py +80 -0
  58. sempy_labs/_delta_analyzer.py +467 -0
  59. sempy_labs/_delta_analyzer_history.py +301 -0
  60. sempy_labs/_dictionary_diffs.py +221 -0
  61. sempy_labs/_documentation.py +147 -0
  62. sempy_labs/_domains.py +51 -0
  63. sempy_labs/_eventhouses.py +182 -0
  64. sempy_labs/_external_data_shares.py +230 -0
  65. sempy_labs/_gateways.py +521 -0
  66. sempy_labs/_generate_semantic_model.py +521 -0
  67. sempy_labs/_get_connection_string.py +84 -0
  68. sempy_labs/_git.py +543 -0
  69. sempy_labs/_graphQL.py +90 -0
  70. sempy_labs/_helper_functions.py +2833 -0
  71. sempy_labs/_icons.py +149 -0
  72. sempy_labs/_job_scheduler.py +609 -0
  73. sempy_labs/_kql_databases.py +149 -0
  74. sempy_labs/_kql_querysets.py +124 -0
  75. sempy_labs/_kusto.py +137 -0
  76. sempy_labs/_labels.py +124 -0
  77. sempy_labs/_list_functions.py +1720 -0
  78. sempy_labs/_managed_private_endpoints.py +253 -0
  79. sempy_labs/_mirrored_databases.py +416 -0
  80. sempy_labs/_mirrored_warehouses.py +60 -0
  81. sempy_labs/_ml_experiments.py +113 -0
  82. sempy_labs/_model_auto_build.py +140 -0
  83. sempy_labs/_model_bpa.py +557 -0
  84. sempy_labs/_model_bpa_bulk.py +378 -0
  85. sempy_labs/_model_bpa_rules.py +859 -0
  86. sempy_labs/_model_dependencies.py +343 -0
  87. sempy_labs/_mounted_data_factories.py +123 -0
  88. sempy_labs/_notebooks.py +441 -0
  89. sempy_labs/_one_lake_integration.py +151 -0
  90. sempy_labs/_onelake.py +131 -0
  91. sempy_labs/_query_scale_out.py +433 -0
  92. sempy_labs/_refresh_semantic_model.py +435 -0
  93. sempy_labs/_semantic_models.py +468 -0
  94. sempy_labs/_spark.py +455 -0
  95. sempy_labs/_sql.py +241 -0
  96. sempy_labs/_sql_audit_settings.py +207 -0
  97. sempy_labs/_sql_endpoints.py +214 -0
  98. sempy_labs/_tags.py +201 -0
  99. sempy_labs/_translations.py +43 -0
  100. sempy_labs/_user_delegation_key.py +44 -0
  101. sempy_labs/_utils.py +79 -0
  102. sempy_labs/_vertipaq.py +1021 -0
  103. sempy_labs/_vpax.py +388 -0
  104. sempy_labs/_warehouses.py +234 -0
  105. sempy_labs/_workloads.py +140 -0
  106. sempy_labs/_workspace_identity.py +72 -0
  107. sempy_labs/_workspaces.py +595 -0
  108. sempy_labs/admin/__init__.py +170 -0
  109. sempy_labs/admin/_activities.py +167 -0
  110. sempy_labs/admin/_apps.py +145 -0
  111. sempy_labs/admin/_artifacts.py +65 -0
  112. sempy_labs/admin/_basic_functions.py +463 -0
  113. sempy_labs/admin/_capacities.py +508 -0
  114. sempy_labs/admin/_dataflows.py +45 -0
  115. sempy_labs/admin/_datasets.py +186 -0
  116. sempy_labs/admin/_domains.py +522 -0
  117. sempy_labs/admin/_external_data_share.py +100 -0
  118. sempy_labs/admin/_git.py +72 -0
  119. sempy_labs/admin/_items.py +265 -0
  120. sempy_labs/admin/_labels.py +211 -0
  121. sempy_labs/admin/_reports.py +241 -0
  122. sempy_labs/admin/_scanner.py +118 -0
  123. sempy_labs/admin/_shared.py +82 -0
  124. sempy_labs/admin/_sharing_links.py +110 -0
  125. sempy_labs/admin/_tags.py +131 -0
  126. sempy_labs/admin/_tenant.py +503 -0
  127. sempy_labs/admin/_tenant_keys.py +89 -0
  128. sempy_labs/admin/_users.py +140 -0
  129. sempy_labs/admin/_workspaces.py +236 -0
  130. sempy_labs/deployment_pipeline/__init__.py +23 -0
  131. sempy_labs/deployment_pipeline/_items.py +580 -0
  132. sempy_labs/directlake/__init__.py +57 -0
  133. sempy_labs/directlake/_autosync.py +58 -0
  134. sempy_labs/directlake/_directlake_schema_compare.py +120 -0
  135. sempy_labs/directlake/_directlake_schema_sync.py +161 -0
  136. sempy_labs/directlake/_dl_helper.py +274 -0
  137. sempy_labs/directlake/_generate_shared_expression.py +94 -0
  138. sempy_labs/directlake/_get_directlake_lakehouse.py +62 -0
  139. sempy_labs/directlake/_get_shared_expression.py +34 -0
  140. sempy_labs/directlake/_guardrails.py +96 -0
  141. sempy_labs/directlake/_list_directlake_model_calc_tables.py +70 -0
  142. sempy_labs/directlake/_show_unsupported_directlake_objects.py +90 -0
  143. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +239 -0
  144. sempy_labs/directlake/_update_directlake_partition_entity.py +259 -0
  145. sempy_labs/directlake/_warm_cache.py +236 -0
  146. sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
  147. sempy_labs/environment/__init__.py +23 -0
  148. sempy_labs/environment/_items.py +212 -0
  149. sempy_labs/environment/_pubstage.py +223 -0
  150. sempy_labs/eventstream/__init__.py +37 -0
  151. sempy_labs/eventstream/_items.py +263 -0
  152. sempy_labs/eventstream/_topology.py +652 -0
  153. sempy_labs/graph/__init__.py +59 -0
  154. sempy_labs/graph/_groups.py +651 -0
  155. sempy_labs/graph/_sensitivity_labels.py +120 -0
  156. sempy_labs/graph/_teams.py +125 -0
  157. sempy_labs/graph/_user_licenses.py +96 -0
  158. sempy_labs/graph/_users.py +516 -0
  159. sempy_labs/graph_model/__init__.py +15 -0
  160. sempy_labs/graph_model/_background_jobs.py +63 -0
  161. sempy_labs/graph_model/_items.py +149 -0
  162. sempy_labs/lakehouse/__init__.py +67 -0
  163. sempy_labs/lakehouse/_blobs.py +247 -0
  164. sempy_labs/lakehouse/_get_lakehouse_columns.py +102 -0
  165. sempy_labs/lakehouse/_get_lakehouse_tables.py +274 -0
  166. sempy_labs/lakehouse/_helper.py +250 -0
  167. sempy_labs/lakehouse/_lakehouse.py +351 -0
  168. sempy_labs/lakehouse/_livy_sessions.py +143 -0
  169. sempy_labs/lakehouse/_materialized_lake_views.py +157 -0
  170. sempy_labs/lakehouse/_partitioning.py +165 -0
  171. sempy_labs/lakehouse/_schemas.py +217 -0
  172. sempy_labs/lakehouse/_shortcuts.py +440 -0
  173. sempy_labs/migration/__init__.py +35 -0
  174. sempy_labs/migration/_create_pqt_file.py +238 -0
  175. sempy_labs/migration/_direct_lake_to_import.py +105 -0
  176. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +398 -0
  177. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +148 -0
  178. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +533 -0
  179. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +172 -0
  180. sempy_labs/migration/_migration_validation.py +71 -0
  181. sempy_labs/migration/_refresh_calc_tables.py +131 -0
  182. sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
  183. sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
  184. sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
  185. sempy_labs/ml_model/__init__.py +23 -0
  186. sempy_labs/ml_model/_functions.py +427 -0
  187. sempy_labs/report/_BPAReportTemplate.json +232 -0
  188. sempy_labs/report/__init__.py +55 -0
  189. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  190. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  191. sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
  192. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
  193. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
  194. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
  195. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
  196. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
  197. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
  198. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
  199. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
  200. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
  201. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
  202. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
  203. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
  204. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
  205. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
  206. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
  207. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
  208. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
  209. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
  210. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
  211. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
  212. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
  213. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
  214. sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
  215. sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
  216. sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
  217. sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
  218. sempy_labs/report/_download_report.py +76 -0
  219. sempy_labs/report/_export_report.py +257 -0
  220. sempy_labs/report/_generate_report.py +427 -0
  221. sempy_labs/report/_paginated.py +76 -0
  222. sempy_labs/report/_report_bpa.py +354 -0
  223. sempy_labs/report/_report_bpa_rules.py +115 -0
  224. sempy_labs/report/_report_functions.py +581 -0
  225. sempy_labs/report/_report_helper.py +227 -0
  226. sempy_labs/report/_report_list_functions.py +110 -0
  227. sempy_labs/report/_report_rebind.py +149 -0
  228. sempy_labs/report/_reportwrapper.py +3100 -0
  229. sempy_labs/report/_save_report.py +147 -0
  230. sempy_labs/snowflake_database/__init__.py +10 -0
  231. sempy_labs/snowflake_database/_items.py +105 -0
  232. sempy_labs/sql_database/__init__.py +21 -0
  233. sempy_labs/sql_database/_items.py +201 -0
  234. sempy_labs/sql_database/_mirroring.py +79 -0
  235. sempy_labs/theme/__init__.py +12 -0
  236. sempy_labs/theme/_org_themes.py +129 -0
  237. sempy_labs/tom/__init__.py +3 -0
  238. sempy_labs/tom/_model.py +5977 -0
  239. sempy_labs/variable_library/__init__.py +19 -0
  240. sempy_labs/variable_library/_functions.py +403 -0
  241. sempy_labs/warehouse/__init__.py +28 -0
  242. sempy_labs/warehouse/_items.py +234 -0
  243. sempy_labs/warehouse/_restore_points.py +309 -0
@@ -0,0 +1,212 @@
1
+ import pandas as pd
2
+ import sempy_labs._icons as icons
3
+ from typing import Optional
4
+ from sempy_labs._helper_functions import (
5
+ resolve_workspace_name_and_id,
6
+ resolve_workspace_id,
7
+ _base_api,
8
+ _create_dataframe,
9
+ resolve_item_id,
10
+ delete_item,
11
+ create_item,
12
+ )
13
+ from uuid import UUID
14
+ from sempy._utils._log import log
15
+
16
+
17
+ @log
18
+ def create_environment(
19
+ environment: str,
20
+ description: Optional[str] = None,
21
+ workspace: Optional[str | UUID] = None,
22
+ ):
23
+ """
24
+ Creates a Fabric environment.
25
+
26
+ This is a wrapper function for the following API: `Items - Create Environment <https://learn.microsoft.com/rest/api/fabric/environment/items/create-environment>`_.
27
+
28
+ Parameters
29
+ ----------
30
+ environment: str
31
+ Name of the environment.
32
+ description : str, default=None
33
+ A description of the environment.
34
+ workspace : str | uuid.UUID, default=None
35
+ The Fabric workspace name or ID.
36
+ Defaults to None which resolves to the workspace of the attached lakehouse
37
+ or if no lakehouse attached, resolves to the workspace of the notebook.
38
+ """
39
+
40
+ create_item(
41
+ name=environment,
42
+ description=description,
43
+ type="Environment",
44
+ workspace=workspace,
45
+ )
46
+
47
+
48
+ @log
49
+ def list_environments(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
50
+ """
51
+ Shows the environments within a workspace.
52
+
53
+ This is a wrapper function for the following API: `Items - List Environments <https://learn.microsoft.com/rest/api/fabric/environment/items/list-environments>`_.
54
+
55
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
56
+
57
+ Parameters
58
+ ----------
59
+ workspace : str | uuid.UUID, default=None
60
+ The Fabric workspace name or ID.
61
+ Defaults to None which resolves to the workspace of the attached lakehouse
62
+ or if no lakehouse attached, resolves to the workspace of the notebook.
63
+
64
+ Returns
65
+ -------
66
+ pandas.DataFrame
67
+ A pandas dataframe showing the environments within a workspace.
68
+ """
69
+
70
+ columns = {
71
+ "Environment Name": "string",
72
+ "Environment Id": "string",
73
+ "Description": "string",
74
+ "Publish State": "string",
75
+ "Publish Target Version": "string",
76
+ "Publish Start Time": "string",
77
+ "Publish End Time": "string",
78
+ "Spark Libraries State": "string",
79
+ "Spark Settings State": "string",
80
+ }
81
+ df = _create_dataframe(columns=columns)
82
+
83
+ workspace_id = resolve_workspace_id(workspace)
84
+
85
+ responses = _base_api(
86
+ request=f"/v1/workspaces/{workspace_id}/environments",
87
+ uses_pagination=True,
88
+ client="fabric_sp",
89
+ )
90
+
91
+ rows = []
92
+ for r in responses:
93
+ for v in r.get("value", []):
94
+ pub = v.get("properties", {}).get("publishDetails", {})
95
+ rows.append(
96
+ {
97
+ "Environment Name": v.get("displayName"),
98
+ "Environment Id": v.get("id"),
99
+ "Description": v.get("description"),
100
+ "Publish State": pub.get("state"),
101
+ "Publish Target Version": pub.get("targetVersion"),
102
+ "Publish Start Time": pub.get("startTime"),
103
+ "Publish End Time": pub.get("endTime"),
104
+ "Spark Libraries State": pub.get("componentPublishInfo", {})
105
+ .get("sparkLibraries", {})
106
+ .get("state"),
107
+ "Spark Settings State": pub.get("componentPublishInfo", {})
108
+ .get("sparkSettings", {})
109
+ .get("state"),
110
+ }
111
+ )
112
+
113
+ if rows:
114
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
115
+
116
+ return df
117
+
118
+
119
+ @log
120
+ def delete_environment(environment: str | UUID, workspace: Optional[str | UUID] = None):
121
+ """
122
+ Deletes a Fabric environment.
123
+
124
+ This is a wrapper function for the following API: `Items - Delete Environment <https://learn.microsoft.com/rest/api/fabric/environment/items/delete-environment>`_.
125
+
126
+ Parameters
127
+ ----------
128
+ environment: str | uuid.UUID
129
+ Name or ID of the environment.
130
+ workspace : str | uuid.UUID, default=None
131
+ The Fabric workspace name or ID.
132
+ Defaults to None which resolves to the workspace of the attached lakehouse
133
+ or if no lakehouse attached, resolves to the workspace of the notebook.
134
+ """
135
+
136
+ delete_item(item=environment, type="Environment", workspace=workspace)
137
+
138
+
139
+ @log
140
+ def publish_environment(
141
+ environment: str | UUID, workspace: Optional[str | UUID] = None
142
+ ):
143
+ """
144
+ Publishes a Fabric environment.
145
+
146
+ This is a wrapper function for the following API: `Spark Libraries - Publish Environment <https://learn.microsoft.com/rest/api/fabric/environment/spark-libraries/publish-environment>`_.
147
+
148
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
149
+
150
+ Parameters
151
+ ----------
152
+ environment: str | uuid.UUID
153
+ Name or ID of the environment.
154
+ workspace : str | uuid.UUID, default=None
155
+ The Fabric workspace name or ID.
156
+ Defaults to None which resolves to the workspace of the attached lakehouse
157
+ or if no lakehouse attached, resolves to the workspace of the notebook.
158
+ """
159
+
160
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
161
+ item_id = resolve_item_id(
162
+ item=environment, type="Environment", workspace=workspace_id
163
+ )
164
+
165
+ _base_api(
166
+ request=f"/v1/workspaces/{workspace_id}/environments/{item_id}/staging/publish",
167
+ method="post",
168
+ lro_return_status_code=True,
169
+ status_codes=None,
170
+ client="fabric_sp",
171
+ )
172
+
173
+ print(
174
+ f"{icons.green_dot} The '{environment}' environment within the '{workspace_name}' workspace has been published."
175
+ )
176
+
177
+
178
+ @log
179
+ def cancel_publish_environment(
180
+ environment: str | UUID, workspace: Optional[str | UUID] = None
181
+ ):
182
+ """
183
+ Trigger an environment publish cancellation.
184
+
185
+ This is a wrapper function for the following API: `Items - Cancel Publish Environment <https://learn.microsoft.com/rest/api/fabric/environment/items/cancel-publish-environment>`_.
186
+
187
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
188
+
189
+ Parameters
190
+ ----------
191
+ environment: str | uuid.UUID
192
+ Name or ID of the environment.
193
+ workspace : str | uuid.UUID, default=None
194
+ The Fabric workspace name or ID.
195
+ Defaults to None which resolves to the workspace of the attached lakehouse
196
+ or if no lakehouse attached, resolves to the workspace of the notebook.
197
+ """
198
+
199
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
200
+ item_id = resolve_item_id(
201
+ item=environment, type="Environment", workspace=workspace_id
202
+ )
203
+
204
+ _base_api(
205
+ request=f"/v1/workspaces/{workspace_id}/environments/{item_id}/staging/cancelPublish",
206
+ method="post",
207
+ client="fabric_sp",
208
+ )
209
+
210
+ print(
211
+ f"{icons.green_dot} The publish of the '{environment}' environment within the '{workspace_name}' workspace has been cancelled."
212
+ )
@@ -0,0 +1,223 @@
1
+ import pandas as pd
2
+ from typing import Optional
3
+ from sempy_labs._helper_functions import (
4
+ resolve_workspace_id,
5
+ _base_api,
6
+ _create_dataframe,
7
+ resolve_item_id,
8
+ _update_dataframe_datatypes,
9
+ )
10
+ from uuid import UUID
11
+ from sempy._utils._log import log
12
+
13
+
14
+ def _get_spark_compute(
15
+ environment: str | UUID,
16
+ workspace: Optional[str | UUID] = None,
17
+ staging: bool = False,
18
+ ) -> pd.DataFrame:
19
+
20
+ columns = {
21
+ "Instance Pool Name": "string",
22
+ "Instance Pool Type": "string",
23
+ "Instance Pool Id": "string",
24
+ "Driver Cores": "int",
25
+ "Driver Memory": "string",
26
+ "Executor Cores": "int",
27
+ "Executor Memory": "string",
28
+ "Dynamic Executor Allocation Enabled": "bool",
29
+ "Dynamic Executor Allocation Min Executors": "int",
30
+ "Dynamic Executor Allocation Max Executors": "int",
31
+ "Spark Properties": "string",
32
+ "Runtime Version": "string",
33
+ }
34
+
35
+ df = _create_dataframe(columns=columns)
36
+
37
+ workspace_id = resolve_workspace_id(workspace)
38
+ item_id = resolve_item_id(
39
+ item=environment, type="Environment", workspace=workspace_id
40
+ )
41
+
42
+ url = f"/v1/workspaces/{workspace_id}/environments/{item_id}/sparkCompute"
43
+ if staging:
44
+ url = (
45
+ f"/v1/workspaces/{workspace_id}/environments/{item_id}/staging/sparkCompute"
46
+ )
47
+
48
+ response = _base_api(
49
+ request=url,
50
+ client="fabric_sp",
51
+ )
52
+
53
+ rows = []
54
+ rows.append(
55
+ {
56
+ "Instance Pool Name": response.get("instancePool", {}).get("name"),
57
+ "Instance Pool Type": response.get("instancePool", {}).get("type"),
58
+ "Instance Pool Id": response.get("instancePool", {}).get("id"),
59
+ "Driver Cores": response.get("driverCores"),
60
+ "Driver Memory": response.get("driverMemory"),
61
+ "Executor Cores": response.get("executorCores"),
62
+ "Executor Memory": response.get("executorMemory"),
63
+ "Dynamic Executor Allocation Enabled": response.get(
64
+ "dynamicExecutorAllocation", {}
65
+ ).get("enabled"),
66
+ "Dynamic Executor Allocation Min Executors": response.get(
67
+ "dynamicExecutorAllocation", {}
68
+ ).get("minExecutors"),
69
+ "Dynamic Executor Allocation Max Executors": response.get(
70
+ "dynamicExecutorAllocation", {}
71
+ ).get("maxExecutors"),
72
+ "Spark Properties": response.get("sparkProperties"),
73
+ "Runtime Version": response.get("runtimeVersion"),
74
+ }
75
+ )
76
+
77
+ if rows:
78
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
79
+ _update_dataframe_datatypes(df, columns)
80
+
81
+ return df
82
+
83
+
84
+ @log
85
+ def get_published_spark_compute(
86
+ environment: str | UUID, workspace: Optional[str | UUID] = None
87
+ ) -> pd.DataFrame:
88
+ """
89
+ Gets the Spark compute of a published Fabric environment.
90
+
91
+ This is a wrapper function for the following API: `Published - Get Spark Compute <https://learn.microsoft.com/rest/api/fabric/environment/published/get-spark-compute>`_.
92
+
93
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
94
+
95
+ Parameters
96
+ ----------
97
+ environment: str | uuid.UUID
98
+ Name or ID of the environment.
99
+ workspace : str | uuid.UUID, default=None
100
+ The Fabric workspace name or ID.
101
+ Defaults to None which resolves to the workspace of the attached lakehouse
102
+ or if no lakehouse attached, resolves to the workspace of the notebook.
103
+ """
104
+
105
+ return _get_spark_compute(environment, workspace, staging=False)
106
+
107
+
108
+ @log
109
+ def get_staging_spark_compute(
110
+ environment: str | UUID, workspace: Optional[str | UUID] = None
111
+ ) -> pd.DataFrame:
112
+ """
113
+ Gets the Spark compute of a staging Fabric environment.
114
+
115
+ This is a wrapper function for the following API: `Staging - Get Spark Compute <https://learn.microsoft.com/rest/api/fabric/environment/staging/get-spark-compute>`_.
116
+
117
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
118
+
119
+ Parameters
120
+ ----------
121
+ environment: str | uuid.UUID
122
+ Name or ID of the environment.
123
+ workspace : str | uuid.UUID, default=None
124
+ The Fabric workspace name or ID.
125
+ Defaults to None which resolves to the workspace of the attached lakehouse
126
+ or if no lakehouse attached, resolves to the workspace of the notebook.
127
+ """
128
+
129
+ return _get_spark_compute(environment, workspace, staging=True)
130
+
131
+
132
+ def _list_libraries(
133
+ environment: str | UUID,
134
+ workspace: Optional[str | UUID] = None,
135
+ staging: bool = False,
136
+ ) -> pd.DataFrame:
137
+
138
+ columns = {
139
+ "Library Name": "string",
140
+ "Library Type": "string",
141
+ "Library Version": "string",
142
+ }
143
+
144
+ df = _create_dataframe(columns=columns)
145
+
146
+ workspace_id = resolve_workspace_id(workspace)
147
+ item_id = resolve_item_id(
148
+ item=environment, type="Environment", workspace=workspace_id
149
+ )
150
+
151
+ url = f"/v1/workspaces/{workspace_id}/environments/{item_id}/libraries"
152
+ if staging:
153
+ url = f"/v1/workspaces/{workspace_id}/environments/{item_id}/staging/libraries"
154
+
155
+ responses = _base_api(
156
+ request=url,
157
+ client="fabric_sp",
158
+ uses_pagination=True,
159
+ )
160
+
161
+ rows = []
162
+ for r in responses:
163
+ for lib in r.get("libraries", []):
164
+ rows.append(
165
+ {
166
+ "Library Name": lib.get("name"),
167
+ "Library Type": lib.get("libraryType"),
168
+ "Library Version": lib.get("version"),
169
+ }
170
+ )
171
+
172
+ if rows:
173
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
174
+
175
+ return df
176
+
177
+
178
+ @log
179
+ def list_published_libraries(
180
+ environment: str | UUID, workspace: Optional[str | UUID] = None
181
+ ) -> pd.DataFrame:
182
+ """
183
+ Gets the published libraries of a Fabric environment.
184
+
185
+ This is a wrapper function for the following API: `Published - List Libraries <https://learn.microsoft.com/rest/api/fabric/environment/published/list-libraries>`_.
186
+
187
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
188
+
189
+ Parameters
190
+ ----------
191
+ environment: str | uuid.UUID
192
+ Name or ID of the environment.
193
+ workspace : str | uuid.UUID, default=None
194
+ The Fabric workspace name or ID.
195
+ Defaults to None which resolves to the workspace of the attached lakehouse
196
+ or if no lakehouse attached, resolves to the workspace of the notebook.
197
+ """
198
+
199
+ return _list_libraries(environment, workspace, staging=False)
200
+
201
+
202
+ @log
203
+ def list_staging_libraries(
204
+ environment: str | UUID, workspace: Optional[str | UUID] = None
205
+ ) -> pd.DataFrame:
206
+ """
207
+ Gets the staging libraries of a Fabric environment.
208
+
209
+ This is a wrapper function for the following API: `Staging - List Libraries <https://learn.microsoft.com/rest/api/fabric/environment/staging/list-libraries>`_.
210
+
211
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
212
+
213
+ Parameters
214
+ ----------
215
+ environment: str | uuid.UUID
216
+ Name or ID of the environment.
217
+ workspace : str | uuid.UUID, default=None
218
+ The Fabric workspace name or ID.
219
+ Defaults to None which resolves to the workspace of the attached lakehouse
220
+ or if no lakehouse attached, resolves to the workspace of the notebook.
221
+ """
222
+
223
+ return _list_libraries(environment, workspace, staging=True)
@@ -0,0 +1,37 @@
1
+ from ._items import (
2
+ list_eventstreams,
3
+ create_eventstream,
4
+ delete_eventstream,
5
+ get_eventstream_definition,
6
+ )
7
+ from ._topology import (
8
+ get_eventstream_destination,
9
+ get_eventstream_destination_connection,
10
+ get_eventstream_source,
11
+ get_eventstream_source_connection,
12
+ get_eventstream_topology,
13
+ pause_eventstream,
14
+ pause_eventstream_destination,
15
+ pause_eventstream_source,
16
+ resume_eventstream,
17
+ resume_eventstream_destination,
18
+ resume_eventstream_source,
19
+ )
20
+
21
+ __all__ = [
22
+ "list_eventstreams",
23
+ "create_eventstream",
24
+ "delete_eventstream",
25
+ "get_eventstream_definition",
26
+ "get_eventstream_destination",
27
+ "get_eventstream_destination_connection",
28
+ "get_eventstream_source",
29
+ "get_eventstream_source_connection",
30
+ "get_eventstream_topology",
31
+ "pause_eventstream",
32
+ "pause_eventstream_destination",
33
+ "pause_eventstream_source",
34
+ "resume_eventstream",
35
+ "resume_eventstream_destination",
36
+ "resume_eventstream_source",
37
+ ]