semantic-link-labs 0.12.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (243) hide show
  1. semantic_link_labs-0.12.8.dist-info/METADATA +354 -0
  2. semantic_link_labs-0.12.8.dist-info/RECORD +243 -0
  3. semantic_link_labs-0.12.8.dist-info/WHEEL +5 -0
  4. semantic_link_labs-0.12.8.dist-info/licenses/LICENSE +21 -0
  5. semantic_link_labs-0.12.8.dist-info/top_level.txt +1 -0
  6. sempy_labs/__init__.py +606 -0
  7. sempy_labs/_a_lib_info.py +2 -0
  8. sempy_labs/_ai.py +437 -0
  9. sempy_labs/_authentication.py +264 -0
  10. sempy_labs/_bpa_translation/_model/_translations_am-ET.po +869 -0
  11. sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +908 -0
  12. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +968 -0
  13. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +963 -0
  14. sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +943 -0
  15. sempy_labs/_bpa_translation/_model/_translations_da-DK.po +945 -0
  16. sempy_labs/_bpa_translation/_model/_translations_de-DE.po +988 -0
  17. sempy_labs/_bpa_translation/_model/_translations_el-GR.po +993 -0
  18. sempy_labs/_bpa_translation/_model/_translations_es-ES.po +971 -0
  19. sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +933 -0
  20. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +942 -0
  21. sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +994 -0
  22. sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +967 -0
  23. sempy_labs/_bpa_translation/_model/_translations_he-IL.po +902 -0
  24. sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +944 -0
  25. sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +963 -0
  26. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +946 -0
  27. sempy_labs/_bpa_translation/_model/_translations_is-IS.po +939 -0
  28. sempy_labs/_bpa_translation/_model/_translations_it-IT.po +986 -0
  29. sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +846 -0
  30. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +839 -0
  31. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +967 -0
  32. sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +978 -0
  33. sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +962 -0
  34. sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +962 -0
  35. sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +957 -0
  36. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +968 -0
  37. sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +964 -0
  38. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +952 -0
  39. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +950 -0
  40. sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +942 -0
  41. sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +976 -0
  42. sempy_labs/_bpa_translation/_model/_translations_te-IN.po +947 -0
  43. sempy_labs/_bpa_translation/_model/_translations_th-TH.po +924 -0
  44. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +953 -0
  45. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +961 -0
  46. sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +804 -0
  47. sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +969 -0
  48. sempy_labs/_capacities.py +1198 -0
  49. sempy_labs/_capacity_migration.py +660 -0
  50. sempy_labs/_clear_cache.py +351 -0
  51. sempy_labs/_connections.py +610 -0
  52. sempy_labs/_dashboards.py +69 -0
  53. sempy_labs/_data_access_security.py +98 -0
  54. sempy_labs/_data_pipelines.py +162 -0
  55. sempy_labs/_dataflows.py +668 -0
  56. sempy_labs/_dax.py +501 -0
  57. sempy_labs/_daxformatter.py +80 -0
  58. sempy_labs/_delta_analyzer.py +467 -0
  59. sempy_labs/_delta_analyzer_history.py +301 -0
  60. sempy_labs/_dictionary_diffs.py +221 -0
  61. sempy_labs/_documentation.py +147 -0
  62. sempy_labs/_domains.py +51 -0
  63. sempy_labs/_eventhouses.py +182 -0
  64. sempy_labs/_external_data_shares.py +230 -0
  65. sempy_labs/_gateways.py +521 -0
  66. sempy_labs/_generate_semantic_model.py +521 -0
  67. sempy_labs/_get_connection_string.py +84 -0
  68. sempy_labs/_git.py +543 -0
  69. sempy_labs/_graphQL.py +90 -0
  70. sempy_labs/_helper_functions.py +2833 -0
  71. sempy_labs/_icons.py +149 -0
  72. sempy_labs/_job_scheduler.py +609 -0
  73. sempy_labs/_kql_databases.py +149 -0
  74. sempy_labs/_kql_querysets.py +124 -0
  75. sempy_labs/_kusto.py +137 -0
  76. sempy_labs/_labels.py +124 -0
  77. sempy_labs/_list_functions.py +1720 -0
  78. sempy_labs/_managed_private_endpoints.py +253 -0
  79. sempy_labs/_mirrored_databases.py +416 -0
  80. sempy_labs/_mirrored_warehouses.py +60 -0
  81. sempy_labs/_ml_experiments.py +113 -0
  82. sempy_labs/_model_auto_build.py +140 -0
  83. sempy_labs/_model_bpa.py +557 -0
  84. sempy_labs/_model_bpa_bulk.py +378 -0
  85. sempy_labs/_model_bpa_rules.py +859 -0
  86. sempy_labs/_model_dependencies.py +343 -0
  87. sempy_labs/_mounted_data_factories.py +123 -0
  88. sempy_labs/_notebooks.py +441 -0
  89. sempy_labs/_one_lake_integration.py +151 -0
  90. sempy_labs/_onelake.py +131 -0
  91. sempy_labs/_query_scale_out.py +433 -0
  92. sempy_labs/_refresh_semantic_model.py +435 -0
  93. sempy_labs/_semantic_models.py +468 -0
  94. sempy_labs/_spark.py +455 -0
  95. sempy_labs/_sql.py +241 -0
  96. sempy_labs/_sql_audit_settings.py +207 -0
  97. sempy_labs/_sql_endpoints.py +214 -0
  98. sempy_labs/_tags.py +201 -0
  99. sempy_labs/_translations.py +43 -0
  100. sempy_labs/_user_delegation_key.py +44 -0
  101. sempy_labs/_utils.py +79 -0
  102. sempy_labs/_vertipaq.py +1021 -0
  103. sempy_labs/_vpax.py +388 -0
  104. sempy_labs/_warehouses.py +234 -0
  105. sempy_labs/_workloads.py +140 -0
  106. sempy_labs/_workspace_identity.py +72 -0
  107. sempy_labs/_workspaces.py +595 -0
  108. sempy_labs/admin/__init__.py +170 -0
  109. sempy_labs/admin/_activities.py +167 -0
  110. sempy_labs/admin/_apps.py +145 -0
  111. sempy_labs/admin/_artifacts.py +65 -0
  112. sempy_labs/admin/_basic_functions.py +463 -0
  113. sempy_labs/admin/_capacities.py +508 -0
  114. sempy_labs/admin/_dataflows.py +45 -0
  115. sempy_labs/admin/_datasets.py +186 -0
  116. sempy_labs/admin/_domains.py +522 -0
  117. sempy_labs/admin/_external_data_share.py +100 -0
  118. sempy_labs/admin/_git.py +72 -0
  119. sempy_labs/admin/_items.py +265 -0
  120. sempy_labs/admin/_labels.py +211 -0
  121. sempy_labs/admin/_reports.py +241 -0
  122. sempy_labs/admin/_scanner.py +118 -0
  123. sempy_labs/admin/_shared.py +82 -0
  124. sempy_labs/admin/_sharing_links.py +110 -0
  125. sempy_labs/admin/_tags.py +131 -0
  126. sempy_labs/admin/_tenant.py +503 -0
  127. sempy_labs/admin/_tenant_keys.py +89 -0
  128. sempy_labs/admin/_users.py +140 -0
  129. sempy_labs/admin/_workspaces.py +236 -0
  130. sempy_labs/deployment_pipeline/__init__.py +23 -0
  131. sempy_labs/deployment_pipeline/_items.py +580 -0
  132. sempy_labs/directlake/__init__.py +57 -0
  133. sempy_labs/directlake/_autosync.py +58 -0
  134. sempy_labs/directlake/_directlake_schema_compare.py +120 -0
  135. sempy_labs/directlake/_directlake_schema_sync.py +161 -0
  136. sempy_labs/directlake/_dl_helper.py +274 -0
  137. sempy_labs/directlake/_generate_shared_expression.py +94 -0
  138. sempy_labs/directlake/_get_directlake_lakehouse.py +62 -0
  139. sempy_labs/directlake/_get_shared_expression.py +34 -0
  140. sempy_labs/directlake/_guardrails.py +96 -0
  141. sempy_labs/directlake/_list_directlake_model_calc_tables.py +70 -0
  142. sempy_labs/directlake/_show_unsupported_directlake_objects.py +90 -0
  143. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +239 -0
  144. sempy_labs/directlake/_update_directlake_partition_entity.py +259 -0
  145. sempy_labs/directlake/_warm_cache.py +236 -0
  146. sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
  147. sempy_labs/environment/__init__.py +23 -0
  148. sempy_labs/environment/_items.py +212 -0
  149. sempy_labs/environment/_pubstage.py +223 -0
  150. sempy_labs/eventstream/__init__.py +37 -0
  151. sempy_labs/eventstream/_items.py +263 -0
  152. sempy_labs/eventstream/_topology.py +652 -0
  153. sempy_labs/graph/__init__.py +59 -0
  154. sempy_labs/graph/_groups.py +651 -0
  155. sempy_labs/graph/_sensitivity_labels.py +120 -0
  156. sempy_labs/graph/_teams.py +125 -0
  157. sempy_labs/graph/_user_licenses.py +96 -0
  158. sempy_labs/graph/_users.py +516 -0
  159. sempy_labs/graph_model/__init__.py +15 -0
  160. sempy_labs/graph_model/_background_jobs.py +63 -0
  161. sempy_labs/graph_model/_items.py +149 -0
  162. sempy_labs/lakehouse/__init__.py +67 -0
  163. sempy_labs/lakehouse/_blobs.py +247 -0
  164. sempy_labs/lakehouse/_get_lakehouse_columns.py +102 -0
  165. sempy_labs/lakehouse/_get_lakehouse_tables.py +274 -0
  166. sempy_labs/lakehouse/_helper.py +250 -0
  167. sempy_labs/lakehouse/_lakehouse.py +351 -0
  168. sempy_labs/lakehouse/_livy_sessions.py +143 -0
  169. sempy_labs/lakehouse/_materialized_lake_views.py +157 -0
  170. sempy_labs/lakehouse/_partitioning.py +165 -0
  171. sempy_labs/lakehouse/_schemas.py +217 -0
  172. sempy_labs/lakehouse/_shortcuts.py +440 -0
  173. sempy_labs/migration/__init__.py +35 -0
  174. sempy_labs/migration/_create_pqt_file.py +238 -0
  175. sempy_labs/migration/_direct_lake_to_import.py +105 -0
  176. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +398 -0
  177. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +148 -0
  178. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +533 -0
  179. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +172 -0
  180. sempy_labs/migration/_migration_validation.py +71 -0
  181. sempy_labs/migration/_refresh_calc_tables.py +131 -0
  182. sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
  183. sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
  184. sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
  185. sempy_labs/ml_model/__init__.py +23 -0
  186. sempy_labs/ml_model/_functions.py +427 -0
  187. sempy_labs/report/_BPAReportTemplate.json +232 -0
  188. sempy_labs/report/__init__.py +55 -0
  189. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  190. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  191. sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
  192. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
  193. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
  194. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
  195. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
  196. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
  197. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
  198. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
  199. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
  200. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
  201. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
  202. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
  203. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
  204. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
  205. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
  206. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
  207. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
  208. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
  209. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
  210. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
  211. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
  212. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
  213. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
  214. sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
  215. sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
  216. sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
  217. sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
  218. sempy_labs/report/_download_report.py +76 -0
  219. sempy_labs/report/_export_report.py +257 -0
  220. sempy_labs/report/_generate_report.py +427 -0
  221. sempy_labs/report/_paginated.py +76 -0
  222. sempy_labs/report/_report_bpa.py +354 -0
  223. sempy_labs/report/_report_bpa_rules.py +115 -0
  224. sempy_labs/report/_report_functions.py +581 -0
  225. sempy_labs/report/_report_helper.py +227 -0
  226. sempy_labs/report/_report_list_functions.py +110 -0
  227. sempy_labs/report/_report_rebind.py +149 -0
  228. sempy_labs/report/_reportwrapper.py +3100 -0
  229. sempy_labs/report/_save_report.py +147 -0
  230. sempy_labs/snowflake_database/__init__.py +10 -0
  231. sempy_labs/snowflake_database/_items.py +105 -0
  232. sempy_labs/sql_database/__init__.py +21 -0
  233. sempy_labs/sql_database/_items.py +201 -0
  234. sempy_labs/sql_database/_mirroring.py +79 -0
  235. sempy_labs/theme/__init__.py +12 -0
  236. sempy_labs/theme/_org_themes.py +129 -0
  237. sempy_labs/tom/__init__.py +3 -0
  238. sempy_labs/tom/_model.py +5977 -0
  239. sempy_labs/variable_library/__init__.py +19 -0
  240. sempy_labs/variable_library/_functions.py +403 -0
  241. sempy_labs/warehouse/__init__.py +28 -0
  242. sempy_labs/warehouse/_items.py +234 -0
  243. sempy_labs/warehouse/_restore_points.py +309 -0
@@ -0,0 +1,441 @@
1
+ import sempy.fabric as fabric
2
+ import pandas as pd
3
+ import sempy_labs._icons as icons
4
+ from typing import Optional, List
5
+ import base64
6
+ import requests
7
+ from sempy._utils._log import log
8
+ from sempy_labs._helper_functions import (
9
+ resolve_workspace_name_and_id,
10
+ resolve_workspace_id,
11
+ _decode_b64,
12
+ _base_api,
13
+ resolve_item_id,
14
+ create_item,
15
+ _create_dataframe,
16
+ )
17
+ from sempy.fabric.exceptions import FabricHTTPException
18
+ from os import PathLike
19
+ from uuid import UUID
20
+ import os
21
+
22
+ _notebook_prefix = "notebook-content."
23
+
24
+
25
+ def _get_notebook_definition_base(
26
+ notebook_name: str,
27
+ workspace: Optional[str | UUID] = None,
28
+ format: Optional[str] = None,
29
+ ) -> pd.DataFrame:
30
+
31
+ workspace_id = resolve_workspace_id(workspace)
32
+ item_id = resolve_item_id(item=notebook_name, type="Notebook", workspace=workspace)
33
+
34
+ url = f"v1/workspaces/{workspace_id}/notebooks/{item_id}/getDefinition"
35
+ if format == "ipynb":
36
+ url += f"?format={format}"
37
+
38
+ result = _base_api(
39
+ request=url,
40
+ method="post",
41
+ lro_return_json=True,
42
+ status_codes=None,
43
+ )
44
+
45
+ return pd.json_normalize(result["definition"]["parts"])
46
+
47
+
48
+ def _get_notebook_type(
49
+ notebook_name: str, workspace: Optional[str | UUID] = None
50
+ ) -> str:
51
+
52
+ df_items = _get_notebook_definition_base(
53
+ notebook_name=notebook_name, workspace=workspace
54
+ )
55
+
56
+ file_path = df_items[df_items["path"].str.startswith(_notebook_prefix)][
57
+ "path"
58
+ ].iloc[0]
59
+
60
+ _, file_extension = os.path.splitext(file_path)
61
+
62
+ return file_extension[1:]
63
+
64
+
65
+ @log
66
+ def get_notebook_definition(
67
+ notebook_name: str,
68
+ workspace: Optional[str | UUID] = None,
69
+ decode: bool = True,
70
+ format: Optional[str] = None,
71
+ ) -> str:
72
+ """
73
+ Obtains the notebook definition.
74
+
75
+ This is a wrapper function for the following API: `Items - Get Notebook Definition <https://learn.microsoft.com/rest/api/fabric/notebook/items/get-notebook-definition>`_.
76
+
77
+ Parameters
78
+ ----------
79
+ notebook_name : str
80
+ The name of the notebook.
81
+ workspace : str | uuid.UUID, default=None
82
+ The name or ID of the workspace.
83
+ Defaults to None which resolves to the workspace of the attached lakehouse
84
+ or if no lakehouse attached, resolves to the workspace of the notebook.
85
+ decode : bool, default=True
86
+ If True, decodes the notebook definition file into .ipynb format.
87
+ If False, obtains the notebook definition file in base64 format.
88
+ format : str, default=None
89
+ The only supported value is ipynb
90
+ If provided the format will be in standard .ipynb otherwise the format will be in source code format which is GIT friendly ipynb
91
+
92
+ Returns
93
+ -------
94
+ str
95
+ The notebook definition.
96
+ """
97
+
98
+ df_items = _get_notebook_definition_base(
99
+ notebook_name=notebook_name, workspace=workspace, format=format
100
+ )
101
+ df_items_filt = df_items[df_items["path"].str.startswith(_notebook_prefix)]
102
+ payload = df_items_filt["payload"].iloc[0]
103
+
104
+ if decode:
105
+ result = _decode_b64(payload)
106
+ else:
107
+ result = payload
108
+
109
+ return result
110
+
111
+
112
+ @log
113
+ def import_notebook_from_web(
114
+ notebook_name: str,
115
+ url: str,
116
+ description: Optional[str] = None,
117
+ workspace: Optional[str | UUID] = None,
118
+ overwrite: bool = False,
119
+ folder: Optional[str | PathLike] = None,
120
+ ):
121
+ """
122
+ Creates a new notebook within a workspace based on a Jupyter notebook hosted in the web.
123
+
124
+ Note: When specifying a notebook from GitHub, please use the raw file path. Note that if the non-raw file path is specified, the url will be
125
+ converted to the raw URL as the raw URL is needed to obtain the notebook content.
126
+
127
+ Parameters
128
+ ----------
129
+ notebook_name : str
130
+ The name of the notebook to be created.
131
+ url : str
132
+ The url of the Jupyter Notebook (.ipynb)
133
+ description : str, default=None
134
+ The description of the notebook.
135
+ Defaults to None which does not place a description.
136
+ workspace : str | uuid.UUID, default=None
137
+ The name or ID of the workspace.
138
+ Defaults to None which resolves to the workspace of the attached lakehouse
139
+ or if no lakehouse attached, resolves to the workspace of the notebook.
140
+ overwrite : bool, default=False
141
+ If set to True, overwrites the existing notebook in the workspace if it exists.
142
+ folder : str | os.PathLike, default=None
143
+ The folder within the workspace where the notebook will be created.
144
+ Defaults to None which places the notebook in the root of the workspace.
145
+ """
146
+
147
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
148
+
149
+ # Fix links to go to the raw github file
150
+ starting_text = "https://github.com/"
151
+ starting_text_len = len(starting_text)
152
+ if url.startswith(starting_text):
153
+ url = f"https://raw.githubusercontent.com/{url[starting_text_len:]}".replace(
154
+ "/blob/", "/"
155
+ )
156
+
157
+ response = requests.get(url)
158
+ if response.status_code != 200:
159
+ raise FabricHTTPException(response)
160
+
161
+ dfI = fabric.list_items(workspace=workspace, type="Notebook")
162
+ dfI_filt = dfI[dfI["Display Name"] == notebook_name]
163
+ if len(dfI_filt) == 0:
164
+ create_notebook(
165
+ name=notebook_name,
166
+ notebook_content=response.content.decode("utf-8"),
167
+ workspace=workspace_id,
168
+ description=description,
169
+ format="ipynb",
170
+ folder=folder,
171
+ )
172
+ elif len(dfI_filt) > 0 and overwrite:
173
+ print(f"{icons.info} Overwrite of notebooks is currently not supported.")
174
+ # update_notebook_definition(
175
+ # name=notebook_name, notebook_content=response.content, workspace=workspace
176
+ # )
177
+ else:
178
+ raise ValueError(
179
+ f"{icons.red_dot} The '{notebook_name}' already exists within the '{workspace_name}' workspace and 'overwrite' is set to False."
180
+ )
181
+
182
+
183
+ @log
184
+ def create_notebook(
185
+ name: str,
186
+ notebook_content: str,
187
+ type: str = "py",
188
+ description: Optional[str] = None,
189
+ workspace: Optional[str | UUID] = None,
190
+ format: Optional[str] = None,
191
+ folder: Optional[str | PathLike] = None,
192
+ ):
193
+ """
194
+ Creates a new notebook with a definition within a workspace.
195
+
196
+ Parameters
197
+ ----------
198
+ name : str
199
+ The name of the notebook to be created.
200
+ notebook_content : str
201
+ The Jupyter notebook content (not in Base64 format).
202
+ type : str, default="py"
203
+ The notebook type.
204
+ description : str, default=None
205
+ The description of the notebook.
206
+ Defaults to None which does not place a description.
207
+ workspace : str | uuid.UUID, default=None
208
+ The name or ID of the workspace.
209
+ Defaults to None which resolves to the workspace of the attached lakehouse
210
+ or if no lakehouse attached, resolves to the workspace of the notebook.
211
+ format : str, default=None
212
+ If 'ipynb' is provided than notebook_content should be standard ipynb format
213
+ otherwise notebook_content should be GIT friendly format
214
+ folder : str | os.PathLike, default=None
215
+ The folder within the workspace where the notebook will be created.
216
+ Defaults to None which places the notebook in the root of the workspace.
217
+ """
218
+
219
+ notebook_payload = base64.b64encode(notebook_content.encode("utf-8")).decode(
220
+ "utf-8"
221
+ )
222
+
223
+ definition_payload = {
224
+ "parts": [
225
+ {
226
+ "path": f"{_notebook_prefix}{type}",
227
+ "payload": notebook_payload,
228
+ "payloadType": "InlineBase64",
229
+ }
230
+ ],
231
+ }
232
+
233
+ if format == "ipynb":
234
+ definition_payload["format"] = "ipynb"
235
+
236
+ create_item(
237
+ name=name,
238
+ type="Notebook",
239
+ workspace=workspace,
240
+ description=description,
241
+ definition=definition_payload,
242
+ folder=folder,
243
+ )
244
+
245
+
246
+ @log
247
+ def update_notebook_definition(
248
+ name: str,
249
+ notebook_content: str,
250
+ workspace: Optional[str | UUID] = None,
251
+ format: Optional[str] = None,
252
+ ):
253
+ """
254
+ Updates an existing notebook with a new definition.
255
+
256
+ Parameters
257
+ ----------
258
+ name : str
259
+ The name of the notebook to be updated.
260
+ notebook_content : str
261
+ The Jupyter notebook content (not in Base64 format).
262
+ workspace : str | uuid.UUID, default=None
263
+ The name or ID of the workspace.
264
+ Defaults to None which resolves to the workspace of the attached lakehouse
265
+ or if no lakehouse attached, resolves to the workspace of the notebook.
266
+ format : str, default=None
267
+ If 'ipynb' is provided than notebook_content should be standard ipynb format
268
+ otherwise notebook_content should be GIT friendly format
269
+ """
270
+
271
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
272
+ notebook_payload = base64.b64encode(notebook_content.encode("utf-8")).decode(
273
+ "utf-8"
274
+ )
275
+ item_id = resolve_item_id(item=name, type="Notebook", workspace=workspace)
276
+ type = _get_notebook_type(notebook_name=name, workspace=workspace)
277
+
278
+ payload = {
279
+ "definition": {
280
+ "parts": [
281
+ {
282
+ "path": f"{_notebook_prefix}{type}",
283
+ "payload": notebook_payload,
284
+ "payloadType": "InlineBase64",
285
+ }
286
+ ],
287
+ },
288
+ }
289
+
290
+ if format == "ipynb":
291
+ payload["definition"]["format"] = "ipynb"
292
+
293
+ _base_api(
294
+ request=f"v1/workspaces/{workspace_id}/notebooks/{item_id}/updateDefinition",
295
+ payload=payload,
296
+ method="post",
297
+ lro_return_status_code=True,
298
+ status_codes=None,
299
+ )
300
+
301
+ print(
302
+ f"{icons.green_dot} The '{name}' notebook was updated within the '{workspace_name}' workspace."
303
+ )
304
+
305
+
306
+ @log
307
+ def list_notebooks(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
308
+ """
309
+ Shows the notebooks within a workspace.
310
+
311
+ Parameters
312
+ ----------
313
+ workspace : str | uuid.UUID, default=None
314
+ The Fabric workspace name or ID.
315
+ Defaults to None which resolves to the workspace of the attached lakehouse
316
+ or if no lakehouse attached, resolves to the workspace of the notebook.
317
+
318
+ Returns
319
+ -------
320
+ pandas.DataFrame
321
+ A pandas dataframe showing the notebooks within a workspace.
322
+ """
323
+
324
+ columns = {
325
+ "Notebook Id": "string",
326
+ "Notebook Name": "string",
327
+ "Description": "string",
328
+ }
329
+ df = _create_dataframe(columns=columns)
330
+
331
+ workspace_id = resolve_workspace_id(workspace)
332
+
333
+ responses = _base_api(
334
+ request=f"/v1/workspaces/{workspace_id}/notebooks", uses_pagination=True
335
+ )
336
+
337
+ rows = []
338
+ for r in responses:
339
+ for v in r.get("value", []):
340
+ rows.append(
341
+ {
342
+ "Notebook Id": v.get("id"),
343
+ "Notebook Name": v.get("displayName"),
344
+ "Description": v.get("description"),
345
+ }
346
+ )
347
+
348
+ if rows:
349
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
350
+
351
+ return df
352
+
353
+
354
+ @log
355
+ def search_notebooks(
356
+ search_string: str,
357
+ notebook: Optional[str | UUID] = None,
358
+ workspace: Optional[str | UUID | List[str | UUID]] = None,
359
+ ) -> pd.DataFrame:
360
+ """
361
+ Searches notebooks within a workspace or across multiple workspaces for a given search string.
362
+
363
+ Parameters
364
+ ----------
365
+ search_string : str
366
+ The string to search for within the notebook definitions.
367
+ notebook : str | uuid.UUID, default=None
368
+ The name or ID of a specific notebook to search within.
369
+ Defaults to None which searches across all notebooks in the specified workspace(s).
370
+ workspace : str | uuid.UUID | list, default=None
371
+ The name or ID of the workspace or a list of workspaces to search within.
372
+ Defaults to None which resolves to the workspace of the attached lakehouse
373
+ or if no lakehouse attached, resolves to the workspace of the notebook.
374
+ If a list is provided, it should contain workspace names or IDs.
375
+
376
+ Returns
377
+ -------
378
+ pandas.DataFrame
379
+ A pandas dataframe showing the notebooks that contain the search string in their definitions.
380
+ The dataframe includes the workspace name, workspace ID, notebook name, and notebook ID.
381
+ """
382
+
383
+ if not workspace:
384
+ workspace_id = resolve_workspace_id(workspace)
385
+ workspace_ids = [workspace_id]
386
+ elif isinstance(workspace, str):
387
+ workspace_id = resolve_workspace_id(workspace)
388
+ workspace_ids = [workspace_id]
389
+ elif isinstance(workspace, list):
390
+ workspace_ids = [resolve_workspace_id(ws) for ws in workspace]
391
+ else:
392
+ raise ValueError(
393
+ "Workspace must be a string, UUID, or a list of strings/UUIDs."
394
+ )
395
+
396
+ dfW = fabric.list_workspaces()
397
+ dfW_filt = dfW[dfW["Id"].isin(workspace_ids)]
398
+
399
+ columns = {
400
+ "Workspace Name": "string",
401
+ "Workspace Id": "string",
402
+ "Notebook Name": "string",
403
+ "Notebook Id": "string",
404
+ }
405
+ df = _create_dataframe(columns=columns)
406
+
407
+ rows = []
408
+ for _, r in dfW_filt.iterrows():
409
+ w_id = r["Id"]
410
+ w_name = r["Name"]
411
+ dfN = list_notebooks(workspace=w_id)
412
+ if notebook is not None:
413
+ item_id = resolve_item_id(item=notebook, type="Notebook", workspace=w_id)
414
+ dfN = dfN[dfN["Notebook Id"] == item_id]
415
+ for _, n in dfN.iterrows():
416
+ notebook_id = n["Notebook Id"]
417
+ notebook_name = n["Notebook Name"]
418
+ definition = _base_api(
419
+ request=f"v1/workspaces/{w_id}/notebooks/{notebook_id}/getDefinition",
420
+ method="post",
421
+ client="fabric_sp",
422
+ status_codes=None,
423
+ lro_return_json=True,
424
+ )
425
+ for part in definition.get("definition").get("parts"):
426
+ payload = _decode_b64(part["payload"])
427
+ if part["path"] == "notebook-content.py":
428
+ if search_string in payload:
429
+ rows.append(
430
+ {
431
+ "Workspace Name": w_name,
432
+ "Workspace Id": w_id,
433
+ "Notebook Name": notebook_name,
434
+ "Notebook Id": notebook_id,
435
+ }
436
+ )
437
+
438
+ if rows:
439
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
440
+
441
+ return df
@@ -0,0 +1,151 @@
1
+ import sempy.fabric as fabric
2
+ import pandas as pd
3
+ from typing import Optional
4
+ from sempy._utils._log import log
5
+ from sempy_labs._helper_functions import (
6
+ resolve_workspace_name_and_id,
7
+ resolve_dataset_name_and_id,
8
+ resolve_workspace_id,
9
+ )
10
+ import sempy_labs._icons as icons
11
+ from uuid import UUID
12
+
13
+
14
+ @log
15
+ def export_model_to_onelake(
16
+ dataset: str | UUID,
17
+ workspace: Optional[str | UUID] = None,
18
+ destination_lakehouse: Optional[str] = None,
19
+ destination_workspace: Optional[str] = None,
20
+ ):
21
+ """
22
+ Exports a semantic model's tables to delta tables in the lakehouse. Creates shortcuts to the tables if a lakehouse is specified.
23
+
24
+ Parameters
25
+ ----------
26
+ dataset : str | uuid.UUID
27
+ Name or ID of the semantic model.
28
+ workspace : str | uuid.UUID, default=None
29
+ The Fabric workspace name or ID.
30
+ Defaults to None which resolves to the workspace of the attached lakehouse
31
+ or if no lakehouse attached, resolves to the workspace of the notebook.
32
+ destination_lakehouse : str, default=None
33
+ The name of the Fabric lakehouse where shortcuts will be created to access the delta tables created by the export. If the lakehouse specified does not exist, one will be created with that name. If no lakehouse is specified, shortcuts will not be created.
34
+ destination_workspace : str, default=None
35
+ The name of the Fabric workspace in which the lakehouse resides.
36
+ """
37
+
38
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
39
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
40
+
41
+ fabric.refresh_tom_cache(workspace=workspace)
42
+
43
+ if destination_workspace is None:
44
+ destination_workspace = workspace_name
45
+ destination_workspace_id = workspace_id
46
+ else:
47
+ destination_workspace_id = resolve_workspace_id(workspace=destination_workspace)
48
+
49
+ tmsl = f"""
50
+ {{
51
+ 'export': {{
52
+ 'layout': 'delta',
53
+ 'type': 'full',
54
+ 'objects': [
55
+ {{
56
+ 'database': '{dataset_name}'
57
+ }}
58
+ ]
59
+ }}
60
+ }}
61
+ """
62
+
63
+ # Export model's tables as delta tables
64
+ try:
65
+ fabric.execute_tmsl(script=tmsl, workspace=workspace_id)
66
+ print(
67
+ f"{icons.green_dot} The '{dataset_name}' semantic model's tables have been exported as delta tables to the '{workspace_name}' workspace.\n"
68
+ )
69
+ except Exception as e:
70
+ raise ValueError(
71
+ f"{icons.red_dot} The '{dataset_name}' semantic model's tables have not been exported as delta tables to the '{workspace_name}' workspace.\nMake sure you enable OneLake integration for the '{dataset_name}' semantic model. Follow the instructions here: https://learn.microsoft.com/power-bi/enterprise/onelake-integration-overview#enable-onelake-integration"
72
+ ) from e
73
+
74
+ # Create shortcuts if destination lakehouse is specified
75
+ if destination_lakehouse is not None:
76
+ # Destination...
77
+ dfI_Dest = fabric.list_items(workspace=destination_workspace, type="Lakehouse")
78
+ dfI_filt = dfI_Dest[(dfI_Dest["Display Name"] == destination_lakehouse)]
79
+
80
+ if len(dfI_filt) == 0:
81
+ print(
82
+ f"{icons.red_dot} The '{destination_lakehouse}' lakehouse does not exist within the '{destination_workspace}' workspace."
83
+ )
84
+ # Create lakehouse
85
+ destination_lakehouse_id = fabric.create_lakehouse(
86
+ display_name=destination_lakehouse, workspace=destination_workspace
87
+ )
88
+ print(
89
+ f"{icons.green_dot} The '{destination_lakehouse}' lakehouse has been created within the '{destination_workspace}' workspace.\n"
90
+ )
91
+ else:
92
+ destination_lakehouse_id = dfI_filt["Id"].iloc[0]
93
+
94
+ # Source...
95
+ dfI_Source = fabric.list_items(workspace=workspace_id, type="SemanticModel")
96
+ dfI_filtSource = dfI_Source[(dfI_Source["Display Name"] == dataset)]
97
+ sourceLakehouseId = dfI_filtSource["Id"].iloc[0]
98
+
99
+ # Valid tables
100
+ dfP = fabric.list_partitions(
101
+ dataset=dataset_id,
102
+ workspace=workspace_id,
103
+ additional_xmla_properties=["Parent.SystemManaged"],
104
+ )
105
+ dfP_filt = dfP[
106
+ (dfP["Mode"] == "Import")
107
+ & (dfP["Source Type"] != "CalculationGroup")
108
+ & (dfP["Parent System Managed"] == False)
109
+ ]
110
+ dfC = fabric.list_columns(dataset=dataset_id, workspace=workspace_id)
111
+ tmc = pd.DataFrame(dfP.groupby("Table Name")["Mode"].nunique()).reset_index()
112
+ oneMode = tmc[tmc["Mode"] == 1]
113
+ tableAll = dfP_filt[
114
+ dfP_filt["Table Name"].isin(dfC["Table Name"].values)
115
+ & (dfP_filt["Table Name"].isin(oneMode["Table Name"].values))
116
+ ]
117
+ tables = tableAll["Table Name"].unique()
118
+
119
+ client = fabric.FabricRestClient()
120
+
121
+ print(f"{icons.in_progress} Creating shortcuts...\n")
122
+ for tableName in tables:
123
+ tablePath = f"Tables/{tableName}"
124
+ shortcutName = tableName.replace(" ", "")
125
+ request_body = {
126
+ "path": "Tables",
127
+ "name": shortcutName,
128
+ "target": {
129
+ "oneLake": {
130
+ "workspaceId": workspace_id,
131
+ "itemId": sourceLakehouseId,
132
+ "path": tablePath,
133
+ }
134
+ },
135
+ }
136
+
137
+ try:
138
+ response = client.post(
139
+ f"/v1/workspaces/{destination_workspace_id}/items/{destination_lakehouse_id}/shortcuts",
140
+ json=request_body,
141
+ )
142
+ if response.status_code == 201:
143
+ print(
144
+ f"{icons.bullet} The shortcut '{shortcutName}' was created in the '{destination_lakehouse}' lakehouse within the '{destination_workspace}' workspace. It is based on the '{tableName}' table in the '{dataset_name}' semantic model within the '{workspace_name}' workspace.\n"
145
+ )
146
+ else:
147
+ print(response.status_code)
148
+ except Exception as e:
149
+ raise ValueError(
150
+ f"{icons.red_dot} Failed to create a shortcut for the '{tableName}' table."
151
+ ) from e