semantic-link-labs 0.12.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (243) hide show
  1. semantic_link_labs-0.12.8.dist-info/METADATA +354 -0
  2. semantic_link_labs-0.12.8.dist-info/RECORD +243 -0
  3. semantic_link_labs-0.12.8.dist-info/WHEEL +5 -0
  4. semantic_link_labs-0.12.8.dist-info/licenses/LICENSE +21 -0
  5. semantic_link_labs-0.12.8.dist-info/top_level.txt +1 -0
  6. sempy_labs/__init__.py +606 -0
  7. sempy_labs/_a_lib_info.py +2 -0
  8. sempy_labs/_ai.py +437 -0
  9. sempy_labs/_authentication.py +264 -0
  10. sempy_labs/_bpa_translation/_model/_translations_am-ET.po +869 -0
  11. sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +908 -0
  12. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +968 -0
  13. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +963 -0
  14. sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +943 -0
  15. sempy_labs/_bpa_translation/_model/_translations_da-DK.po +945 -0
  16. sempy_labs/_bpa_translation/_model/_translations_de-DE.po +988 -0
  17. sempy_labs/_bpa_translation/_model/_translations_el-GR.po +993 -0
  18. sempy_labs/_bpa_translation/_model/_translations_es-ES.po +971 -0
  19. sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +933 -0
  20. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +942 -0
  21. sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +994 -0
  22. sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +967 -0
  23. sempy_labs/_bpa_translation/_model/_translations_he-IL.po +902 -0
  24. sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +944 -0
  25. sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +963 -0
  26. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +946 -0
  27. sempy_labs/_bpa_translation/_model/_translations_is-IS.po +939 -0
  28. sempy_labs/_bpa_translation/_model/_translations_it-IT.po +986 -0
  29. sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +846 -0
  30. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +839 -0
  31. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +967 -0
  32. sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +978 -0
  33. sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +962 -0
  34. sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +962 -0
  35. sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +957 -0
  36. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +968 -0
  37. sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +964 -0
  38. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +952 -0
  39. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +950 -0
  40. sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +942 -0
  41. sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +976 -0
  42. sempy_labs/_bpa_translation/_model/_translations_te-IN.po +947 -0
  43. sempy_labs/_bpa_translation/_model/_translations_th-TH.po +924 -0
  44. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +953 -0
  45. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +961 -0
  46. sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +804 -0
  47. sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +969 -0
  48. sempy_labs/_capacities.py +1198 -0
  49. sempy_labs/_capacity_migration.py +660 -0
  50. sempy_labs/_clear_cache.py +351 -0
  51. sempy_labs/_connections.py +610 -0
  52. sempy_labs/_dashboards.py +69 -0
  53. sempy_labs/_data_access_security.py +98 -0
  54. sempy_labs/_data_pipelines.py +162 -0
  55. sempy_labs/_dataflows.py +668 -0
  56. sempy_labs/_dax.py +501 -0
  57. sempy_labs/_daxformatter.py +80 -0
  58. sempy_labs/_delta_analyzer.py +467 -0
  59. sempy_labs/_delta_analyzer_history.py +301 -0
  60. sempy_labs/_dictionary_diffs.py +221 -0
  61. sempy_labs/_documentation.py +147 -0
  62. sempy_labs/_domains.py +51 -0
  63. sempy_labs/_eventhouses.py +182 -0
  64. sempy_labs/_external_data_shares.py +230 -0
  65. sempy_labs/_gateways.py +521 -0
  66. sempy_labs/_generate_semantic_model.py +521 -0
  67. sempy_labs/_get_connection_string.py +84 -0
  68. sempy_labs/_git.py +543 -0
  69. sempy_labs/_graphQL.py +90 -0
  70. sempy_labs/_helper_functions.py +2833 -0
  71. sempy_labs/_icons.py +149 -0
  72. sempy_labs/_job_scheduler.py +609 -0
  73. sempy_labs/_kql_databases.py +149 -0
  74. sempy_labs/_kql_querysets.py +124 -0
  75. sempy_labs/_kusto.py +137 -0
  76. sempy_labs/_labels.py +124 -0
  77. sempy_labs/_list_functions.py +1720 -0
  78. sempy_labs/_managed_private_endpoints.py +253 -0
  79. sempy_labs/_mirrored_databases.py +416 -0
  80. sempy_labs/_mirrored_warehouses.py +60 -0
  81. sempy_labs/_ml_experiments.py +113 -0
  82. sempy_labs/_model_auto_build.py +140 -0
  83. sempy_labs/_model_bpa.py +557 -0
  84. sempy_labs/_model_bpa_bulk.py +378 -0
  85. sempy_labs/_model_bpa_rules.py +859 -0
  86. sempy_labs/_model_dependencies.py +343 -0
  87. sempy_labs/_mounted_data_factories.py +123 -0
  88. sempy_labs/_notebooks.py +441 -0
  89. sempy_labs/_one_lake_integration.py +151 -0
  90. sempy_labs/_onelake.py +131 -0
  91. sempy_labs/_query_scale_out.py +433 -0
  92. sempy_labs/_refresh_semantic_model.py +435 -0
  93. sempy_labs/_semantic_models.py +468 -0
  94. sempy_labs/_spark.py +455 -0
  95. sempy_labs/_sql.py +241 -0
  96. sempy_labs/_sql_audit_settings.py +207 -0
  97. sempy_labs/_sql_endpoints.py +214 -0
  98. sempy_labs/_tags.py +201 -0
  99. sempy_labs/_translations.py +43 -0
  100. sempy_labs/_user_delegation_key.py +44 -0
  101. sempy_labs/_utils.py +79 -0
  102. sempy_labs/_vertipaq.py +1021 -0
  103. sempy_labs/_vpax.py +388 -0
  104. sempy_labs/_warehouses.py +234 -0
  105. sempy_labs/_workloads.py +140 -0
  106. sempy_labs/_workspace_identity.py +72 -0
  107. sempy_labs/_workspaces.py +595 -0
  108. sempy_labs/admin/__init__.py +170 -0
  109. sempy_labs/admin/_activities.py +167 -0
  110. sempy_labs/admin/_apps.py +145 -0
  111. sempy_labs/admin/_artifacts.py +65 -0
  112. sempy_labs/admin/_basic_functions.py +463 -0
  113. sempy_labs/admin/_capacities.py +508 -0
  114. sempy_labs/admin/_dataflows.py +45 -0
  115. sempy_labs/admin/_datasets.py +186 -0
  116. sempy_labs/admin/_domains.py +522 -0
  117. sempy_labs/admin/_external_data_share.py +100 -0
  118. sempy_labs/admin/_git.py +72 -0
  119. sempy_labs/admin/_items.py +265 -0
  120. sempy_labs/admin/_labels.py +211 -0
  121. sempy_labs/admin/_reports.py +241 -0
  122. sempy_labs/admin/_scanner.py +118 -0
  123. sempy_labs/admin/_shared.py +82 -0
  124. sempy_labs/admin/_sharing_links.py +110 -0
  125. sempy_labs/admin/_tags.py +131 -0
  126. sempy_labs/admin/_tenant.py +503 -0
  127. sempy_labs/admin/_tenant_keys.py +89 -0
  128. sempy_labs/admin/_users.py +140 -0
  129. sempy_labs/admin/_workspaces.py +236 -0
  130. sempy_labs/deployment_pipeline/__init__.py +23 -0
  131. sempy_labs/deployment_pipeline/_items.py +580 -0
  132. sempy_labs/directlake/__init__.py +57 -0
  133. sempy_labs/directlake/_autosync.py +58 -0
  134. sempy_labs/directlake/_directlake_schema_compare.py +120 -0
  135. sempy_labs/directlake/_directlake_schema_sync.py +161 -0
  136. sempy_labs/directlake/_dl_helper.py +274 -0
  137. sempy_labs/directlake/_generate_shared_expression.py +94 -0
  138. sempy_labs/directlake/_get_directlake_lakehouse.py +62 -0
  139. sempy_labs/directlake/_get_shared_expression.py +34 -0
  140. sempy_labs/directlake/_guardrails.py +96 -0
  141. sempy_labs/directlake/_list_directlake_model_calc_tables.py +70 -0
  142. sempy_labs/directlake/_show_unsupported_directlake_objects.py +90 -0
  143. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +239 -0
  144. sempy_labs/directlake/_update_directlake_partition_entity.py +259 -0
  145. sempy_labs/directlake/_warm_cache.py +236 -0
  146. sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
  147. sempy_labs/environment/__init__.py +23 -0
  148. sempy_labs/environment/_items.py +212 -0
  149. sempy_labs/environment/_pubstage.py +223 -0
  150. sempy_labs/eventstream/__init__.py +37 -0
  151. sempy_labs/eventstream/_items.py +263 -0
  152. sempy_labs/eventstream/_topology.py +652 -0
  153. sempy_labs/graph/__init__.py +59 -0
  154. sempy_labs/graph/_groups.py +651 -0
  155. sempy_labs/graph/_sensitivity_labels.py +120 -0
  156. sempy_labs/graph/_teams.py +125 -0
  157. sempy_labs/graph/_user_licenses.py +96 -0
  158. sempy_labs/graph/_users.py +516 -0
  159. sempy_labs/graph_model/__init__.py +15 -0
  160. sempy_labs/graph_model/_background_jobs.py +63 -0
  161. sempy_labs/graph_model/_items.py +149 -0
  162. sempy_labs/lakehouse/__init__.py +67 -0
  163. sempy_labs/lakehouse/_blobs.py +247 -0
  164. sempy_labs/lakehouse/_get_lakehouse_columns.py +102 -0
  165. sempy_labs/lakehouse/_get_lakehouse_tables.py +274 -0
  166. sempy_labs/lakehouse/_helper.py +250 -0
  167. sempy_labs/lakehouse/_lakehouse.py +351 -0
  168. sempy_labs/lakehouse/_livy_sessions.py +143 -0
  169. sempy_labs/lakehouse/_materialized_lake_views.py +157 -0
  170. sempy_labs/lakehouse/_partitioning.py +165 -0
  171. sempy_labs/lakehouse/_schemas.py +217 -0
  172. sempy_labs/lakehouse/_shortcuts.py +440 -0
  173. sempy_labs/migration/__init__.py +35 -0
  174. sempy_labs/migration/_create_pqt_file.py +238 -0
  175. sempy_labs/migration/_direct_lake_to_import.py +105 -0
  176. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +398 -0
  177. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +148 -0
  178. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +533 -0
  179. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +172 -0
  180. sempy_labs/migration/_migration_validation.py +71 -0
  181. sempy_labs/migration/_refresh_calc_tables.py +131 -0
  182. sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
  183. sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
  184. sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
  185. sempy_labs/ml_model/__init__.py +23 -0
  186. sempy_labs/ml_model/_functions.py +427 -0
  187. sempy_labs/report/_BPAReportTemplate.json +232 -0
  188. sempy_labs/report/__init__.py +55 -0
  189. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  190. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  191. sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
  192. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
  193. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
  194. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
  195. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
  196. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
  197. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
  198. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
  199. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
  200. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
  201. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
  202. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
  203. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
  204. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
  205. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
  206. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
  207. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
  208. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
  209. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
  210. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
  211. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
  212. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
  213. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
  214. sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
  215. sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
  216. sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
  217. sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
  218. sempy_labs/report/_download_report.py +76 -0
  219. sempy_labs/report/_export_report.py +257 -0
  220. sempy_labs/report/_generate_report.py +427 -0
  221. sempy_labs/report/_paginated.py +76 -0
  222. sempy_labs/report/_report_bpa.py +354 -0
  223. sempy_labs/report/_report_bpa_rules.py +115 -0
  224. sempy_labs/report/_report_functions.py +581 -0
  225. sempy_labs/report/_report_helper.py +227 -0
  226. sempy_labs/report/_report_list_functions.py +110 -0
  227. sempy_labs/report/_report_rebind.py +149 -0
  228. sempy_labs/report/_reportwrapper.py +3100 -0
  229. sempy_labs/report/_save_report.py +147 -0
  230. sempy_labs/snowflake_database/__init__.py +10 -0
  231. sempy_labs/snowflake_database/_items.py +105 -0
  232. sempy_labs/sql_database/__init__.py +21 -0
  233. sempy_labs/sql_database/_items.py +201 -0
  234. sempy_labs/sql_database/_mirroring.py +79 -0
  235. sempy_labs/theme/__init__.py +12 -0
  236. sempy_labs/theme/_org_themes.py +129 -0
  237. sempy_labs/tom/__init__.py +3 -0
  238. sempy_labs/tom/_model.py +5977 -0
  239. sempy_labs/variable_library/__init__.py +19 -0
  240. sempy_labs/variable_library/_functions.py +403 -0
  241. sempy_labs/warehouse/__init__.py +28 -0
  242. sempy_labs/warehouse/_items.py +234 -0
  243. sempy_labs/warehouse/_restore_points.py +309 -0
@@ -0,0 +1,440 @@
1
+ import sempy.fabric as fabric
2
+ import pandas as pd
3
+ from sempy_labs._helper_functions import (
4
+ resolve_lakehouse_name_and_id,
5
+ resolve_workspace_id,
6
+ resolve_workspace_name_and_id,
7
+ _base_api,
8
+ _create_dataframe,
9
+ resolve_workspace_name,
10
+ )
11
+ from sempy._utils._log import log
12
+ from typing import Optional
13
+ import sempy_labs._icons as icons
14
+ from uuid import UUID
15
+ from sempy.fabric.exceptions import FabricHTTPException
16
+
17
+
18
+ @log
19
+ def create_shortcut_onelake(
20
+ table_name: str,
21
+ source_lakehouse: str | UUID,
22
+ source_workspace: str | UUID,
23
+ destination_lakehouse: Optional[str | UUID] = None,
24
+ destination_workspace: Optional[str | UUID] = None,
25
+ shortcut_name: Optional[str] = None,
26
+ source_path: str = "Tables",
27
+ destination_path: str = "Tables",
28
+ shortcut_conflict_policy: Optional[str] = None,
29
+ ):
30
+ """
31
+ Creates a `shortcut <https://learn.microsoft.com/fabric/onelake/onelake-shortcuts>`_ to a delta table in OneLake.
32
+
33
+ This is a wrapper function for the following API: `OneLake Shortcuts - Create Shortcut <https://learn.microsoft.com/rest/api/fabric/core/onelake-shortcuts/create-shortcut>`_.
34
+
35
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
36
+
37
+ Parameters
38
+ ----------
39
+ table_name : str
40
+ The table name for which a shortcut will be created.
41
+ source_lakehouse : str | uuid.UUID
42
+ The Fabric lakehouse in which the table resides.
43
+ source_workspace : str | uuid.UUID
44
+ The name or ID of the Fabric workspace in which the source lakehouse exists.
45
+ destination_lakehouse : str | uuid.UUID, default=None
46
+ The Fabric lakehouse in which the shortcut will be created.
47
+ Defaults to None which resolves to the lakehouse attached to the notebook.
48
+ destination_workspace : str | uuid.UUID, default=None
49
+ The name or ID of the Fabric workspace in which the shortcut will be created.
50
+ Defaults to None which resolves to the workspace of the attached lakehouse
51
+ or if no lakehouse attached, resolves to the workspace of the notebook.
52
+ shortcut_name : str, default=None
53
+ The name of the shortcut 'table' to be created. This defaults to the 'table_name' parameter value.
54
+ source_path : str, default="Tables"
55
+ A string representing the full path to the table/file in the source lakehouse, including either "Files" or "Tables". Examples: Tables/FolderName/SubFolderName; Files/FolderName/SubFolderName.
56
+ destination_path: str, default="Tables"
57
+ A string representing the full path where the shortcut is created, including either "Files" or "Tables". Examples: Tables/FolderName/SubFolderName; Files/FolderName/SubFolderName.
58
+ shortcut_conflict_policy : str, default=None
59
+ When provided, it defines the action to take when a shortcut with the same name and path already exists. The default action is 'Abort'. Additional ShortcutConflictPolicy types may be added over time.
60
+ """
61
+
62
+ if not (source_path.startswith("Files") or source_path.startswith("Tables")):
63
+ raise ValueError(
64
+ f"{icons.red_dot} The 'source_path' parameter must be either 'Files' or 'Tables'."
65
+ )
66
+ if not (
67
+ destination_path.startswith("Files") or destination_path.startswith("Tables")
68
+ ):
69
+ raise ValueError(
70
+ f"{icons.red_dot} The 'destination_path' parameter must be either 'Files' or 'Tables'."
71
+ )
72
+
73
+ (source_workspace_name, source_workspace_id) = resolve_workspace_name_and_id(
74
+ source_workspace
75
+ )
76
+
77
+ (source_lakehouse_name, source_lakehouse_id) = resolve_lakehouse_name_and_id(
78
+ lakehouse=source_lakehouse, workspace=source_workspace_id
79
+ )
80
+
81
+ (destination_workspace_name, destination_workspace_id) = (
82
+ resolve_workspace_name_and_id(destination_workspace)
83
+ )
84
+ (destination_lakehouse_name, destination_lakehouse_id) = (
85
+ resolve_lakehouse_name_and_id(
86
+ lakehouse=destination_lakehouse, workspace=destination_workspace_id
87
+ )
88
+ )
89
+
90
+ if shortcut_name is None:
91
+ shortcut_name = table_name
92
+
93
+ source_full_path = f"{source_path}/{table_name}"
94
+
95
+ actual_shortcut_name = shortcut_name.replace(" ", "")
96
+
97
+ payload = {
98
+ "path": destination_path,
99
+ "name": actual_shortcut_name,
100
+ "target": {
101
+ "oneLake": {
102
+ "itemId": source_lakehouse_id,
103
+ "path": source_full_path,
104
+ "workspaceId": source_workspace_id,
105
+ }
106
+ },
107
+ }
108
+
109
+ # Check if the shortcut already exists
110
+ try:
111
+ response = _base_api(
112
+ request=f"/v1/workspaces/{destination_workspace_id}/items/{destination_lakehouse_id}/shortcuts/{destination_path}/{actual_shortcut_name}",
113
+ client="fabric_sp",
114
+ )
115
+ response_json = response.json()
116
+ del response_json["target"]["type"]
117
+ if response_json.get("target") == payload.get("target"):
118
+ print(
119
+ f"{icons.info} The '{actual_shortcut_name}' shortcut already exists in the '{destination_lakehouse_name}' lakehouse within the '{destination_workspace_name}' workspace."
120
+ )
121
+ return
122
+ else:
123
+ raise ValueError(
124
+ f"{icons.red_dot} The '{actual_shortcut_name}' shortcut already exists in the '{destination_lakehouse_name} lakehouse within the '{destination_workspace_name}' workspace but has a different source."
125
+ )
126
+ except FabricHTTPException:
127
+ pass
128
+
129
+ url = f"/v1/workspaces/{destination_workspace_id}/items/{destination_lakehouse_id}/shortcuts"
130
+
131
+ if shortcut_conflict_policy:
132
+ if shortcut_conflict_policy not in ["Abort", "GenerateUniqueName"]:
133
+ raise ValueError(
134
+ f"{icons.red_dot} The 'shortcut_conflict_policy' parameter must be either 'Abort' or 'GenerateUniqueName'."
135
+ )
136
+ url += f"?shortcutConflictPolicy={shortcut_conflict_policy}"
137
+
138
+ _base_api(
139
+ request=url,
140
+ payload=payload,
141
+ status_codes=201,
142
+ method="post",
143
+ client="fabric_sp",
144
+ )
145
+
146
+ print(
147
+ f"{icons.green_dot} The shortcut '{shortcut_name}' was created in the '{destination_lakehouse_name}' lakehouse within the '{destination_workspace_name}' workspace. It is based on the '{table_name}' table in the '{source_lakehouse_name}' lakehouse within the '{source_workspace_name}' workspace."
148
+ )
149
+
150
+
151
+ def create_shortcut(
152
+ shortcut_name: str,
153
+ location: str,
154
+ subpath: str,
155
+ source: str,
156
+ connection_id: str,
157
+ lakehouse: Optional[str] = None,
158
+ workspace: Optional[str] = None,
159
+ ):
160
+ """
161
+ Creates a `shortcut <https://learn.microsoft.com/fabric/onelake/onelake-shortcuts>`_ to an ADLS Gen2 or Amazon S3 source.
162
+
163
+ Parameters
164
+ ----------
165
+ shortcut_name : str
166
+ location : str
167
+ subpath : str
168
+ source : str
169
+ connection_id: str
170
+ lakehouse : str
171
+ The Fabric lakehouse in which the shortcut will be created.
172
+ workspace : str, default=None
173
+ The name of the Fabric workspace in which the shortcut will be created.
174
+ Defaults to None which resolves to the workspace of the attached lakehouse
175
+ or if no lakehouse attached, resolves to the workspace of the notebook.
176
+ """
177
+
178
+ source_titles = {"adlsGen2": "ADLS Gen2", "amazonS3": "Amazon S3"}
179
+
180
+ sourceValues = list(source_titles.keys())
181
+
182
+ if source not in sourceValues:
183
+ raise ValueError(
184
+ f"{icons.red_dot} The 'source' parameter must be one of these values: {sourceValues}."
185
+ )
186
+
187
+ sourceTitle = source_titles[source]
188
+
189
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
190
+ (lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
191
+ lakehouse=lakehouse, workspace=workspace_id
192
+ )
193
+
194
+ shortcutActualName = shortcut_name.replace(" ", "")
195
+
196
+ payload = {
197
+ "path": "Tables",
198
+ "name": shortcutActualName,
199
+ "target": {
200
+ source: {
201
+ "location": location,
202
+ "subpath": subpath,
203
+ "connectionId": connection_id,
204
+ }
205
+ },
206
+ }
207
+
208
+ _base_api(
209
+ request=f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts",
210
+ method="post",
211
+ payload=payload,
212
+ status_codes=201,
213
+ client="fabric_sp",
214
+ )
215
+ print(
216
+ f"{icons.green_dot} The shortcut '{shortcutActualName}' was created in the '{lakehouse_name}' lakehouse within"
217
+ f" the '{workspace_name}' workspace. It is based on the '{subpath}' table in '{sourceTitle}'."
218
+ )
219
+
220
+
221
+ @log
222
+ def delete_shortcut(
223
+ shortcut_name: str,
224
+ shortcut_path: str = "Tables",
225
+ lakehouse: Optional[str] = None,
226
+ workspace: Optional[str | UUID] = None,
227
+ ):
228
+ """
229
+ Deletes a shortcut.
230
+
231
+ This is a wrapper function for the following API: `OneLake Shortcuts - Delete Shortcut <https://learn.microsoft.com/rest/api/fabric/core/onelake-shortcuts/delete-shortcut>`_.
232
+
233
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
234
+
235
+ Parameters
236
+ ----------
237
+ shortcut_name : str
238
+ The name of the shortcut.
239
+ shortcut_path : str = "Tables"
240
+ The path of the shortcut to be deleted. Must start with either "Files" or "Tables". Examples: Tables/FolderName/SubFolderName; Files/FolderName/SubFolderName.
241
+ lakehouse : str | uuid.UUID, default=None
242
+ The Fabric lakehouse name in which the shortcut resides.
243
+ Defaults to None which resolves to the lakehouse attached to the notebook.
244
+ workspace : str | UUID, default=None
245
+ The name or ID of the Fabric workspace in which lakehouse resides.
246
+ Defaults to None which resolves to the workspace of the attached lakehouse
247
+ or if no lakehouse attached, resolves to the workspace of the notebook.
248
+ """
249
+
250
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
251
+ (lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
252
+ lakehouse=lakehouse, workspace=workspace_id
253
+ )
254
+
255
+ _base_api(
256
+ request=f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts/{shortcut_path}/{shortcut_name}",
257
+ method="delete",
258
+ client="fabric_sp",
259
+ )
260
+
261
+ print(
262
+ f"{icons.green_dot} The '{shortcut_name}' shortcut in the '{lakehouse}' within the '{workspace_name}' workspace has been deleted."
263
+ )
264
+
265
+
266
+ @log
267
+ def reset_shortcut_cache(workspace: Optional[str | UUID] = None):
268
+ """
269
+ Deletes any cached files that were stored while reading from shortcuts.
270
+
271
+ This is a wrapper function for the following API: `OneLake Shortcuts - Reset Shortcut Cache <https://learn.microsoft.com/rest/api/fabric/core/onelake-shortcuts/reset-shortcut-cache>`_.
272
+
273
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
274
+
275
+ Parameters
276
+ ----------
277
+ workspace : str | uuid.UUID, default=None
278
+ The name or ID of the Fabric workspace.
279
+ Defaults to None which resolves to the workspace of the attached lakehouse
280
+ or if no lakehouse attached, resolves to the workspace of the notebook.
281
+ """
282
+
283
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
284
+
285
+ _base_api(
286
+ request=f"/v1/workspaces/{workspace_id}/onelake/resetShortcutCache",
287
+ method="post",
288
+ client="fabric_sp",
289
+ lro_return_status_code=True,
290
+ status_codes=None,
291
+ )
292
+
293
+ print(
294
+ f"{icons.green_dot} The shortcut cache has been reset for the '{workspace_name}' workspace."
295
+ )
296
+
297
+
298
+ @log
299
+ def list_shortcuts(
300
+ lakehouse: Optional[str | UUID] = None,
301
+ workspace: Optional[str | UUID] = None,
302
+ path: Optional[str] = None,
303
+ ) -> pd.DataFrame:
304
+ """
305
+ Shows all shortcuts which exist in a Fabric lakehouse and their properties.
306
+
307
+ Parameters
308
+ ----------
309
+ lakehouse : str | uuid.UUID, default=None
310
+ The Fabric lakehouse name or ID.
311
+ Defaults to None which resolves to the lakehouse attached to the notebook.
312
+ workspace : str | uuid.UUID, default=None
313
+ The name or ID of the Fabric workspace in which lakehouse resides.
314
+ Defaults to None which resolves to the workspace of the attached lakehouse
315
+ or if no lakehouse attached, resolves to the workspace of the notebook.
316
+ path: str, default=None
317
+ The path within lakehouse where to look for shortcuts. If provided, must start with either "Files" or "Tables". Examples: Tables/FolderName/SubFolderName; Files/FolderName/SubFolderName.
318
+ Defaults to None which will retun all shortcuts on the given lakehouse
319
+
320
+ Returns
321
+ -------
322
+ pandas.DataFrame
323
+ A pandas dataframe showing all the shortcuts which exist in the specified lakehouse.
324
+ """
325
+
326
+ workspace_id = resolve_workspace_id(workspace)
327
+ (lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
328
+ lakehouse=lakehouse, workspace=workspace_id
329
+ )
330
+
331
+ columns = {
332
+ "Shortcut Name": "string",
333
+ "Shortcut Path": "string",
334
+ "Source Type": "string",
335
+ "Source Workspace Id": "string",
336
+ "Source Workspace Name": "string",
337
+ "Source Item Id": "string",
338
+ "Source Item Name": "string",
339
+ "Source Item Type": "string",
340
+ "OneLake Path": "string",
341
+ "Connection Id": "string",
342
+ "Location": "string",
343
+ "Bucket": "string",
344
+ "SubPath": "string",
345
+ "Source Properties Raw": "string",
346
+ }
347
+ df = _create_dataframe(columns=columns)
348
+
349
+ # To improve performance create a dataframe to cache all items for a given workspace
350
+ itm_clms = {
351
+ "Id": "string",
352
+ "Display Name": "string",
353
+ "Description": "string",
354
+ "Type": "string",
355
+ "Workspace Id": "string",
356
+ }
357
+ source_items_df = _create_dataframe(columns=itm_clms)
358
+
359
+ url = f"/v1/workspaces/{workspace_id}/items/{lakehouse_id}/shortcuts"
360
+
361
+ if path is not None:
362
+ url += f"?parentPath={path}"
363
+
364
+ responses = _base_api(
365
+ request=url,
366
+ uses_pagination=True,
367
+ client="fabric_sp",
368
+ )
369
+
370
+ sources = {
371
+ "AdlsGen2": "adlsGen2",
372
+ "AmazonS3": "amazonS3",
373
+ "Dataverse": "dataverse",
374
+ "ExternalDataShare": "externalDataShare",
375
+ "GoogleCloudStorage": "googleCloudStorage",
376
+ "OneLake": "oneLake",
377
+ "S3Compatible": "s3Compatible",
378
+ }
379
+
380
+ rows = []
381
+ for r in responses:
382
+ for i in r.get("value", []):
383
+ tgt = i.get("target", {})
384
+ tgt_type = tgt.get("type")
385
+ connection_id = tgt.get(sources.get(tgt_type), {}).get("connectionId")
386
+ location = tgt.get(sources.get(tgt_type), {}).get("location")
387
+ sub_path = tgt.get(sources.get(tgt_type), {}).get("subpath")
388
+ source_workspace_id = tgt.get(sources.get(tgt_type), {}).get("workspaceId")
389
+ source_item_id = tgt.get(sources.get(tgt_type), {}).get("itemId")
390
+ bucket = tgt.get(sources.get(tgt_type), {}).get("bucket")
391
+ source_workspace_name = (
392
+ resolve_workspace_name(
393
+ workspace_id=source_workspace_id, throw_error=False
394
+ )
395
+ if source_workspace_id is not None
396
+ else None
397
+ )
398
+ # Cache and use it to getitem type and name
399
+ source_item_type = None
400
+ source_item_name = None
401
+ try:
402
+ dfI = source_items_df[
403
+ source_items_df["Workspace Id"] == source_workspace_id
404
+ ]
405
+ if dfI.empty:
406
+ dfI = fabric.list_items(workspace=source_workspace_id)
407
+ source_items_df = pd.concat(
408
+ [source_items_df, dfI], ignore_index=True
409
+ )
410
+
411
+ dfI_filt = dfI[dfI["Id"] == source_item_id]
412
+ if not dfI_filt.empty:
413
+ source_item_type = dfI_filt["Type"].iloc[0]
414
+ source_item_name = dfI_filt["Display Name"].iloc[0]
415
+ except Exception:
416
+ pass
417
+
418
+ rows.append(
419
+ {
420
+ "Shortcut Name": i.get("name"),
421
+ "Shortcut Path": i.get("path"),
422
+ "Source Type": tgt_type,
423
+ "Source Workspace Id": source_workspace_id,
424
+ "Source Workspace Name": source_workspace_name,
425
+ "Source Item Id": source_item_id,
426
+ "Source Item Name": source_item_name,
427
+ "Source Item Type": source_item_type,
428
+ "OneLake Path": tgt.get(sources.get("oneLake"), {}).get("path"),
429
+ "Connection Id": connection_id,
430
+ "Location": location,
431
+ "Bucket": bucket,
432
+ "SubPath": sub_path,
433
+ "Source Properties Raw": str(tgt),
434
+ }
435
+ )
436
+
437
+ if rows:
438
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
439
+
440
+ return df
@@ -0,0 +1,35 @@
1
+ from ._create_pqt_file import create_pqt_file
2
+ from ._migrate_calctables_to_lakehouse import (
3
+ migrate_calc_tables_to_lakehouse,
4
+ migrate_field_parameters,
5
+ )
6
+ from ._migrate_calctables_to_semantic_model import (
7
+ migrate_calc_tables_to_semantic_model,
8
+ )
9
+ from ._migrate_model_objects_to_semantic_model import (
10
+ migrate_model_objects_to_semantic_model,
11
+ )
12
+ from ._migrate_tables_columns_to_semantic_model import (
13
+ migrate_tables_columns_to_semantic_model,
14
+ )
15
+ from ._migration_validation import (
16
+ migration_validation,
17
+ )
18
+ from ._refresh_calc_tables import (
19
+ refresh_calc_tables,
20
+ )
21
+ from ._direct_lake_to_import import (
22
+ migrate_direct_lake_to_import,
23
+ )
24
+
25
+ __all__ = [
26
+ "create_pqt_file",
27
+ "migrate_calc_tables_to_lakehouse",
28
+ "migrate_field_parameters",
29
+ "migrate_calc_tables_to_semantic_model",
30
+ "migrate_model_objects_to_semantic_model",
31
+ "migrate_tables_columns_to_semantic_model",
32
+ "migration_validation",
33
+ "refresh_calc_tables",
34
+ "migrate_direct_lake_to_import",
35
+ ]