semantic-link-labs 0.12.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (243) hide show
  1. semantic_link_labs-0.12.8.dist-info/METADATA +354 -0
  2. semantic_link_labs-0.12.8.dist-info/RECORD +243 -0
  3. semantic_link_labs-0.12.8.dist-info/WHEEL +5 -0
  4. semantic_link_labs-0.12.8.dist-info/licenses/LICENSE +21 -0
  5. semantic_link_labs-0.12.8.dist-info/top_level.txt +1 -0
  6. sempy_labs/__init__.py +606 -0
  7. sempy_labs/_a_lib_info.py +2 -0
  8. sempy_labs/_ai.py +437 -0
  9. sempy_labs/_authentication.py +264 -0
  10. sempy_labs/_bpa_translation/_model/_translations_am-ET.po +869 -0
  11. sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +908 -0
  12. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +968 -0
  13. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +963 -0
  14. sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +943 -0
  15. sempy_labs/_bpa_translation/_model/_translations_da-DK.po +945 -0
  16. sempy_labs/_bpa_translation/_model/_translations_de-DE.po +988 -0
  17. sempy_labs/_bpa_translation/_model/_translations_el-GR.po +993 -0
  18. sempy_labs/_bpa_translation/_model/_translations_es-ES.po +971 -0
  19. sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +933 -0
  20. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +942 -0
  21. sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +994 -0
  22. sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +967 -0
  23. sempy_labs/_bpa_translation/_model/_translations_he-IL.po +902 -0
  24. sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +944 -0
  25. sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +963 -0
  26. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +946 -0
  27. sempy_labs/_bpa_translation/_model/_translations_is-IS.po +939 -0
  28. sempy_labs/_bpa_translation/_model/_translations_it-IT.po +986 -0
  29. sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +846 -0
  30. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +839 -0
  31. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +967 -0
  32. sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +978 -0
  33. sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +962 -0
  34. sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +962 -0
  35. sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +957 -0
  36. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +968 -0
  37. sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +964 -0
  38. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +952 -0
  39. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +950 -0
  40. sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +942 -0
  41. sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +976 -0
  42. sempy_labs/_bpa_translation/_model/_translations_te-IN.po +947 -0
  43. sempy_labs/_bpa_translation/_model/_translations_th-TH.po +924 -0
  44. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +953 -0
  45. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +961 -0
  46. sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +804 -0
  47. sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +969 -0
  48. sempy_labs/_capacities.py +1198 -0
  49. sempy_labs/_capacity_migration.py +660 -0
  50. sempy_labs/_clear_cache.py +351 -0
  51. sempy_labs/_connections.py +610 -0
  52. sempy_labs/_dashboards.py +69 -0
  53. sempy_labs/_data_access_security.py +98 -0
  54. sempy_labs/_data_pipelines.py +162 -0
  55. sempy_labs/_dataflows.py +668 -0
  56. sempy_labs/_dax.py +501 -0
  57. sempy_labs/_daxformatter.py +80 -0
  58. sempy_labs/_delta_analyzer.py +467 -0
  59. sempy_labs/_delta_analyzer_history.py +301 -0
  60. sempy_labs/_dictionary_diffs.py +221 -0
  61. sempy_labs/_documentation.py +147 -0
  62. sempy_labs/_domains.py +51 -0
  63. sempy_labs/_eventhouses.py +182 -0
  64. sempy_labs/_external_data_shares.py +230 -0
  65. sempy_labs/_gateways.py +521 -0
  66. sempy_labs/_generate_semantic_model.py +521 -0
  67. sempy_labs/_get_connection_string.py +84 -0
  68. sempy_labs/_git.py +543 -0
  69. sempy_labs/_graphQL.py +90 -0
  70. sempy_labs/_helper_functions.py +2833 -0
  71. sempy_labs/_icons.py +149 -0
  72. sempy_labs/_job_scheduler.py +609 -0
  73. sempy_labs/_kql_databases.py +149 -0
  74. sempy_labs/_kql_querysets.py +124 -0
  75. sempy_labs/_kusto.py +137 -0
  76. sempy_labs/_labels.py +124 -0
  77. sempy_labs/_list_functions.py +1720 -0
  78. sempy_labs/_managed_private_endpoints.py +253 -0
  79. sempy_labs/_mirrored_databases.py +416 -0
  80. sempy_labs/_mirrored_warehouses.py +60 -0
  81. sempy_labs/_ml_experiments.py +113 -0
  82. sempy_labs/_model_auto_build.py +140 -0
  83. sempy_labs/_model_bpa.py +557 -0
  84. sempy_labs/_model_bpa_bulk.py +378 -0
  85. sempy_labs/_model_bpa_rules.py +859 -0
  86. sempy_labs/_model_dependencies.py +343 -0
  87. sempy_labs/_mounted_data_factories.py +123 -0
  88. sempy_labs/_notebooks.py +441 -0
  89. sempy_labs/_one_lake_integration.py +151 -0
  90. sempy_labs/_onelake.py +131 -0
  91. sempy_labs/_query_scale_out.py +433 -0
  92. sempy_labs/_refresh_semantic_model.py +435 -0
  93. sempy_labs/_semantic_models.py +468 -0
  94. sempy_labs/_spark.py +455 -0
  95. sempy_labs/_sql.py +241 -0
  96. sempy_labs/_sql_audit_settings.py +207 -0
  97. sempy_labs/_sql_endpoints.py +214 -0
  98. sempy_labs/_tags.py +201 -0
  99. sempy_labs/_translations.py +43 -0
  100. sempy_labs/_user_delegation_key.py +44 -0
  101. sempy_labs/_utils.py +79 -0
  102. sempy_labs/_vertipaq.py +1021 -0
  103. sempy_labs/_vpax.py +388 -0
  104. sempy_labs/_warehouses.py +234 -0
  105. sempy_labs/_workloads.py +140 -0
  106. sempy_labs/_workspace_identity.py +72 -0
  107. sempy_labs/_workspaces.py +595 -0
  108. sempy_labs/admin/__init__.py +170 -0
  109. sempy_labs/admin/_activities.py +167 -0
  110. sempy_labs/admin/_apps.py +145 -0
  111. sempy_labs/admin/_artifacts.py +65 -0
  112. sempy_labs/admin/_basic_functions.py +463 -0
  113. sempy_labs/admin/_capacities.py +508 -0
  114. sempy_labs/admin/_dataflows.py +45 -0
  115. sempy_labs/admin/_datasets.py +186 -0
  116. sempy_labs/admin/_domains.py +522 -0
  117. sempy_labs/admin/_external_data_share.py +100 -0
  118. sempy_labs/admin/_git.py +72 -0
  119. sempy_labs/admin/_items.py +265 -0
  120. sempy_labs/admin/_labels.py +211 -0
  121. sempy_labs/admin/_reports.py +241 -0
  122. sempy_labs/admin/_scanner.py +118 -0
  123. sempy_labs/admin/_shared.py +82 -0
  124. sempy_labs/admin/_sharing_links.py +110 -0
  125. sempy_labs/admin/_tags.py +131 -0
  126. sempy_labs/admin/_tenant.py +503 -0
  127. sempy_labs/admin/_tenant_keys.py +89 -0
  128. sempy_labs/admin/_users.py +140 -0
  129. sempy_labs/admin/_workspaces.py +236 -0
  130. sempy_labs/deployment_pipeline/__init__.py +23 -0
  131. sempy_labs/deployment_pipeline/_items.py +580 -0
  132. sempy_labs/directlake/__init__.py +57 -0
  133. sempy_labs/directlake/_autosync.py +58 -0
  134. sempy_labs/directlake/_directlake_schema_compare.py +120 -0
  135. sempy_labs/directlake/_directlake_schema_sync.py +161 -0
  136. sempy_labs/directlake/_dl_helper.py +274 -0
  137. sempy_labs/directlake/_generate_shared_expression.py +94 -0
  138. sempy_labs/directlake/_get_directlake_lakehouse.py +62 -0
  139. sempy_labs/directlake/_get_shared_expression.py +34 -0
  140. sempy_labs/directlake/_guardrails.py +96 -0
  141. sempy_labs/directlake/_list_directlake_model_calc_tables.py +70 -0
  142. sempy_labs/directlake/_show_unsupported_directlake_objects.py +90 -0
  143. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +239 -0
  144. sempy_labs/directlake/_update_directlake_partition_entity.py +259 -0
  145. sempy_labs/directlake/_warm_cache.py +236 -0
  146. sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
  147. sempy_labs/environment/__init__.py +23 -0
  148. sempy_labs/environment/_items.py +212 -0
  149. sempy_labs/environment/_pubstage.py +223 -0
  150. sempy_labs/eventstream/__init__.py +37 -0
  151. sempy_labs/eventstream/_items.py +263 -0
  152. sempy_labs/eventstream/_topology.py +652 -0
  153. sempy_labs/graph/__init__.py +59 -0
  154. sempy_labs/graph/_groups.py +651 -0
  155. sempy_labs/graph/_sensitivity_labels.py +120 -0
  156. sempy_labs/graph/_teams.py +125 -0
  157. sempy_labs/graph/_user_licenses.py +96 -0
  158. sempy_labs/graph/_users.py +516 -0
  159. sempy_labs/graph_model/__init__.py +15 -0
  160. sempy_labs/graph_model/_background_jobs.py +63 -0
  161. sempy_labs/graph_model/_items.py +149 -0
  162. sempy_labs/lakehouse/__init__.py +67 -0
  163. sempy_labs/lakehouse/_blobs.py +247 -0
  164. sempy_labs/lakehouse/_get_lakehouse_columns.py +102 -0
  165. sempy_labs/lakehouse/_get_lakehouse_tables.py +274 -0
  166. sempy_labs/lakehouse/_helper.py +250 -0
  167. sempy_labs/lakehouse/_lakehouse.py +351 -0
  168. sempy_labs/lakehouse/_livy_sessions.py +143 -0
  169. sempy_labs/lakehouse/_materialized_lake_views.py +157 -0
  170. sempy_labs/lakehouse/_partitioning.py +165 -0
  171. sempy_labs/lakehouse/_schemas.py +217 -0
  172. sempy_labs/lakehouse/_shortcuts.py +440 -0
  173. sempy_labs/migration/__init__.py +35 -0
  174. sempy_labs/migration/_create_pqt_file.py +238 -0
  175. sempy_labs/migration/_direct_lake_to_import.py +105 -0
  176. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +398 -0
  177. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +148 -0
  178. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +533 -0
  179. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +172 -0
  180. sempy_labs/migration/_migration_validation.py +71 -0
  181. sempy_labs/migration/_refresh_calc_tables.py +131 -0
  182. sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
  183. sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
  184. sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
  185. sempy_labs/ml_model/__init__.py +23 -0
  186. sempy_labs/ml_model/_functions.py +427 -0
  187. sempy_labs/report/_BPAReportTemplate.json +232 -0
  188. sempy_labs/report/__init__.py +55 -0
  189. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  190. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  191. sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
  192. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
  193. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
  194. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
  195. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
  196. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
  197. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
  198. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
  199. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
  200. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
  201. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
  202. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
  203. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
  204. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
  205. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
  206. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
  207. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
  208. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
  209. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
  210. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
  211. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
  212. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
  213. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
  214. sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
  215. sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
  216. sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
  217. sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
  218. sempy_labs/report/_download_report.py +76 -0
  219. sempy_labs/report/_export_report.py +257 -0
  220. sempy_labs/report/_generate_report.py +427 -0
  221. sempy_labs/report/_paginated.py +76 -0
  222. sempy_labs/report/_report_bpa.py +354 -0
  223. sempy_labs/report/_report_bpa_rules.py +115 -0
  224. sempy_labs/report/_report_functions.py +581 -0
  225. sempy_labs/report/_report_helper.py +227 -0
  226. sempy_labs/report/_report_list_functions.py +110 -0
  227. sempy_labs/report/_report_rebind.py +149 -0
  228. sempy_labs/report/_reportwrapper.py +3100 -0
  229. sempy_labs/report/_save_report.py +147 -0
  230. sempy_labs/snowflake_database/__init__.py +10 -0
  231. sempy_labs/snowflake_database/_items.py +105 -0
  232. sempy_labs/sql_database/__init__.py +21 -0
  233. sempy_labs/sql_database/_items.py +201 -0
  234. sempy_labs/sql_database/_mirroring.py +79 -0
  235. sempy_labs/theme/__init__.py +12 -0
  236. sempy_labs/theme/_org_themes.py +129 -0
  237. sempy_labs/tom/__init__.py +3 -0
  238. sempy_labs/tom/_model.py +5977 -0
  239. sempy_labs/variable_library/__init__.py +19 -0
  240. sempy_labs/variable_library/_functions.py +403 -0
  241. sempy_labs/warehouse/__init__.py +28 -0
  242. sempy_labs/warehouse/_items.py +234 -0
  243. sempy_labs/warehouse/_restore_points.py +309 -0
@@ -0,0 +1,147 @@
1
+ import os
2
+ import base64
3
+ import json
4
+ import sempy.fabric as fabric
5
+ import sempy_labs._icons as icons
6
+ from sempy_labs.report._generate_report import get_report_definition
7
+ from sempy_labs._generate_semantic_model import get_semantic_model_definition
8
+ from sempy_labs._helper_functions import (
9
+ _mount,
10
+ resolve_workspace_name_and_id,
11
+ resolve_item_name,
12
+ resolve_workspace_name,
13
+ resolve_item_name_and_id,
14
+ )
15
+ from uuid import UUID
16
+ from sempy._utils._log import log
17
+ from typing import Optional
18
+
19
+
20
+ @log
21
+ def save_report_as_pbip(
22
+ report: str | UUID,
23
+ workspace: Optional[str | UUID] = None,
24
+ thick_report: bool = True,
25
+ live_connect: bool = True,
26
+ lakehouse: Optional[str | UUID] = None,
27
+ lakehouse_workspace: Optional[str | UUID] = None,
28
+ ):
29
+ """
30
+ Saves a report as a .pbip file to the default lakehouse attached to the notebook.
31
+
32
+ Parameters
33
+ ----------
34
+ report : str | uuid.UUID
35
+ Name or ID of the Power BI report.
36
+ workspace : str | uuid.UUID, default=None
37
+ The name or ID of the Fabric workspace in which the report resides.
38
+ Defaults to None which resolves to the workspace of the attached lakehouse
39
+ or if no lakehouse attached, resolves to the workspace of the notebook.
40
+ thick_report : bool, default=True
41
+ If set to True, saves the report and underlying semantic model.
42
+ If set to False, saves just the report.
43
+ live_connect : bool, default=True
44
+ If set to True, saves a .pbip live-connected to the workspace in the Power BI / Fabric service.
45
+ If set to False, saves a .pbip with a local model, independent from the Power BI / Fabric service.
46
+ lakehouse : str | uuid.UUID, default=None
47
+ The Fabric lakehouse name or ID. This will be the lakehouse to which the report is saved.
48
+ Defaults to None which resolves to the lakehouse attached to the notebook.
49
+ lakehouse_workspace : str | uuid.UUID, default=None
50
+ The Fabric workspace name or ID used by the lakehouse.
51
+ Defaults to None which resolves to the workspace of the attached lakehouse
52
+ or if no lakehouse attached, resolves to the workspace of the notebook.
53
+ """
54
+
55
+ (report_workspace_name, report_workspace_id) = resolve_workspace_name_and_id(
56
+ workspace
57
+ )
58
+ (report_name, report_id) = resolve_item_name_and_id(
59
+ item=report, type="Report", workspace=workspace
60
+ )
61
+ indent = 2
62
+
63
+ local_path = _mount(lakehouse=lakehouse, workspace=lakehouse_workspace)
64
+ save_location = f"{local_path}/Files"
65
+
66
+ # Find semantic model info
67
+ dfR = fabric.list_reports(workspace=workspace)
68
+ dfR_filt = dfR[dfR["Id"] == report_id]
69
+ if dfR_filt.empty:
70
+ raise ValueError(
71
+ f"{icons.red_dot} The '{report} report does not exist within the '{report_workspace_name} workspace."
72
+ )
73
+
74
+ dataset_id = dfR_filt["Dataset Id"].iloc[0]
75
+ dataset_workspace_id = dfR_filt["Dataset Workspace Id"].iloc[0]
76
+ dataset_name = resolve_item_name(item_id=dataset_id, workspace=dataset_workspace_id)
77
+ dataset_workspace_name = resolve_workspace_name(dataset_workspace_id)
78
+ path_prefix = f"{save_location}/{report_workspace_name}/{report_name}/{report_name}"
79
+
80
+ # Local model not supported if the report and model are in different workspaces
81
+ if dataset_workspace_name != report_workspace_name and not live_connect:
82
+ live_connect = True
83
+ print(
84
+ f"{icons.warning} The '{report_name}' report from the '{report_workspace_name}' workspace is being saved as a live-connected report/model."
85
+ )
86
+
87
+ def add_files(name, type, object_workspace):
88
+
89
+ path_prefix_full = f"{path_prefix}.{type}"
90
+
91
+ if type == "Report":
92
+ dataframe = get_report_definition(report=name, workspace=workspace)
93
+ elif type == "SemanticModel":
94
+ dataframe = get_semantic_model_definition(
95
+ dataset=name, workspace=object_workspace
96
+ )
97
+ else:
98
+ raise NotImplementedError
99
+
100
+ # Create and save files based on dataset/report definition
101
+ for _, r in dataframe.iterrows():
102
+ path = r["path"]
103
+ file_content = base64.b64decode(r["payload"])
104
+ file_path = f"{path_prefix_full}/{path}"
105
+ os.makedirs(os.path.dirname(file_path), exist_ok=True)
106
+
107
+ # Update the definition.pbir file for local models
108
+ if not live_connect and type == "Report" and path == "definition.pbir":
109
+ file_content = {
110
+ "version": "1.0",
111
+ "datasetReference": {
112
+ "byPath": {"path": f"../{report_name}.SemanticModel"},
113
+ "byConnection": None,
114
+ },
115
+ }
116
+
117
+ with open(file_path, "w") as f:
118
+ json.dump(file_content, f, indent=indent)
119
+ else:
120
+ with open(file_path, "wb") as f:
121
+ f.write(file_content)
122
+
123
+ # Create and save .pbip file for report, converting the file extension
124
+ if type == "Report":
125
+ # Standard .pbip file content
126
+ pbip = {
127
+ "version": "1.0",
128
+ "artifacts": [{"report": {"path": f"{report_name}.Report"}}],
129
+ "settings": {"enableAutoRecovery": True},
130
+ }
131
+ # Ensure the directory exists
132
+ os.makedirs(os.path.dirname(path_prefix), exist_ok=True)
133
+ # Write the .pbip file directly
134
+ pbip_final = f"{path_prefix}.pbip"
135
+ with open(pbip_final, "w") as file:
136
+ json.dump(pbip, file, indent=indent)
137
+
138
+ add_files(name=report_name, type="Report", object_workspace=workspace)
139
+ if thick_report:
140
+ add_files(
141
+ name=dataset_name,
142
+ type="SemanticModel",
143
+ object_workspace=dataset_workspace_name,
144
+ )
145
+ print(
146
+ f"{icons.green_dot} The '{report_name}' report within the '{report_workspace_name}' workspace has been saved to this location: {save_location}."
147
+ )
@@ -0,0 +1,10 @@
1
+ from ._items import (
2
+ list_snowflake_databases,
3
+ delete_snowflake_database,
4
+ )
5
+
6
+
7
+ __all__ = [
8
+ "list_snowflake_databases",
9
+ "delete_snowflake_database",
10
+ ]
@@ -0,0 +1,105 @@
1
+ from sempy_labs._helper_functions import (
2
+ _base_api,
3
+ _create_dataframe,
4
+ _update_dataframe_datatypes,
5
+ delete_item,
6
+ resolve_workspace_id,
7
+ )
8
+ import pandas as pd
9
+ from typing import Optional
10
+ from uuid import UUID
11
+ from sempy._utils._log import log
12
+
13
+
14
+ @log
15
+ def list_snowflake_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
16
+ """
17
+ Shows the snowflake databases within a workspace.
18
+
19
+ This is a wrapper function for the following API: `Items - List Snowflake Databases <https://learn.microsoft.com/rest/api/fabric/snowflakedatabase/items/list-snowflake-databases>`_.
20
+
21
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
22
+
23
+ Parameters
24
+ ----------
25
+ workspace : str | uuid.UUID, default=None
26
+ The Fabric workspace name or ID.
27
+ Defaults to None which resolves to the workspace of the attached lakehouse
28
+ or if no lakehouse attached, resolves to the workspace of the notebook.
29
+
30
+ Returns
31
+ -------
32
+ pandas.DataFrame
33
+ A pandas dataframe showing the snowflake databases within a workspace.
34
+ """
35
+
36
+ columns = {
37
+ "Snowflake Database Display Name": "string",
38
+ "Snowflake Database Id": "string",
39
+ "Description": "string",
40
+ "Snowflake Database Name": "string",
41
+ "OneLake Tables Path": "string",
42
+ "Snowflake Volume Path": "string",
43
+ "SQL Endpoint Connection String": "string",
44
+ "SQL Endpoint Id": "string",
45
+ "SQL Endpoint Provisioning Status": "string",
46
+ "Default Schema": "string",
47
+ }
48
+ df = _create_dataframe(columns=columns)
49
+
50
+ workspace_id = resolve_workspace_id(workspace)
51
+
52
+ responses = _base_api(
53
+ request=f"/v1/workspaces/{workspace_id}/snowflakeDatabases",
54
+ uses_pagination=True,
55
+ client="fabric_sp",
56
+ )
57
+
58
+ rows = []
59
+ for r in responses:
60
+ for v in r.get("value", []):
61
+ prop = v.get("properties", {})
62
+
63
+ rows.append(
64
+ {
65
+ "Snowflake Database Display Name": v.get("displayName"),
66
+ "Snowflake Database Id": v.get("id"),
67
+ "Description": v.get("description"),
68
+ "Snowflake Database Name": prop.get("snowflakeDatabaseName"),
69
+ "OneLake Tables Path": prop.get("onelakeTablesPath"),
70
+ "Snowflake Volume Path": prop.get("snowflakeVolumePath"),
71
+ "SQL Endpoint Connection String": prop.get(
72
+ "sqlEndpointProperties", {}
73
+ ).get("connectionString"),
74
+ "SQL Endpoint Id": prop.get("sqlEndpointProperties", {}).get("id"),
75
+ "SQL Endpoint Provisioning Status": prop.get(
76
+ "sqlEndpointProperties", {}
77
+ ).get("provisioningStatus"),
78
+ "Default Schema": prop.get("defaultSchema"),
79
+ }
80
+ )
81
+
82
+ if rows:
83
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
84
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
85
+
86
+ return df
87
+
88
+
89
+ @log
90
+ def delete_snowflake_database(
91
+ snowflake_database: str | UUID, workspace: Optional[str | UUID] = None
92
+ ):
93
+ """
94
+ Deletes a Fabric snowflake database.
95
+
96
+ This is a wrapper function for the following API: `Items - Delete Snowflake Database <https://learn.microsoft.com/rest/api/fabric/snowflakedatabase/items/delete-snowflake-database>`_.
97
+
98
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
99
+ """
100
+
101
+ delete_item(
102
+ item=snowflake_database,
103
+ type="SnowflakeDatabase",
104
+ workspace=workspace,
105
+ )
@@ -0,0 +1,21 @@
1
+ from sempy_labs.sql_database._items import (
2
+ get_sql_database_columns,
3
+ get_sql_database_tables,
4
+ create_sql_database,
5
+ delete_sql_database,
6
+ list_sql_databases,
7
+ )
8
+ from sempy_labs.sql_database._mirroring import (
9
+ start_mirroring,
10
+ stop_mirroring,
11
+ )
12
+
13
+ __all__ = [
14
+ "get_sql_database_columns",
15
+ "get_sql_database_tables",
16
+ "create_sql_database",
17
+ "delete_sql_database",
18
+ "list_sql_databases",
19
+ "start_mirroring",
20
+ "stop_mirroring",
21
+ ]
@@ -0,0 +1,201 @@
1
+ from sempy_labs._helper_functions import (
2
+ resolve_workspace_id,
3
+ _base_api,
4
+ _create_dataframe,
5
+ _update_dataframe_datatypes,
6
+ create_item,
7
+ delete_item,
8
+ )
9
+ import pandas as pd
10
+ from typing import Optional
11
+ from uuid import UUID
12
+ from sempy._utils._log import log
13
+
14
+
15
+ @log
16
+ def create_sql_database(
17
+ name: str, description: Optional[str] = None, workspace: Optional[str | UUID] = None
18
+ ):
19
+ """
20
+ Creates a SQL database.
21
+
22
+ This is a wrapper function for the following API: `Items - Create SQL Database <https://learn.microsoft.com/rest/api/fabric/sqldatabase/items/create-sql-database>`_.
23
+
24
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
25
+
26
+ Parameters
27
+ ----------
28
+ name: str
29
+ Name of the SQL database.
30
+ description : str, default=None
31
+ A description of the SQL database.
32
+ workspace : str | uuid.UUID, default=None
33
+ The Fabric workspace name or ID.
34
+ Defaults to None which resolves to the workspace of the attached lakehouse
35
+ or if no lakehouse attached, resolves to the workspace of the notebook.
36
+ """
37
+
38
+ create_item(
39
+ name=name, description=description, type="SQLDatabase", workspace=workspace
40
+ )
41
+
42
+
43
+ @log
44
+ def delete_sql_database(
45
+ sql_database: str | UUID, workspace: Optional[str | UUID] = None
46
+ ):
47
+ """
48
+ Deletes a SQL Database.
49
+
50
+ This is a wrapper function for the following API: `Items - Delete SQL Database <https://learn.microsoft.com/rest/api/fabric/sqldatabase/items/delete-sql-database>`_.
51
+
52
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
53
+
54
+ Parameters
55
+ ----------
56
+ sql_database: str | uuid.UUID
57
+ Name of the SQL database.
58
+ workspace : str | uuid.UUID, default=None
59
+ The Fabric workspace name or ID.
60
+ Defaults to None which resolves to the workspace of the attached lakehouse
61
+ or if no lakehouse attached, resolves to the workspace of the notebook.
62
+ """
63
+
64
+ delete_item(item=sql_database, type="SQLDatabase", workspace=workspace)
65
+
66
+
67
+ @log
68
+ def list_sql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
69
+ """
70
+ Lists all SQL databases in the Fabric workspace.
71
+
72
+ This is a wrapper function for the following API: `Items - List SQL Databases <https://learn.microsoft.com/rest/api/fabric/sqldatabase/items/list-sql-databases>`_.
73
+
74
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
75
+
76
+ Parameters
77
+ ----------
78
+ workspace : str | uuid.UUID, default=None
79
+ The Fabric workspace name or ID.
80
+ Defaults to None which resolves to the workspace of the attached lakehouse
81
+ or if no lakehouse attached, resolves to the workspace of the notebook.
82
+
83
+ Returns
84
+ -------
85
+ pandas.DataFrame
86
+ A pandas dataframe showing a list of SQL databases in the Fabric workspace.
87
+ """
88
+
89
+ workspace_id = resolve_workspace_id(workspace)
90
+
91
+ columns = {
92
+ "SQL Database Name": "string",
93
+ "SQL Database Id": "string",
94
+ "Description": "string",
95
+ "Connection Info": "string",
96
+ "Database Name": "string",
97
+ "Server FQDN": "string",
98
+ }
99
+ df = _create_dataframe(columns=columns)
100
+
101
+ responses = _base_api(
102
+ request=f"/v1/workspaces/{workspace_id}/SQLDatabases",
103
+ uses_pagination=True,
104
+ client="fabric_sp",
105
+ )
106
+
107
+ rows = []
108
+ for r in responses:
109
+ for v in r.get("value", []):
110
+ prop = v.get("properties", {})
111
+ rows.append(
112
+ {
113
+ "SQL Database Name": v.get("displayName"),
114
+ "SQL Database Id": v.get("id"),
115
+ "Description": v.get("description"),
116
+ "Connection Info": prop.get("connectionInfo"),
117
+ "Database Name": prop.get("databaseName"),
118
+ "Server FQDN": prop.get("serverFqdn"),
119
+ }
120
+ )
121
+
122
+ if rows:
123
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
124
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
125
+
126
+ return df
127
+
128
+
129
+ @log
130
+ def get_sql_database_tables(
131
+ sql_database: str | UUID, workspace: Optional[str | UUID] = None
132
+ ) -> pd.DataFrame:
133
+ """
134
+ Shows a list of the tables in the Fabric SQLDabatse. This function is based on INFORMATION_SCHEMA.TABLES.
135
+
136
+ Parameters
137
+ ----------
138
+ sql_database : str | uuid.UUID
139
+ Name or ID of the Fabric SQLDabatase.
140
+ workspace : str | uuid.UUID, default=None
141
+ The Fabric workspace name or ID.
142
+ Defaults to None which resolves to the workspace of the attached lakehouse
143
+ or if no lakehouse attached, resolves to the workspace of the notebook.
144
+
145
+ Returns
146
+ -------
147
+ pandas.DataFrame
148
+ A pandas dataframe showing a list of the tables in the Fabric SQLDabatase.
149
+ """
150
+
151
+ from sempy_labs._sql import ConnectSQLDatabase
152
+
153
+ with ConnectSQLDatabase(sql_database=sql_database, workspace=workspace) as sql:
154
+ df = sql.query(
155
+ """
156
+ SELECT TABLE_SCHEMA AS [Schema], TABLE_NAME AS [Table Name], TABLE_TYPE AS [Table Type]
157
+ FROM INFORMATION_SCHEMA.TABLES
158
+ WHERE TABLE_TYPE = 'BASE TABLE'
159
+ """
160
+ )
161
+
162
+ return df
163
+
164
+
165
+ @log
166
+ def get_sql_database_columns(
167
+ sql_database: str | UUID, workspace: Optional[str | UUID] = None
168
+ ) -> pd.DataFrame:
169
+ """
170
+ Shows a list of the columns in each table within the Fabric SQLDabatase. This function is based on INFORMATION_SCHEMA.COLUMNS.
171
+
172
+ Parameters
173
+ ----------
174
+ sql_database : str | uuid.UUID
175
+ Name or ID of the Fabric SQLDabatase.
176
+ workspace : str | uuid.UUID, default=None
177
+ The Fabric workspace name or ID.
178
+ Defaults to None which resolves to the workspace of the attached lakehouse
179
+ or if no lakehouse attached, resolves to the workspace of the notebook.
180
+
181
+ Returns
182
+ -------
183
+ pandas.DataFrame
184
+ A pandas dataframe showing a list of the columns in each table within the Fabric SQLDabatase.
185
+ """
186
+
187
+ from sempy_labs._sql import ConnectSQLDatabase
188
+
189
+ with ConnectSQLDatabase(sql_database=sql_database, workspace=workspace) as sql:
190
+ df = sql.query(
191
+ """
192
+ SELECT t.TABLE_SCHEMA AS [Schema], t.TABLE_NAME AS [Table Name], c.COLUMN_NAME AS [Column Name], c.DATA_TYPE AS [Data Type], c.IS_NULLABLE AS [Is Nullable], c.CHARACTER_MAXIMUM_LENGTH AS [Character Max Length]
193
+ FROM INFORMATION_SCHEMA.TABLES AS t
194
+ LEFT JOIN INFORMATION_SCHEMA.COLUMNS AS c
195
+ ON t.TABLE_NAME = c.TABLE_NAME
196
+ AND t.TABLE_SCHEMA = c.TABLE_SCHEMA
197
+ WHERE t.TABLE_TYPE = 'BASE TABLE'
198
+ """
199
+ )
200
+
201
+ return df
@@ -0,0 +1,79 @@
1
+ from sempy_labs._helper_functions import (
2
+ resolve_item_id,
3
+ _base_api,
4
+ resolve_workspace_name_and_id,
5
+ )
6
+ from typing import Optional
7
+ from uuid import UUID
8
+ from sempy._utils._log import log
9
+ import sempy_labs._icons as icons
10
+
11
+
12
+ @log
13
+ def start_mirroring(sql_database: str | UUID, workspace: Optional[str | UUID] = None):
14
+ """
15
+ Starts data mirroring for the specified SQL Database.
16
+
17
+ This is a wrapper function for the following API: `Mirroring - Start Mirroring <https://learn.microsoft.com/rest/api/fabric/sqldatabase/mirroring/start-mirroring>`_.
18
+
19
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
20
+
21
+ Parameters
22
+ ----------
23
+ sql_database : str | uuid.UUID
24
+ Name or ID of the SQL Database.
25
+ workspace : str | uuid.UUID, default=None
26
+ The Fabric workspace name or ID.
27
+ Defaults to None which resolves to the workspace of the attached lakehouse
28
+ or if no lakehouse attached, resolves to the workspace of the notebook.
29
+ """
30
+
31
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
32
+ item_id = resolve_item_id(
33
+ item=sql_database, type="SQLDatabase", workspace=workspace_id
34
+ )
35
+
36
+ _base_api(
37
+ request=f"/v1/workspaces/{workspace_id}/sqlDatabases/{item_id}/startMirroring",
38
+ method="post",
39
+ client="fabric_sp",
40
+ )
41
+
42
+ print(
43
+ f"{icons.green_dot} The SQL Database '{sql_database}' in the '{workspace_name}' workspace is now being mirrored."
44
+ )
45
+
46
+
47
+ @log
48
+ def stop_mirroring(sql_database: str | UUID, workspace: Optional[str | UUID] = None):
49
+ """
50
+ Stops data mirroring for the specified SQL Database.
51
+
52
+ This is a wrapper function for the following API: `Mirroring - Stop Mirroring <https://learn.microsoft.com/rest/api/fabric/sqldatabase/mirroring/stop-mirroring>`_.
53
+
54
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
55
+
56
+ Parameters
57
+ ----------
58
+ sql_database : str | uuid.UUID
59
+ Name or ID of the SQL Database.
60
+ workspace : str | uuid.UUID, default=None
61
+ The Fabric workspace name or ID.
62
+ Defaults to None which resolves to the workspace of the attached lakehouse
63
+ or if no lakehouse attached, resolves to the workspace of the notebook.
64
+ """
65
+
66
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
67
+ item_id = resolve_item_id(
68
+ item=sql_database, type="SQLDatabase", workspace=workspace_id
69
+ )
70
+
71
+ _base_api(
72
+ request=f"/v1/workspaces/{workspace_id}/sqlDatabases/{item_id}/stopMirroring",
73
+ method="post",
74
+ client="fabric_sp",
75
+ )
76
+
77
+ print(
78
+ f"{icons.green_dot} The SQL Database '{sql_database}' in the '{workspace_name}' workspace is no longer being mirrored."
79
+ )
@@ -0,0 +1,12 @@
1
+ from ._org_themes import (
2
+ list_org_themes,
3
+ get_org_theme_json,
4
+ resolve_theme_id,
5
+ )
6
+
7
+
8
+ __all__ = [
9
+ "list_org_themes",
10
+ "get_org_theme_json",
11
+ "resolve_theme_id",
12
+ ]