semantic-link-labs 0.12.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (243) hide show
  1. semantic_link_labs-0.12.8.dist-info/METADATA +354 -0
  2. semantic_link_labs-0.12.8.dist-info/RECORD +243 -0
  3. semantic_link_labs-0.12.8.dist-info/WHEEL +5 -0
  4. semantic_link_labs-0.12.8.dist-info/licenses/LICENSE +21 -0
  5. semantic_link_labs-0.12.8.dist-info/top_level.txt +1 -0
  6. sempy_labs/__init__.py +606 -0
  7. sempy_labs/_a_lib_info.py +2 -0
  8. sempy_labs/_ai.py +437 -0
  9. sempy_labs/_authentication.py +264 -0
  10. sempy_labs/_bpa_translation/_model/_translations_am-ET.po +869 -0
  11. sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +908 -0
  12. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +968 -0
  13. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +963 -0
  14. sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +943 -0
  15. sempy_labs/_bpa_translation/_model/_translations_da-DK.po +945 -0
  16. sempy_labs/_bpa_translation/_model/_translations_de-DE.po +988 -0
  17. sempy_labs/_bpa_translation/_model/_translations_el-GR.po +993 -0
  18. sempy_labs/_bpa_translation/_model/_translations_es-ES.po +971 -0
  19. sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +933 -0
  20. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +942 -0
  21. sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +994 -0
  22. sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +967 -0
  23. sempy_labs/_bpa_translation/_model/_translations_he-IL.po +902 -0
  24. sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +944 -0
  25. sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +963 -0
  26. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +946 -0
  27. sempy_labs/_bpa_translation/_model/_translations_is-IS.po +939 -0
  28. sempy_labs/_bpa_translation/_model/_translations_it-IT.po +986 -0
  29. sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +846 -0
  30. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +839 -0
  31. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +967 -0
  32. sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +978 -0
  33. sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +962 -0
  34. sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +962 -0
  35. sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +957 -0
  36. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +968 -0
  37. sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +964 -0
  38. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +952 -0
  39. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +950 -0
  40. sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +942 -0
  41. sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +976 -0
  42. sempy_labs/_bpa_translation/_model/_translations_te-IN.po +947 -0
  43. sempy_labs/_bpa_translation/_model/_translations_th-TH.po +924 -0
  44. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +953 -0
  45. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +961 -0
  46. sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +804 -0
  47. sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +969 -0
  48. sempy_labs/_capacities.py +1198 -0
  49. sempy_labs/_capacity_migration.py +660 -0
  50. sempy_labs/_clear_cache.py +351 -0
  51. sempy_labs/_connections.py +610 -0
  52. sempy_labs/_dashboards.py +69 -0
  53. sempy_labs/_data_access_security.py +98 -0
  54. sempy_labs/_data_pipelines.py +162 -0
  55. sempy_labs/_dataflows.py +668 -0
  56. sempy_labs/_dax.py +501 -0
  57. sempy_labs/_daxformatter.py +80 -0
  58. sempy_labs/_delta_analyzer.py +467 -0
  59. sempy_labs/_delta_analyzer_history.py +301 -0
  60. sempy_labs/_dictionary_diffs.py +221 -0
  61. sempy_labs/_documentation.py +147 -0
  62. sempy_labs/_domains.py +51 -0
  63. sempy_labs/_eventhouses.py +182 -0
  64. sempy_labs/_external_data_shares.py +230 -0
  65. sempy_labs/_gateways.py +521 -0
  66. sempy_labs/_generate_semantic_model.py +521 -0
  67. sempy_labs/_get_connection_string.py +84 -0
  68. sempy_labs/_git.py +543 -0
  69. sempy_labs/_graphQL.py +90 -0
  70. sempy_labs/_helper_functions.py +2833 -0
  71. sempy_labs/_icons.py +149 -0
  72. sempy_labs/_job_scheduler.py +609 -0
  73. sempy_labs/_kql_databases.py +149 -0
  74. sempy_labs/_kql_querysets.py +124 -0
  75. sempy_labs/_kusto.py +137 -0
  76. sempy_labs/_labels.py +124 -0
  77. sempy_labs/_list_functions.py +1720 -0
  78. sempy_labs/_managed_private_endpoints.py +253 -0
  79. sempy_labs/_mirrored_databases.py +416 -0
  80. sempy_labs/_mirrored_warehouses.py +60 -0
  81. sempy_labs/_ml_experiments.py +113 -0
  82. sempy_labs/_model_auto_build.py +140 -0
  83. sempy_labs/_model_bpa.py +557 -0
  84. sempy_labs/_model_bpa_bulk.py +378 -0
  85. sempy_labs/_model_bpa_rules.py +859 -0
  86. sempy_labs/_model_dependencies.py +343 -0
  87. sempy_labs/_mounted_data_factories.py +123 -0
  88. sempy_labs/_notebooks.py +441 -0
  89. sempy_labs/_one_lake_integration.py +151 -0
  90. sempy_labs/_onelake.py +131 -0
  91. sempy_labs/_query_scale_out.py +433 -0
  92. sempy_labs/_refresh_semantic_model.py +435 -0
  93. sempy_labs/_semantic_models.py +468 -0
  94. sempy_labs/_spark.py +455 -0
  95. sempy_labs/_sql.py +241 -0
  96. sempy_labs/_sql_audit_settings.py +207 -0
  97. sempy_labs/_sql_endpoints.py +214 -0
  98. sempy_labs/_tags.py +201 -0
  99. sempy_labs/_translations.py +43 -0
  100. sempy_labs/_user_delegation_key.py +44 -0
  101. sempy_labs/_utils.py +79 -0
  102. sempy_labs/_vertipaq.py +1021 -0
  103. sempy_labs/_vpax.py +388 -0
  104. sempy_labs/_warehouses.py +234 -0
  105. sempy_labs/_workloads.py +140 -0
  106. sempy_labs/_workspace_identity.py +72 -0
  107. sempy_labs/_workspaces.py +595 -0
  108. sempy_labs/admin/__init__.py +170 -0
  109. sempy_labs/admin/_activities.py +167 -0
  110. sempy_labs/admin/_apps.py +145 -0
  111. sempy_labs/admin/_artifacts.py +65 -0
  112. sempy_labs/admin/_basic_functions.py +463 -0
  113. sempy_labs/admin/_capacities.py +508 -0
  114. sempy_labs/admin/_dataflows.py +45 -0
  115. sempy_labs/admin/_datasets.py +186 -0
  116. sempy_labs/admin/_domains.py +522 -0
  117. sempy_labs/admin/_external_data_share.py +100 -0
  118. sempy_labs/admin/_git.py +72 -0
  119. sempy_labs/admin/_items.py +265 -0
  120. sempy_labs/admin/_labels.py +211 -0
  121. sempy_labs/admin/_reports.py +241 -0
  122. sempy_labs/admin/_scanner.py +118 -0
  123. sempy_labs/admin/_shared.py +82 -0
  124. sempy_labs/admin/_sharing_links.py +110 -0
  125. sempy_labs/admin/_tags.py +131 -0
  126. sempy_labs/admin/_tenant.py +503 -0
  127. sempy_labs/admin/_tenant_keys.py +89 -0
  128. sempy_labs/admin/_users.py +140 -0
  129. sempy_labs/admin/_workspaces.py +236 -0
  130. sempy_labs/deployment_pipeline/__init__.py +23 -0
  131. sempy_labs/deployment_pipeline/_items.py +580 -0
  132. sempy_labs/directlake/__init__.py +57 -0
  133. sempy_labs/directlake/_autosync.py +58 -0
  134. sempy_labs/directlake/_directlake_schema_compare.py +120 -0
  135. sempy_labs/directlake/_directlake_schema_sync.py +161 -0
  136. sempy_labs/directlake/_dl_helper.py +274 -0
  137. sempy_labs/directlake/_generate_shared_expression.py +94 -0
  138. sempy_labs/directlake/_get_directlake_lakehouse.py +62 -0
  139. sempy_labs/directlake/_get_shared_expression.py +34 -0
  140. sempy_labs/directlake/_guardrails.py +96 -0
  141. sempy_labs/directlake/_list_directlake_model_calc_tables.py +70 -0
  142. sempy_labs/directlake/_show_unsupported_directlake_objects.py +90 -0
  143. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +239 -0
  144. sempy_labs/directlake/_update_directlake_partition_entity.py +259 -0
  145. sempy_labs/directlake/_warm_cache.py +236 -0
  146. sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
  147. sempy_labs/environment/__init__.py +23 -0
  148. sempy_labs/environment/_items.py +212 -0
  149. sempy_labs/environment/_pubstage.py +223 -0
  150. sempy_labs/eventstream/__init__.py +37 -0
  151. sempy_labs/eventstream/_items.py +263 -0
  152. sempy_labs/eventstream/_topology.py +652 -0
  153. sempy_labs/graph/__init__.py +59 -0
  154. sempy_labs/graph/_groups.py +651 -0
  155. sempy_labs/graph/_sensitivity_labels.py +120 -0
  156. sempy_labs/graph/_teams.py +125 -0
  157. sempy_labs/graph/_user_licenses.py +96 -0
  158. sempy_labs/graph/_users.py +516 -0
  159. sempy_labs/graph_model/__init__.py +15 -0
  160. sempy_labs/graph_model/_background_jobs.py +63 -0
  161. sempy_labs/graph_model/_items.py +149 -0
  162. sempy_labs/lakehouse/__init__.py +67 -0
  163. sempy_labs/lakehouse/_blobs.py +247 -0
  164. sempy_labs/lakehouse/_get_lakehouse_columns.py +102 -0
  165. sempy_labs/lakehouse/_get_lakehouse_tables.py +274 -0
  166. sempy_labs/lakehouse/_helper.py +250 -0
  167. sempy_labs/lakehouse/_lakehouse.py +351 -0
  168. sempy_labs/lakehouse/_livy_sessions.py +143 -0
  169. sempy_labs/lakehouse/_materialized_lake_views.py +157 -0
  170. sempy_labs/lakehouse/_partitioning.py +165 -0
  171. sempy_labs/lakehouse/_schemas.py +217 -0
  172. sempy_labs/lakehouse/_shortcuts.py +440 -0
  173. sempy_labs/migration/__init__.py +35 -0
  174. sempy_labs/migration/_create_pqt_file.py +238 -0
  175. sempy_labs/migration/_direct_lake_to_import.py +105 -0
  176. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +398 -0
  177. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +148 -0
  178. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +533 -0
  179. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +172 -0
  180. sempy_labs/migration/_migration_validation.py +71 -0
  181. sempy_labs/migration/_refresh_calc_tables.py +131 -0
  182. sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
  183. sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
  184. sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
  185. sempy_labs/ml_model/__init__.py +23 -0
  186. sempy_labs/ml_model/_functions.py +427 -0
  187. sempy_labs/report/_BPAReportTemplate.json +232 -0
  188. sempy_labs/report/__init__.py +55 -0
  189. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  190. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  191. sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
  192. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
  193. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
  194. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
  195. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
  196. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
  197. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
  198. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
  199. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
  200. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
  201. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
  202. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
  203. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
  204. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
  205. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
  206. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
  207. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
  208. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
  209. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
  210. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
  211. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
  212. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
  213. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
  214. sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
  215. sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
  216. sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
  217. sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
  218. sempy_labs/report/_download_report.py +76 -0
  219. sempy_labs/report/_export_report.py +257 -0
  220. sempy_labs/report/_generate_report.py +427 -0
  221. sempy_labs/report/_paginated.py +76 -0
  222. sempy_labs/report/_report_bpa.py +354 -0
  223. sempy_labs/report/_report_bpa_rules.py +115 -0
  224. sempy_labs/report/_report_functions.py +581 -0
  225. sempy_labs/report/_report_helper.py +227 -0
  226. sempy_labs/report/_report_list_functions.py +110 -0
  227. sempy_labs/report/_report_rebind.py +149 -0
  228. sempy_labs/report/_reportwrapper.py +3100 -0
  229. sempy_labs/report/_save_report.py +147 -0
  230. sempy_labs/snowflake_database/__init__.py +10 -0
  231. sempy_labs/snowflake_database/_items.py +105 -0
  232. sempy_labs/sql_database/__init__.py +21 -0
  233. sempy_labs/sql_database/_items.py +201 -0
  234. sempy_labs/sql_database/_mirroring.py +79 -0
  235. sempy_labs/theme/__init__.py +12 -0
  236. sempy_labs/theme/_org_themes.py +129 -0
  237. sempy_labs/tom/__init__.py +3 -0
  238. sempy_labs/tom/_model.py +5977 -0
  239. sempy_labs/variable_library/__init__.py +19 -0
  240. sempy_labs/variable_library/_functions.py +403 -0
  241. sempy_labs/warehouse/__init__.py +28 -0
  242. sempy_labs/warehouse/_items.py +234 -0
  243. sempy_labs/warehouse/_restore_points.py +309 -0
@@ -0,0 +1,580 @@
1
+ import pandas as pd
2
+ from sempy_labs._helper_functions import (
3
+ _is_valid_uuid,
4
+ _base_api,
5
+ _update_dataframe_datatypes,
6
+ _create_dataframe,
7
+ resolve_workspace_id,
8
+ resolve_capacity_id,
9
+ )
10
+ from sempy._utils._log import log
11
+ import sempy_labs._icons as icons
12
+ from uuid import UUID
13
+ from typing import List, Optional
14
+
15
+
16
+ @log
17
+ def resolve_deployment_pipeline_id(deployment_pipeline: str | UUID) -> UUID:
18
+ """
19
+ Obtains the Id for a given deployment pipeline.
20
+
21
+ Parameters
22
+ ----------
23
+ deployment_pipeline : str | uuid.UUID
24
+ The deployment pipeline name or ID.
25
+
26
+ Returns
27
+ -------
28
+ uuid.UUID
29
+ The deployment pipeline Id.
30
+ """
31
+
32
+ if _is_valid_uuid(deployment_pipeline):
33
+ return deployment_pipeline
34
+ else:
35
+ dfP = list()
36
+ dfP_filt = dfP[dfP["Deployment Pipeline Name"] == deployment_pipeline]
37
+ if len(dfP_filt) == 0:
38
+ raise ValueError(
39
+ f"{icons.red_dot} The '{deployment_pipeline}' deployment pipeline is not valid."
40
+ )
41
+ return dfP_filt["Deployment Pipeline Id"].iloc[0]
42
+
43
+
44
+ @log
45
+ def resolve_stage_id(deployment_pipeline_id: UUID, stage: str | UUID):
46
+
47
+ dfPS = list_deployment_pipeline_stages(deployment_pipeline=deployment_pipeline_id)
48
+
49
+ if _is_valid_uuid(stage):
50
+ dfPS_filt = dfPS[dfPS["Deployment Pipeline Stage Id"] == stage]
51
+ else:
52
+ dfPS_filt = dfPS[dfPS["Deployment Pipeline Stage Name"] == stage]
53
+ if dfPS.empty:
54
+ raise ValueError(
55
+ f"{icons.red_dot} The '{stage}' stage does not exist within the '{deployment_pipeline_id}' deployment pipeline."
56
+ )
57
+ return dfPS_filt["Deployment Pipeline Stage Id"].iloc[0]
58
+
59
+
60
+ @log
61
+ def list_deployment_pipelines() -> pd.DataFrame:
62
+ """
63
+ Shows a list of deployment pipelines the user can access.
64
+
65
+ This is a wrapper function for the following API: `Deployment Pipelines - List Deployment Pipelines <https://learn.microsoft.com/rest/api/fabric/core/deployment-pipelines/list-deployment-pipelines>`_.
66
+
67
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
68
+
69
+ Returns
70
+ -------
71
+ pandas.DataFrame
72
+ A pandas dataframe showing a list of deployment pipelines the user can access.
73
+ """
74
+
75
+ columns = {
76
+ "Deployment Pipeline Id": "string",
77
+ "Deployment Pipeline Name": "string",
78
+ "Description": "string",
79
+ }
80
+ df = _create_dataframe(columns=columns)
81
+
82
+ responses = _base_api(
83
+ request="/v1/deploymentPipelines",
84
+ status_codes=200,
85
+ uses_pagination=True,
86
+ client="fabric_sp",
87
+ )
88
+
89
+ rows = []
90
+ for r in responses:
91
+ for v in r.get("value", []):
92
+ rows.append(
93
+ {
94
+ "Deployment Pipeline Id": v.get("id"),
95
+ "Deployment Pipeline Name": v.get("displayName"),
96
+ "Description": v.get("description"),
97
+ }
98
+ )
99
+
100
+ if rows:
101
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
102
+
103
+ return df
104
+
105
+
106
+ @log
107
+ def list_deployment_pipeline_stages(deployment_pipeline: str | UUID) -> pd.DataFrame:
108
+ """
109
+ Shows the specified deployment pipeline stages.
110
+
111
+ This is a wrapper function for the following API: `Deployment Pipelines - List Deployment Pipeline Stages <https://learn.microsoft.com/rest/api/fabric/core/deployment-pipelines/list-deployment-pipeline-stages>`_.
112
+
113
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
114
+
115
+ Parameters
116
+ ----------
117
+ deployment_pipeline : str | uuid.UUID
118
+ The deployment pipeline name or ID.
119
+
120
+ Returns
121
+ -------
122
+ pandas.DataFrame
123
+ A pandas dataframe showing the specified deployment pipeline stages.
124
+ """
125
+
126
+ columns = {
127
+ "Deployment Pipeline Stage Id": "string",
128
+ "Deployment Pipeline Stage Name": "string",
129
+ "Order": "int",
130
+ "Description": "string",
131
+ "Workspace Id": "string",
132
+ "Workspace Name": "string",
133
+ "Public": "bool",
134
+ }
135
+ df = _create_dataframe(columns=columns)
136
+
137
+ deployment_pipeline_id = resolve_deployment_pipeline_id(
138
+ deployment_pipeline=deployment_pipeline
139
+ )
140
+
141
+ responses = _base_api(
142
+ request=f"/v1/deploymentPipelines/{deployment_pipeline_id}/stages",
143
+ status_codes=200,
144
+ uses_pagination=True,
145
+ client="fabric_sp",
146
+ )
147
+
148
+ rows = []
149
+ for r in responses:
150
+ for v in r.get("value", []):
151
+ rows.append(
152
+ {
153
+ "Deployment Pipeline Stage Id": v.get("id"),
154
+ "Deployment Pipeline Stage Name": v.get("displayName"),
155
+ "Description": v.get("description"),
156
+ "Order": v.get("order"),
157
+ "Workspace Id": v.get("workspaceId"),
158
+ "Workspace Name": v.get("workspaceName"),
159
+ "Public": v.get("isPublic"),
160
+ }
161
+ )
162
+
163
+ if rows:
164
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
165
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
166
+
167
+ return df
168
+
169
+
170
+ @log
171
+ def list_deployment_pipeline_stage_items(
172
+ deployment_pipeline: str | UUID,
173
+ stage: str | UUID,
174
+ ) -> pd.DataFrame:
175
+ """
176
+ Shows the supported items from the workspace assigned to the specified stage of the specified deployment pipeline.
177
+
178
+ This is a wrapper function for the following API: `Deployment Pipelines - List Deployment Pipeline Stage Items <https://learn.microsoft.com/rest/api/fabric/core/deployment-pipelines/list-deployment-pipeline-stage-items>`_.
179
+
180
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
181
+
182
+ Parameters
183
+ ----------
184
+ deployment_pipeline : str | uuid.UUID
185
+ The deployment pipeline name or ID.
186
+ stage : str | uuid.UUID
187
+ The deployment pipeline stage name or ID.
188
+
189
+ Returns
190
+ -------
191
+ pandas.DataFrame
192
+ A pandas dataframe showing the supported items from the workspace assigned to the specified stage of the specified deployment pipeline.
193
+ """
194
+
195
+ columns = {
196
+ "Deployment Pipeline Stage Item Id": "string",
197
+ "Deployment Pipeline Stage Item Name": "string",
198
+ "Item Type": "string",
199
+ "Source Item Id": "string",
200
+ "Target Item Id": "string",
201
+ "Last Deployment Time": "string",
202
+ }
203
+ df = _create_dataframe(columns=columns)
204
+
205
+ deployment_pipeline_id = resolve_deployment_pipeline_id(
206
+ deployment_pipeline=deployment_pipeline
207
+ )
208
+
209
+ stage_id = resolve_stage_id(deployment_pipeline_id, stage)
210
+
211
+ responses = _base_api(
212
+ request=f"/v1/deploymentPipelines/{deployment_pipeline_id}/stages/{stage_id}/items",
213
+ status_codes=200,
214
+ uses_pagination=True,
215
+ client="fabric_sp",
216
+ )
217
+
218
+ rows = []
219
+ for r in responses:
220
+ for v in r.get("value", []):
221
+ rows.append(
222
+ {
223
+ "Deployment Pipeline Stage Item Id": v.get("itemId"),
224
+ "Deployment Pipeline Stage Item Name": v.get("itemDisplayName"),
225
+ "Item Type": v.get("itemType"),
226
+ "Source Item Id": v.get("sourceItemId"),
227
+ "Target Item Id": v.get("targetItemId"),
228
+ "Last Deployment Time": v.get("lastDeploymentTime"),
229
+ }
230
+ )
231
+
232
+ if rows:
233
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
234
+
235
+ return df
236
+
237
+
238
+ @log
239
+ def list_deployment_pipeline_role_assignments(
240
+ deployment_pipeline: str | UUID,
241
+ ) -> pd.DataFrame:
242
+ """
243
+ Shows the role assignments for the specified deployment pipeline.
244
+
245
+ This is a wrapper function for the following API: `Deployment Pipelines - List Deployment Pipeline Role Assignments <https://learn.microsoft.com/rest/api/fabric/core/deployment-pipelines/list-deployment-pipeline-role-assignments>`_.
246
+
247
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
248
+
249
+ Parameters
250
+ ----------
251
+ deployment_pipeline : str | uuid.UUID
252
+ The deployment pipeline name or ID.
253
+
254
+ Returns
255
+ -------
256
+ pandas.DataFrame
257
+ A pandas dataframe showing the role assignments for the specified deployment pipeline.
258
+ """
259
+
260
+ columns = {
261
+ "Role": "string",
262
+ "Principal Id": "string",
263
+ "Principal Type": "string",
264
+ }
265
+ df = _create_dataframe(columns=columns)
266
+
267
+ deployment_pipeline_id = resolve_deployment_pipeline_id(
268
+ deployment_pipeline=deployment_pipeline
269
+ )
270
+
271
+ responses = _base_api(
272
+ request=f"/v1/deploymentPipelines/{deployment_pipeline_id}/roleAssignments",
273
+ uses_pagination=True,
274
+ client="fabric_sp",
275
+ )
276
+
277
+ rows = []
278
+ for r in responses:
279
+ for v in r.get("value", []):
280
+ principal = v.get("principal", {})
281
+ rows.append(
282
+ {
283
+ "Role": v.get("role"),
284
+ "Principal Id": principal.get("id"),
285
+ "Principal Type Name": principal.get("type"),
286
+ }
287
+ )
288
+
289
+ if rows:
290
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
291
+
292
+ return df
293
+
294
+
295
+ @log
296
+ def delete_deployment_pipeline(
297
+ deployment_pipeline: str | UUID,
298
+ ):
299
+ """
300
+ Deletes the specified deployment pipeline.
301
+
302
+ This is a wrapper function for the following API: `Deployment Pipelines - Delete Deployment Pipeline <https://learn.microsoft.com/rest/api/fabric/core/deployment-pipelines/delete-deployment-pipeline>`_.
303
+
304
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
305
+
306
+ Parameters
307
+ ----------
308
+ deployment_pipeline : str | uuid.UUID
309
+ The deployment pipeline name or ID.
310
+ """
311
+
312
+ deployment_pipeline_id = resolve_deployment_pipeline_id(
313
+ deployment_pipeline=deployment_pipeline
314
+ )
315
+
316
+ _base_api(
317
+ request=f"/v1/deploymentPipelines/{deployment_pipeline_id}",
318
+ method="delete",
319
+ client="fabric_sp",
320
+ )
321
+
322
+ print(
323
+ f"{icons.green_dot} The '{deployment_pipeline}' deployment pipeline has been deleted successfully."
324
+ )
325
+
326
+
327
+ @log
328
+ def list_deployment_pipeline_operations(
329
+ deployment_pipeline: str | UUID,
330
+ ) -> pd.DataFrame:
331
+ """
332
+ Shows the operations for the specified deployment pipeline.
333
+
334
+ This is a wrapper function for the following API: `Deployment Pipelines - List Deployment Pipeline Operations <https://learn.microsoft.com/rest/api/fabric/core/deployment-pipelines/list-deployment-pipeline-operations>`_.
335
+
336
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
337
+
338
+ Parameters
339
+ ----------
340
+ deployment_pipeline : str | uuid.UUID
341
+ The deployment pipeline name or ID.
342
+
343
+ Returns
344
+ -------
345
+ pandas.DataFrame
346
+ A pandas dataframe showing the operations for the specified deployment pipeline.
347
+ """
348
+
349
+ columns = {
350
+ "Operation Id": "string",
351
+ "Type": "string",
352
+ "Status": "string",
353
+ "Last Updated Time": "string",
354
+ "Execution Start Time": "datetime_coerce",
355
+ "Execution End Time": "datetime_coerce",
356
+ "Source Stage Id": "string",
357
+ "Target Stage Id": "string",
358
+ "Note": "string",
359
+ "New Items Count": "int",
360
+ "Different Items Count": "int",
361
+ "No Difference Items Count": "int",
362
+ "Performed By Id": "string",
363
+ "Performed By Type": "string",
364
+ }
365
+ df = _create_dataframe(columns=columns)
366
+
367
+ deployment_pipeline_id = resolve_deployment_pipeline_id(
368
+ deployment_pipeline=deployment_pipeline
369
+ )
370
+
371
+ responses = _base_api(
372
+ request=f"/v1/deploymentPipelines/{deployment_pipeline_id}/operations",
373
+ uses_pagination=True,
374
+ client="fabric_sp",
375
+ )
376
+
377
+ rows = []
378
+ for r in responses:
379
+ for v in r.get("value", []):
380
+ p = v.get("preDeploymentDiffInformation", {})
381
+ rows.append(
382
+ {
383
+ "Operation Id": v.get("id"),
384
+ "Type": v.get("type"),
385
+ "Status": v.get("status"),
386
+ "Last Updated Time": v.get("lastUpdatedTime"),
387
+ "Execution Start Time": v.get("executionStartTime"),
388
+ "Execution End Time": v.get("executionEndTime"),
389
+ "Source Stage Id": v.get("sourceStageId"),
390
+ "Target Stage Id": v.get("targetStageId"),
391
+ "Note": v.get("note", {}).get("content"),
392
+ "New Items Count": p.get("newItemsCount"),
393
+ "Different Items Count": p.get("differentItemsCount"),
394
+ "No Difference Items Count": p.get("noDifferenceItemsCount"),
395
+ "Performed By Id": v.get("performedBy", {}).get("id"),
396
+ "Performed By Type": v.get("performedBy", {}).get("type"),
397
+ }
398
+ )
399
+
400
+ if rows:
401
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
402
+ _update_dataframe_datatypes(dataframe=df, column_map=columns)
403
+
404
+ return df
405
+
406
+
407
+ @log
408
+ def unassign_workspace_from_stage(
409
+ deployment_pipeline: str | UUID,
410
+ stage: str | UUID,
411
+ ):
412
+ """
413
+ Unassigns the workspace from the specified stage of the specified deployment pipeline.
414
+
415
+ This is a wrapper function for the following API: `Deployment Pipelines - Unassign Workspace From Stage <https://learn.microsoft.com/rest/api/fabric/core/deployment-pipelines/unassign-workspace-from-stage>`_.
416
+
417
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
418
+
419
+ Parameters
420
+ ----------
421
+ deployment_pipeline : str | uuid.UUID
422
+ The deployment pipeline name or ID.
423
+ stage : str | uuid.UUID
424
+ The deployment pipeline stage name or ID.
425
+ """
426
+
427
+ deployment_pipeline_id = resolve_deployment_pipeline_id(
428
+ deployment_pipeline=deployment_pipeline
429
+ )
430
+
431
+ stage_id = resolve_stage_id(deployment_pipeline_id, stage)
432
+
433
+ _base_api(
434
+ request=f"/v1/deploymentPipelines/{deployment_pipeline_id}/stages/{stage_id}/unassignWorkspace",
435
+ method="post",
436
+ client="fabric_sp",
437
+ )
438
+
439
+ print(
440
+ f"{icons.green_dot} The workspace has been unassigned from the '{stage}' stage of the '{deployment_pipeline}' deployment pipeline successfully."
441
+ )
442
+
443
+
444
+ @log
445
+ def assign_workspace_to_stage(
446
+ deployment_pipeline: str | UUID,
447
+ stage: str | UUID,
448
+ workspace: Optional[str | UUID] = None,
449
+ ):
450
+ """
451
+ Unassigns the workspace from the specified stage of the specified deployment pipeline.
452
+
453
+ This is a wrapper function for the following API: `Deployment Pipelines - Assign Workspace To Stage <https://learn.microsoft.com/rest/api/fabric/core/deployment-pipelines/assign-workspace-to-stage>`_.
454
+
455
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
456
+
457
+ Parameters
458
+ ----------
459
+ deployment_pipeline : str | uuid.UUID
460
+ The deployment pipeline name or ID.
461
+ stage : str | uuid.UUID
462
+ The deployment pipeline stage name or ID.
463
+ workspace : str | uuid.UUID, default=None
464
+ The Fabric workspace name or ID.
465
+ Defaults to None which resolves to the workspace of the attached lakehouse
466
+ or if no lakehouse attached, resolves to the workspace of the notebook.
467
+ """
468
+
469
+ deployment_pipeline_id = resolve_deployment_pipeline_id(
470
+ deployment_pipeline=deployment_pipeline
471
+ )
472
+
473
+ stage_id = resolve_stage_id(deployment_pipeline_id, stage)
474
+ workspace_id = resolve_workspace_id(workspace=workspace)
475
+
476
+ payload = {"workspaceId": workspace_id}
477
+
478
+ _base_api(
479
+ request=f"/v1/deploymentPipelines/{deployment_pipeline_id}/stages/{stage_id}/assignWorkspace",
480
+ method="post",
481
+ client="fabric_sp",
482
+ payload=payload,
483
+ )
484
+
485
+ print(
486
+ f"{icons.green_dot} The workspace has been assigned to the '{stage}' stage of the '{deployment_pipeline}' deployment pipeline successfully."
487
+ )
488
+
489
+
490
+ @log
491
+ def deploy_stage_content(
492
+ deployment_pipeline: str | UUID,
493
+ source_stage_id: UUID,
494
+ target_stage_id: UUID,
495
+ items: Optional[dict | List[dict]] = None,
496
+ note: Optional[str] = None,
497
+ allow_cross_region_deployment: Optional[bool] = False,
498
+ capacity: Optional[str | UUID] = None,
499
+ workspace_name: Optional[str] = None,
500
+ ):
501
+ """
502
+ Deploys items from the specified stage of the specified deployment pipeline.
503
+
504
+ This is a wrapper function for the following API: `Deployment Pipelines - Deploy Stage Content <https://learn.microsoft.com/rest/api/fabric/core/deployment-pipelines/deploy-stage-content>`_.
505
+
506
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
507
+
508
+ Parameters
509
+ ----------
510
+ deployment_pipeline : str | uuid.UUID
511
+ The deployment pipeline name or ID.
512
+ source_stage_id : uuid.UUID
513
+ The source deployment pipeline stage ID.
514
+ target_stage_id : uuid.UUID
515
+ The target deployment pipeline stage ID.
516
+ items : dict | List[dict], default=None
517
+ A list of items to deploy. Each item should be a dictionary with the following structure:
518
+ {
519
+ "itemId": "1a201f2a-d1d8-45c0-8c61-1676338517de",
520
+ "itemType": "SemanticModel"
521
+ }
522
+ If None, all items will be deployed.
523
+ note : str, default=None
524
+ An optional note to include with the deployment.
525
+ allow_cross_region_deployment : bool, default=False
526
+ Indicates whether cross region deployment is enabled. True - enabled, False - disabled. Default value is False.
527
+ capacity : str | uuid.UUID, default=None
528
+ The capacity name or ID to use for the deployment operation if creating a new workspace. Required when deploying to a stage that has no assigned workspaces, otherwise it is ignored. The deployment will fail if the new workspace configuration details aren't provided when required.
529
+ workspace_name : str, default=None
530
+ The workspace name to use for the deployment operation if creating a new workspace. Required when deploying to a stage that has no assigned workspaces, otherwise it is ignored. The deployment will fail if the new workspace configuration details aren't provided when required.
531
+ """
532
+
533
+ deployment_pipeline_id = resolve_deployment_pipeline_id(
534
+ deployment_pipeline=deployment_pipeline
535
+ )
536
+
537
+ payload = {
538
+ "sourceStageId": source_stage_id,
539
+ "targetStageId": target_stage_id,
540
+ }
541
+ if note:
542
+ payload["note"] = note
543
+
544
+ if items:
545
+ if isinstance(items, dict):
546
+ items = [items]
547
+
548
+ if not isinstance(items, list):
549
+ raise ValueError(
550
+ f"{icons.red_dot} The 'items' parameter must be a list of dictionaries."
551
+ )
552
+
553
+ payload["items"] = items
554
+
555
+ if allow_cross_region_deployment:
556
+ payload["options"] = {"allowCrossRegionDeployment": True}
557
+
558
+ if capacity and workspace_name:
559
+ capacity_id = resolve_capacity_id(capacity)
560
+ payload["createdWorkspaceDetails"] = {
561
+ "capacityId": capacity_id,
562
+ "name": workspace_name,
563
+ }
564
+
565
+ _base_api(
566
+ request=f"/v1/deploymentPipelines/{deployment_pipeline_id}/deploy",
567
+ method="post",
568
+ payload=payload,
569
+ status_codes=[200, 202],
570
+ lro_return_status_code=True,
571
+ )
572
+
573
+ print(
574
+ f"{icons.green_dot} The deployment from stage '{source_stage_id}' to stage '{target_stage_id}' in the '{deployment_pipeline}' deployment pipeline has been initiated successfully."
575
+ )
576
+
577
+ if capacity and workspace_name:
578
+ print(
579
+ f"{icons.info} A new workspace '{workspace_name}' will be created in the specified capacity for the deployment."
580
+ )
@@ -0,0 +1,57 @@
1
+ from ._generate_shared_expression import generate_shared_expression
2
+ from ._directlake_schema_compare import direct_lake_schema_compare
3
+ from ._directlake_schema_sync import direct_lake_schema_sync
4
+ from ._dl_helper import (
5
+ check_fallback_reason,
6
+ generate_direct_lake_semantic_model,
7
+ get_direct_lake_source,
8
+ )
9
+ from ._get_directlake_lakehouse import get_direct_lake_lakehouse
10
+ from ._get_shared_expression import get_shared_expression
11
+ from ._guardrails import (
12
+ get_direct_lake_guardrails,
13
+ get_sku_size,
14
+ get_directlake_guardrails_for_sku,
15
+ )
16
+ from ._list_directlake_model_calc_tables import (
17
+ list_direct_lake_model_calc_tables,
18
+ )
19
+ from ._show_unsupported_directlake_objects import (
20
+ show_unsupported_direct_lake_objects,
21
+ )
22
+ from ._update_directlake_model_lakehouse_connection import (
23
+ update_direct_lake_model_lakehouse_connection,
24
+ update_direct_lake_model_connection,
25
+ )
26
+ from ._update_directlake_partition_entity import (
27
+ update_direct_lake_partition_entity,
28
+ add_table_to_direct_lake_semantic_model,
29
+ )
30
+ from ._warm_cache import (
31
+ warm_direct_lake_cache_isresident,
32
+ warm_direct_lake_cache_perspective,
33
+ )
34
+ from ._autosync import set_autosync
35
+
36
+ __all__ = [
37
+ "generate_shared_expression",
38
+ "direct_lake_schema_compare",
39
+ "direct_lake_schema_sync",
40
+ "check_fallback_reason",
41
+ "get_direct_lake_lakehouse",
42
+ "get_shared_expression",
43
+ "get_direct_lake_guardrails",
44
+ "get_sku_size",
45
+ "get_directlake_guardrails_for_sku",
46
+ "list_direct_lake_model_calc_tables",
47
+ "show_unsupported_direct_lake_objects",
48
+ "update_direct_lake_model_lakehouse_connection",
49
+ "update_direct_lake_partition_entity",
50
+ "warm_direct_lake_cache_isresident",
51
+ "warm_direct_lake_cache_perspective",
52
+ "add_table_to_direct_lake_semantic_model",
53
+ "generate_direct_lake_semantic_model",
54
+ "get_direct_lake_source",
55
+ "update_direct_lake_model_connection",
56
+ "set_autosync",
57
+ ]
@@ -0,0 +1,58 @@
1
+ import requests
2
+ from sempy_labs._helper_functions import (
3
+ _get_url_prefix,
4
+ resolve_workspace_name_and_id,
5
+ resolve_dataset_name_and_id,
6
+ )
7
+ from typing import Optional
8
+ import sempy_labs._icons as icons
9
+ from sempy._utils._log import log
10
+ from uuid import UUID
11
+ from sempy.fabric.exceptions import FabricHTTPException
12
+
13
+
14
+ @log
15
+ def set_autosync(
16
+ dataset: str | UUID, workspace: Optional[str | UUID] = None, enable: bool = True
17
+ ):
18
+ """
19
+ Enables or disables AutoSync for a Direct Lake semantic model.
20
+
21
+ Parameters
22
+ ----------
23
+ dataset : str | uuid.UUID
24
+ Name or ID of the semantic model.
25
+ workspace : str | uuid.UUID, default=None
26
+ The Fabric workspace name or ID.
27
+ Defaults to None which resolves to the workspace of the attached lakehouse
28
+ or if no lakehouse attached, resolves to the workspace of the notebook.
29
+ enable : bool, default=True
30
+ Whether to enable (True) or disable (False) AutoSync.
31
+ """
32
+
33
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
34
+ (dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
35
+
36
+ import notebookutils
37
+
38
+ token = notebookutils.credentials.getToken("pbi")
39
+ headers = {"Authorization": f"Bearer {token}"}
40
+
41
+ prefix = _get_url_prefix()
42
+
43
+ response = requests.get(
44
+ url=f"{prefix}/metadata/models/{dataset_id}", headers=headers
45
+ )
46
+ id = response.json().get("model", {}).get("id")
47
+
48
+ payload = {"directLakeAutoSync": enable}
49
+ response = requests.post(
50
+ url=f"{prefix}/metadata/models/{id}/settings", headers=headers, json=payload
51
+ )
52
+
53
+ if response.status_code != 204:
54
+ raise FabricHTTPException(f"Failed to retrieve labels: {response.text}")
55
+
56
+ print(
57
+ f"{icons.green_dot} Direct Lake AutoSync has been {'enabled' if enable else 'disabled'} for the '{dataset_name}' semantic model within the '{workspace_name}' workspace."
58
+ )