semantic-link-labs 0.12.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (243) hide show
  1. semantic_link_labs-0.12.8.dist-info/METADATA +354 -0
  2. semantic_link_labs-0.12.8.dist-info/RECORD +243 -0
  3. semantic_link_labs-0.12.8.dist-info/WHEEL +5 -0
  4. semantic_link_labs-0.12.8.dist-info/licenses/LICENSE +21 -0
  5. semantic_link_labs-0.12.8.dist-info/top_level.txt +1 -0
  6. sempy_labs/__init__.py +606 -0
  7. sempy_labs/_a_lib_info.py +2 -0
  8. sempy_labs/_ai.py +437 -0
  9. sempy_labs/_authentication.py +264 -0
  10. sempy_labs/_bpa_translation/_model/_translations_am-ET.po +869 -0
  11. sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +908 -0
  12. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +968 -0
  13. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +963 -0
  14. sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +943 -0
  15. sempy_labs/_bpa_translation/_model/_translations_da-DK.po +945 -0
  16. sempy_labs/_bpa_translation/_model/_translations_de-DE.po +988 -0
  17. sempy_labs/_bpa_translation/_model/_translations_el-GR.po +993 -0
  18. sempy_labs/_bpa_translation/_model/_translations_es-ES.po +971 -0
  19. sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +933 -0
  20. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +942 -0
  21. sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +994 -0
  22. sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +967 -0
  23. sempy_labs/_bpa_translation/_model/_translations_he-IL.po +902 -0
  24. sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +944 -0
  25. sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +963 -0
  26. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +946 -0
  27. sempy_labs/_bpa_translation/_model/_translations_is-IS.po +939 -0
  28. sempy_labs/_bpa_translation/_model/_translations_it-IT.po +986 -0
  29. sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +846 -0
  30. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +839 -0
  31. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +967 -0
  32. sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +978 -0
  33. sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +962 -0
  34. sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +962 -0
  35. sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +957 -0
  36. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +968 -0
  37. sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +964 -0
  38. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +952 -0
  39. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +950 -0
  40. sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +942 -0
  41. sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +976 -0
  42. sempy_labs/_bpa_translation/_model/_translations_te-IN.po +947 -0
  43. sempy_labs/_bpa_translation/_model/_translations_th-TH.po +924 -0
  44. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +953 -0
  45. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +961 -0
  46. sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +804 -0
  47. sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +969 -0
  48. sempy_labs/_capacities.py +1198 -0
  49. sempy_labs/_capacity_migration.py +660 -0
  50. sempy_labs/_clear_cache.py +351 -0
  51. sempy_labs/_connections.py +610 -0
  52. sempy_labs/_dashboards.py +69 -0
  53. sempy_labs/_data_access_security.py +98 -0
  54. sempy_labs/_data_pipelines.py +162 -0
  55. sempy_labs/_dataflows.py +668 -0
  56. sempy_labs/_dax.py +501 -0
  57. sempy_labs/_daxformatter.py +80 -0
  58. sempy_labs/_delta_analyzer.py +467 -0
  59. sempy_labs/_delta_analyzer_history.py +301 -0
  60. sempy_labs/_dictionary_diffs.py +221 -0
  61. sempy_labs/_documentation.py +147 -0
  62. sempy_labs/_domains.py +51 -0
  63. sempy_labs/_eventhouses.py +182 -0
  64. sempy_labs/_external_data_shares.py +230 -0
  65. sempy_labs/_gateways.py +521 -0
  66. sempy_labs/_generate_semantic_model.py +521 -0
  67. sempy_labs/_get_connection_string.py +84 -0
  68. sempy_labs/_git.py +543 -0
  69. sempy_labs/_graphQL.py +90 -0
  70. sempy_labs/_helper_functions.py +2833 -0
  71. sempy_labs/_icons.py +149 -0
  72. sempy_labs/_job_scheduler.py +609 -0
  73. sempy_labs/_kql_databases.py +149 -0
  74. sempy_labs/_kql_querysets.py +124 -0
  75. sempy_labs/_kusto.py +137 -0
  76. sempy_labs/_labels.py +124 -0
  77. sempy_labs/_list_functions.py +1720 -0
  78. sempy_labs/_managed_private_endpoints.py +253 -0
  79. sempy_labs/_mirrored_databases.py +416 -0
  80. sempy_labs/_mirrored_warehouses.py +60 -0
  81. sempy_labs/_ml_experiments.py +113 -0
  82. sempy_labs/_model_auto_build.py +140 -0
  83. sempy_labs/_model_bpa.py +557 -0
  84. sempy_labs/_model_bpa_bulk.py +378 -0
  85. sempy_labs/_model_bpa_rules.py +859 -0
  86. sempy_labs/_model_dependencies.py +343 -0
  87. sempy_labs/_mounted_data_factories.py +123 -0
  88. sempy_labs/_notebooks.py +441 -0
  89. sempy_labs/_one_lake_integration.py +151 -0
  90. sempy_labs/_onelake.py +131 -0
  91. sempy_labs/_query_scale_out.py +433 -0
  92. sempy_labs/_refresh_semantic_model.py +435 -0
  93. sempy_labs/_semantic_models.py +468 -0
  94. sempy_labs/_spark.py +455 -0
  95. sempy_labs/_sql.py +241 -0
  96. sempy_labs/_sql_audit_settings.py +207 -0
  97. sempy_labs/_sql_endpoints.py +214 -0
  98. sempy_labs/_tags.py +201 -0
  99. sempy_labs/_translations.py +43 -0
  100. sempy_labs/_user_delegation_key.py +44 -0
  101. sempy_labs/_utils.py +79 -0
  102. sempy_labs/_vertipaq.py +1021 -0
  103. sempy_labs/_vpax.py +388 -0
  104. sempy_labs/_warehouses.py +234 -0
  105. sempy_labs/_workloads.py +140 -0
  106. sempy_labs/_workspace_identity.py +72 -0
  107. sempy_labs/_workspaces.py +595 -0
  108. sempy_labs/admin/__init__.py +170 -0
  109. sempy_labs/admin/_activities.py +167 -0
  110. sempy_labs/admin/_apps.py +145 -0
  111. sempy_labs/admin/_artifacts.py +65 -0
  112. sempy_labs/admin/_basic_functions.py +463 -0
  113. sempy_labs/admin/_capacities.py +508 -0
  114. sempy_labs/admin/_dataflows.py +45 -0
  115. sempy_labs/admin/_datasets.py +186 -0
  116. sempy_labs/admin/_domains.py +522 -0
  117. sempy_labs/admin/_external_data_share.py +100 -0
  118. sempy_labs/admin/_git.py +72 -0
  119. sempy_labs/admin/_items.py +265 -0
  120. sempy_labs/admin/_labels.py +211 -0
  121. sempy_labs/admin/_reports.py +241 -0
  122. sempy_labs/admin/_scanner.py +118 -0
  123. sempy_labs/admin/_shared.py +82 -0
  124. sempy_labs/admin/_sharing_links.py +110 -0
  125. sempy_labs/admin/_tags.py +131 -0
  126. sempy_labs/admin/_tenant.py +503 -0
  127. sempy_labs/admin/_tenant_keys.py +89 -0
  128. sempy_labs/admin/_users.py +140 -0
  129. sempy_labs/admin/_workspaces.py +236 -0
  130. sempy_labs/deployment_pipeline/__init__.py +23 -0
  131. sempy_labs/deployment_pipeline/_items.py +580 -0
  132. sempy_labs/directlake/__init__.py +57 -0
  133. sempy_labs/directlake/_autosync.py +58 -0
  134. sempy_labs/directlake/_directlake_schema_compare.py +120 -0
  135. sempy_labs/directlake/_directlake_schema_sync.py +161 -0
  136. sempy_labs/directlake/_dl_helper.py +274 -0
  137. sempy_labs/directlake/_generate_shared_expression.py +94 -0
  138. sempy_labs/directlake/_get_directlake_lakehouse.py +62 -0
  139. sempy_labs/directlake/_get_shared_expression.py +34 -0
  140. sempy_labs/directlake/_guardrails.py +96 -0
  141. sempy_labs/directlake/_list_directlake_model_calc_tables.py +70 -0
  142. sempy_labs/directlake/_show_unsupported_directlake_objects.py +90 -0
  143. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +239 -0
  144. sempy_labs/directlake/_update_directlake_partition_entity.py +259 -0
  145. sempy_labs/directlake/_warm_cache.py +236 -0
  146. sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
  147. sempy_labs/environment/__init__.py +23 -0
  148. sempy_labs/environment/_items.py +212 -0
  149. sempy_labs/environment/_pubstage.py +223 -0
  150. sempy_labs/eventstream/__init__.py +37 -0
  151. sempy_labs/eventstream/_items.py +263 -0
  152. sempy_labs/eventstream/_topology.py +652 -0
  153. sempy_labs/graph/__init__.py +59 -0
  154. sempy_labs/graph/_groups.py +651 -0
  155. sempy_labs/graph/_sensitivity_labels.py +120 -0
  156. sempy_labs/graph/_teams.py +125 -0
  157. sempy_labs/graph/_user_licenses.py +96 -0
  158. sempy_labs/graph/_users.py +516 -0
  159. sempy_labs/graph_model/__init__.py +15 -0
  160. sempy_labs/graph_model/_background_jobs.py +63 -0
  161. sempy_labs/graph_model/_items.py +149 -0
  162. sempy_labs/lakehouse/__init__.py +67 -0
  163. sempy_labs/lakehouse/_blobs.py +247 -0
  164. sempy_labs/lakehouse/_get_lakehouse_columns.py +102 -0
  165. sempy_labs/lakehouse/_get_lakehouse_tables.py +274 -0
  166. sempy_labs/lakehouse/_helper.py +250 -0
  167. sempy_labs/lakehouse/_lakehouse.py +351 -0
  168. sempy_labs/lakehouse/_livy_sessions.py +143 -0
  169. sempy_labs/lakehouse/_materialized_lake_views.py +157 -0
  170. sempy_labs/lakehouse/_partitioning.py +165 -0
  171. sempy_labs/lakehouse/_schemas.py +217 -0
  172. sempy_labs/lakehouse/_shortcuts.py +440 -0
  173. sempy_labs/migration/__init__.py +35 -0
  174. sempy_labs/migration/_create_pqt_file.py +238 -0
  175. sempy_labs/migration/_direct_lake_to_import.py +105 -0
  176. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +398 -0
  177. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +148 -0
  178. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +533 -0
  179. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +172 -0
  180. sempy_labs/migration/_migration_validation.py +71 -0
  181. sempy_labs/migration/_refresh_calc_tables.py +131 -0
  182. sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
  183. sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
  184. sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
  185. sempy_labs/ml_model/__init__.py +23 -0
  186. sempy_labs/ml_model/_functions.py +427 -0
  187. sempy_labs/report/_BPAReportTemplate.json +232 -0
  188. sempy_labs/report/__init__.py +55 -0
  189. sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
  190. sempy_labs/report/_bpareporttemplate/.platform +11 -0
  191. sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
  192. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
  193. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
  194. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
  195. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
  196. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
  197. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
  198. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
  199. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
  200. sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
  201. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
  202. sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
  203. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
  204. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
  205. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
  206. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
  207. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
  208. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
  209. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
  210. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
  211. sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
  212. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
  213. sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
  214. sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
  215. sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
  216. sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
  217. sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
  218. sempy_labs/report/_download_report.py +76 -0
  219. sempy_labs/report/_export_report.py +257 -0
  220. sempy_labs/report/_generate_report.py +427 -0
  221. sempy_labs/report/_paginated.py +76 -0
  222. sempy_labs/report/_report_bpa.py +354 -0
  223. sempy_labs/report/_report_bpa_rules.py +115 -0
  224. sempy_labs/report/_report_functions.py +581 -0
  225. sempy_labs/report/_report_helper.py +227 -0
  226. sempy_labs/report/_report_list_functions.py +110 -0
  227. sempy_labs/report/_report_rebind.py +149 -0
  228. sempy_labs/report/_reportwrapper.py +3100 -0
  229. sempy_labs/report/_save_report.py +147 -0
  230. sempy_labs/snowflake_database/__init__.py +10 -0
  231. sempy_labs/snowflake_database/_items.py +105 -0
  232. sempy_labs/sql_database/__init__.py +21 -0
  233. sempy_labs/sql_database/_items.py +201 -0
  234. sempy_labs/sql_database/_mirroring.py +79 -0
  235. sempy_labs/theme/__init__.py +12 -0
  236. sempy_labs/theme/_org_themes.py +129 -0
  237. sempy_labs/tom/__init__.py +3 -0
  238. sempy_labs/tom/_model.py +5977 -0
  239. sempy_labs/variable_library/__init__.py +19 -0
  240. sempy_labs/variable_library/_functions.py +403 -0
  241. sempy_labs/warehouse/__init__.py +28 -0
  242. sempy_labs/warehouse/_items.py +234 -0
  243. sempy_labs/warehouse/_restore_points.py +309 -0
@@ -0,0 +1,149 @@
1
+ import pandas as pd
2
+ from typing import Optional
3
+ from sempy_labs._helper_functions import (
4
+ _base_api,
5
+ _create_dataframe,
6
+ delete_item,
7
+ create_item,
8
+ resolve_item_id,
9
+ resolve_workspace_id,
10
+ )
11
+ from uuid import UUID
12
+ import sempy_labs._icons as icons
13
+ from sempy._utils._log import log
14
+
15
+
16
+ @log
17
+ def list_kql_databases(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
18
+ """
19
+ Shows the KQL databases within a workspace.
20
+
21
+ This is a wrapper function for the following API: `Items - List KQL Databases <https://learn.microsoft.com/rest/api/fabric/kqldatabase/items/list-kql-databases>`_.
22
+
23
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
24
+
25
+ Parameters
26
+ ----------
27
+ workspace : str | uuid.UUID, default=None
28
+ The Fabric workspace name or ID.
29
+ Defaults to None which resolves to the workspace of the attached lakehouse
30
+ or if no lakehouse attached, resolves to the workspace of the notebook.
31
+
32
+ Returns
33
+ -------
34
+ pandas.DataFrame
35
+ A pandas dataframe showing the KQL databases within a workspace.
36
+ """
37
+
38
+ columns = {
39
+ "KQL Database Name": "string",
40
+ "KQL Database Id": "string",
41
+ "Description": "string",
42
+ "Parent Eventhouse Item Id": "string",
43
+ "Query Service URI": "string",
44
+ "Ingestion Service URI": "string",
45
+ "Database Type": "string",
46
+ }
47
+ df = _create_dataframe(columns=columns)
48
+
49
+ workspace_id = resolve_workspace_id(workspace)
50
+
51
+ responses = _base_api(
52
+ request=f"v1/workspaces/{workspace_id}/kqlDatabases",
53
+ uses_pagination=True,
54
+ client="fabric_sp",
55
+ )
56
+
57
+ rows = []
58
+ for r in responses:
59
+ for v in r.get("value", []):
60
+ prop = v.get("properties", {})
61
+ rows.append(
62
+ {
63
+ "KQL Database Name": v.get("displayName"),
64
+ "KQL Database Id": v.get("id"),
65
+ "Description": v.get("description"),
66
+ "Parent Eventhouse Item Id": prop.get("parentEventhouseItemId"),
67
+ "Query Service URI": prop.get("queryServiceUri"),
68
+ "Ingestion Service URI": prop.get("ingestionServiceUri"),
69
+ "Database Type": prop.get("databaseType"),
70
+ }
71
+ )
72
+
73
+ if rows:
74
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
75
+
76
+ return df
77
+
78
+
79
+ @log
80
+ def _create_kql_database(
81
+ name: str, description: Optional[str] = None, workspace: Optional[str | UUID] = None
82
+ ):
83
+ """
84
+ Creates a KQL database.
85
+
86
+ This is a wrapper function for the following API: `Items - Create KQL Database <https://learn.microsoft.com/rest/api/fabric/kqldatabase/items/create-kql-database>`_.
87
+
88
+ Parameters
89
+ ----------
90
+ name: str
91
+ Name of the KQL database.
92
+ description : str, default=None
93
+ A description of the environment.
94
+ workspace : str | uuid.UUID, default=None
95
+ The Fabric workspace name or ID.
96
+ Defaults to None which resolves to the workspace of the attached lakehouse
97
+ or if no lakehouse attached, resolves to the workspace of the notebook.
98
+ """
99
+
100
+ create_item(
101
+ name=name, description=description, type="KQLDatabase", workspace=workspace
102
+ )
103
+
104
+
105
+ @log
106
+ def delete_kql_database(
107
+ kql_database: str | UUID,
108
+ workspace: Optional[str | UUID] = None,
109
+ **kwargs,
110
+ ):
111
+ """
112
+ Deletes a KQL database.
113
+
114
+ This is a wrapper function for the following API: `Items - Delete KQL Database <https://learn.microsoft.com/rest/api/fabric/kqldatabase/items/delete-kql-database>`_.
115
+
116
+ Parameters
117
+ ----------
118
+ kql_database: str | uuid.UUID
119
+ Name or ID of the KQL database.
120
+ workspace : str | uuid.UUID, default=None
121
+ The Fabric workspace name or ID.
122
+ Defaults to None which resolves to the workspace of the attached lakehouse
123
+ or if no lakehouse attached, resolves to the workspace of the notebook.
124
+ """
125
+
126
+ if "name" in kwargs:
127
+ kql_database = kwargs["name"]
128
+ print(
129
+ f"{icons.warning} The 'name' parameter is deprecated. Please use 'kql_database' instead."
130
+ )
131
+
132
+ delete_item(item=kql_database, type="KQLDatabase", workspace=workspace)
133
+
134
+
135
+ @log
136
+ def _resolve_cluster_uri(
137
+ kql_database: str | UUID, workspace: Optional[str | UUID] = None
138
+ ) -> str:
139
+
140
+ workspace_id = resolve_workspace_id(workspace=workspace)
141
+ item_id = resolve_item_id(
142
+ item=kql_database, type="KQLDatabase", workspace=workspace
143
+ )
144
+ response = _base_api(
145
+ request=f"/v1/workspaces/{workspace_id}/kqlDatabases/{item_id}",
146
+ client="fabric_sp",
147
+ )
148
+
149
+ return response.json().get("properties", {}).get("queryServiceUri")
@@ -0,0 +1,124 @@
1
+ import pandas as pd
2
+ import sempy_labs._icons as icons
3
+ from typing import Optional
4
+ from sempy_labs._helper_functions import (
5
+ resolve_workspace_id,
6
+ _base_api,
7
+ _create_dataframe,
8
+ delete_item,
9
+ create_item,
10
+ )
11
+ from uuid import UUID
12
+ from sempy._utils._log import log
13
+
14
+
15
+ @log
16
+ def list_kql_querysets(workspace: Optional[str | UUID] = None) -> pd.DataFrame:
17
+ """
18
+ Shows the KQL querysets within a workspace.
19
+
20
+ This is a wrapper function for the following API: `Items - List KQL Querysets <https://learn.microsoft.com/rest/api/fabric/kqlqueryset/items/list-kql-querysets>`_.
21
+
22
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
23
+
24
+ Parameters
25
+ ----------
26
+ workspace : str | uuid.UUID, default=None
27
+ The Fabric workspace name or ID.
28
+ Defaults to None which resolves to the workspace of the attached lakehouse
29
+ or if no lakehouse attached, resolves to the workspace of the notebook.
30
+
31
+ Returns
32
+ -------
33
+ pandas.DataFrame
34
+ A pandas dataframe showing the KQL querysets within a workspace.
35
+ """
36
+
37
+ columns = {
38
+ "KQL Queryset Name": "string",
39
+ "KQL Queryset Id": "string",
40
+ "Description": "string",
41
+ }
42
+ df = _create_dataframe(columns=columns)
43
+
44
+ workspace_id = resolve_workspace_id(workspace)
45
+
46
+ responses = _base_api(
47
+ request=f"v1/workspaces/{workspace_id}/kqlQuerysets",
48
+ uses_pagination=True,
49
+ client="fabric_sp",
50
+ )
51
+
52
+ rows = []
53
+ for r in responses:
54
+ for v in r.get("value", []):
55
+ rows.append(
56
+ {
57
+ "KQL Queryset Name": v.get("displayName"),
58
+ "KQL Queryset Id": v.get("id"),
59
+ "Description": v.get("description"),
60
+ }
61
+ )
62
+
63
+ if rows:
64
+ df = pd.DataFrame(rows, columns=list(columns.keys()))
65
+
66
+ return df
67
+
68
+
69
+ @log
70
+ def create_kql_queryset(
71
+ name: str, description: Optional[str] = None, workspace: Optional[str | UUID] = None
72
+ ):
73
+ """
74
+ Creates a KQL queryset.
75
+
76
+ This is a wrapper function for the following API: `Items - Create KQL Queryset <https://learn.microsoft.com/rest/api/fabric/kqlqueryset/items/create-kql-queryset>`_.
77
+
78
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
79
+
80
+ Parameters
81
+ ----------
82
+ name: str
83
+ Name of the KQL queryset.
84
+ description : str, default=None
85
+ A description of the environment.
86
+ workspace : str | uuid.UUID, default=None
87
+ The Fabric workspace name or ID.
88
+ Defaults to None which resolves to the workspace of the attached lakehouse
89
+ or if no lakehouse attached, resolves to the workspace of the notebook.
90
+ """
91
+
92
+ create_item(
93
+ name=name, description=description, type="KQLQueryset", workspace=workspace
94
+ )
95
+
96
+
97
+ @log
98
+ def delete_kql_queryset(
99
+ kql_queryset: str | UUID, workspace: Optional[str | UUID] = None, **kwargs
100
+ ):
101
+ """
102
+ Deletes a KQL queryset.
103
+
104
+ This is a wrapper function for the following API: `Items - Delete KQL Queryset <https://learn.microsoft.com/rest/api/fabric/kqlqueryset/items/delete-kql-queryset>`_.
105
+
106
+ Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
107
+
108
+ Parameters
109
+ ----------
110
+ kql_queryset: str | uuid.UUID
111
+ Name or ID of the KQL queryset.
112
+ workspace : str | uuid.UUID, default=None
113
+ The Fabric workspace name or ID.
114
+ Defaults to None which resolves to the workspace of the attached lakehouse
115
+ or if no lakehouse attached, resolves to the workspace of the notebook.
116
+ """
117
+
118
+ if "name" in kwargs:
119
+ kql_queryset = kwargs["name"]
120
+ print(
121
+ f"{icons.warning} The 'name' parameter is deprecated. Please use 'kql_queryset' instead."
122
+ )
123
+
124
+ delete_item(item=kql_queryset, type="KQLQueryset", workspace=workspace)
sempy_labs/_kusto.py ADDED
@@ -0,0 +1,137 @@
1
+ import requests
2
+ import pandas as pd
3
+ from sempy.fabric.exceptions import FabricHTTPException
4
+ from sempy._utils._log import log
5
+ import sempy_labs._icons as icons
6
+ from typing import Optional
7
+ from uuid import UUID
8
+ from sempy_labs._kql_databases import _resolve_cluster_uri
9
+ from sempy_labs._helper_functions import resolve_item_id
10
+
11
+
12
+ @log
13
+ def query_kusto(
14
+ query: str,
15
+ kql_database: str | UUID,
16
+ workspace: Optional[str | UUID] = None,
17
+ language: str = "kql",
18
+ ) -> pd.DataFrame:
19
+ """
20
+ Runs a KQL query against a KQL database.
21
+
22
+ Parameters
23
+ ----------
24
+ query : str
25
+ The query (supports KQL or SQL - make sure to specify the language parameter accordingly).
26
+ kql_database : str | uuid.UUID
27
+ The KQL database name or ID.
28
+ workspace : str | uuid.UUID, default=None
29
+ The Fabric workspace name or ID.
30
+ Defaults to None which resolves to the workspace of the attached lakehouse
31
+ or if no lakehouse attached, resolves to the workspace of the notebook.
32
+ language : str, default="kql"
33
+ The language of the query. Currently "kql' and "sql" are supported.
34
+
35
+ Returns
36
+ -------
37
+ pandas.DataFrame
38
+ A pandas dataframe showing the result of the KQL query.
39
+ """
40
+
41
+ import notebookutils
42
+
43
+ language = language.lower()
44
+ if language not in ["kql", "sql"]:
45
+ raise ValueError(
46
+ f"{icons._red_dot} Invalid language '{language}'. Only 'kql' and 'sql' are supported."
47
+ )
48
+
49
+ cluster_uri = _resolve_cluster_uri(kql_database=kql_database, workspace=workspace)
50
+ token = notebookutils.credentials.getToken(cluster_uri)
51
+
52
+ headers = {
53
+ "Authorization": f"Bearer {token}",
54
+ "Content-Type": "application/json",
55
+ "Accept": "application/json",
56
+ }
57
+
58
+ kql_database_id = resolve_item_id(
59
+ item=kql_database, type="KQLDatabase", workspace=workspace
60
+ )
61
+ payload = {"db": kql_database_id, "csl": query}
62
+ if language == "sql":
63
+ payload["properties"] = {"Options": {"query_language": "sql"}}
64
+
65
+ response = requests.post(
66
+ f"{cluster_uri}/v1/rest/query",
67
+ headers=headers,
68
+ json=payload,
69
+ )
70
+
71
+ if response.status_code != 200:
72
+ raise FabricHTTPException(response)
73
+
74
+ results = response.json()
75
+ columns_info = results["Tables"][0]["Columns"]
76
+ rows = results["Tables"][0]["Rows"]
77
+
78
+ df = pd.DataFrame(rows, columns=[col["ColumnName"] for col in columns_info])
79
+
80
+ return df
81
+ # for col_info in columns_info:
82
+ # col_name = col_info["ColumnName"]
83
+ # data_type = col_info["DataType"]
84
+
85
+ # try:
86
+ # if data_type == "DateTime":
87
+ # df[col_name] = pd.to_datetime(df[col_name])
88
+ # elif data_type in ["Int64", "Int32", "Long"]:
89
+ # df[col_name] = (
90
+ # pd.to_numeric(df[col_name], errors="coerce")
91
+ # .fillna(0)
92
+ # .astype("int64")
93
+ # )
94
+ # elif data_type == "Real" or data_type == "Double":
95
+ # df[col_name] = pd.to_numeric(df[col_name], errors="coerce")
96
+ # else:
97
+ # # Convert any other type to string, change as needed
98
+ # df[col_name] = df[col_name].astype(str)
99
+ # except Exception as e:
100
+ # print(
101
+ # f"{icons.yellow_dot} Could not convert column {col_name} to {data_type}, defaulting to string: {str(e)}"
102
+ # )
103
+ # df[col_name] = df[col_name].astype(str)
104
+
105
+ return df
106
+
107
+
108
+ @log
109
+ def query_workspace_monitoring(
110
+ query: str, workspace: Optional[str | UUID] = None, language: str = "kql"
111
+ ) -> pd.DataFrame:
112
+ """
113
+ Runs a query against the Fabric workspace monitoring database. Workspace monitoring must be enabled on the workspace to use this function.
114
+
115
+ Parameters
116
+ ----------
117
+ query : str
118
+ The query (supports KQL or SQL - make sure to specify the language parameter accordingly).
119
+ workspace : str | uuid.UUID, default=None
120
+ The Fabric workspace name or ID.
121
+ Defaults to None which resolves to the workspace of the attached lakehouse
122
+ or if no lakehouse attached, resolves to the workspace of the notebook.
123
+ language : str, default="kql"
124
+ The language of the query. Currently "kql' and "sql" are supported.
125
+
126
+ Returns
127
+ -------
128
+ pandas.DataFrame
129
+ A pandas dataframe showing the result of the query.
130
+ """
131
+
132
+ return query_kusto(
133
+ query=query,
134
+ kql_database="Monitoring KQL database",
135
+ workspace=workspace,
136
+ language=language,
137
+ )
sempy_labs/_labels.py ADDED
@@ -0,0 +1,124 @@
1
+ import sempy.fabric as fabric
2
+ import requests
3
+ import pandas as pd
4
+ from typing import Optional, Union
5
+ from uuid import UUID
6
+ from sempy.fabric.exceptions import FabricHTTPException
7
+ from sempy._utils._log import log
8
+ from sempy_labs._helper_functions import (
9
+ _get_url_prefix,
10
+ )
11
+
12
+
13
+ @log
14
+ def list_item_labels(workspace: Optional[Union[str, UUID]] = None) -> pd.DataFrame:
15
+ """
16
+ List all items within a workspace and shows their sensitivity labels.
17
+
18
+ NOTE: This function uses an internal API and is subject to change/break without notice.
19
+
20
+ Parameters
21
+ ----------
22
+ workspace : str | uuid.UUID, default=None
23
+ The Fabric workspace name or ID.
24
+ Defaults to None which resolves to the workspace of the attached lakehouse
25
+ or if no lakehouse attached, resolves to the workspace of the notebook.
26
+ Returns
27
+ -------
28
+ pandas.DataFrame
29
+ A pandas dataframe showing a list of all items within a workspace and their sensitivity labels.
30
+ """
31
+
32
+ import notebookutils
33
+
34
+ token = notebookutils.credentials.getToken("pbi")
35
+ headers = {"Authorization": f"Bearer {token}"}
36
+
37
+ # Item types handled in special payload fields
38
+ grouped_types = {
39
+ "dashboards": "Dashboard",
40
+ "reports": "Report",
41
+ "models": "SemanticModel",
42
+ "dataflows": "Dataflow",
43
+ "datamarts": "Datamart",
44
+ }
45
+
46
+ # All other item types go into 'artifacts'
47
+ fabric_items = [
48
+ "Datamart",
49
+ "Lakehouse",
50
+ "Eventhouse",
51
+ "Environment",
52
+ "KQLDatabase",
53
+ "KQLQueryset",
54
+ "KQLDashboard",
55
+ "DataPipeline",
56
+ "Notebook",
57
+ "SparkJobDefinition",
58
+ "MLExperiment",
59
+ "MLModel",
60
+ "Warehouse",
61
+ "Eventstream",
62
+ "SQLEndpoint",
63
+ "MirroredWarehouse",
64
+ "MirroredDatabase",
65
+ "Reflex",
66
+ "GraphQLApi",
67
+ "MountedDataFactory",
68
+ "SQLDatabase",
69
+ "CopyJob",
70
+ "VariableLibrary",
71
+ "Dataflow",
72
+ "ApacheAirflowJob",
73
+ "WarehouseSnapshot",
74
+ "DigitalTwinBuilder",
75
+ "DigitalTwinBuilderFlow",
76
+ "MirroredAzureDatabricksCatalog",
77
+ "DataAgent",
78
+ "UserDataFunction",
79
+ ]
80
+
81
+ dfI = fabric.list_items(workspace=workspace)
82
+
83
+ payload = {
84
+ key: [{"artifactId": i} for i in dfI[dfI["Type"] == value]["Id"].tolist()]
85
+ for key, value in grouped_types.items()
86
+ }
87
+
88
+ # Add generic artifact types
89
+ artifact_ids = dfI[dfI["Type"].isin(fabric_items)]["Id"].tolist()
90
+ if artifact_ids:
91
+ payload["artifacts"] = [{"artifactId": i} for i in artifact_ids]
92
+
93
+ prefix = _get_url_prefix()
94
+
95
+ response = requests.post(
96
+ f"{prefix}/metadata/informationProtection/artifacts",
97
+ json=payload,
98
+ headers=headers,
99
+ )
100
+ if response.status_code != 200:
101
+ raise FabricHTTPException(f"Failed to retrieve labels: {response.text}")
102
+ result = response.json()
103
+
104
+ label_keys = [
105
+ "artifactInformationProtections",
106
+ "datasetInformationProtections",
107
+ "reportInformationProtections",
108
+ "dashboardInformationProtections",
109
+ ]
110
+
111
+ rows = [
112
+ {
113
+ "Id": item.get("artifactObjectId"),
114
+ "Label Id": item.get("labelId"),
115
+ "Label Name": item.get("name"),
116
+ "Parent Label Name": item.get("parent", {}).get("name"),
117
+ "Label Description": item.get("tooltip"),
118
+ }
119
+ for key in label_keys
120
+ for item in result.get(key, [])
121
+ ]
122
+
123
+ df_labels = pd.DataFrame(rows)
124
+ return dfI.merge(df_labels, on="Id", how="left")