semantic-link-labs 0.8.3__py3-none-any.whl → 0.8.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (109) hide show
  1. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/METADATA +38 -8
  2. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/RECORD +109 -104
  3. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +53 -1
  5. sempy_labs/_bpa_translation/_model/_translations_am-ET.po +24 -5
  6. sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +28 -4
  7. sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +34 -4
  8. sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +33 -4
  9. sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +31 -4
  10. sempy_labs/_bpa_translation/_model/_translations_da-DK.po +31 -4
  11. sempy_labs/_bpa_translation/_model/_translations_de-DE.po +34 -4
  12. sempy_labs/_bpa_translation/_model/_translations_el-GR.po +36 -4
  13. sempy_labs/_bpa_translation/_model/_translations_es-ES.po +90 -58
  14. sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +31 -5
  15. sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +31 -4
  16. sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +34 -4
  17. sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +34 -4
  18. sempy_labs/_bpa_translation/_model/_translations_he-IL.po +28 -4
  19. sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +32 -4
  20. sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +32 -4
  21. sempy_labs/_bpa_translation/_model/_translations_id-ID.po +32 -4
  22. sempy_labs/_bpa_translation/_model/_translations_is-IS.po +31 -4
  23. sempy_labs/_bpa_translation/_model/_translations_it-IT.po +34 -4
  24. sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +24 -4
  25. sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +72 -56
  26. sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +34 -4
  27. sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +34 -4
  28. sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +95 -71
  29. sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +32 -4
  30. sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +32 -4
  31. sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +33 -4
  32. sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +34 -4
  33. sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +31 -4
  34. sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +32 -4
  35. sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +32 -4
  36. sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +32 -4
  37. sempy_labs/_bpa_translation/_model/_translations_te-IN.po +31 -4
  38. sempy_labs/_bpa_translation/_model/_translations_th-TH.po +31 -4
  39. sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +32 -4
  40. sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +100 -72
  41. sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +23 -5
  42. sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +32 -4
  43. sempy_labs/_capacities.py +49 -14
  44. sempy_labs/_capacity_migration.py +1 -7
  45. sempy_labs/_data_pipelines.py +6 -0
  46. sempy_labs/_dataflows.py +118 -1
  47. sempy_labs/_dax.py +189 -3
  48. sempy_labs/_deployment_pipelines.py +13 -7
  49. sempy_labs/_environments.py +6 -0
  50. sempy_labs/_eventhouses.py +6 -0
  51. sempy_labs/_eventstreams.py +6 -0
  52. sempy_labs/_external_data_shares.py +6 -4
  53. sempy_labs/_generate_semantic_model.py +26 -3
  54. sempy_labs/_git.py +14 -14
  55. sempy_labs/_helper_functions.py +197 -1
  56. sempy_labs/_icons.py +55 -22
  57. sempy_labs/_kql_databases.py +6 -0
  58. sempy_labs/_kql_querysets.py +6 -0
  59. sempy_labs/_list_functions.py +1 -1
  60. sempy_labs/_managed_private_endpoints.py +166 -0
  61. sempy_labs/_mirrored_databases.py +428 -0
  62. sempy_labs/_mirrored_warehouses.py +2 -0
  63. sempy_labs/_ml_experiments.py +6 -0
  64. sempy_labs/_ml_models.py +7 -1
  65. sempy_labs/_model_bpa.py +215 -181
  66. sempy_labs/_model_bpa_bulk.py +46 -42
  67. sempy_labs/_model_bpa_rules.py +8 -3
  68. sempy_labs/_model_dependencies.py +41 -87
  69. sempy_labs/_notebooks.py +107 -12
  70. sempy_labs/_query_scale_out.py +8 -6
  71. sempy_labs/_refresh_semantic_model.py +299 -49
  72. sempy_labs/_spark.py +12 -5
  73. sempy_labs/_translations.py +2 -0
  74. sempy_labs/_vertipaq.py +89 -86
  75. sempy_labs/_warehouses.py +79 -0
  76. sempy_labs/_workloads.py +128 -0
  77. sempy_labs/_workspace_identity.py +4 -4
  78. sempy_labs/_workspaces.py +14 -1
  79. sempy_labs/admin/__init__.py +2 -0
  80. sempy_labs/admin/_basic_functions.py +131 -43
  81. sempy_labs/admin/_domains.py +18 -18
  82. sempy_labs/directlake/__init__.py +2 -0
  83. sempy_labs/directlake/_directlake_schema_sync.py +2 -1
  84. sempy_labs/directlake/_dl_helper.py +4 -1
  85. sempy_labs/directlake/_get_shared_expression.py +7 -1
  86. sempy_labs/directlake/_guardrails.py +2 -1
  87. sempy_labs/directlake/_show_unsupported_directlake_objects.py +1 -7
  88. sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +78 -0
  89. sempy_labs/directlake/_update_directlake_partition_entity.py +13 -32
  90. sempy_labs/directlake/_warm_cache.py +10 -9
  91. sempy_labs/lakehouse/_get_lakehouse_tables.py +6 -2
  92. sempy_labs/lakehouse/_shortcuts.py +4 -0
  93. sempy_labs/migration/_create_pqt_file.py +5 -2
  94. sempy_labs/migration/_migrate_calctables_to_lakehouse.py +3 -2
  95. sempy_labs/migration/_migrate_calctables_to_semantic_model.py +2 -0
  96. sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +2 -8
  97. sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +17 -0
  98. sempy_labs/migration/_migration_validation.py +2 -0
  99. sempy_labs/migration/_refresh_calc_tables.py +1 -0
  100. sempy_labs/report/__init__.py +6 -1
  101. sempy_labs/report/_download_report.py +75 -0
  102. sempy_labs/report/_generate_report.py +6 -0
  103. sempy_labs/report/_paginated.py +74 -0
  104. sempy_labs/report/_report_functions.py +6 -0
  105. sempy_labs/report/_report_rebind.py +2 -0
  106. sempy_labs/report/_reportwrapper.py +4 -2
  107. sempy_labs/tom/_model.py +135 -68
  108. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/LICENSE +0 -0
  109. {semantic_link_labs-0.8.3.dist-info → semantic_link_labs-0.8.5.dist-info}/top_level.txt +0 -0
@@ -1,11 +1,19 @@
1
1
  import sempy.fabric as fabric
2
2
  import time
3
- from sempy_labs._helper_functions import resolve_dataset_id
3
+ from sempy_labs._helper_functions import (
4
+ resolve_dataset_id,
5
+ resolve_workspace_name_and_id,
6
+ _get_partition_map,
7
+ _process_and_display_chart,
8
+ )
4
9
  from typing import Any, List, Optional, Union
5
10
  from sempy._utils._log import log
6
11
  import sempy_labs._icons as icons
7
- from sempy_labs._helper_functions import resolve_workspace_name_and_id
8
12
  from sempy.fabric.exceptions import FabricHTTPException
13
+ import pandas as pd
14
+ import warnings
15
+ import ipywidgets as widgets
16
+ import json
9
17
 
10
18
 
11
19
  @log
@@ -18,7 +26,8 @@ def refresh_semantic_model(
18
26
  apply_refresh_policy: bool = True,
19
27
  max_parallelism: int = 10,
20
28
  workspace: Optional[str] = None,
21
- ):
29
+ visualize: bool = False,
30
+ ) -> pd.DataFrame | None:
22
31
  """
23
32
  Refreshes a semantic model.
24
33
 
@@ -44,6 +53,13 @@ def refresh_semantic_model(
44
53
  The Fabric workspace name.
45
54
  Defaults to None which resolves to the workspace of the attached lakehouse
46
55
  or if no lakehouse attached, resolves to the workspace of the notebook.
56
+ visualize : bool, default=False
57
+ If True, displays a Gantt chart showing the refresh statistics for each table/partition.
58
+
59
+ Returns
60
+ -------
61
+ pandas.DataFrame | None
62
+ If 'visualize' is set to True, returns a pandas dataframe showing the SSAS trace output used to generate the visualization.
47
63
  """
48
64
 
49
65
  workspace = fabric.resolve_workspace_name(workspace)
@@ -67,65 +83,186 @@ def refresh_semantic_model(
67
83
 
68
84
  objects = objects + [extract_names(partition) for partition in partitions]
69
85
 
70
- refresh_type = (
71
- refresh_type.lower().replace("only", "Only").replace("values", "Values")
72
- )
86
+ refresh_type = refresh_type.lower()
87
+ for prefix, mapped_value in icons.refresh_type_mapping.items():
88
+ if refresh_type.startswith(prefix):
89
+ refresh_type = mapped_value
90
+ break
73
91
 
74
- if refresh_type not in icons.refreshTypes:
92
+ valid_refresh_types = list(icons.refresh_type_mapping.values())
93
+ if refresh_type not in valid_refresh_types:
75
94
  raise ValueError(
76
- f"{icons.red_dot} Invalid refresh type. Refresh type must be one of these values: {icons.refreshTypes}."
95
+ f"{icons.red_dot} Invalid refresh type. Refresh type must be one of these values: {valid_refresh_types}."
77
96
  )
78
97
 
79
- if len(objects) == 0:
80
- requestID = fabric.refresh_dataset(
81
- dataset=dataset,
82
- workspace=workspace,
83
- refresh_type=refresh_type,
84
- retry_count=retry_count,
85
- apply_refresh_policy=apply_refresh_policy,
86
- max_parallelism=max_parallelism,
98
+ def refresh_and_trace_dataset(
99
+ dataset,
100
+ workspace,
101
+ refresh_type,
102
+ retry_count,
103
+ apply_refresh_policy,
104
+ max_parallelism,
105
+ objects,
106
+ visualize,
107
+ ):
108
+ # Ignore specific warnings
109
+ warnings.filterwarnings(
110
+ "ignore",
111
+ message="No trace logs have been recorded. Try starting the trace with a larger 'delay'",
87
112
  )
88
- else:
89
- requestID = fabric.refresh_dataset(
90
- dataset=dataset,
91
- workspace=workspace,
92
- refresh_type=refresh_type,
93
- retry_count=retry_count,
94
- apply_refresh_policy=apply_refresh_policy,
95
- max_parallelism=max_parallelism,
96
- objects=objects,
97
- )
98
- print(
99
- f"{icons.in_progress} Refresh of the '{dataset}' semantic model within the '{workspace}' workspace is in progress..."
100
- )
101
- if len(objects) != 0:
102
- print(objects)
103
113
 
104
- while True:
105
- requestDetails = fabric.get_refresh_execution_details(
106
- dataset=dataset, refresh_request_id=requestID, workspace=workspace
107
- )
108
- status = requestDetails.status
114
+ def extract_failure_error():
115
+ error_messages = []
116
+ combined_messages = ""
117
+ final_message = f"{icons.red_dot} The refresh of the '{dataset}' semantic model within the '{workspace}' workspace has failed."
118
+ for _, r in fabric.get_refresh_execution_details(
119
+ refresh_request_id=request_id,
120
+ dataset=dataset,
121
+ workspace=workspace,
122
+ ).messages.iterrows():
123
+ error_messages.append(f"{r['Type']}: {r['Message']}")
109
124
 
110
- # Check if the refresh has completed
111
- if status == "Completed":
112
- break
113
- elif status == "Failed":
114
- raise ValueError(
115
- f"{icons.red_dot} The refresh of the '{dataset}' semantic model within the '{workspace}' workspace has failed."
125
+ if error_messages:
126
+ combined_messages = "\n".join(error_messages)
127
+ final_message += f"'\n' {combined_messages}"
128
+
129
+ return final_message
130
+
131
+ # Function to perform dataset refresh
132
+ def refresh_dataset():
133
+ return fabric.refresh_dataset(
134
+ dataset=dataset,
135
+ workspace=workspace,
136
+ refresh_type=refresh_type,
137
+ retry_count=retry_count,
138
+ apply_refresh_policy=apply_refresh_policy,
139
+ max_parallelism=max_parallelism,
140
+ objects=objects if objects else None,
116
141
  )
117
- elif status == "Cancelled":
118
- print(
119
- f"{icons.yellow_dot} The refresh of the '{dataset}' semantic model within the '{workspace}' workspace has been cancelled."
142
+
143
+ def check_refresh_status(request_id):
144
+ request_details = fabric.get_refresh_execution_details(
145
+ dataset=dataset, refresh_request_id=request_id, workspace=workspace
120
146
  )
121
- return
147
+ return request_details.status
122
148
 
123
- time.sleep(3)
149
+ def display_trace_logs(trace, partition_map, widget, title, stop=False):
150
+ if stop:
151
+ df = trace.stop()
152
+ else:
153
+ df = trace.get_trace_logs()
154
+ if not df.empty:
155
+ df = df[
156
+ df["Event Subclass"].isin(["ExecuteSql", "Process"])
157
+ ].reset_index(drop=True)
158
+ df = pd.merge(
159
+ df,
160
+ partition_map[
161
+ ["PartitionID", "Object Name", "TableName", "PartitionName"]
162
+ ],
163
+ left_on="Object ID",
164
+ right_on="PartitionID",
165
+ how="left",
166
+ )
167
+ _process_and_display_chart(df, title=title, widget=widget)
168
+ if stop:
169
+ df.drop(["Object Name", "PartitionID"], axis=1, inplace=True)
170
+ df.rename(columns={"TableName": "Table Name"}, inplace=True)
171
+ df.rename(columns={"PartitionName": "Partition Name"}, inplace=True)
172
+ return df
124
173
 
125
- print(
126
- f"{icons.green_dot} Refresh of the '{dataset}' semantic model within the '{workspace}' workspace is complete."
174
+ # Start the refresh process
175
+ if not visualize:
176
+ request_id = refresh_dataset()
177
+ print(
178
+ f"{icons.in_progress} Refresh of the '{dataset}' semantic model within the '{workspace}' workspace is in progress..."
179
+ )
180
+
181
+ # Monitor refresh progress and handle tracing if visualize is enabled
182
+ if visualize:
183
+ partition_map = _get_partition_map(dataset, workspace)
184
+ widget = widgets.Output()
185
+
186
+ with fabric.create_trace_connection(
187
+ dataset=dataset, workspace=workspace
188
+ ) as trace_connection:
189
+ with trace_connection.create_trace(icons.refresh_event_schema) as trace:
190
+ trace.start()
191
+ request_id = refresh_dataset()
192
+
193
+ while True:
194
+ status = check_refresh_status(request_id)
195
+ # Check if the refresh has completed
196
+ if status == "Completed":
197
+ break
198
+ elif status == "Failed":
199
+ raise ValueError(extract_failure_error())
200
+ elif status == "Cancelled":
201
+ print(
202
+ f"{icons.yellow_dot} The refresh of the '{dataset}' semantic model within the '{workspace}' workspace has been cancelled."
203
+ )
204
+ return
205
+
206
+ # Capture and display logs in real-time
207
+ display_trace_logs(
208
+ trace,
209
+ partition_map,
210
+ widget,
211
+ title="Refresh in progress...",
212
+ )
213
+
214
+ time.sleep(3) # Wait before the next check
215
+
216
+ # Final log display after completion
217
+ time.sleep(5)
218
+
219
+ # Stop trace and display final chart
220
+ final_df = display_trace_logs(
221
+ trace,
222
+ partition_map,
223
+ widget,
224
+ title="Refresh Completed",
225
+ stop=True,
226
+ )
227
+
228
+ print(
229
+ f"{icons.green_dot} Refresh '{refresh_type}' of the '{dataset}' semantic model within the '{workspace}' workspace is complete."
230
+ )
231
+ return final_df
232
+
233
+ # For non-visualize case, only check refresh status
234
+ else:
235
+ while True:
236
+ status = check_refresh_status(request_id)
237
+ if status == "Completed":
238
+ break
239
+ elif status == "Failed":
240
+ raise ValueError(extract_failure_error())
241
+ elif status == "Cancelled":
242
+ print(
243
+ f"{icons.yellow_dot} The refresh of the '{dataset}' semantic model within the '{workspace}' workspace has been cancelled."
244
+ )
245
+ return
246
+
247
+ time.sleep(3)
248
+
249
+ print(
250
+ f"{icons.green_dot} Refresh '{refresh_type}' of the '{dataset}' semantic model within the '{workspace}' workspace is complete."
251
+ )
252
+
253
+ final_output = refresh_and_trace_dataset(
254
+ dataset=dataset,
255
+ workspace=workspace,
256
+ refresh_type=refresh_type,
257
+ retry_count=retry_count,
258
+ apply_refresh_policy=apply_refresh_policy,
259
+ max_parallelism=max_parallelism,
260
+ objects=objects,
261
+ visualize=visualize,
127
262
  )
128
263
 
264
+ return final_output
265
+
129
266
 
130
267
  @log
131
268
  def cancel_dataset_refresh(
@@ -173,3 +310,116 @@ def cancel_dataset_refresh(
173
310
  print(
174
311
  f"{icons.green_dot} The '{request_id}' refresh request for the '{dataset}' semantic model within the '{workspace}' workspace has been cancelled."
175
312
  )
313
+
314
+
315
+ def get_semantic_model_refresh_history(
316
+ dataset: str, request_id: Optional[str] = None, workspace: Optional[str] = None
317
+ ) -> pd.DataFrame:
318
+ """
319
+ Obtains the semantic model refresh history (refreshes executed via the Enhanced Refresh API).
320
+
321
+ This is a wrapper function for the following API: `Datasets - Get Refresh History In Group <https://learn.microsoft.com/rest/api/power-bi/datasets/get-refresh-history-in-group>`_.
322
+
323
+ Parameters
324
+ ----------
325
+ dataset : str
326
+ Name of the semantic model.
327
+ request_id : str, default=None
328
+ The request id of a semantic model refresh.
329
+ Defaults to None which resolves to showing all refresh requests for the given semantic model.
330
+ workspace : str, default=None
331
+ The Fabric workspace name.
332
+ Defaults to None which resolves to the workspace of the attached lakehouse
333
+ or if no lakehouse attached, resolves to the workspace of the notebook.
334
+
335
+ Returns
336
+ -------
337
+ pandas.DataFrame
338
+ A pandas dataframe showing the semantic model refresh history.
339
+ """
340
+
341
+ workspace_name = fabric.resolve_workspace_name(workspace)
342
+ workspace_id = fabric.resolve_workspace_id(workspace_name)
343
+ df = pd.DataFrame(
344
+ columns=[
345
+ "Request Id",
346
+ "Refresh Type",
347
+ "Start Time",
348
+ "End Time",
349
+ "Status",
350
+ "Extended Status",
351
+ ]
352
+ )
353
+
354
+ dataset_id = fabric.resolve_item_id(
355
+ item_name=dataset, workspace=workspace_id, type="SemanticModel"
356
+ )
357
+ client = fabric.PowerBIRestClient()
358
+ response = client.get(
359
+ f"/v1.0/myorg/groups/{workspace_id}/datasets/{dataset_id}/refreshes"
360
+ )
361
+ data = []
362
+
363
+ for i in response.json().get("value", []):
364
+ error = i.get("serviceExceptionJson")
365
+ if error:
366
+ error_json = json.loads(error)
367
+ if request_id is None:
368
+ new_data = {
369
+ "Request Id": i.get("requestId"),
370
+ "Refresh Type": i.get("refreshType"),
371
+ "Start Time": i.get("startTime"),
372
+ "End Time": i.get("endTime"),
373
+ "Error Code": error_json.get("errorCode") if error else None,
374
+ "Error Description": (
375
+ error_json.get("errorDescription") if error else None
376
+ ),
377
+ "Status": i.get("status"),
378
+ "Extended Status": i.get("extendedStatus"),
379
+ "Attempts": i.get("refreshAttempts"),
380
+ }
381
+ data.append(new_data)
382
+
383
+ elif request_id == i.get("requestId"):
384
+ for attempt in i.get("refreshAttempts", []):
385
+ attempt_error = attempt.get("serviceExceptionJson")
386
+ if attempt_error:
387
+ attempt_error_json = json.loads(attempt_error)
388
+ new_data = {
389
+ "Request Id": i.get("requestId"),
390
+ "Refresh Type": i.get("refreshType"),
391
+ "Start Time": i.get("startTime"),
392
+ "End Time": i.get("endTime"),
393
+ "Error Code": error_json.get("errorCode") if error else None,
394
+ "Error Description": (
395
+ error_json.get("errorDescription") if error else None
396
+ ),
397
+ "Status": i.get("status"),
398
+ "Extended Status": i.get("extendedStatus"),
399
+ "Attempt Id": attempt.get("attemptId"),
400
+ "Attempt Start Time": attempt.get("startTime"),
401
+ "Attempt End Time": attempt.get("endTime"),
402
+ "Attempt Error Code": (
403
+ attempt_error_json.get("errorCode") if attempt_error else None
404
+ ),
405
+ "Attempt Error Description": (
406
+ attempt_error_json.get("errorDescription")
407
+ if attempt_error
408
+ else None
409
+ ),
410
+ "Type": attempt.get("type"),
411
+ }
412
+ data.append(new_data)
413
+
414
+ if data:
415
+ df = pd.DataFrame(data)
416
+
417
+ # date_cols = ["Start Time", "End Time"]
418
+ # df[date_cols] = df[date_cols].apply(pd.to_datetime)
419
+
420
+ if "Attempt Id" in df.columns:
421
+ df["Attempt Id"] = df["Attempt Id"].astype(int)
422
+ # date_cols = ["Attempt Start Time", "Attempt End Time"]
423
+ # df[date_cols] = df[date_cols].apply(pd.to_datetime)
424
+
425
+ return df
sempy_labs/_spark.py CHANGED
@@ -12,6 +12,8 @@ def list_custom_pools(workspace: Optional[str] = None) -> pd.DataFrame:
12
12
  """
13
13
  Lists all `custom pools <https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools>`_ within a workspace.
14
14
 
15
+ This is a wrapper function for the following API: `Custom Pools - List Workspace Custom Pools <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/list-workspace-custom-pools>`_.
16
+
15
17
  Parameters
16
18
  ----------
17
19
  workspace : str, default=None
@@ -25,7 +27,6 @@ def list_custom_pools(workspace: Optional[str] = None) -> pd.DataFrame:
25
27
  A pandas dataframe showing all the custom pools within the Fabric workspace.
26
28
  """
27
29
 
28
- # https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/list-workspace-custom-pools
29
30
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
30
31
 
31
32
  df = pd.DataFrame(
@@ -99,6 +100,8 @@ def create_custom_pool(
99
100
  """
100
101
  Creates a `custom pool <https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools>`_ within a workspace.
101
102
 
103
+ This is a wrapper function for the following API: `Custom Pools - Create Workspace Custom Pool <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool>`_.
104
+
102
105
  Parameters
103
106
  ----------
104
107
  pool_name : str
@@ -125,7 +128,6 @@ def create_custom_pool(
125
128
  or if no lakehouse attached, resolves to the workspace of the notebook.
126
129
  """
127
130
 
128
- # https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/create-workspace-custom-pool
129
131
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
130
132
 
131
133
  request_body = {
@@ -171,6 +173,8 @@ def update_custom_pool(
171
173
  """
172
174
  Updates the properties of a `custom pool <https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools>`_ within a workspace.
173
175
 
176
+ This is a wrapper function for the following API: `Custom Pools - Update Workspace Custom Pool <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/update-workspace-custom-pool>`_.
177
+
174
178
  Parameters
175
179
  ----------
176
180
  pool_name : str
@@ -205,7 +209,6 @@ def update_custom_pool(
205
209
  or if no lakehouse attached, resolves to the workspace of the notebook.
206
210
  """
207
211
 
208
- # https://learn.microsoft.com/en-us/rest/api/fabric/spark/custom-pools/update-workspace-custom-pool?tabs=HTTP
209
212
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
210
213
 
211
214
  df = list_custom_pools(workspace=workspace)
@@ -267,6 +270,8 @@ def delete_custom_pool(pool_name: str, workspace: Optional[str] = None):
267
270
  """
268
271
  Deletes a `custom pool <https://learn.microsoft.com/fabric/data-engineering/create-custom-spark-pools>`_ within a workspace.
269
272
 
273
+ This is a wrapper function for the following API: `Custom Pools - Delete Workspace Custom Pool <https://learn.microsoft.com/rest/api/fabric/spark/custom-pools/delete-workspace-custom-pool>`_.
274
+
270
275
  Parameters
271
276
  ----------
272
277
  pool_name : str
@@ -304,6 +309,8 @@ def get_spark_settings(
304
309
  """
305
310
  Shows the spark settings for a workspace.
306
311
 
312
+ This is a wrapper function for the following API: `Workspace Settings - Get Spark Settings <https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/get-spark-settings>`_.
313
+
307
314
  Parameters
308
315
  ----------
309
316
  workspace : str, default=None
@@ -319,7 +326,6 @@ def get_spark_settings(
319
326
  A pandas dataframe showing the spark settings for a workspace.
320
327
  """
321
328
 
322
- # https://learn.microsoft.com/en-us/rest/api/fabric/spark/workspace-settings/get-spark-settings?tabs=HTTP
323
329
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
324
330
 
325
331
  df = pd.DataFrame(
@@ -392,6 +398,8 @@ def update_spark_settings(
392
398
  """
393
399
  Updates the spark settings for a workspace.
394
400
 
401
+ This is a wrapper function for the following API: `Workspace Settings - Update Spark Settings <https://learn.microsoft.com/rest/api/fabric/spark/workspace-settings/update-spark-settings>`_.
402
+
395
403
  Parameters
396
404
  ----------
397
405
  automatic_log_enabled : bool, default=None
@@ -424,7 +432,6 @@ def update_spark_settings(
424
432
  or if no lakehouse attached, resolves to the workspace of the notebook.
425
433
  """
426
434
 
427
- # https://learn.microsoft.com/en-us/rest/api/fabric/spark/workspace-settings/update-spark-settings?tabs=HTTP
428
435
  (workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
429
436
 
430
437
  request_body = get_spark_settings(workspace=workspace, return_dataframe=False)
@@ -55,6 +55,8 @@ def translate_semantic_model(
55
55
  columns=["Object Type", "Name", "Description", "Display Folder"]
56
56
  )
57
57
 
58
+ icons.sll_tags.append("TranslateSemanticModel")
59
+
58
60
  with connect_semantic_model(
59
61
  dataset=dataset, readonly=False, workspace=workspace
60
62
  ) as tom: