semantic-link-labs 0.9.3__py3-none-any.whl → 0.9.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of semantic-link-labs might be problematic. Click here for more details.

Files changed (68) hide show
  1. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/METADATA +25 -6
  2. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/RECORD +68 -52
  3. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/WHEEL +1 -1
  4. sempy_labs/__init__.py +45 -4
  5. sempy_labs/_capacities.py +22 -127
  6. sempy_labs/_capacity_migration.py +11 -9
  7. sempy_labs/_dashboards.py +60 -0
  8. sempy_labs/_data_pipelines.py +5 -31
  9. sempy_labs/_dax.py +17 -3
  10. sempy_labs/_delta_analyzer.py +279 -127
  11. sempy_labs/_environments.py +20 -48
  12. sempy_labs/_eventhouses.py +69 -30
  13. sempy_labs/_eventstreams.py +16 -34
  14. sempy_labs/_gateways.py +4 -4
  15. sempy_labs/_generate_semantic_model.py +30 -10
  16. sempy_labs/_git.py +90 -1
  17. sempy_labs/_graphQL.py +3 -20
  18. sempy_labs/_helper_functions.py +201 -44
  19. sempy_labs/_job_scheduler.py +226 -2
  20. sempy_labs/_kql_databases.py +19 -34
  21. sempy_labs/_kql_querysets.py +15 -32
  22. sempy_labs/_list_functions.py +14 -133
  23. sempy_labs/_mirrored_databases.py +14 -48
  24. sempy_labs/_ml_experiments.py +5 -30
  25. sempy_labs/_ml_models.py +4 -28
  26. sempy_labs/_model_bpa.py +17 -0
  27. sempy_labs/_model_bpa_rules.py +12 -2
  28. sempy_labs/_mounted_data_factories.py +119 -0
  29. sempy_labs/_notebooks.py +16 -26
  30. sempy_labs/_semantic_models.py +117 -0
  31. sempy_labs/_sql.py +78 -10
  32. sempy_labs/_sqldatabase.py +227 -0
  33. sempy_labs/_utils.py +42 -0
  34. sempy_labs/_vertipaq.py +17 -2
  35. sempy_labs/_warehouses.py +5 -17
  36. sempy_labs/_workloads.py +23 -9
  37. sempy_labs/_workspaces.py +13 -5
  38. sempy_labs/admin/__init__.py +70 -9
  39. sempy_labs/admin/_activities.py +166 -0
  40. sempy_labs/admin/_apps.py +143 -0
  41. sempy_labs/admin/_artifacts.py +62 -0
  42. sempy_labs/admin/_basic_functions.py +32 -704
  43. sempy_labs/admin/_capacities.py +311 -0
  44. sempy_labs/admin/_datasets.py +184 -0
  45. sempy_labs/admin/_domains.py +1 -1
  46. sempy_labs/admin/_items.py +3 -1
  47. sempy_labs/admin/_reports.py +239 -0
  48. sempy_labs/admin/_scanner.py +0 -1
  49. sempy_labs/admin/_shared.py +76 -0
  50. sempy_labs/admin/_tenant.py +489 -0
  51. sempy_labs/admin/_users.py +133 -0
  52. sempy_labs/admin/_workspaces.py +148 -0
  53. sempy_labs/directlake/_dl_helper.py +0 -1
  54. sempy_labs/directlake/_update_directlake_partition_entity.py +14 -0
  55. sempy_labs/graph/_teams.py +1 -1
  56. sempy_labs/graph/_users.py +9 -1
  57. sempy_labs/lakehouse/__init__.py +2 -0
  58. sempy_labs/lakehouse/_lakehouse.py +6 -7
  59. sempy_labs/lakehouse/_shortcuts.py +216 -64
  60. sempy_labs/report/__init__.py +3 -1
  61. sempy_labs/report/_download_report.py +4 -1
  62. sempy_labs/report/_export_report.py +272 -0
  63. sempy_labs/report/_generate_report.py +9 -17
  64. sempy_labs/report/_report_bpa.py +12 -19
  65. sempy_labs/report/_report_functions.py +9 -261
  66. sempy_labs/tom/_model.py +307 -40
  67. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/LICENSE +0 -0
  68. {semantic_link_labs-0.9.3.dist-info → semantic_link_labs-0.9.5.dist-info}/top_level.txt +0 -0
@@ -30,6 +30,11 @@ def _build_url(url: str, params: dict) -> str:
30
30
  return url
31
31
 
32
32
 
33
+ def _encode_user(user: str) -> str:
34
+
35
+ return urllib.parse.quote(user, safe="@")
36
+
37
+
33
38
  def create_abfss_path(
34
39
  lakehouse_id: UUID,
35
40
  lakehouse_workspace_id: UUID,
@@ -130,14 +135,16 @@ def create_relationship_name(
130
135
  )
131
136
 
132
137
 
133
- def resolve_report_id(report: str, workspace: Optional[str | UUID] = None) -> UUID:
138
+ def resolve_report_id(
139
+ report: str | UUID, workspace: Optional[str | UUID] = None
140
+ ) -> UUID:
134
141
  """
135
142
  Obtains the ID of the Power BI report.
136
143
 
137
144
  Parameters
138
145
  ----------
139
- report : str
140
- The name of the Power BI report.
146
+ report : str | uuid.UUID
147
+ The name or ID of the Power BI report.
141
148
  workspace : str | uuid.UUID, default=None
142
149
  The Fabric workspace name or ID.
143
150
  Defaults to None which resolves to the workspace of the attached lakehouse
@@ -145,11 +152,11 @@ def resolve_report_id(report: str, workspace: Optional[str | UUID] = None) -> UU
145
152
 
146
153
  Returns
147
154
  -------
148
- UUID
155
+ uuid.UUID
149
156
  The ID of the Power BI report.
150
157
  """
151
158
 
152
- return fabric.resolve_item_id(item_name=report, type="Report", workspace=workspace)
159
+ return resolve_item_id(item=report, type="Report", workspace=workspace)
153
160
 
154
161
 
155
162
  def resolve_report_name(report_id: UUID, workspace: Optional[str | UUID] = None) -> str:
@@ -176,6 +183,127 @@ def resolve_report_name(report_id: UUID, workspace: Optional[str | UUID] = None)
176
183
  )
177
184
 
178
185
 
186
+ def delete_item(
187
+ item: str | UUID, type: str, workspace: Optional[str | UUID] = None
188
+ ) -> None:
189
+ """
190
+ Deletes an item from a Fabric workspace.
191
+
192
+ Parameters
193
+ ----------
194
+ item : str | uuid.UUID
195
+ The name or ID of the item to be deleted.
196
+ type : str
197
+ The type of the item to be deleted.
198
+ workspace : str | uuid.UUID, default=None
199
+ The Fabric workspace name or ID.
200
+ Defaults to None which resolves to the workspace of the attached lakehouse
201
+ or if no lakehouse attached, resolves to the workspace of the notebook.
202
+ """
203
+
204
+ from sempy_labs._utils import item_types
205
+
206
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
207
+ (item_name, item_id) = resolve_item_name_and_id(item, type, workspace_id)
208
+ item_type = item_types.get(type)[0].lower()
209
+
210
+ fabric.delete_item(item_id=item_id, workspace=workspace_id)
211
+
212
+ print(
213
+ f"{icons.green_dot} The '{item_name}' {item_type} has been successfully deleted from the '{workspace_name}' workspace."
214
+ )
215
+
216
+
217
+ def create_item(
218
+ name: str,
219
+ type: str,
220
+ description: Optional[str] = None,
221
+ definition: Optional[dict] = None,
222
+ workspace: Optional[str | UUID] = None,
223
+ ):
224
+ """
225
+ Creates an item in a Fabric workspace.
226
+
227
+ Parameters
228
+ ----------
229
+ name : str
230
+ The name of the item to be created.
231
+ type : str
232
+ The type of the item to be created.
233
+ description : str, default=None
234
+ A description of the item to be created.
235
+ definition : dict, default=None
236
+ The definition of the item to be created.
237
+ workspace : str | uuid.UUID, default=None
238
+ The Fabric workspace name or ID.
239
+ Defaults to None which resolves to the workspace of the attached lakehouse
240
+ or if no lakehouse attached, resolves to the workspace of the notebook.
241
+ """
242
+ from sempy_labs._utils import item_types
243
+
244
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
245
+ item_type = item_types.get(type)[0].lower()
246
+ item_type_url = item_types.get(type)[1]
247
+
248
+ payload = {
249
+ "displayName": name,
250
+ }
251
+ if description:
252
+ payload["description"] = description
253
+ if definition:
254
+ payload["definition"] = definition
255
+
256
+ _base_api(
257
+ request=f"/v1/workspaces/{workspace_id}/{item_type_url}",
258
+ method="post",
259
+ payload=payload,
260
+ status_codes=[201, 202],
261
+ lro_return_status_code=True,
262
+ )
263
+ print(
264
+ f"{icons.green_dot} The '{name}' {item_type} has been successfully created within the in the '{workspace_name}' workspace."
265
+ )
266
+
267
+
268
+ def get_item_definition(
269
+ item: str | UUID,
270
+ type: str,
271
+ workspace: Optional[str | UUID] = None,
272
+ format: Optional[str] = None,
273
+ return_dataframe: bool = True,
274
+ decode: bool = True,
275
+ ):
276
+
277
+ from sempy_labs._utils import item_types
278
+
279
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
280
+ item_id = resolve_item_id(item, type, workspace_id)
281
+ item_type_url = item_types.get(type)[1]
282
+ path = item_types.get(type)[2]
283
+
284
+ url = f"/v1/workspaces/{workspace_id}/{item_type_url}/{item_id}/getDefinition"
285
+ if format:
286
+ url += f"?format={format}"
287
+
288
+ result = _base_api(
289
+ request=url,
290
+ method="post",
291
+ status_codes=None,
292
+ lro_return_json=True,
293
+ )
294
+
295
+ if return_dataframe:
296
+ return pd.json_normalize(result["definition"]["parts"])
297
+
298
+ value = next(
299
+ p.get("payload") for p in result["definition"]["parts"] if p.get("path") == path
300
+ )
301
+ if decode:
302
+ json.loads(_decode_b64(value))
303
+ else:
304
+ return value
305
+
306
+
179
307
  def resolve_item_id(
180
308
  item: str | UUID, type: str, workspace: Optional[str] = None
181
309
  ) -> UUID:
@@ -268,14 +396,7 @@ def resolve_dataset_id(
268
396
  The ID of the semantic model.
269
397
  """
270
398
 
271
- if _is_valid_uuid(dataset):
272
- dataset_id = dataset
273
- else:
274
- dataset_id = fabric.resolve_item_id(
275
- item_name=dataset, type="SemanticModel", workspace=workspace
276
- )
277
-
278
- return dataset_id
399
+ return resolve_item_id(item=dataset, type="SemanticModel", workspace=workspace)
279
400
 
280
401
 
281
402
  def resolve_dataset_name(
@@ -512,7 +633,6 @@ def save_as_delta_table(
512
633
  or if no lakehouse attached, resolves to the workspace of the notebook.
513
634
  """
514
635
 
515
- from pyspark.sql import SparkSession
516
636
  from pyspark.sql.types import (
517
637
  StringType,
518
638
  IntegerType,
@@ -544,8 +664,6 @@ def save_as_delta_table(
544
664
  f"{icons.red_dot} Invalid 'delta_table_name'. Delta tables in the lakehouse cannot have spaces in their names."
545
665
  )
546
666
 
547
- dataframe.columns = [col.replace(" ", "_") for col in dataframe.columns]
548
-
549
667
  spark = _create_spark_session()
550
668
 
551
669
  type_mapping = {
@@ -563,6 +681,7 @@ def save_as_delta_table(
563
681
  }
564
682
 
565
683
  if isinstance(dataframe, pd.DataFrame):
684
+ dataframe.columns = [col.replace(" ", "_") for col in dataframe.columns]
566
685
  if schema is None:
567
686
  spark_df = spark.createDataFrame(dataframe)
568
687
  else:
@@ -574,6 +693,9 @@ def save_as_delta_table(
574
693
  )
575
694
  spark_df = spark.createDataFrame(dataframe, schema_map)
576
695
  else:
696
+ for col_name in dataframe.columns:
697
+ new_name = col_name.replace(" ", "_")
698
+ dataframe = dataframe.withColumnRenamed(col_name, new_name)
577
699
  spark_df = dataframe
578
700
 
579
701
  filePath = create_abfss_path(
@@ -644,7 +766,7 @@ def resolve_workspace_name_and_id(
644
766
 
645
767
  Returns
646
768
  -------
647
- str, str
769
+ str, uuid.UUID
648
770
  The name and ID of the Fabric workspace.
649
771
  """
650
772
 
@@ -909,14 +1031,14 @@ def resolve_capacity_name(capacity_id: Optional[UUID] = None) -> str:
909
1031
  return dfC_filt["Display Name"].iloc[0]
910
1032
 
911
1033
 
912
- def resolve_capacity_id(capacity_name: Optional[str] = None) -> UUID:
1034
+ def resolve_capacity_id(capacity: Optional[str | UUID] = None, **kwargs) -> UUID:
913
1035
  """
914
1036
  Obtains the capacity Id for a given capacity name.
915
1037
 
916
1038
  Parameters
917
1039
  ----------
918
- capacity_name : str, default=None
919
- The capacity name.
1040
+ capacity : str | uuid.UUID, default=None
1041
+ The capacity name or ID.
920
1042
  Defaults to None which resolves to the capacity id of the workspace of the attached lakehouse
921
1043
  or if no lakehouse attached, resolves to the capacity name of the workspace of the notebook.
922
1044
 
@@ -926,16 +1048,22 @@ def resolve_capacity_id(capacity_name: Optional[str] = None) -> UUID:
926
1048
  The capacity Id.
927
1049
  """
928
1050
 
929
- if capacity_name is None:
1051
+ if "capacity_name" in kwargs:
1052
+ capacity = kwargs["capacity_name"]
1053
+ print(
1054
+ f"{icons.warning} The 'capacity_name' parameter is deprecated. Please use 'capacity' instead."
1055
+ )
1056
+
1057
+ if capacity is None:
930
1058
  return get_capacity_id()
1059
+ if _is_valid_uuid(capacity):
1060
+ return capacity
931
1061
 
932
1062
  dfC = fabric.list_capacities()
933
- dfC_filt = dfC[dfC["Display Name"] == capacity_name]
1063
+ dfC_filt = dfC[dfC["Display Name"] == capacity]
934
1064
 
935
1065
  if dfC_filt.empty:
936
- raise ValueError(
937
- f"{icons.red_dot} The '{capacity_name}' capacity does not exist."
938
- )
1066
+ raise ValueError(f"{icons.red_dot} The '{capacity}' capacity does not exist.")
939
1067
 
940
1068
  return dfC_filt["Id"].iloc[0]
941
1069
 
@@ -1098,12 +1226,7 @@ def resolve_warehouse_id(
1098
1226
  The warehouse Id.
1099
1227
  """
1100
1228
 
1101
- if _is_valid_uuid(warehouse):
1102
- return warehouse
1103
- else:
1104
- return fabric.resolve_item_id(
1105
- item_name=warehouse, type="Warehouse", workspace=workspace
1106
- )
1229
+ return resolve_item_id(item=warehouse, type="Warehouse", workspace=workspace)
1107
1230
 
1108
1231
 
1109
1232
  def get_language_codes(languages: str | List[str]):
@@ -1163,14 +1286,14 @@ def convert_to_alphanumeric_lowercase(input_string):
1163
1286
 
1164
1287
 
1165
1288
  def resolve_environment_id(
1166
- environment: str, workspace: Optional[str | UUID] = None
1289
+ environment: str | UUID, workspace: Optional[str | UUID] = None
1167
1290
  ) -> UUID:
1168
1291
  """
1169
1292
  Obtains the environment Id for a given environment.
1170
1293
 
1171
1294
  Parameters
1172
1295
  ----------
1173
- environment: str
1296
+ environment: str | uuid.UUID
1174
1297
  Name of the environment.
1175
1298
  workspace : str | uuid.UUID, default=None
1176
1299
  The Fabric workspace name or ID in which the semantic model resides.
@@ -1179,13 +1302,11 @@ def resolve_environment_id(
1179
1302
 
1180
1303
  Returns
1181
1304
  -------
1182
- UUID
1305
+ uuid.UUID
1183
1306
  The environment Id.
1184
1307
  """
1185
1308
 
1186
- return fabric.resolve_item_id(
1187
- item_name=environment, type="Environment", workspace=workspace
1188
- )
1309
+ return resolve_item_id(item=environment, type="Environment", workspace=workspace)
1189
1310
 
1190
1311
 
1191
1312
  def _make_clickable(val):
@@ -1217,14 +1338,16 @@ def convert_to_friendly_case(text: str) -> str:
1217
1338
  return text
1218
1339
 
1219
1340
 
1220
- def resolve_notebook_id(notebook: str, workspace: Optional[str | UUID] = None) -> UUID:
1341
+ def resolve_notebook_id(
1342
+ notebook: str | UUID, workspace: Optional[str | UUID] = None
1343
+ ) -> UUID:
1221
1344
  """
1222
1345
  Obtains the notebook Id for a given notebook.
1223
1346
 
1224
1347
  Parameters
1225
1348
  ----------
1226
- notebook: str
1227
- Name of the notebook.
1349
+ notebook: str | uuid.UUID
1350
+ Name or ID of the notebook.
1228
1351
  workspace : str | uuid.UUID, default=None
1229
1352
  The Fabric workspace name or ID in which the semantic model resides.
1230
1353
  Defaults to None which resolves to the workspace of the attached lakehouse
@@ -1236,9 +1359,7 @@ def resolve_notebook_id(notebook: str, workspace: Optional[str | UUID] = None) -
1236
1359
  The notebook Id.
1237
1360
  """
1238
1361
 
1239
- return fabric.resolve_item_id(
1240
- item_name=notebook, type="Notebook", workspace=workspace
1241
- )
1362
+ return resolve_item_id(item=notebook, type="Notebook", workspace=workspace)
1242
1363
 
1243
1364
 
1244
1365
  def generate_guid():
@@ -1524,9 +1645,15 @@ def _base_api(
1524
1645
  raise NotImplementedError
1525
1646
  else:
1526
1647
  headers = _get_headers(auth.token_provider.get(), audience=client)
1648
+ if client == "graph":
1649
+ url = f"https://graph.microsoft.com/v1.0/{request}"
1650
+ elif client == "azure":
1651
+ url = request
1652
+ else:
1653
+ raise NotImplementedError
1527
1654
  response = requests.request(
1528
1655
  method.upper(),
1529
- f"https://graph.microsoft.com/v1.0/{request}",
1656
+ url,
1530
1657
  headers=headers,
1531
1658
  json=payload,
1532
1659
  )
@@ -1637,3 +1764,33 @@ def _run_spark_sql_query(query):
1637
1764
  spark = _create_spark_session()
1638
1765
 
1639
1766
  return spark.sql(query)
1767
+
1768
+
1769
+ def _mount(lakehouse, workspace) -> str:
1770
+ """
1771
+ Mounts a lakehouse to a notebook if it is not already mounted. Returns the local path to the lakehouse.
1772
+ """
1773
+
1774
+ import notebookutils
1775
+
1776
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace=workspace)
1777
+ (lakehouse_name, lakehouse_id) = resolve_lakehouse_name_and_id(
1778
+ lakehouse=lakehouse, workspace=workspace
1779
+ )
1780
+
1781
+ lake_path = create_abfss_path(lakehouse_id, workspace_id)
1782
+ mounts = notebookutils.fs.mounts()
1783
+ mount_point = f"/{workspace_name.replace(' ', '')}{lakehouse_name.replace(' ', '')}"
1784
+ if not any(i.get("source") == lake_path for i in mounts):
1785
+ # Mount lakehouse if not mounted
1786
+ notebookutils.fs.mount(lake_path, mount_point)
1787
+ print(
1788
+ f"{icons.green_dot} Mounted the '{lakehouse_name}' lakehouse within the '{workspace_name}' to the notebook."
1789
+ )
1790
+
1791
+ mounts = notebookutils.fs.mounts()
1792
+ local_path = next(
1793
+ i.get("localPath") for i in mounts if i.get("source") == lake_path
1794
+ )
1795
+
1796
+ return local_path
@@ -1,6 +1,6 @@
1
1
  from sempy._utils._log import log
2
2
  import pandas as pd
3
- from typing import Optional
3
+ from typing import Optional, List
4
4
  from sempy_labs._helper_functions import (
5
5
  resolve_workspace_name_and_id,
6
6
  resolve_item_name_and_id,
@@ -189,7 +189,7 @@ def run_on_demand_item_job(
189
189
  Parameters
190
190
  ----------
191
191
  item : str | uuid.UUID
192
- The item name or ID
192
+ The item name or ID.
193
193
  type : str, default=None
194
194
  The item `type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_. If specifying the item name as the item, the item type is required.
195
195
  job_type : str, default="DefaultJob"
@@ -213,3 +213,227 @@ def run_on_demand_item_job(
213
213
  )
214
214
 
215
215
  print(f"{icons.green_dot} The '{item_name}' {type.lower()} has been executed.")
216
+
217
+
218
+ def create_item_schedule_cron(
219
+ item: str | UUID,
220
+ type: str,
221
+ start_date_time: str,
222
+ end_date_time: str,
223
+ local_time_zone: str,
224
+ job_type: str = "DefaultJob",
225
+ interval_minutes: int = 10,
226
+ enabled: bool = True,
227
+ workspace: Optional[str | UUID] = None,
228
+ ):
229
+ """
230
+ Create a new schedule for an item based on a `chronological time <https://learn.microsoft.com/rest/api/fabric/core/job-scheduler/create-item-schedule?tabs=HTTP#cronscheduleconfig>`_.
231
+
232
+ This is a wrapper function for the following API: `Job Scheduler - Create Item Schedule <https://learn.microsoft.com/rest/api/fabric/core/job-scheduler/create-item-schedule>`_.
233
+
234
+ Parameters
235
+ ----------
236
+ item : str | uuid.UUID
237
+ The item name or ID.
238
+ type : str
239
+ The item `type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_. If specifying the item name as the item, the item type is required.
240
+ start_date_time: str
241
+ The start date and time of the schedule. Example: "2024-04-28T00:00:00".
242
+ end_date_time: str
243
+ The end date and time of the schedule. Must be later than the start_date_time. Example: "2024-04-30T23:59:00".
244
+ local_time_zone: str
245
+ The `time zone <https://learn.microsoft.com/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11>`_ of the schedule. Example: "Central Standard Time".
246
+ job_type : str, default="DefaultJob"
247
+ The job type.
248
+ interval_minutes: int, default=10
249
+ The schedule interval (in minutes).
250
+ enabled: bool, default=True
251
+ Whether the schedule is enabled.
252
+ workspace : str | uuid.UUID, default=None
253
+ The workspace name or ID.
254
+ Defaults to None which resolves to the workspace of the attached lakehouse
255
+ or if no lakehouse attached, resolves to the workspace of the notebook.
256
+ """
257
+
258
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
259
+ (item_name, item_id) = resolve_item_name_and_id(
260
+ item=item, type=type, workspace=workspace
261
+ )
262
+
263
+ payload = {
264
+ "enabled": enabled,
265
+ "configuration": {
266
+ "startDateTime": start_date_time,
267
+ "endDateTime": end_date_time,
268
+ "localTimeZoneId": local_time_zone,
269
+ "type": "Cron",
270
+ "interval": interval_minutes,
271
+ },
272
+ }
273
+
274
+ _base_api(
275
+ request=f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/{job_type}/schedules",
276
+ method="post",
277
+ payload=payload,
278
+ status_codes=201,
279
+ )
280
+
281
+ print(
282
+ f"{icons.green_dot} The schedule for the '{item_name}' {type.lower()} has been created."
283
+ )
284
+
285
+
286
+ def create_item_schedule_daily(
287
+ item: str | UUID,
288
+ type: str,
289
+ start_date_time: str,
290
+ end_date_time: str,
291
+ local_time_zone: str,
292
+ times: List[str],
293
+ job_type: str = "DefaultJob",
294
+ enabled: bool = True,
295
+ workspace: Optional[str | UUID] = None,
296
+ ):
297
+ """
298
+ Create a new daily schedule for an item.
299
+
300
+ This is a wrapper function for the following API: `Job Scheduler - Create Item Schedule <https://learn.microsoft.com/rest/api/fabric/core/job-scheduler/create-item-schedule>`_.
301
+
302
+ Parameters
303
+ ----------
304
+ item : str | uuid.UUID
305
+ The item name or ID.
306
+ type : str
307
+ The item `type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_. If specifying the item name as the item, the item type is required.
308
+ start_date_time: str
309
+ The start date and time of the schedule. Example: "2024-04-28T00:00:00".
310
+ end_date_time: str
311
+ The end date and time of the schedule. Must be later than the start_date_time. Example: "2024-04-30T23:59:00".
312
+ local_time_zone: str
313
+ The `time zone <https://learn.microsoft.com/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11>`_ of the schedule. Example: "Central Standard Time".
314
+ times : List[str]
315
+ A list of time slots in hh:mm format, at most 100 elements are allowed. Example: ["00:00", "12:00"].
316
+ job_type : str, default="DefaultJob"
317
+ The job type.
318
+ enabled: bool, default=True
319
+ Whether the schedule is enabled.
320
+ workspace : str | uuid.UUID, default=None
321
+ The workspace name or ID.
322
+ Defaults to None which resolves to the workspace of the attached lakehouse
323
+ or if no lakehouse attached, resolves to the workspace of the notebook.
324
+ """
325
+
326
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
327
+ (item_name, item_id) = resolve_item_name_and_id(
328
+ item=item, type=type, workspace=workspace
329
+ )
330
+
331
+ payload = {
332
+ "enabled": enabled,
333
+ "configuration": {
334
+ "startDateTime": start_date_time,
335
+ "endDateTime": end_date_time,
336
+ "localTimeZoneId": local_time_zone,
337
+ "type": "Daily",
338
+ "times": times,
339
+ },
340
+ }
341
+
342
+ _base_api(
343
+ request=f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/{job_type}/schedules",
344
+ method="post",
345
+ payload=payload,
346
+ status_codes=201,
347
+ )
348
+
349
+ print(
350
+ f"{icons.green_dot} The schedule for the '{item_name}' {type.lower()} has been created."
351
+ )
352
+
353
+
354
+ def create_item_schedule_weekly(
355
+ item: str | UUID,
356
+ type: str,
357
+ start_date_time: str,
358
+ end_date_time: str,
359
+ local_time_zone: str,
360
+ times: List[str],
361
+ weekdays: List[str],
362
+ job_type: str = "DefaultJob",
363
+ enabled: bool = True,
364
+ workspace: Optional[str | UUID] = None,
365
+ ):
366
+ """
367
+ Create a new daily schedule for an item.
368
+
369
+ This is a wrapper function for the following API: `Job Scheduler - Create Item Schedule <https://learn.microsoft.com/rest/api/fabric/core/job-scheduler/create-item-schedule>`_.
370
+
371
+ Parameters
372
+ ----------
373
+ item : str | uuid.UUID
374
+ The item name or ID.
375
+ type : str
376
+ The item `type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_. If specifying the item name as the item, the item type is required.
377
+ start_date_time: str
378
+ The start date and time of the schedule. Example: "2024-04-28T00:00:00".
379
+ end_date_time: str
380
+ The end date and time of the schedule. Must be later than the start_date_time. Example: "2024-04-30T23:59:00".
381
+ local_time_zone: str
382
+ The `time zone <https://learn.microsoft.com/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11>`_ of the schedule. Example: "Central Standard Time".
383
+ times : List[str]
384
+ A list of time slots in hh:mm format, at most 100 elements are allowed. Example: ["00:00", "12:00"].
385
+ weekdays : List[str]
386
+ A list of weekdays. Example: ["Monday", "Tuesday"].
387
+ job_type : str, default="DefaultJob"
388
+ The job type.
389
+ enabled: bool, default=True
390
+ Whether the schedule is enabled.
391
+ workspace : str | uuid.UUID, default=None
392
+ The workspace name or ID.
393
+ Defaults to None which resolves to the workspace of the attached lakehouse
394
+ or if no lakehouse attached, resolves to the workspace of the notebook.
395
+ """
396
+
397
+ (workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
398
+ (item_name, item_id) = resolve_item_name_and_id(
399
+ item=item, type=type, workspace=workspace
400
+ )
401
+
402
+ weekdays = [w.capitalize() for w in weekdays]
403
+ weekday_list = [
404
+ "Sunday",
405
+ "Monday",
406
+ "Tuesday",
407
+ "Wednesday",
408
+ "Thursday",
409
+ "Friday",
410
+ "Saturday",
411
+ ]
412
+ for weekday in weekdays:
413
+ if weekday not in weekday_list:
414
+ raise ValueError(
415
+ f"{icons.red_dot} Invalid weekday: {weekday}. Must be one of {weekday_list}."
416
+ )
417
+
418
+ payload = {
419
+ "enabled": enabled,
420
+ "configuration": {
421
+ "startDateTime": start_date_time,
422
+ "endDateTime": end_date_time,
423
+ "localTimeZoneId": local_time_zone,
424
+ "type": "Weekly",
425
+ "times": times,
426
+ "weekdays": weekdays,
427
+ },
428
+ }
429
+
430
+ _base_api(
431
+ request=f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/{job_type}/schedules",
432
+ method="post",
433
+ payload=payload,
434
+ status_codes=201,
435
+ )
436
+
437
+ print(
438
+ f"{icons.green_dot} The schedule for the '{item_name}' {type.lower()} has been created."
439
+ )