semantic-link-labs 0.12.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- semantic_link_labs-0.12.8.dist-info/METADATA +354 -0
- semantic_link_labs-0.12.8.dist-info/RECORD +243 -0
- semantic_link_labs-0.12.8.dist-info/WHEEL +5 -0
- semantic_link_labs-0.12.8.dist-info/licenses/LICENSE +21 -0
- semantic_link_labs-0.12.8.dist-info/top_level.txt +1 -0
- sempy_labs/__init__.py +606 -0
- sempy_labs/_a_lib_info.py +2 -0
- sempy_labs/_ai.py +437 -0
- sempy_labs/_authentication.py +264 -0
- sempy_labs/_bpa_translation/_model/_translations_am-ET.po +869 -0
- sempy_labs/_bpa_translation/_model/_translations_ar-AE.po +908 -0
- sempy_labs/_bpa_translation/_model/_translations_bg-BG.po +968 -0
- sempy_labs/_bpa_translation/_model/_translations_ca-ES.po +963 -0
- sempy_labs/_bpa_translation/_model/_translations_cs-CZ.po +943 -0
- sempy_labs/_bpa_translation/_model/_translations_da-DK.po +945 -0
- sempy_labs/_bpa_translation/_model/_translations_de-DE.po +988 -0
- sempy_labs/_bpa_translation/_model/_translations_el-GR.po +993 -0
- sempy_labs/_bpa_translation/_model/_translations_es-ES.po +971 -0
- sempy_labs/_bpa_translation/_model/_translations_fa-IR.po +933 -0
- sempy_labs/_bpa_translation/_model/_translations_fi-FI.po +942 -0
- sempy_labs/_bpa_translation/_model/_translations_fr-FR.po +994 -0
- sempy_labs/_bpa_translation/_model/_translations_ga-IE.po +967 -0
- sempy_labs/_bpa_translation/_model/_translations_he-IL.po +902 -0
- sempy_labs/_bpa_translation/_model/_translations_hi-IN.po +944 -0
- sempy_labs/_bpa_translation/_model/_translations_hu-HU.po +963 -0
- sempy_labs/_bpa_translation/_model/_translations_id-ID.po +946 -0
- sempy_labs/_bpa_translation/_model/_translations_is-IS.po +939 -0
- sempy_labs/_bpa_translation/_model/_translations_it-IT.po +986 -0
- sempy_labs/_bpa_translation/_model/_translations_ja-JP.po +846 -0
- sempy_labs/_bpa_translation/_model/_translations_ko-KR.po +839 -0
- sempy_labs/_bpa_translation/_model/_translations_mt-MT.po +967 -0
- sempy_labs/_bpa_translation/_model/_translations_nl-NL.po +978 -0
- sempy_labs/_bpa_translation/_model/_translations_pl-PL.po +962 -0
- sempy_labs/_bpa_translation/_model/_translations_pt-BR.po +962 -0
- sempy_labs/_bpa_translation/_model/_translations_pt-PT.po +957 -0
- sempy_labs/_bpa_translation/_model/_translations_ro-RO.po +968 -0
- sempy_labs/_bpa_translation/_model/_translations_ru-RU.po +964 -0
- sempy_labs/_bpa_translation/_model/_translations_sk-SK.po +952 -0
- sempy_labs/_bpa_translation/_model/_translations_sl-SL.po +950 -0
- sempy_labs/_bpa_translation/_model/_translations_sv-SE.po +942 -0
- sempy_labs/_bpa_translation/_model/_translations_ta-IN.po +976 -0
- sempy_labs/_bpa_translation/_model/_translations_te-IN.po +947 -0
- sempy_labs/_bpa_translation/_model/_translations_th-TH.po +924 -0
- sempy_labs/_bpa_translation/_model/_translations_tr-TR.po +953 -0
- sempy_labs/_bpa_translation/_model/_translations_uk-UA.po +961 -0
- sempy_labs/_bpa_translation/_model/_translations_zh-CN.po +804 -0
- sempy_labs/_bpa_translation/_model/_translations_zu-ZA.po +969 -0
- sempy_labs/_capacities.py +1198 -0
- sempy_labs/_capacity_migration.py +660 -0
- sempy_labs/_clear_cache.py +351 -0
- sempy_labs/_connections.py +610 -0
- sempy_labs/_dashboards.py +69 -0
- sempy_labs/_data_access_security.py +98 -0
- sempy_labs/_data_pipelines.py +162 -0
- sempy_labs/_dataflows.py +668 -0
- sempy_labs/_dax.py +501 -0
- sempy_labs/_daxformatter.py +80 -0
- sempy_labs/_delta_analyzer.py +467 -0
- sempy_labs/_delta_analyzer_history.py +301 -0
- sempy_labs/_dictionary_diffs.py +221 -0
- sempy_labs/_documentation.py +147 -0
- sempy_labs/_domains.py +51 -0
- sempy_labs/_eventhouses.py +182 -0
- sempy_labs/_external_data_shares.py +230 -0
- sempy_labs/_gateways.py +521 -0
- sempy_labs/_generate_semantic_model.py +521 -0
- sempy_labs/_get_connection_string.py +84 -0
- sempy_labs/_git.py +543 -0
- sempy_labs/_graphQL.py +90 -0
- sempy_labs/_helper_functions.py +2833 -0
- sempy_labs/_icons.py +149 -0
- sempy_labs/_job_scheduler.py +609 -0
- sempy_labs/_kql_databases.py +149 -0
- sempy_labs/_kql_querysets.py +124 -0
- sempy_labs/_kusto.py +137 -0
- sempy_labs/_labels.py +124 -0
- sempy_labs/_list_functions.py +1720 -0
- sempy_labs/_managed_private_endpoints.py +253 -0
- sempy_labs/_mirrored_databases.py +416 -0
- sempy_labs/_mirrored_warehouses.py +60 -0
- sempy_labs/_ml_experiments.py +113 -0
- sempy_labs/_model_auto_build.py +140 -0
- sempy_labs/_model_bpa.py +557 -0
- sempy_labs/_model_bpa_bulk.py +378 -0
- sempy_labs/_model_bpa_rules.py +859 -0
- sempy_labs/_model_dependencies.py +343 -0
- sempy_labs/_mounted_data_factories.py +123 -0
- sempy_labs/_notebooks.py +441 -0
- sempy_labs/_one_lake_integration.py +151 -0
- sempy_labs/_onelake.py +131 -0
- sempy_labs/_query_scale_out.py +433 -0
- sempy_labs/_refresh_semantic_model.py +435 -0
- sempy_labs/_semantic_models.py +468 -0
- sempy_labs/_spark.py +455 -0
- sempy_labs/_sql.py +241 -0
- sempy_labs/_sql_audit_settings.py +207 -0
- sempy_labs/_sql_endpoints.py +214 -0
- sempy_labs/_tags.py +201 -0
- sempy_labs/_translations.py +43 -0
- sempy_labs/_user_delegation_key.py +44 -0
- sempy_labs/_utils.py +79 -0
- sempy_labs/_vertipaq.py +1021 -0
- sempy_labs/_vpax.py +388 -0
- sempy_labs/_warehouses.py +234 -0
- sempy_labs/_workloads.py +140 -0
- sempy_labs/_workspace_identity.py +72 -0
- sempy_labs/_workspaces.py +595 -0
- sempy_labs/admin/__init__.py +170 -0
- sempy_labs/admin/_activities.py +167 -0
- sempy_labs/admin/_apps.py +145 -0
- sempy_labs/admin/_artifacts.py +65 -0
- sempy_labs/admin/_basic_functions.py +463 -0
- sempy_labs/admin/_capacities.py +508 -0
- sempy_labs/admin/_dataflows.py +45 -0
- sempy_labs/admin/_datasets.py +186 -0
- sempy_labs/admin/_domains.py +522 -0
- sempy_labs/admin/_external_data_share.py +100 -0
- sempy_labs/admin/_git.py +72 -0
- sempy_labs/admin/_items.py +265 -0
- sempy_labs/admin/_labels.py +211 -0
- sempy_labs/admin/_reports.py +241 -0
- sempy_labs/admin/_scanner.py +118 -0
- sempy_labs/admin/_shared.py +82 -0
- sempy_labs/admin/_sharing_links.py +110 -0
- sempy_labs/admin/_tags.py +131 -0
- sempy_labs/admin/_tenant.py +503 -0
- sempy_labs/admin/_tenant_keys.py +89 -0
- sempy_labs/admin/_users.py +140 -0
- sempy_labs/admin/_workspaces.py +236 -0
- sempy_labs/deployment_pipeline/__init__.py +23 -0
- sempy_labs/deployment_pipeline/_items.py +580 -0
- sempy_labs/directlake/__init__.py +57 -0
- sempy_labs/directlake/_autosync.py +58 -0
- sempy_labs/directlake/_directlake_schema_compare.py +120 -0
- sempy_labs/directlake/_directlake_schema_sync.py +161 -0
- sempy_labs/directlake/_dl_helper.py +274 -0
- sempy_labs/directlake/_generate_shared_expression.py +94 -0
- sempy_labs/directlake/_get_directlake_lakehouse.py +62 -0
- sempy_labs/directlake/_get_shared_expression.py +34 -0
- sempy_labs/directlake/_guardrails.py +96 -0
- sempy_labs/directlake/_list_directlake_model_calc_tables.py +70 -0
- sempy_labs/directlake/_show_unsupported_directlake_objects.py +90 -0
- sempy_labs/directlake/_update_directlake_model_lakehouse_connection.py +239 -0
- sempy_labs/directlake/_update_directlake_partition_entity.py +259 -0
- sempy_labs/directlake/_warm_cache.py +236 -0
- sempy_labs/dotnet_lib/dotnet.runtime.config.json +10 -0
- sempy_labs/environment/__init__.py +23 -0
- sempy_labs/environment/_items.py +212 -0
- sempy_labs/environment/_pubstage.py +223 -0
- sempy_labs/eventstream/__init__.py +37 -0
- sempy_labs/eventstream/_items.py +263 -0
- sempy_labs/eventstream/_topology.py +652 -0
- sempy_labs/graph/__init__.py +59 -0
- sempy_labs/graph/_groups.py +651 -0
- sempy_labs/graph/_sensitivity_labels.py +120 -0
- sempy_labs/graph/_teams.py +125 -0
- sempy_labs/graph/_user_licenses.py +96 -0
- sempy_labs/graph/_users.py +516 -0
- sempy_labs/graph_model/__init__.py +15 -0
- sempy_labs/graph_model/_background_jobs.py +63 -0
- sempy_labs/graph_model/_items.py +149 -0
- sempy_labs/lakehouse/__init__.py +67 -0
- sempy_labs/lakehouse/_blobs.py +247 -0
- sempy_labs/lakehouse/_get_lakehouse_columns.py +102 -0
- sempy_labs/lakehouse/_get_lakehouse_tables.py +274 -0
- sempy_labs/lakehouse/_helper.py +250 -0
- sempy_labs/lakehouse/_lakehouse.py +351 -0
- sempy_labs/lakehouse/_livy_sessions.py +143 -0
- sempy_labs/lakehouse/_materialized_lake_views.py +157 -0
- sempy_labs/lakehouse/_partitioning.py +165 -0
- sempy_labs/lakehouse/_schemas.py +217 -0
- sempy_labs/lakehouse/_shortcuts.py +440 -0
- sempy_labs/migration/__init__.py +35 -0
- sempy_labs/migration/_create_pqt_file.py +238 -0
- sempy_labs/migration/_direct_lake_to_import.py +105 -0
- sempy_labs/migration/_migrate_calctables_to_lakehouse.py +398 -0
- sempy_labs/migration/_migrate_calctables_to_semantic_model.py +148 -0
- sempy_labs/migration/_migrate_model_objects_to_semantic_model.py +533 -0
- sempy_labs/migration/_migrate_tables_columns_to_semantic_model.py +172 -0
- sempy_labs/migration/_migration_validation.py +71 -0
- sempy_labs/migration/_refresh_calc_tables.py +131 -0
- sempy_labs/mirrored_azure_databricks_catalog/__init__.py +15 -0
- sempy_labs/mirrored_azure_databricks_catalog/_discover.py +213 -0
- sempy_labs/mirrored_azure_databricks_catalog/_refresh_catalog_metadata.py +45 -0
- sempy_labs/ml_model/__init__.py +23 -0
- sempy_labs/ml_model/_functions.py +427 -0
- sempy_labs/report/_BPAReportTemplate.json +232 -0
- sempy_labs/report/__init__.py +55 -0
- sempy_labs/report/_bpareporttemplate/.pbi/localSettings.json +9 -0
- sempy_labs/report/_bpareporttemplate/.platform +11 -0
- sempy_labs/report/_bpareporttemplate/StaticResources/SharedResources/BaseThemes/CY24SU06.json +710 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/1b08bce3bebabb0a27a8/visual.json +191 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/2f22ddb70c301693c165/visual.json +438 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/3b1182230aa6c600b43a/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/58577ba6380c69891500/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/a2a8fa5028b3b776c96c/visual.json +207 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/adfd47ef30652707b987/visual.json +506 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/b6a80ee459e716e170b1/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/01d72098bda5055bd500/visuals/ce3130a721c020cc3d81/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/page.json +8 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/92735ae19b31712208ad/visuals/66e60dfb526437cd78d1/visual.json +112 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/page.json +11 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/07deb8bce824e1be37d7/visual.json +513 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0b1c68838818b32ad03b/visual.json +352 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0c171de9d2683d10b930/visual.json +37 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/0efa01be0510e40a645e/visual.json +542 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/6bf2f0eb830ab53cc668/visual.json +221 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/88d8141cb8500b60030c/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/a753273590beed656a03/visual.json +576 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/c597da16dc7e63222a82/visuals/b8fdc82cddd61ac447bc/visual.json +127 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/page.json +9 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/d37dce724a0ccc30044b/visuals/ce8532a7e25020271077/visual.json +38 -0
- sempy_labs/report/_bpareporttemplate/definition/pages/pages.json +10 -0
- sempy_labs/report/_bpareporttemplate/definition/report.json +176 -0
- sempy_labs/report/_bpareporttemplate/definition/version.json +4 -0
- sempy_labs/report/_bpareporttemplate/definition.pbir +14 -0
- sempy_labs/report/_download_report.py +76 -0
- sempy_labs/report/_export_report.py +257 -0
- sempy_labs/report/_generate_report.py +427 -0
- sempy_labs/report/_paginated.py +76 -0
- sempy_labs/report/_report_bpa.py +354 -0
- sempy_labs/report/_report_bpa_rules.py +115 -0
- sempy_labs/report/_report_functions.py +581 -0
- sempy_labs/report/_report_helper.py +227 -0
- sempy_labs/report/_report_list_functions.py +110 -0
- sempy_labs/report/_report_rebind.py +149 -0
- sempy_labs/report/_reportwrapper.py +3100 -0
- sempy_labs/report/_save_report.py +147 -0
- sempy_labs/snowflake_database/__init__.py +10 -0
- sempy_labs/snowflake_database/_items.py +105 -0
- sempy_labs/sql_database/__init__.py +21 -0
- sempy_labs/sql_database/_items.py +201 -0
- sempy_labs/sql_database/_mirroring.py +79 -0
- sempy_labs/theme/__init__.py +12 -0
- sempy_labs/theme/_org_themes.py +129 -0
- sempy_labs/tom/__init__.py +3 -0
- sempy_labs/tom/_model.py +5977 -0
- sempy_labs/variable_library/__init__.py +19 -0
- sempy_labs/variable_library/_functions.py +403 -0
- sempy_labs/warehouse/__init__.py +28 -0
- sempy_labs/warehouse/_items.py +234 -0
- sempy_labs/warehouse/_restore_points.py +309 -0
|
@@ -0,0 +1,609 @@
|
|
|
1
|
+
from sempy._utils._log import log
|
|
2
|
+
import pandas as pd
|
|
3
|
+
from typing import Optional, List
|
|
4
|
+
from sempy_labs._helper_functions import (
|
|
5
|
+
resolve_workspace_name_and_id,
|
|
6
|
+
resolve_item_name_and_id,
|
|
7
|
+
_update_dataframe_datatypes,
|
|
8
|
+
_base_api,
|
|
9
|
+
_create_dataframe,
|
|
10
|
+
resolve_workspace_id,
|
|
11
|
+
resolve_item_id,
|
|
12
|
+
)
|
|
13
|
+
from uuid import UUID
|
|
14
|
+
import sempy_labs._icons as icons
|
|
15
|
+
import time
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@log
|
|
19
|
+
def list_item_job_instances(
|
|
20
|
+
item: str | UUID, type: Optional[str] = None, workspace: Optional[str | UUID] = None
|
|
21
|
+
) -> pd.DataFrame:
|
|
22
|
+
"""
|
|
23
|
+
Returns a list of job instances for the specified item.
|
|
24
|
+
|
|
25
|
+
This is a wrapper function for the following API: `Job Scheduler - List Item Job Instances <https://learn.microsoft.com/rest/api/fabric/core/job-scheduler/list-item-job-instances>`_.
|
|
26
|
+
|
|
27
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
28
|
+
|
|
29
|
+
Parameters
|
|
30
|
+
----------
|
|
31
|
+
item : str | uuid.UUID
|
|
32
|
+
The item name or ID
|
|
33
|
+
type : str, default=None
|
|
34
|
+
The item `type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_. If specifying the item name as the item, the item type is required.
|
|
35
|
+
workspace : str | uuid.UUID, default=None
|
|
36
|
+
The Fabric workspace name or ID used by the lakehouse.
|
|
37
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
38
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
39
|
+
|
|
40
|
+
Returns
|
|
41
|
+
-------
|
|
42
|
+
pandas.DataFrame
|
|
43
|
+
Shows a list of job instances for the specified item.
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
47
|
+
(item_name, item_id) = resolve_item_name_and_id(
|
|
48
|
+
item=item, type=type, workspace=workspace_id
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
columns = {
|
|
52
|
+
"Job Instance Id": "string",
|
|
53
|
+
"Item Name": "string",
|
|
54
|
+
"Item Id": "string",
|
|
55
|
+
"Item Type": "string",
|
|
56
|
+
"Job Type": "string",
|
|
57
|
+
"Invoke Type": "string",
|
|
58
|
+
"Status": "string",
|
|
59
|
+
"Root Activity Id": "string",
|
|
60
|
+
"Start Time UTC": "datetime",
|
|
61
|
+
"End Time UTC": "string",
|
|
62
|
+
"Error Message": "string",
|
|
63
|
+
}
|
|
64
|
+
df = _create_dataframe(columns=columns)
|
|
65
|
+
|
|
66
|
+
responses = _base_api(
|
|
67
|
+
request=f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/instances",
|
|
68
|
+
uses_pagination=True,
|
|
69
|
+
client="fabric_sp",
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
if not responses[0].get("value"):
|
|
73
|
+
return df
|
|
74
|
+
|
|
75
|
+
rows = []
|
|
76
|
+
for r in responses:
|
|
77
|
+
for v in r.get("value", []):
|
|
78
|
+
fail = v.get("failureReason", {})
|
|
79
|
+
rows.append(
|
|
80
|
+
{
|
|
81
|
+
"Job Instance Id": v.get("id"),
|
|
82
|
+
"Item Name": item_name,
|
|
83
|
+
"Item Id": v.get("itemId"),
|
|
84
|
+
"Item Type": type,
|
|
85
|
+
"Job Type": v.get("jobType"),
|
|
86
|
+
"Invoke Type": v.get("invokeType"),
|
|
87
|
+
"Status": v.get("status"),
|
|
88
|
+
"Root Activity Id": v.get("rootActivityId"),
|
|
89
|
+
"Start Time UTC": v.get("startTimeUtc"),
|
|
90
|
+
"End Time UTC": v.get("endTimeUtc"),
|
|
91
|
+
"Error Message": fail.get("message") if fail is not None else "",
|
|
92
|
+
}
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
if rows:
|
|
96
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
97
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
98
|
+
|
|
99
|
+
return df
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
@log
|
|
103
|
+
def _get_item_job_instance(url: str) -> pd.DataFrame:
|
|
104
|
+
|
|
105
|
+
columns = {
|
|
106
|
+
"Job Instance Id": "string",
|
|
107
|
+
"Item Id": "string",
|
|
108
|
+
"Job Type": "string",
|
|
109
|
+
"Invoke Type": "string",
|
|
110
|
+
"Status": "string",
|
|
111
|
+
"Root Activity Id": "string",
|
|
112
|
+
"Start Time UTC": "datetime",
|
|
113
|
+
"End Time UTC": "string",
|
|
114
|
+
"Error Message": "string",
|
|
115
|
+
}
|
|
116
|
+
df = _create_dataframe(columns=columns)
|
|
117
|
+
|
|
118
|
+
response = _base_api(request=url, client="fabric_sp")
|
|
119
|
+
|
|
120
|
+
rows = []
|
|
121
|
+
v = response.json()
|
|
122
|
+
fail = v.get("failureReason", {})
|
|
123
|
+
rows.append(
|
|
124
|
+
{
|
|
125
|
+
"Job Instance Id": v.get("id"),
|
|
126
|
+
"Item Id": v.get("itemId"),
|
|
127
|
+
"Job Type": v.get("jobType"),
|
|
128
|
+
"Invoke Type": v.get("invokeType"),
|
|
129
|
+
"Status": v.get("status"),
|
|
130
|
+
"Root Activity Id": v.get("rootActivityId"),
|
|
131
|
+
"Start Time UTC": v.get("startTimeUtc"),
|
|
132
|
+
"End Time UTC": v.get("endTimeUtc"),
|
|
133
|
+
"Error Message": fail.get("message") if fail is not None else "",
|
|
134
|
+
}
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
if rows:
|
|
138
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
139
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
140
|
+
|
|
141
|
+
return df
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
@log
|
|
145
|
+
def list_item_schedules(
|
|
146
|
+
item: str | UUID,
|
|
147
|
+
type: Optional[str] = None,
|
|
148
|
+
job_type: str = "DefaultJob",
|
|
149
|
+
workspace: Optional[str | UUID] = None,
|
|
150
|
+
) -> pd.DataFrame:
|
|
151
|
+
"""
|
|
152
|
+
Get scheduling settings for one specific item.
|
|
153
|
+
|
|
154
|
+
This is a wrapper function for the following API: `Job Scheduler - List Item Schedules <https://learn.microsoft.com/rest/api/fabric/core/job-scheduler/list-item-schedules>`_.
|
|
155
|
+
|
|
156
|
+
Parameters
|
|
157
|
+
----------
|
|
158
|
+
item : str | uuid.UUID
|
|
159
|
+
The item name or ID
|
|
160
|
+
type : str, default=None
|
|
161
|
+
The item `type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_. If specifying the item name as the item, the item type is required.
|
|
162
|
+
job_type : str, default="DefaultJob"
|
|
163
|
+
The job type.
|
|
164
|
+
workspace : str | uuid.UUID, default=None
|
|
165
|
+
The Fabric workspace name or ID used by the lakehouse.
|
|
166
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
167
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
168
|
+
|
|
169
|
+
Returns
|
|
170
|
+
-------
|
|
171
|
+
pandas.DataFrame
|
|
172
|
+
Shows a list of scheduling settings for one specific item.
|
|
173
|
+
"""
|
|
174
|
+
|
|
175
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
176
|
+
item_id = resolve_item_id(item=item, type=type, workspace=workspace_id)
|
|
177
|
+
|
|
178
|
+
base_columns = {
|
|
179
|
+
"Job Schedule Id": "string",
|
|
180
|
+
"Enabled": "bool",
|
|
181
|
+
"Created Date Time": "datetime",
|
|
182
|
+
"Start Date Time": "datetime",
|
|
183
|
+
"End Date Time": "string",
|
|
184
|
+
"Local Time Zone Id": "string",
|
|
185
|
+
"Type": "string",
|
|
186
|
+
"Owner Id": "string",
|
|
187
|
+
"Owner Type": "string",
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
optional_columns = {
|
|
191
|
+
"Occurrence Day of Month": "int_fillna",
|
|
192
|
+
"Occurrence Week Index": "string",
|
|
193
|
+
"Occurrence Weekday": "string",
|
|
194
|
+
"Occurrence Type": "string",
|
|
195
|
+
"Interval": "int_fillna",
|
|
196
|
+
"Times": "string",
|
|
197
|
+
"Recurrence": "int_fillna",
|
|
198
|
+
"Weekdays": "string",
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
response = _base_api(
|
|
202
|
+
request=f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/{job_type}/schedules",
|
|
203
|
+
client="fabric_sp",
|
|
204
|
+
)
|
|
205
|
+
|
|
206
|
+
rows = []
|
|
207
|
+
for v in response.json().get("value", []):
|
|
208
|
+
config = v.get("configuration", {})
|
|
209
|
+
own = v.get("owner", {})
|
|
210
|
+
occurrence = config.get("occurrence", {})
|
|
211
|
+
type = config.get("type")
|
|
212
|
+
|
|
213
|
+
row = {
|
|
214
|
+
"Job Schedule Id": v.get("id"),
|
|
215
|
+
"Enabled": v.get("enabled"),
|
|
216
|
+
"Created Date Time": v.get("createdDateTime"),
|
|
217
|
+
"Start Date Time": config.get("startDateTime"),
|
|
218
|
+
"End Date Time": config.get("endDateTime"),
|
|
219
|
+
"Local Time Zone Id": config.get("localTimeZoneId"),
|
|
220
|
+
"Type": type,
|
|
221
|
+
"Owner Id": own.get("id"),
|
|
222
|
+
"Owner Type": own.get("type"),
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
if type == "Cron":
|
|
226
|
+
row["Interval"] = config.get("interval")
|
|
227
|
+
elif type == "Daily":
|
|
228
|
+
row["Times"] = config.get("times")
|
|
229
|
+
elif type == "Weekly":
|
|
230
|
+
row["Times"] = config.get("times")
|
|
231
|
+
row["Weekdays"] = config.get("weekdays")
|
|
232
|
+
elif type == "Monthly":
|
|
233
|
+
occurrence_type = occurrence.get("occurrenceType")
|
|
234
|
+
row["Times"] = config.get("times")
|
|
235
|
+
row["Recurrence"] = config.get("recurrence")
|
|
236
|
+
row["Occurrence Type"] = occurrence_type
|
|
237
|
+
|
|
238
|
+
if occurrence_type == "OrdinalWeekday":
|
|
239
|
+
row["Occurrence Week Index"] = occurrence.get("weekIndex")
|
|
240
|
+
row["Occurrence Weekday"] = occurrence.get("weekday")
|
|
241
|
+
elif occurrence_type == "DayOfMonth":
|
|
242
|
+
row["Occurrence Day of Month"] = occurrence.get("dayOfMonth")
|
|
243
|
+
|
|
244
|
+
rows.append(row)
|
|
245
|
+
|
|
246
|
+
# Build final column map based on what was actually present
|
|
247
|
+
columns = base_columns.copy()
|
|
248
|
+
|
|
249
|
+
if rows:
|
|
250
|
+
# Find which optional columns were actually included in rows
|
|
251
|
+
all_used_columns = set().union(*(r.keys() for r in rows))
|
|
252
|
+
for col in all_used_columns:
|
|
253
|
+
if col in optional_columns:
|
|
254
|
+
columns[col] = optional_columns[col]
|
|
255
|
+
df = pd.DataFrame(rows, columns=list(columns.keys()))
|
|
256
|
+
_update_dataframe_datatypes(dataframe=df, column_map=columns)
|
|
257
|
+
|
|
258
|
+
return df
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
@log
|
|
262
|
+
def run_on_demand_item_job(
|
|
263
|
+
item: str | UUID,
|
|
264
|
+
type: Optional[str] = None,
|
|
265
|
+
job_type: str = "DefaultJob",
|
|
266
|
+
workspace: Optional[str | UUID] = None,
|
|
267
|
+
):
|
|
268
|
+
"""
|
|
269
|
+
Run on-demand item job instance.
|
|
270
|
+
|
|
271
|
+
This is a wrapper function for the following API: `Job Scheduler - Run On Demand Item Job <https://learn.microsoft.com/rest/api/fabric/core/job-scheduler/run-on-demand-item-job>`_.
|
|
272
|
+
|
|
273
|
+
Parameters
|
|
274
|
+
----------
|
|
275
|
+
item : str | uuid.UUID
|
|
276
|
+
The item name or ID.
|
|
277
|
+
type : str, default=None
|
|
278
|
+
The item `type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_. If specifying the item name as the item, the item type is required.
|
|
279
|
+
job_type : str, default="DefaultJob"
|
|
280
|
+
The job type.
|
|
281
|
+
workspace : str | uuid.UUID, default=None
|
|
282
|
+
The Fabric workspace name or ID used by the lakehouse.
|
|
283
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
284
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
285
|
+
"""
|
|
286
|
+
|
|
287
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
288
|
+
(item_name, item_id) = resolve_item_name_and_id(
|
|
289
|
+
item=item, type=type, workspace=workspace
|
|
290
|
+
)
|
|
291
|
+
|
|
292
|
+
_base_api(
|
|
293
|
+
request=f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/instances?jobType={job_type}",
|
|
294
|
+
method="post",
|
|
295
|
+
lro_return_status_code=True,
|
|
296
|
+
status_codes=202,
|
|
297
|
+
client="fabric_sp",
|
|
298
|
+
)
|
|
299
|
+
|
|
300
|
+
print(f"{icons.green_dot} The '{item_name}' {type.lower()} has been executed.")
|
|
301
|
+
|
|
302
|
+
|
|
303
|
+
@log
|
|
304
|
+
def create_item_schedule_cron(
|
|
305
|
+
item: str | UUID,
|
|
306
|
+
type: str,
|
|
307
|
+
start_date_time: str,
|
|
308
|
+
end_date_time: str,
|
|
309
|
+
local_time_zone: str,
|
|
310
|
+
job_type: str = "DefaultJob",
|
|
311
|
+
interval_minutes: int = 10,
|
|
312
|
+
enabled: bool = True,
|
|
313
|
+
workspace: Optional[str | UUID] = None,
|
|
314
|
+
):
|
|
315
|
+
"""
|
|
316
|
+
Create a new schedule for an item based on a `chronological time <https://learn.microsoft.com/rest/api/fabric/core/job-scheduler/create-item-schedule?tabs=HTTP#cronscheduleconfig>`_.
|
|
317
|
+
|
|
318
|
+
This is a wrapper function for the following API: `Job Scheduler - Create Item Schedule <https://learn.microsoft.com/rest/api/fabric/core/job-scheduler/create-item-schedule>`_.
|
|
319
|
+
|
|
320
|
+
Parameters
|
|
321
|
+
----------
|
|
322
|
+
item : str | uuid.UUID
|
|
323
|
+
The item name or ID.
|
|
324
|
+
type : str
|
|
325
|
+
The item `type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_. If specifying the item name as the item, the item type is required.
|
|
326
|
+
start_date_time: str
|
|
327
|
+
The start date and time of the schedule. Example: "2024-04-28T00:00:00".
|
|
328
|
+
end_date_time: str
|
|
329
|
+
The end date and time of the schedule. Must be later than the start_date_time. Example: "2024-04-30T23:59:00".
|
|
330
|
+
local_time_zone: str
|
|
331
|
+
The `time zone <https://learn.microsoft.com/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11>`_ of the schedule. Example: "Central Standard Time".
|
|
332
|
+
job_type : str, default="DefaultJob"
|
|
333
|
+
The job type.
|
|
334
|
+
interval_minutes: int, default=10
|
|
335
|
+
The schedule interval (in minutes).
|
|
336
|
+
enabled: bool, default=True
|
|
337
|
+
Whether the schedule is enabled.
|
|
338
|
+
workspace : str | uuid.UUID, default=None
|
|
339
|
+
The workspace name or ID.
|
|
340
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
341
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
342
|
+
"""
|
|
343
|
+
|
|
344
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
345
|
+
(item_name, item_id) = resolve_item_name_and_id(
|
|
346
|
+
item=item, type=type, workspace=workspace
|
|
347
|
+
)
|
|
348
|
+
|
|
349
|
+
payload = {
|
|
350
|
+
"enabled": enabled,
|
|
351
|
+
"configuration": {
|
|
352
|
+
"startDateTime": start_date_time,
|
|
353
|
+
"endDateTime": end_date_time,
|
|
354
|
+
"localTimeZoneId": local_time_zone,
|
|
355
|
+
"type": "Cron",
|
|
356
|
+
"interval": interval_minutes,
|
|
357
|
+
},
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
_base_api(
|
|
361
|
+
request=f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/{job_type}/schedules",
|
|
362
|
+
method="post",
|
|
363
|
+
payload=payload,
|
|
364
|
+
status_codes=201,
|
|
365
|
+
client="fabric_sp",
|
|
366
|
+
)
|
|
367
|
+
|
|
368
|
+
print(
|
|
369
|
+
f"{icons.green_dot} The schedule for the '{item_name}' {type.lower()} has been created."
|
|
370
|
+
)
|
|
371
|
+
|
|
372
|
+
|
|
373
|
+
@log
|
|
374
|
+
def create_item_schedule_daily(
|
|
375
|
+
item: str | UUID,
|
|
376
|
+
type: str,
|
|
377
|
+
start_date_time: str,
|
|
378
|
+
end_date_time: str,
|
|
379
|
+
local_time_zone: str,
|
|
380
|
+
times: List[str],
|
|
381
|
+
job_type: str = "DefaultJob",
|
|
382
|
+
enabled: bool = True,
|
|
383
|
+
workspace: Optional[str | UUID] = None,
|
|
384
|
+
):
|
|
385
|
+
"""
|
|
386
|
+
Create a new daily schedule for an item.
|
|
387
|
+
|
|
388
|
+
This is a wrapper function for the following API: `Job Scheduler - Create Item Schedule <https://learn.microsoft.com/rest/api/fabric/core/job-scheduler/create-item-schedule>`_.
|
|
389
|
+
|
|
390
|
+
Parameters
|
|
391
|
+
----------
|
|
392
|
+
item : str | uuid.UUID
|
|
393
|
+
The item name or ID.
|
|
394
|
+
type : str
|
|
395
|
+
The item `type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_. If specifying the item name as the item, the item type is required.
|
|
396
|
+
start_date_time: str
|
|
397
|
+
The start date and time of the schedule. Example: "2024-04-28T00:00:00".
|
|
398
|
+
end_date_time: str
|
|
399
|
+
The end date and time of the schedule. Must be later than the start_date_time. Example: "2024-04-30T23:59:00".
|
|
400
|
+
local_time_zone: str
|
|
401
|
+
The `time zone <https://learn.microsoft.com/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11>`_ of the schedule. Example: "Central Standard Time".
|
|
402
|
+
times : List[str]
|
|
403
|
+
A list of time slots in hh:mm format, at most 100 elements are allowed. Example: ["00:00", "12:00"].
|
|
404
|
+
job_type : str, default="DefaultJob"
|
|
405
|
+
The job type.
|
|
406
|
+
enabled: bool, default=True
|
|
407
|
+
Whether the schedule is enabled.
|
|
408
|
+
workspace : str | uuid.UUID, default=None
|
|
409
|
+
The workspace name or ID.
|
|
410
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
411
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
412
|
+
"""
|
|
413
|
+
|
|
414
|
+
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
|
|
415
|
+
(item_name, item_id) = resolve_item_name_and_id(
|
|
416
|
+
item=item, type=type, workspace=workspace
|
|
417
|
+
)
|
|
418
|
+
|
|
419
|
+
payload = {
|
|
420
|
+
"enabled": enabled,
|
|
421
|
+
"configuration": {
|
|
422
|
+
"startDateTime": start_date_time,
|
|
423
|
+
"endDateTime": end_date_time,
|
|
424
|
+
"localTimeZoneId": local_time_zone,
|
|
425
|
+
"type": "Daily",
|
|
426
|
+
"times": times,
|
|
427
|
+
},
|
|
428
|
+
}
|
|
429
|
+
|
|
430
|
+
_base_api(
|
|
431
|
+
request=f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/{job_type}/schedules",
|
|
432
|
+
method="post",
|
|
433
|
+
payload=payload,
|
|
434
|
+
status_codes=201,
|
|
435
|
+
client="fabric_sp",
|
|
436
|
+
)
|
|
437
|
+
|
|
438
|
+
print(
|
|
439
|
+
f"{icons.green_dot} The schedule for the '{item_name}' {type.lower()} has been created."
|
|
440
|
+
)
|
|
441
|
+
|
|
442
|
+
|
|
443
|
+
@log
|
|
444
|
+
def create_item_schedule_weekly(
|
|
445
|
+
item: str | UUID,
|
|
446
|
+
type: str,
|
|
447
|
+
start_date_time: str,
|
|
448
|
+
end_date_time: str,
|
|
449
|
+
local_time_zone: str,
|
|
450
|
+
times: List[str],
|
|
451
|
+
weekdays: List[str],
|
|
452
|
+
job_type: str = "DefaultJob",
|
|
453
|
+
enabled: bool = True,
|
|
454
|
+
workspace: Optional[str | UUID] = None,
|
|
455
|
+
):
|
|
456
|
+
"""
|
|
457
|
+
Create a new daily schedule for an item.
|
|
458
|
+
|
|
459
|
+
This is a wrapper function for the following API: `Job Scheduler - Create Item Schedule <https://learn.microsoft.com/rest/api/fabric/core/job-scheduler/create-item-schedule>`_.
|
|
460
|
+
|
|
461
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
462
|
+
|
|
463
|
+
Parameters
|
|
464
|
+
----------
|
|
465
|
+
item : str | uuid.UUID
|
|
466
|
+
The item name or ID.
|
|
467
|
+
type : str
|
|
468
|
+
The item `type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_. If specifying the item name as the item, the item type is required.
|
|
469
|
+
start_date_time: str
|
|
470
|
+
The start date and time of the schedule. Example: "2024-04-28T00:00:00".
|
|
471
|
+
end_date_time: str
|
|
472
|
+
The end date and time of the schedule. Must be later than the start_date_time. Example: "2024-04-30T23:59:00".
|
|
473
|
+
local_time_zone: str
|
|
474
|
+
The `time zone <https://learn.microsoft.com/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11>`_ of the schedule. Example: "Central Standard Time".
|
|
475
|
+
times : List[str]
|
|
476
|
+
A list of time slots in hh:mm format, at most 100 elements are allowed. Example: ["00:00", "12:00"].
|
|
477
|
+
weekdays : List[str]
|
|
478
|
+
A list of weekdays. Example: ["Monday", "Tuesday"].
|
|
479
|
+
job_type : str, default="DefaultJob"
|
|
480
|
+
The job type.
|
|
481
|
+
enabled: bool, default=True
|
|
482
|
+
Whether the schedule is enabled.
|
|
483
|
+
workspace : str | uuid.UUID, default=None
|
|
484
|
+
The workspace name or ID.
|
|
485
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
486
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
487
|
+
"""
|
|
488
|
+
|
|
489
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
490
|
+
(item_name, item_id) = resolve_item_name_and_id(
|
|
491
|
+
item=item, type=type, workspace=workspace
|
|
492
|
+
)
|
|
493
|
+
|
|
494
|
+
weekdays = [w.capitalize() for w in weekdays]
|
|
495
|
+
weekday_list = [
|
|
496
|
+
"Sunday",
|
|
497
|
+
"Monday",
|
|
498
|
+
"Tuesday",
|
|
499
|
+
"Wednesday",
|
|
500
|
+
"Thursday",
|
|
501
|
+
"Friday",
|
|
502
|
+
"Saturday",
|
|
503
|
+
]
|
|
504
|
+
for weekday in weekdays:
|
|
505
|
+
if weekday not in weekday_list:
|
|
506
|
+
raise ValueError(
|
|
507
|
+
f"{icons.red_dot} Invalid weekday: {weekday}. Must be one of {weekday_list}."
|
|
508
|
+
)
|
|
509
|
+
|
|
510
|
+
payload = {
|
|
511
|
+
"enabled": enabled,
|
|
512
|
+
"configuration": {
|
|
513
|
+
"startDateTime": start_date_time,
|
|
514
|
+
"endDateTime": end_date_time,
|
|
515
|
+
"localTimeZoneId": local_time_zone,
|
|
516
|
+
"type": "Weekly",
|
|
517
|
+
"times": times,
|
|
518
|
+
"weekdays": weekdays,
|
|
519
|
+
},
|
|
520
|
+
}
|
|
521
|
+
|
|
522
|
+
_base_api(
|
|
523
|
+
request=f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/{job_type}/schedules",
|
|
524
|
+
method="post",
|
|
525
|
+
payload=payload,
|
|
526
|
+
status_codes=201,
|
|
527
|
+
client="fabric_sp",
|
|
528
|
+
)
|
|
529
|
+
|
|
530
|
+
print(
|
|
531
|
+
f"{icons.green_dot} The schedule for the '{item_name}' {type.lower()} has been created."
|
|
532
|
+
)
|
|
533
|
+
|
|
534
|
+
|
|
535
|
+
@log
|
|
536
|
+
def cancel_item_job_instance(
|
|
537
|
+
item: str | UUID,
|
|
538
|
+
job_instance_id: UUID,
|
|
539
|
+
type: str,
|
|
540
|
+
workspace: Optional[str | UUID] = None,
|
|
541
|
+
):
|
|
542
|
+
"""
|
|
543
|
+
Cancel an item's job instance.
|
|
544
|
+
|
|
545
|
+
This is a wrapper function for the following API: `Job Scheduler - Cancel Item Job Instance <https://learn.microsoft.com/rest/api/fabric/core/job-scheduler/cancel-item-job-instance>`_.
|
|
546
|
+
|
|
547
|
+
Service Principal Authentication is supported (see `here <https://github.com/microsoft/semantic-link-labs/blob/main/notebooks/Service%20Principal.ipynb>`_ for examples).
|
|
548
|
+
|
|
549
|
+
Parameters
|
|
550
|
+
----------
|
|
551
|
+
item : str | uuid.UUID
|
|
552
|
+
The item name or ID.
|
|
553
|
+
job_instance_id : uuid.UUID
|
|
554
|
+
The job instance ID to cancel.
|
|
555
|
+
type : str
|
|
556
|
+
The item `type <https://learn.microsoft.com/rest/api/fabric/core/items/list-items?tabs=HTTP#itemtype>`_. If specifying the item name as the item, the item type is required.
|
|
557
|
+
workspace : str | uuid.UUID, default=None
|
|
558
|
+
The Fabric workspace name or ID used by the lakehouse.
|
|
559
|
+
Defaults to None which resolves to the workspace of the attached lakehouse
|
|
560
|
+
or if no lakehouse attached, resolves to the workspace of the notebook.
|
|
561
|
+
"""
|
|
562
|
+
|
|
563
|
+
workspace_id = resolve_workspace_id(workspace)
|
|
564
|
+
(item_name, item_id) = resolve_item_name_and_id(
|
|
565
|
+
item=item, type=type, workspace=workspace
|
|
566
|
+
)
|
|
567
|
+
|
|
568
|
+
response = _base_api(
|
|
569
|
+
request=f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/instances/{job_instance_id}",
|
|
570
|
+
client="fabric_sp",
|
|
571
|
+
)
|
|
572
|
+
current_status = response.json().get("status")
|
|
573
|
+
|
|
574
|
+
if current_status not in ["NotStarted", "InProgress"]:
|
|
575
|
+
print(
|
|
576
|
+
f"{icons.info} The job instance '{job_instance_id}' for the '{item_name}' {type.lower()} is in status '{current_status}' and cannot be cancelled."
|
|
577
|
+
)
|
|
578
|
+
return
|
|
579
|
+
else:
|
|
580
|
+
# Cancel the job instance
|
|
581
|
+
response = _base_api(
|
|
582
|
+
request=f"v1/workspaces/{workspace_id}/items/{item_id}/jobs/instances/{job_instance_id}/cancel",
|
|
583
|
+
method="post",
|
|
584
|
+
status_codes=202,
|
|
585
|
+
client="fabric_sp",
|
|
586
|
+
)
|
|
587
|
+
|
|
588
|
+
status_url = response.headers.get("Location").split("fabric.microsoft.com")[1]
|
|
589
|
+
status = None
|
|
590
|
+
while status not in ["Completed", "Failed", "Cancelled"]:
|
|
591
|
+
response = _base_api(request=status_url)
|
|
592
|
+
status = response.json().get("status")
|
|
593
|
+
time.sleep(3)
|
|
594
|
+
|
|
595
|
+
if status == "Cancelled":
|
|
596
|
+
print(
|
|
597
|
+
f"{icons.green_dot} The job instance '{job_instance_id}' for the '{item_name}' {type.lower()} has been cancelled."
|
|
598
|
+
)
|
|
599
|
+
return
|
|
600
|
+
elif status == "Failed":
|
|
601
|
+
print(
|
|
602
|
+
f"{icons.info} The job instance '{job_instance_id}' for the '{item_name}' {type.lower()} could not be cancelled and has failed."
|
|
603
|
+
)
|
|
604
|
+
return
|
|
605
|
+
elif status == "Completed":
|
|
606
|
+
print(
|
|
607
|
+
f"{icons.info} The job instance '{job_instance_id}' for the '{item_name}' {type.lower()} has already completed before it could be cancelled."
|
|
608
|
+
)
|
|
609
|
+
return
|