pyworkflow-engine 0.1.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dashboard/backend/app/__init__.py +1 -0
- dashboard/backend/app/config.py +32 -0
- dashboard/backend/app/controllers/__init__.py +6 -0
- dashboard/backend/app/controllers/run_controller.py +86 -0
- dashboard/backend/app/controllers/workflow_controller.py +33 -0
- dashboard/backend/app/dependencies/__init__.py +5 -0
- dashboard/backend/app/dependencies/storage.py +50 -0
- dashboard/backend/app/repositories/__init__.py +6 -0
- dashboard/backend/app/repositories/run_repository.py +80 -0
- dashboard/backend/app/repositories/workflow_repository.py +27 -0
- dashboard/backend/app/rest/__init__.py +8 -0
- dashboard/backend/app/rest/v1/__init__.py +12 -0
- dashboard/backend/app/rest/v1/health.py +33 -0
- dashboard/backend/app/rest/v1/runs.py +133 -0
- dashboard/backend/app/rest/v1/workflows.py +41 -0
- dashboard/backend/app/schemas/__init__.py +23 -0
- dashboard/backend/app/schemas/common.py +16 -0
- dashboard/backend/app/schemas/event.py +24 -0
- dashboard/backend/app/schemas/hook.py +25 -0
- dashboard/backend/app/schemas/run.py +54 -0
- dashboard/backend/app/schemas/step.py +28 -0
- dashboard/backend/app/schemas/workflow.py +31 -0
- dashboard/backend/app/server.py +87 -0
- dashboard/backend/app/services/__init__.py +6 -0
- dashboard/backend/app/services/run_service.py +240 -0
- dashboard/backend/app/services/workflow_service.py +155 -0
- dashboard/backend/main.py +18 -0
- docs/concepts/cancellation.mdx +362 -0
- docs/concepts/continue-as-new.mdx +434 -0
- docs/concepts/events.mdx +266 -0
- docs/concepts/fault-tolerance.mdx +370 -0
- docs/concepts/hooks.mdx +552 -0
- docs/concepts/limitations.mdx +167 -0
- docs/concepts/schedules.mdx +775 -0
- docs/concepts/sleep.mdx +312 -0
- docs/concepts/steps.mdx +301 -0
- docs/concepts/workflows.mdx +255 -0
- docs/guides/cli.mdx +942 -0
- docs/guides/configuration.mdx +560 -0
- docs/introduction.mdx +155 -0
- docs/quickstart.mdx +279 -0
- examples/__init__.py +1 -0
- examples/celery/__init__.py +1 -0
- examples/celery/durable/docker-compose.yml +55 -0
- examples/celery/durable/pyworkflow.config.yaml +12 -0
- examples/celery/durable/workflows/__init__.py +122 -0
- examples/celery/durable/workflows/basic.py +87 -0
- examples/celery/durable/workflows/batch_processing.py +102 -0
- examples/celery/durable/workflows/cancellation.py +273 -0
- examples/celery/durable/workflows/child_workflow_patterns.py +240 -0
- examples/celery/durable/workflows/child_workflows.py +202 -0
- examples/celery/durable/workflows/continue_as_new.py +260 -0
- examples/celery/durable/workflows/fault_tolerance.py +210 -0
- examples/celery/durable/workflows/hooks.py +211 -0
- examples/celery/durable/workflows/idempotency.py +112 -0
- examples/celery/durable/workflows/long_running.py +99 -0
- examples/celery/durable/workflows/retries.py +101 -0
- examples/celery/durable/workflows/schedules.py +209 -0
- examples/celery/transient/01_basic_workflow.py +91 -0
- examples/celery/transient/02_fault_tolerance.py +257 -0
- examples/celery/transient/__init__.py +20 -0
- examples/celery/transient/pyworkflow.config.yaml +25 -0
- examples/local/__init__.py +1 -0
- examples/local/durable/01_basic_workflow.py +94 -0
- examples/local/durable/02_file_storage.py +132 -0
- examples/local/durable/03_retries.py +169 -0
- examples/local/durable/04_long_running.py +119 -0
- examples/local/durable/05_event_log.py +145 -0
- examples/local/durable/06_idempotency.py +148 -0
- examples/local/durable/07_hooks.py +334 -0
- examples/local/durable/08_cancellation.py +233 -0
- examples/local/durable/09_child_workflows.py +198 -0
- examples/local/durable/10_child_workflow_patterns.py +265 -0
- examples/local/durable/11_continue_as_new.py +249 -0
- examples/local/durable/12_schedules.py +198 -0
- examples/local/durable/__init__.py +1 -0
- examples/local/transient/01_quick_tasks.py +87 -0
- examples/local/transient/02_retries.py +130 -0
- examples/local/transient/03_sleep.py +141 -0
- examples/local/transient/__init__.py +1 -0
- pyworkflow/__init__.py +256 -0
- pyworkflow/aws/__init__.py +68 -0
- pyworkflow/aws/context.py +234 -0
- pyworkflow/aws/handler.py +184 -0
- pyworkflow/aws/testing.py +310 -0
- pyworkflow/celery/__init__.py +41 -0
- pyworkflow/celery/app.py +198 -0
- pyworkflow/celery/scheduler.py +315 -0
- pyworkflow/celery/tasks.py +1746 -0
- pyworkflow/cli/__init__.py +132 -0
- pyworkflow/cli/__main__.py +6 -0
- pyworkflow/cli/commands/__init__.py +1 -0
- pyworkflow/cli/commands/hooks.py +640 -0
- pyworkflow/cli/commands/quickstart.py +495 -0
- pyworkflow/cli/commands/runs.py +773 -0
- pyworkflow/cli/commands/scheduler.py +130 -0
- pyworkflow/cli/commands/schedules.py +794 -0
- pyworkflow/cli/commands/setup.py +703 -0
- pyworkflow/cli/commands/worker.py +413 -0
- pyworkflow/cli/commands/workflows.py +1257 -0
- pyworkflow/cli/output/__init__.py +1 -0
- pyworkflow/cli/output/formatters.py +321 -0
- pyworkflow/cli/output/styles.py +121 -0
- pyworkflow/cli/utils/__init__.py +1 -0
- pyworkflow/cli/utils/async_helpers.py +30 -0
- pyworkflow/cli/utils/config.py +130 -0
- pyworkflow/cli/utils/config_generator.py +344 -0
- pyworkflow/cli/utils/discovery.py +53 -0
- pyworkflow/cli/utils/docker_manager.py +651 -0
- pyworkflow/cli/utils/interactive.py +364 -0
- pyworkflow/cli/utils/storage.py +115 -0
- pyworkflow/config.py +329 -0
- pyworkflow/context/__init__.py +63 -0
- pyworkflow/context/aws.py +230 -0
- pyworkflow/context/base.py +416 -0
- pyworkflow/context/local.py +930 -0
- pyworkflow/context/mock.py +381 -0
- pyworkflow/core/__init__.py +0 -0
- pyworkflow/core/exceptions.py +353 -0
- pyworkflow/core/registry.py +313 -0
- pyworkflow/core/scheduled.py +328 -0
- pyworkflow/core/step.py +494 -0
- pyworkflow/core/workflow.py +294 -0
- pyworkflow/discovery.py +248 -0
- pyworkflow/engine/__init__.py +0 -0
- pyworkflow/engine/events.py +879 -0
- pyworkflow/engine/executor.py +682 -0
- pyworkflow/engine/replay.py +273 -0
- pyworkflow/observability/__init__.py +19 -0
- pyworkflow/observability/logging.py +234 -0
- pyworkflow/primitives/__init__.py +33 -0
- pyworkflow/primitives/child_handle.py +174 -0
- pyworkflow/primitives/child_workflow.py +372 -0
- pyworkflow/primitives/continue_as_new.py +101 -0
- pyworkflow/primitives/define_hook.py +150 -0
- pyworkflow/primitives/hooks.py +97 -0
- pyworkflow/primitives/resume_hook.py +210 -0
- pyworkflow/primitives/schedule.py +545 -0
- pyworkflow/primitives/shield.py +96 -0
- pyworkflow/primitives/sleep.py +100 -0
- pyworkflow/runtime/__init__.py +21 -0
- pyworkflow/runtime/base.py +179 -0
- pyworkflow/runtime/celery.py +310 -0
- pyworkflow/runtime/factory.py +101 -0
- pyworkflow/runtime/local.py +706 -0
- pyworkflow/scheduler/__init__.py +9 -0
- pyworkflow/scheduler/local.py +248 -0
- pyworkflow/serialization/__init__.py +0 -0
- pyworkflow/serialization/decoder.py +146 -0
- pyworkflow/serialization/encoder.py +162 -0
- pyworkflow/storage/__init__.py +54 -0
- pyworkflow/storage/base.py +612 -0
- pyworkflow/storage/config.py +185 -0
- pyworkflow/storage/dynamodb.py +1315 -0
- pyworkflow/storage/file.py +827 -0
- pyworkflow/storage/memory.py +549 -0
- pyworkflow/storage/postgres.py +1161 -0
- pyworkflow/storage/schemas.py +486 -0
- pyworkflow/storage/sqlite.py +1136 -0
- pyworkflow/utils/__init__.py +0 -0
- pyworkflow/utils/duration.py +177 -0
- pyworkflow/utils/schedule.py +391 -0
- pyworkflow_engine-0.1.7.dist-info/METADATA +687 -0
- pyworkflow_engine-0.1.7.dist-info/RECORD +196 -0
- pyworkflow_engine-0.1.7.dist-info/WHEEL +5 -0
- pyworkflow_engine-0.1.7.dist-info/entry_points.txt +2 -0
- pyworkflow_engine-0.1.7.dist-info/licenses/LICENSE +21 -0
- pyworkflow_engine-0.1.7.dist-info/top_level.txt +5 -0
- tests/examples/__init__.py +0 -0
- tests/integration/__init__.py +0 -0
- tests/integration/test_cancellation.py +330 -0
- tests/integration/test_child_workflows.py +439 -0
- tests/integration/test_continue_as_new.py +428 -0
- tests/integration/test_dynamodb_storage.py +1146 -0
- tests/integration/test_fault_tolerance.py +369 -0
- tests/integration/test_schedule_storage.py +484 -0
- tests/unit/__init__.py +0 -0
- tests/unit/backends/__init__.py +1 -0
- tests/unit/backends/test_dynamodb_storage.py +1554 -0
- tests/unit/backends/test_postgres_storage.py +1281 -0
- tests/unit/backends/test_sqlite_storage.py +1460 -0
- tests/unit/conftest.py +41 -0
- tests/unit/test_cancellation.py +364 -0
- tests/unit/test_child_workflows.py +680 -0
- tests/unit/test_continue_as_new.py +441 -0
- tests/unit/test_event_limits.py +316 -0
- tests/unit/test_executor.py +320 -0
- tests/unit/test_fault_tolerance.py +334 -0
- tests/unit/test_hooks.py +495 -0
- tests/unit/test_registry.py +261 -0
- tests/unit/test_replay.py +420 -0
- tests/unit/test_schedule_schemas.py +285 -0
- tests/unit/test_schedule_utils.py +286 -0
- tests/unit/test_scheduled_workflow.py +274 -0
- tests/unit/test_step.py +353 -0
- tests/unit/test_workflow.py +243 -0
|
@@ -0,0 +1,794 @@
|
|
|
1
|
+
"""Schedule management CLI commands."""
|
|
2
|
+
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
|
|
5
|
+
import click
|
|
6
|
+
|
|
7
|
+
from pyworkflow import OverlapPolicy, ScheduleSpec, ScheduleStatus
|
|
8
|
+
from pyworkflow.cli.output.formatters import (
|
|
9
|
+
format_json,
|
|
10
|
+
format_key_value,
|
|
11
|
+
format_plain,
|
|
12
|
+
format_table,
|
|
13
|
+
print_error,
|
|
14
|
+
print_info,
|
|
15
|
+
print_success,
|
|
16
|
+
print_warning,
|
|
17
|
+
)
|
|
18
|
+
from pyworkflow.cli.utils.async_helpers import async_command
|
|
19
|
+
from pyworkflow.cli.utils.storage import create_storage
|
|
20
|
+
from pyworkflow.utils.schedule import describe_schedule
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@click.group(name="schedules")
|
|
24
|
+
def schedules() -> None:
|
|
25
|
+
"""Manage workflow schedules (cron, interval, calendar-based)."""
|
|
26
|
+
pass
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
@schedules.command(name="list")
|
|
30
|
+
@click.option(
|
|
31
|
+
"--workflow",
|
|
32
|
+
help="Filter by workflow name",
|
|
33
|
+
)
|
|
34
|
+
@click.option(
|
|
35
|
+
"--status",
|
|
36
|
+
type=click.Choice([s.value for s in ScheduleStatus], case_sensitive=False),
|
|
37
|
+
help="Filter by schedule status",
|
|
38
|
+
)
|
|
39
|
+
@click.option(
|
|
40
|
+
"--limit",
|
|
41
|
+
type=int,
|
|
42
|
+
default=20,
|
|
43
|
+
help="Maximum number of schedules to display (default: 20)",
|
|
44
|
+
)
|
|
45
|
+
@click.pass_context
|
|
46
|
+
@async_command
|
|
47
|
+
async def list_schedules_cmd(
|
|
48
|
+
ctx: click.Context,
|
|
49
|
+
workflow: str | None,
|
|
50
|
+
status: str | None,
|
|
51
|
+
limit: int,
|
|
52
|
+
) -> None:
|
|
53
|
+
"""
|
|
54
|
+
List workflow schedules.
|
|
55
|
+
|
|
56
|
+
Examples:
|
|
57
|
+
|
|
58
|
+
# List all schedules
|
|
59
|
+
pyworkflow schedules list
|
|
60
|
+
|
|
61
|
+
# List schedules for specific workflow
|
|
62
|
+
pyworkflow schedules list --workflow my_workflow
|
|
63
|
+
|
|
64
|
+
# List only active schedules
|
|
65
|
+
pyworkflow schedules list --status active
|
|
66
|
+
|
|
67
|
+
# JSON output
|
|
68
|
+
pyworkflow --output json schedules list
|
|
69
|
+
"""
|
|
70
|
+
from pyworkflow.primitives.schedule import list_schedules
|
|
71
|
+
|
|
72
|
+
# Get context data
|
|
73
|
+
config = ctx.obj["config"]
|
|
74
|
+
output = ctx.obj["output"]
|
|
75
|
+
storage_type = ctx.obj["storage_type"]
|
|
76
|
+
storage_path = ctx.obj["storage_path"]
|
|
77
|
+
|
|
78
|
+
# Create storage backend
|
|
79
|
+
storage = create_storage(storage_type, storage_path, config)
|
|
80
|
+
|
|
81
|
+
# Parse status filter
|
|
82
|
+
status_filter = ScheduleStatus(status) if status else None
|
|
83
|
+
|
|
84
|
+
try:
|
|
85
|
+
schedules_list = await list_schedules(
|
|
86
|
+
workflow_name=workflow,
|
|
87
|
+
status=status_filter,
|
|
88
|
+
limit=limit,
|
|
89
|
+
storage=storage,
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
if not schedules_list:
|
|
93
|
+
print_info("No schedules found")
|
|
94
|
+
return
|
|
95
|
+
|
|
96
|
+
# Format output
|
|
97
|
+
if output == "json":
|
|
98
|
+
data = [
|
|
99
|
+
{
|
|
100
|
+
"schedule_id": s.schedule_id,
|
|
101
|
+
"workflow_name": s.workflow_name,
|
|
102
|
+
"status": s.status.value,
|
|
103
|
+
"spec": describe_schedule(s.spec),
|
|
104
|
+
"overlap_policy": s.overlap_policy.value,
|
|
105
|
+
"next_run_time": s.next_run_time.isoformat() if s.next_run_time else None,
|
|
106
|
+
"total_runs": s.total_runs,
|
|
107
|
+
"successful_runs": s.successful_runs,
|
|
108
|
+
"failed_runs": s.failed_runs,
|
|
109
|
+
"created_at": s.created_at.isoformat() if s.created_at else None,
|
|
110
|
+
}
|
|
111
|
+
for s in schedules_list
|
|
112
|
+
]
|
|
113
|
+
format_json(data)
|
|
114
|
+
|
|
115
|
+
elif output == "plain":
|
|
116
|
+
schedule_ids = [s.schedule_id for s in schedules_list]
|
|
117
|
+
format_plain(schedule_ids)
|
|
118
|
+
|
|
119
|
+
else: # table
|
|
120
|
+
data = [
|
|
121
|
+
{
|
|
122
|
+
"Schedule ID": s.schedule_id,
|
|
123
|
+
"Workflow": s.workflow_name,
|
|
124
|
+
"Status": s.status.value,
|
|
125
|
+
"Schedule": describe_schedule(s.spec),
|
|
126
|
+
"Next Run": s.next_run_time.strftime("%Y-%m-%d %H:%M:%S")
|
|
127
|
+
if s.next_run_time
|
|
128
|
+
else "-",
|
|
129
|
+
"Runs": f"{s.successful_runs}/{s.total_runs}",
|
|
130
|
+
}
|
|
131
|
+
for s in schedules_list
|
|
132
|
+
]
|
|
133
|
+
format_table(
|
|
134
|
+
data,
|
|
135
|
+
["Schedule ID", "Workflow", "Status", "Schedule", "Next Run", "Runs"],
|
|
136
|
+
title="Workflow Schedules",
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
except Exception as e:
|
|
140
|
+
print_error(f"Failed to list schedules: {e}")
|
|
141
|
+
if ctx.obj["verbose"]:
|
|
142
|
+
raise
|
|
143
|
+
raise click.Abort()
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
@schedules.command(name="create")
|
|
147
|
+
@click.argument("workflow_name")
|
|
148
|
+
@click.option(
|
|
149
|
+
"--cron",
|
|
150
|
+
help="Cron expression (e.g., '0 9 * * *' for daily at 9 AM)",
|
|
151
|
+
)
|
|
152
|
+
@click.option(
|
|
153
|
+
"--interval",
|
|
154
|
+
help="Interval duration (e.g., '5m', '1h', '30s')",
|
|
155
|
+
)
|
|
156
|
+
@click.option(
|
|
157
|
+
"--timezone",
|
|
158
|
+
default="UTC",
|
|
159
|
+
help="Timezone for schedule (default: UTC)",
|
|
160
|
+
)
|
|
161
|
+
@click.option(
|
|
162
|
+
"--overlap",
|
|
163
|
+
type=click.Choice([p.value for p in OverlapPolicy], case_sensitive=False),
|
|
164
|
+
default="skip",
|
|
165
|
+
help="Overlap policy (default: skip)",
|
|
166
|
+
)
|
|
167
|
+
@click.option(
|
|
168
|
+
"--schedule-id",
|
|
169
|
+
help="Custom schedule ID (optional)",
|
|
170
|
+
)
|
|
171
|
+
@click.pass_context
|
|
172
|
+
@async_command
|
|
173
|
+
async def create_schedule_cmd(
|
|
174
|
+
ctx: click.Context,
|
|
175
|
+
workflow_name: str,
|
|
176
|
+
cron: str | None,
|
|
177
|
+
interval: str | None,
|
|
178
|
+
timezone: str,
|
|
179
|
+
overlap: str,
|
|
180
|
+
schedule_id: str | None,
|
|
181
|
+
) -> None:
|
|
182
|
+
"""
|
|
183
|
+
Create a new schedule for a workflow.
|
|
184
|
+
|
|
185
|
+
Examples:
|
|
186
|
+
|
|
187
|
+
# Create cron schedule (daily at 9 AM)
|
|
188
|
+
pyworkflow schedules create my_workflow --cron "0 9 * * *"
|
|
189
|
+
|
|
190
|
+
# Create interval schedule (every 5 minutes)
|
|
191
|
+
pyworkflow schedules create my_workflow --interval 5m
|
|
192
|
+
|
|
193
|
+
# Create with custom ID and overlap policy
|
|
194
|
+
pyworkflow schedules create my_workflow --cron "0 0 * * 0" \\
|
|
195
|
+
--schedule-id weekly_job --overlap buffer_one
|
|
196
|
+
|
|
197
|
+
# Different timezone
|
|
198
|
+
pyworkflow schedules create my_workflow --cron "0 9 * * *" \\
|
|
199
|
+
--timezone America/New_York
|
|
200
|
+
"""
|
|
201
|
+
from pyworkflow.primitives.schedule import create_schedule
|
|
202
|
+
|
|
203
|
+
if not cron and not interval:
|
|
204
|
+
print_error("Either --cron or --interval must be provided")
|
|
205
|
+
raise click.Abort()
|
|
206
|
+
|
|
207
|
+
# Get context data
|
|
208
|
+
config = ctx.obj["config"]
|
|
209
|
+
output = ctx.obj["output"]
|
|
210
|
+
storage_type = ctx.obj["storage_type"]
|
|
211
|
+
storage_path = ctx.obj["storage_path"]
|
|
212
|
+
|
|
213
|
+
# Create storage backend
|
|
214
|
+
storage = create_storage(storage_type, storage_path, config)
|
|
215
|
+
|
|
216
|
+
# Parse overlap policy
|
|
217
|
+
overlap_policy = OverlapPolicy(overlap)
|
|
218
|
+
|
|
219
|
+
# Create schedule spec
|
|
220
|
+
spec = ScheduleSpec(
|
|
221
|
+
cron=cron,
|
|
222
|
+
interval=interval,
|
|
223
|
+
timezone=timezone,
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
try:
|
|
227
|
+
schedule = await create_schedule(
|
|
228
|
+
workflow_name=workflow_name,
|
|
229
|
+
spec=spec,
|
|
230
|
+
overlap_policy=overlap_policy,
|
|
231
|
+
schedule_id=schedule_id,
|
|
232
|
+
storage=storage,
|
|
233
|
+
)
|
|
234
|
+
|
|
235
|
+
print_success(f"Created schedule: {schedule.schedule_id}")
|
|
236
|
+
|
|
237
|
+
if output == "json":
|
|
238
|
+
data = {
|
|
239
|
+
"schedule_id": schedule.schedule_id,
|
|
240
|
+
"workflow_name": schedule.workflow_name,
|
|
241
|
+
"status": schedule.status.value,
|
|
242
|
+
"spec": describe_schedule(schedule.spec),
|
|
243
|
+
"overlap_policy": schedule.overlap_policy.value,
|
|
244
|
+
"next_run_time": schedule.next_run_time.isoformat()
|
|
245
|
+
if schedule.next_run_time
|
|
246
|
+
else None,
|
|
247
|
+
}
|
|
248
|
+
format_json(data)
|
|
249
|
+
else:
|
|
250
|
+
print_info(f"Schedule: {describe_schedule(schedule.spec)}")
|
|
251
|
+
if schedule.next_run_time:
|
|
252
|
+
print_info(f"Next run: {schedule.next_run_time.strftime('%Y-%m-%d %H:%M:%S')}")
|
|
253
|
+
|
|
254
|
+
except ValueError as e:
|
|
255
|
+
print_error(str(e))
|
|
256
|
+
raise click.Abort()
|
|
257
|
+
except Exception as e:
|
|
258
|
+
print_error(f"Failed to create schedule: {e}")
|
|
259
|
+
if ctx.obj["verbose"]:
|
|
260
|
+
raise
|
|
261
|
+
raise click.Abort()
|
|
262
|
+
|
|
263
|
+
|
|
264
|
+
@schedules.command(name="show")
|
|
265
|
+
@click.argument("schedule_id")
|
|
266
|
+
@click.pass_context
|
|
267
|
+
@async_command
|
|
268
|
+
async def show_schedule_cmd(
|
|
269
|
+
ctx: click.Context,
|
|
270
|
+
schedule_id: str,
|
|
271
|
+
) -> None:
|
|
272
|
+
"""
|
|
273
|
+
Show schedule details.
|
|
274
|
+
|
|
275
|
+
Examples:
|
|
276
|
+
|
|
277
|
+
pyworkflow schedules show sched_abc123
|
|
278
|
+
|
|
279
|
+
# JSON output
|
|
280
|
+
pyworkflow --output json schedules show sched_abc123
|
|
281
|
+
"""
|
|
282
|
+
from pyworkflow.primitives.schedule import get_schedule
|
|
283
|
+
|
|
284
|
+
# Get context data
|
|
285
|
+
config = ctx.obj["config"]
|
|
286
|
+
output = ctx.obj["output"]
|
|
287
|
+
storage_type = ctx.obj["storage_type"]
|
|
288
|
+
storage_path = ctx.obj["storage_path"]
|
|
289
|
+
|
|
290
|
+
# Create storage backend
|
|
291
|
+
storage = create_storage(storage_type, storage_path, config)
|
|
292
|
+
|
|
293
|
+
try:
|
|
294
|
+
schedule = await get_schedule(schedule_id, storage=storage)
|
|
295
|
+
|
|
296
|
+
if not schedule:
|
|
297
|
+
print_error(f"Schedule '{schedule_id}' not found")
|
|
298
|
+
raise click.Abort()
|
|
299
|
+
|
|
300
|
+
if output == "json":
|
|
301
|
+
data = {
|
|
302
|
+
"schedule_id": schedule.schedule_id,
|
|
303
|
+
"workflow_name": schedule.workflow_name,
|
|
304
|
+
"status": schedule.status.value,
|
|
305
|
+
"spec": {
|
|
306
|
+
"cron": schedule.spec.cron,
|
|
307
|
+
"interval": schedule.spec.interval,
|
|
308
|
+
"timezone": schedule.spec.timezone,
|
|
309
|
+
},
|
|
310
|
+
"overlap_policy": schedule.overlap_policy.value,
|
|
311
|
+
"next_run_time": schedule.next_run_time.isoformat()
|
|
312
|
+
if schedule.next_run_time
|
|
313
|
+
else None,
|
|
314
|
+
"last_run_at": schedule.last_run_at.isoformat() if schedule.last_run_at else None,
|
|
315
|
+
"total_runs": schedule.total_runs,
|
|
316
|
+
"successful_runs": schedule.successful_runs,
|
|
317
|
+
"failed_runs": schedule.failed_runs,
|
|
318
|
+
"skipped_runs": schedule.skipped_runs,
|
|
319
|
+
"created_at": schedule.created_at.isoformat() if schedule.created_at else None,
|
|
320
|
+
"updated_at": schedule.updated_at.isoformat() if schedule.updated_at else None,
|
|
321
|
+
}
|
|
322
|
+
format_json(data)
|
|
323
|
+
else:
|
|
324
|
+
data = {
|
|
325
|
+
"Schedule ID": schedule.schedule_id,
|
|
326
|
+
"Workflow": schedule.workflow_name,
|
|
327
|
+
"Status": schedule.status.value,
|
|
328
|
+
"Schedule": describe_schedule(schedule.spec),
|
|
329
|
+
"Overlap Policy": schedule.overlap_policy.value,
|
|
330
|
+
"Next Run": schedule.next_run_time.strftime("%Y-%m-%d %H:%M:%S")
|
|
331
|
+
if schedule.next_run_time
|
|
332
|
+
else "-",
|
|
333
|
+
"Last Run": schedule.last_run_at.strftime("%Y-%m-%d %H:%M:%S")
|
|
334
|
+
if schedule.last_run_at
|
|
335
|
+
else "-",
|
|
336
|
+
"Total Runs": schedule.total_runs,
|
|
337
|
+
"Successful": schedule.successful_runs,
|
|
338
|
+
"Failed": schedule.failed_runs,
|
|
339
|
+
"Skipped": schedule.skipped_runs,
|
|
340
|
+
"Created": schedule.created_at.strftime("%Y-%m-%d %H:%M:%S")
|
|
341
|
+
if schedule.created_at
|
|
342
|
+
else "-",
|
|
343
|
+
}
|
|
344
|
+
format_key_value(data, title=f"Schedule: {schedule_id}")
|
|
345
|
+
|
|
346
|
+
except click.Abort:
|
|
347
|
+
raise
|
|
348
|
+
except Exception as e:
|
|
349
|
+
print_error(f"Failed to get schedule: {e}")
|
|
350
|
+
if ctx.obj["verbose"]:
|
|
351
|
+
raise
|
|
352
|
+
raise click.Abort()
|
|
353
|
+
|
|
354
|
+
|
|
355
|
+
@schedules.command(name="pause")
|
|
356
|
+
@click.argument("schedule_id")
|
|
357
|
+
@click.pass_context
|
|
358
|
+
@async_command
|
|
359
|
+
async def pause_schedule_cmd(
|
|
360
|
+
ctx: click.Context,
|
|
361
|
+
schedule_id: str,
|
|
362
|
+
) -> None:
|
|
363
|
+
"""
|
|
364
|
+
Pause a schedule.
|
|
365
|
+
|
|
366
|
+
A paused schedule will not trigger any new workflow runs until resumed.
|
|
367
|
+
|
|
368
|
+
Examples:
|
|
369
|
+
|
|
370
|
+
pyworkflow schedules pause sched_abc123
|
|
371
|
+
"""
|
|
372
|
+
from pyworkflow.primitives.schedule import pause_schedule
|
|
373
|
+
|
|
374
|
+
# Get context data
|
|
375
|
+
config = ctx.obj["config"]
|
|
376
|
+
output = ctx.obj["output"]
|
|
377
|
+
storage_type = ctx.obj["storage_type"]
|
|
378
|
+
storage_path = ctx.obj["storage_path"]
|
|
379
|
+
|
|
380
|
+
# Create storage backend
|
|
381
|
+
storage = create_storage(storage_type, storage_path, config)
|
|
382
|
+
|
|
383
|
+
try:
|
|
384
|
+
schedule = await pause_schedule(schedule_id, storage=storage)
|
|
385
|
+
print_success(f"Paused schedule: {schedule_id}")
|
|
386
|
+
|
|
387
|
+
if output == "json":
|
|
388
|
+
data = {
|
|
389
|
+
"schedule_id": schedule.schedule_id,
|
|
390
|
+
"status": schedule.status.value,
|
|
391
|
+
}
|
|
392
|
+
format_json(data)
|
|
393
|
+
|
|
394
|
+
except ValueError as e:
|
|
395
|
+
print_error(str(e))
|
|
396
|
+
raise click.Abort()
|
|
397
|
+
except Exception as e:
|
|
398
|
+
print_error(f"Failed to pause schedule: {e}")
|
|
399
|
+
if ctx.obj["verbose"]:
|
|
400
|
+
raise
|
|
401
|
+
raise click.Abort()
|
|
402
|
+
|
|
403
|
+
|
|
404
|
+
@schedules.command(name="resume")
|
|
405
|
+
@click.argument("schedule_id")
|
|
406
|
+
@click.pass_context
|
|
407
|
+
@async_command
|
|
408
|
+
async def resume_schedule_cmd(
|
|
409
|
+
ctx: click.Context,
|
|
410
|
+
schedule_id: str,
|
|
411
|
+
) -> None:
|
|
412
|
+
"""
|
|
413
|
+
Resume a paused schedule.
|
|
414
|
+
|
|
415
|
+
Recalculates the next run time from now.
|
|
416
|
+
|
|
417
|
+
Examples:
|
|
418
|
+
|
|
419
|
+
pyworkflow schedules resume sched_abc123
|
|
420
|
+
"""
|
|
421
|
+
from pyworkflow.primitives.schedule import resume_schedule
|
|
422
|
+
|
|
423
|
+
# Get context data
|
|
424
|
+
config = ctx.obj["config"]
|
|
425
|
+
output = ctx.obj["output"]
|
|
426
|
+
storage_type = ctx.obj["storage_type"]
|
|
427
|
+
storage_path = ctx.obj["storage_path"]
|
|
428
|
+
|
|
429
|
+
# Create storage backend
|
|
430
|
+
storage = create_storage(storage_type, storage_path, config)
|
|
431
|
+
|
|
432
|
+
try:
|
|
433
|
+
schedule = await resume_schedule(schedule_id, storage=storage)
|
|
434
|
+
print_success(f"Resumed schedule: {schedule_id}")
|
|
435
|
+
|
|
436
|
+
if schedule.next_run_time:
|
|
437
|
+
print_info(f"Next run: {schedule.next_run_time.strftime('%Y-%m-%d %H:%M:%S')}")
|
|
438
|
+
|
|
439
|
+
if output == "json":
|
|
440
|
+
data = {
|
|
441
|
+
"schedule_id": schedule.schedule_id,
|
|
442
|
+
"status": schedule.status.value,
|
|
443
|
+
"next_run_time": schedule.next_run_time.isoformat()
|
|
444
|
+
if schedule.next_run_time
|
|
445
|
+
else None,
|
|
446
|
+
}
|
|
447
|
+
format_json(data)
|
|
448
|
+
|
|
449
|
+
except ValueError as e:
|
|
450
|
+
print_error(str(e))
|
|
451
|
+
raise click.Abort()
|
|
452
|
+
except Exception as e:
|
|
453
|
+
print_error(f"Failed to resume schedule: {e}")
|
|
454
|
+
if ctx.obj["verbose"]:
|
|
455
|
+
raise
|
|
456
|
+
raise click.Abort()
|
|
457
|
+
|
|
458
|
+
|
|
459
|
+
@schedules.command(name="delete")
|
|
460
|
+
@click.argument("schedule_id")
|
|
461
|
+
@click.option(
|
|
462
|
+
"--force",
|
|
463
|
+
is_flag=True,
|
|
464
|
+
help="Delete without confirmation",
|
|
465
|
+
)
|
|
466
|
+
@click.pass_context
|
|
467
|
+
@async_command
|
|
468
|
+
async def delete_schedule_cmd(
|
|
469
|
+
ctx: click.Context,
|
|
470
|
+
schedule_id: str,
|
|
471
|
+
force: bool,
|
|
472
|
+
) -> None:
|
|
473
|
+
"""
|
|
474
|
+
Delete a schedule (soft delete).
|
|
475
|
+
|
|
476
|
+
The schedule record is preserved for audit purposes but marked as deleted.
|
|
477
|
+
|
|
478
|
+
Examples:
|
|
479
|
+
|
|
480
|
+
# Delete with confirmation
|
|
481
|
+
pyworkflow schedules delete sched_abc123
|
|
482
|
+
|
|
483
|
+
# Force delete
|
|
484
|
+
pyworkflow schedules delete sched_abc123 --force
|
|
485
|
+
"""
|
|
486
|
+
from pyworkflow.primitives.schedule import delete_schedule, get_schedule
|
|
487
|
+
|
|
488
|
+
# Get context data
|
|
489
|
+
config = ctx.obj["config"]
|
|
490
|
+
output = ctx.obj["output"]
|
|
491
|
+
storage_type = ctx.obj["storage_type"]
|
|
492
|
+
storage_path = ctx.obj["storage_path"]
|
|
493
|
+
|
|
494
|
+
# Create storage backend
|
|
495
|
+
storage = create_storage(storage_type, storage_path, config)
|
|
496
|
+
|
|
497
|
+
try:
|
|
498
|
+
# Check if schedule exists
|
|
499
|
+
schedule = await get_schedule(schedule_id, storage=storage)
|
|
500
|
+
if not schedule:
|
|
501
|
+
print_error(f"Schedule '{schedule_id}' not found")
|
|
502
|
+
raise click.Abort()
|
|
503
|
+
|
|
504
|
+
# Confirm deletion
|
|
505
|
+
if not force:
|
|
506
|
+
click.confirm(
|
|
507
|
+
f"Delete schedule '{schedule_id}' for workflow '{schedule.workflow_name}'?",
|
|
508
|
+
abort=True,
|
|
509
|
+
)
|
|
510
|
+
|
|
511
|
+
await delete_schedule(schedule_id, storage=storage)
|
|
512
|
+
print_success(f"Deleted schedule: {schedule_id}")
|
|
513
|
+
|
|
514
|
+
if output == "json":
|
|
515
|
+
data = {
|
|
516
|
+
"schedule_id": schedule_id,
|
|
517
|
+
"deleted": True,
|
|
518
|
+
}
|
|
519
|
+
format_json(data)
|
|
520
|
+
|
|
521
|
+
except click.Abort:
|
|
522
|
+
raise
|
|
523
|
+
except ValueError as e:
|
|
524
|
+
print_error(str(e))
|
|
525
|
+
raise click.Abort()
|
|
526
|
+
except Exception as e:
|
|
527
|
+
print_error(f"Failed to delete schedule: {e}")
|
|
528
|
+
if ctx.obj["verbose"]:
|
|
529
|
+
raise
|
|
530
|
+
raise click.Abort()
|
|
531
|
+
|
|
532
|
+
|
|
533
|
+
@schedules.command(name="trigger")
|
|
534
|
+
@click.argument("schedule_id")
|
|
535
|
+
@click.pass_context
|
|
536
|
+
@async_command
|
|
537
|
+
async def trigger_schedule_cmd(
|
|
538
|
+
ctx: click.Context,
|
|
539
|
+
schedule_id: str,
|
|
540
|
+
) -> None:
|
|
541
|
+
"""
|
|
542
|
+
Manually trigger a schedule immediately.
|
|
543
|
+
|
|
544
|
+
This bypasses the normal scheduling and executes the workflow immediately.
|
|
545
|
+
Does not affect the regular schedule timing.
|
|
546
|
+
|
|
547
|
+
Examples:
|
|
548
|
+
|
|
549
|
+
pyworkflow schedules trigger sched_abc123
|
|
550
|
+
"""
|
|
551
|
+
from pyworkflow.primitives.schedule import get_schedule, trigger_schedule
|
|
552
|
+
|
|
553
|
+
# Get context data
|
|
554
|
+
config = ctx.obj["config"]
|
|
555
|
+
output = ctx.obj["output"]
|
|
556
|
+
storage_type = ctx.obj["storage_type"]
|
|
557
|
+
storage_path = ctx.obj["storage_path"]
|
|
558
|
+
|
|
559
|
+
# Create storage backend
|
|
560
|
+
storage = create_storage(storage_type, storage_path, config)
|
|
561
|
+
|
|
562
|
+
try:
|
|
563
|
+
# Check if schedule exists
|
|
564
|
+
schedule = await get_schedule(schedule_id, storage=storage)
|
|
565
|
+
if not schedule:
|
|
566
|
+
print_error(f"Schedule '{schedule_id}' not found")
|
|
567
|
+
raise click.Abort()
|
|
568
|
+
|
|
569
|
+
await trigger_schedule(schedule_id, storage=storage)
|
|
570
|
+
print_success(f"Triggered schedule: {schedule_id}")
|
|
571
|
+
print_info(f"Workflow '{schedule.workflow_name}' execution queued")
|
|
572
|
+
|
|
573
|
+
if output == "json":
|
|
574
|
+
data = {
|
|
575
|
+
"schedule_id": schedule_id,
|
|
576
|
+
"triggered": True,
|
|
577
|
+
"workflow_name": schedule.workflow_name,
|
|
578
|
+
}
|
|
579
|
+
format_json(data)
|
|
580
|
+
|
|
581
|
+
except ValueError as e:
|
|
582
|
+
print_error(str(e))
|
|
583
|
+
raise click.Abort()
|
|
584
|
+
except Exception as e:
|
|
585
|
+
print_error(f"Failed to trigger schedule: {e}")
|
|
586
|
+
if ctx.obj["verbose"]:
|
|
587
|
+
raise
|
|
588
|
+
raise click.Abort()
|
|
589
|
+
|
|
590
|
+
|
|
591
|
+
@schedules.command(name="backfill")
|
|
592
|
+
@click.argument("schedule_id")
|
|
593
|
+
@click.option(
|
|
594
|
+
"--start",
|
|
595
|
+
required=True,
|
|
596
|
+
help="Start time for backfill (ISO format, e.g., 2024-01-01T00:00:00)",
|
|
597
|
+
)
|
|
598
|
+
@click.option(
|
|
599
|
+
"--end",
|
|
600
|
+
required=True,
|
|
601
|
+
help="End time for backfill (ISO format, e.g., 2024-01-31T23:59:59)",
|
|
602
|
+
)
|
|
603
|
+
@click.pass_context
|
|
604
|
+
@async_command
|
|
605
|
+
async def backfill_schedule_cmd(
|
|
606
|
+
ctx: click.Context,
|
|
607
|
+
schedule_id: str,
|
|
608
|
+
start: str,
|
|
609
|
+
end: str,
|
|
610
|
+
) -> None:
|
|
611
|
+
"""
|
|
612
|
+
Backfill missed runs for a schedule.
|
|
613
|
+
|
|
614
|
+
Creates workflow runs for all scheduled times between start and end times.
|
|
615
|
+
Useful for catching up after scheduler downtime.
|
|
616
|
+
|
|
617
|
+
Examples:
|
|
618
|
+
|
|
619
|
+
# Backfill a specific time range
|
|
620
|
+
pyworkflow schedules backfill sched_abc123 \\
|
|
621
|
+
--start 2024-01-01T00:00:00 \\
|
|
622
|
+
--end 2024-01-31T23:59:59
|
|
623
|
+
"""
|
|
624
|
+
from pyworkflow.primitives.schedule import backfill_schedule, get_schedule
|
|
625
|
+
|
|
626
|
+
# Get context data
|
|
627
|
+
config = ctx.obj["config"]
|
|
628
|
+
output = ctx.obj["output"]
|
|
629
|
+
storage_type = ctx.obj["storage_type"]
|
|
630
|
+
storage_path = ctx.obj["storage_path"]
|
|
631
|
+
|
|
632
|
+
# Create storage backend
|
|
633
|
+
storage = create_storage(storage_type, storage_path, config)
|
|
634
|
+
|
|
635
|
+
try:
|
|
636
|
+
# Check if schedule exists
|
|
637
|
+
schedule = await get_schedule(schedule_id, storage=storage)
|
|
638
|
+
if not schedule:
|
|
639
|
+
print_error(f"Schedule '{schedule_id}' not found")
|
|
640
|
+
raise click.Abort()
|
|
641
|
+
|
|
642
|
+
# Parse timestamps
|
|
643
|
+
try:
|
|
644
|
+
start_time = datetime.fromisoformat(start)
|
|
645
|
+
except ValueError:
|
|
646
|
+
print_error(f"Invalid start time format: {start}")
|
|
647
|
+
print_info("Expected ISO format (e.g., 2024-01-01T00:00:00)")
|
|
648
|
+
raise click.Abort()
|
|
649
|
+
|
|
650
|
+
try:
|
|
651
|
+
end_time = datetime.fromisoformat(end)
|
|
652
|
+
except ValueError:
|
|
653
|
+
print_error(f"Invalid end time format: {end}")
|
|
654
|
+
print_info("Expected ISO format (e.g., 2024-01-31T23:59:59)")
|
|
655
|
+
raise click.Abort()
|
|
656
|
+
|
|
657
|
+
if start_time >= end_time:
|
|
658
|
+
print_error("Start time must be before end time")
|
|
659
|
+
raise click.Abort()
|
|
660
|
+
|
|
661
|
+
# Perform backfill
|
|
662
|
+
run_ids = await backfill_schedule(
|
|
663
|
+
schedule_id=schedule_id,
|
|
664
|
+
start_time=start_time,
|
|
665
|
+
end_time=end_time,
|
|
666
|
+
storage=storage,
|
|
667
|
+
)
|
|
668
|
+
|
|
669
|
+
if run_ids:
|
|
670
|
+
print_success(f"Started backfill for schedule: {schedule_id}")
|
|
671
|
+
print_info(f"Created {len(run_ids)} workflow run(s)")
|
|
672
|
+
else:
|
|
673
|
+
print_warning("No runs to backfill in the specified time range")
|
|
674
|
+
|
|
675
|
+
if output == "json":
|
|
676
|
+
data = {
|
|
677
|
+
"schedule_id": schedule_id,
|
|
678
|
+
"start_time": start_time.isoformat(),
|
|
679
|
+
"end_time": end_time.isoformat(),
|
|
680
|
+
"runs_created": len(run_ids),
|
|
681
|
+
"run_ids": run_ids,
|
|
682
|
+
}
|
|
683
|
+
format_json(data)
|
|
684
|
+
|
|
685
|
+
except click.Abort:
|
|
686
|
+
raise
|
|
687
|
+
except ValueError as e:
|
|
688
|
+
print_error(str(e))
|
|
689
|
+
raise click.Abort()
|
|
690
|
+
except Exception as e:
|
|
691
|
+
print_error(f"Failed to backfill schedule: {e}")
|
|
692
|
+
if ctx.obj["verbose"]:
|
|
693
|
+
raise
|
|
694
|
+
raise click.Abort()
|
|
695
|
+
|
|
696
|
+
|
|
697
|
+
@schedules.command(name="update")
|
|
698
|
+
@click.argument("schedule_id")
|
|
699
|
+
@click.option(
|
|
700
|
+
"--cron",
|
|
701
|
+
help="New cron expression",
|
|
702
|
+
)
|
|
703
|
+
@click.option(
|
|
704
|
+
"--interval",
|
|
705
|
+
help="New interval duration",
|
|
706
|
+
)
|
|
707
|
+
@click.option(
|
|
708
|
+
"--overlap",
|
|
709
|
+
type=click.Choice([p.value for p in OverlapPolicy], case_sensitive=False),
|
|
710
|
+
help="New overlap policy",
|
|
711
|
+
)
|
|
712
|
+
@click.pass_context
|
|
713
|
+
@async_command
|
|
714
|
+
async def update_schedule_cmd(
|
|
715
|
+
ctx: click.Context,
|
|
716
|
+
schedule_id: str,
|
|
717
|
+
cron: str | None,
|
|
718
|
+
interval: str | None,
|
|
719
|
+
overlap: str | None,
|
|
720
|
+
) -> None:
|
|
721
|
+
"""
|
|
722
|
+
Update an existing schedule.
|
|
723
|
+
|
|
724
|
+
Examples:
|
|
725
|
+
|
|
726
|
+
# Update cron expression
|
|
727
|
+
pyworkflow schedules update sched_abc123 --cron "0 10 * * *"
|
|
728
|
+
|
|
729
|
+
# Update overlap policy
|
|
730
|
+
pyworkflow schedules update sched_abc123 --overlap buffer_one
|
|
731
|
+
|
|
732
|
+
# Update both
|
|
733
|
+
pyworkflow schedules update sched_abc123 --interval 10m --overlap allow_all
|
|
734
|
+
"""
|
|
735
|
+
from pyworkflow.primitives.schedule import update_schedule
|
|
736
|
+
|
|
737
|
+
if not cron and not interval and not overlap:
|
|
738
|
+
print_error("At least one of --cron, --interval, or --overlap must be provided")
|
|
739
|
+
raise click.Abort()
|
|
740
|
+
|
|
741
|
+
# Get context data
|
|
742
|
+
config = ctx.obj["config"]
|
|
743
|
+
output = ctx.obj["output"]
|
|
744
|
+
storage_type = ctx.obj["storage_type"]
|
|
745
|
+
storage_path = ctx.obj["storage_path"]
|
|
746
|
+
|
|
747
|
+
# Create storage backend
|
|
748
|
+
storage = create_storage(storage_type, storage_path, config)
|
|
749
|
+
|
|
750
|
+
try:
|
|
751
|
+
# Build new spec if schedule timing is being updated
|
|
752
|
+
new_spec = None
|
|
753
|
+
if cron or interval:
|
|
754
|
+
new_spec = ScheduleSpec(
|
|
755
|
+
cron=cron,
|
|
756
|
+
interval=interval,
|
|
757
|
+
)
|
|
758
|
+
|
|
759
|
+
# Parse overlap policy
|
|
760
|
+
overlap_policy = OverlapPolicy(overlap) if overlap else None
|
|
761
|
+
|
|
762
|
+
schedule = await update_schedule(
|
|
763
|
+
schedule_id=schedule_id,
|
|
764
|
+
spec=new_spec,
|
|
765
|
+
overlap_policy=overlap_policy,
|
|
766
|
+
storage=storage,
|
|
767
|
+
)
|
|
768
|
+
|
|
769
|
+
print_success(f"Updated schedule: {schedule_id}")
|
|
770
|
+
|
|
771
|
+
if output == "json":
|
|
772
|
+
data = {
|
|
773
|
+
"schedule_id": schedule.schedule_id,
|
|
774
|
+
"workflow_name": schedule.workflow_name,
|
|
775
|
+
"spec": describe_schedule(schedule.spec),
|
|
776
|
+
"overlap_policy": schedule.overlap_policy.value,
|
|
777
|
+
"next_run_time": schedule.next_run_time.isoformat()
|
|
778
|
+
if schedule.next_run_time
|
|
779
|
+
else None,
|
|
780
|
+
}
|
|
781
|
+
format_json(data)
|
|
782
|
+
else:
|
|
783
|
+
print_info(f"Schedule: {describe_schedule(schedule.spec)}")
|
|
784
|
+
if schedule.next_run_time:
|
|
785
|
+
print_info(f"Next run: {schedule.next_run_time.strftime('%Y-%m-%d %H:%M:%S')}")
|
|
786
|
+
|
|
787
|
+
except ValueError as e:
|
|
788
|
+
print_error(str(e))
|
|
789
|
+
raise click.Abort()
|
|
790
|
+
except Exception as e:
|
|
791
|
+
print_error(f"Failed to update schedule: {e}")
|
|
792
|
+
if ctx.obj["verbose"]:
|
|
793
|
+
raise
|
|
794
|
+
raise click.Abort()
|