pyworkflow-engine 0.1.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dashboard/backend/app/__init__.py +1 -0
- dashboard/backend/app/config.py +32 -0
- dashboard/backend/app/controllers/__init__.py +6 -0
- dashboard/backend/app/controllers/run_controller.py +86 -0
- dashboard/backend/app/controllers/workflow_controller.py +33 -0
- dashboard/backend/app/dependencies/__init__.py +5 -0
- dashboard/backend/app/dependencies/storage.py +50 -0
- dashboard/backend/app/repositories/__init__.py +6 -0
- dashboard/backend/app/repositories/run_repository.py +80 -0
- dashboard/backend/app/repositories/workflow_repository.py +27 -0
- dashboard/backend/app/rest/__init__.py +8 -0
- dashboard/backend/app/rest/v1/__init__.py +12 -0
- dashboard/backend/app/rest/v1/health.py +33 -0
- dashboard/backend/app/rest/v1/runs.py +133 -0
- dashboard/backend/app/rest/v1/workflows.py +41 -0
- dashboard/backend/app/schemas/__init__.py +23 -0
- dashboard/backend/app/schemas/common.py +16 -0
- dashboard/backend/app/schemas/event.py +24 -0
- dashboard/backend/app/schemas/hook.py +25 -0
- dashboard/backend/app/schemas/run.py +54 -0
- dashboard/backend/app/schemas/step.py +28 -0
- dashboard/backend/app/schemas/workflow.py +31 -0
- dashboard/backend/app/server.py +87 -0
- dashboard/backend/app/services/__init__.py +6 -0
- dashboard/backend/app/services/run_service.py +240 -0
- dashboard/backend/app/services/workflow_service.py +155 -0
- dashboard/backend/main.py +18 -0
- docs/concepts/cancellation.mdx +362 -0
- docs/concepts/continue-as-new.mdx +434 -0
- docs/concepts/events.mdx +266 -0
- docs/concepts/fault-tolerance.mdx +370 -0
- docs/concepts/hooks.mdx +552 -0
- docs/concepts/limitations.mdx +167 -0
- docs/concepts/schedules.mdx +775 -0
- docs/concepts/sleep.mdx +312 -0
- docs/concepts/steps.mdx +301 -0
- docs/concepts/workflows.mdx +255 -0
- docs/guides/cli.mdx +942 -0
- docs/guides/configuration.mdx +560 -0
- docs/introduction.mdx +155 -0
- docs/quickstart.mdx +279 -0
- examples/__init__.py +1 -0
- examples/celery/__init__.py +1 -0
- examples/celery/durable/docker-compose.yml +55 -0
- examples/celery/durable/pyworkflow.config.yaml +12 -0
- examples/celery/durable/workflows/__init__.py +122 -0
- examples/celery/durable/workflows/basic.py +87 -0
- examples/celery/durable/workflows/batch_processing.py +102 -0
- examples/celery/durable/workflows/cancellation.py +273 -0
- examples/celery/durable/workflows/child_workflow_patterns.py +240 -0
- examples/celery/durable/workflows/child_workflows.py +202 -0
- examples/celery/durable/workflows/continue_as_new.py +260 -0
- examples/celery/durable/workflows/fault_tolerance.py +210 -0
- examples/celery/durable/workflows/hooks.py +211 -0
- examples/celery/durable/workflows/idempotency.py +112 -0
- examples/celery/durable/workflows/long_running.py +99 -0
- examples/celery/durable/workflows/retries.py +101 -0
- examples/celery/durable/workflows/schedules.py +209 -0
- examples/celery/transient/01_basic_workflow.py +91 -0
- examples/celery/transient/02_fault_tolerance.py +257 -0
- examples/celery/transient/__init__.py +20 -0
- examples/celery/transient/pyworkflow.config.yaml +25 -0
- examples/local/__init__.py +1 -0
- examples/local/durable/01_basic_workflow.py +94 -0
- examples/local/durable/02_file_storage.py +132 -0
- examples/local/durable/03_retries.py +169 -0
- examples/local/durable/04_long_running.py +119 -0
- examples/local/durable/05_event_log.py +145 -0
- examples/local/durable/06_idempotency.py +148 -0
- examples/local/durable/07_hooks.py +334 -0
- examples/local/durable/08_cancellation.py +233 -0
- examples/local/durable/09_child_workflows.py +198 -0
- examples/local/durable/10_child_workflow_patterns.py +265 -0
- examples/local/durable/11_continue_as_new.py +249 -0
- examples/local/durable/12_schedules.py +198 -0
- examples/local/durable/__init__.py +1 -0
- examples/local/transient/01_quick_tasks.py +87 -0
- examples/local/transient/02_retries.py +130 -0
- examples/local/transient/03_sleep.py +141 -0
- examples/local/transient/__init__.py +1 -0
- pyworkflow/__init__.py +256 -0
- pyworkflow/aws/__init__.py +68 -0
- pyworkflow/aws/context.py +234 -0
- pyworkflow/aws/handler.py +184 -0
- pyworkflow/aws/testing.py +310 -0
- pyworkflow/celery/__init__.py +41 -0
- pyworkflow/celery/app.py +198 -0
- pyworkflow/celery/scheduler.py +315 -0
- pyworkflow/celery/tasks.py +1746 -0
- pyworkflow/cli/__init__.py +132 -0
- pyworkflow/cli/__main__.py +6 -0
- pyworkflow/cli/commands/__init__.py +1 -0
- pyworkflow/cli/commands/hooks.py +640 -0
- pyworkflow/cli/commands/quickstart.py +495 -0
- pyworkflow/cli/commands/runs.py +773 -0
- pyworkflow/cli/commands/scheduler.py +130 -0
- pyworkflow/cli/commands/schedules.py +794 -0
- pyworkflow/cli/commands/setup.py +703 -0
- pyworkflow/cli/commands/worker.py +413 -0
- pyworkflow/cli/commands/workflows.py +1257 -0
- pyworkflow/cli/output/__init__.py +1 -0
- pyworkflow/cli/output/formatters.py +321 -0
- pyworkflow/cli/output/styles.py +121 -0
- pyworkflow/cli/utils/__init__.py +1 -0
- pyworkflow/cli/utils/async_helpers.py +30 -0
- pyworkflow/cli/utils/config.py +130 -0
- pyworkflow/cli/utils/config_generator.py +344 -0
- pyworkflow/cli/utils/discovery.py +53 -0
- pyworkflow/cli/utils/docker_manager.py +651 -0
- pyworkflow/cli/utils/interactive.py +364 -0
- pyworkflow/cli/utils/storage.py +115 -0
- pyworkflow/config.py +329 -0
- pyworkflow/context/__init__.py +63 -0
- pyworkflow/context/aws.py +230 -0
- pyworkflow/context/base.py +416 -0
- pyworkflow/context/local.py +930 -0
- pyworkflow/context/mock.py +381 -0
- pyworkflow/core/__init__.py +0 -0
- pyworkflow/core/exceptions.py +353 -0
- pyworkflow/core/registry.py +313 -0
- pyworkflow/core/scheduled.py +328 -0
- pyworkflow/core/step.py +494 -0
- pyworkflow/core/workflow.py +294 -0
- pyworkflow/discovery.py +248 -0
- pyworkflow/engine/__init__.py +0 -0
- pyworkflow/engine/events.py +879 -0
- pyworkflow/engine/executor.py +682 -0
- pyworkflow/engine/replay.py +273 -0
- pyworkflow/observability/__init__.py +19 -0
- pyworkflow/observability/logging.py +234 -0
- pyworkflow/primitives/__init__.py +33 -0
- pyworkflow/primitives/child_handle.py +174 -0
- pyworkflow/primitives/child_workflow.py +372 -0
- pyworkflow/primitives/continue_as_new.py +101 -0
- pyworkflow/primitives/define_hook.py +150 -0
- pyworkflow/primitives/hooks.py +97 -0
- pyworkflow/primitives/resume_hook.py +210 -0
- pyworkflow/primitives/schedule.py +545 -0
- pyworkflow/primitives/shield.py +96 -0
- pyworkflow/primitives/sleep.py +100 -0
- pyworkflow/runtime/__init__.py +21 -0
- pyworkflow/runtime/base.py +179 -0
- pyworkflow/runtime/celery.py +310 -0
- pyworkflow/runtime/factory.py +101 -0
- pyworkflow/runtime/local.py +706 -0
- pyworkflow/scheduler/__init__.py +9 -0
- pyworkflow/scheduler/local.py +248 -0
- pyworkflow/serialization/__init__.py +0 -0
- pyworkflow/serialization/decoder.py +146 -0
- pyworkflow/serialization/encoder.py +162 -0
- pyworkflow/storage/__init__.py +54 -0
- pyworkflow/storage/base.py +612 -0
- pyworkflow/storage/config.py +185 -0
- pyworkflow/storage/dynamodb.py +1315 -0
- pyworkflow/storage/file.py +827 -0
- pyworkflow/storage/memory.py +549 -0
- pyworkflow/storage/postgres.py +1161 -0
- pyworkflow/storage/schemas.py +486 -0
- pyworkflow/storage/sqlite.py +1136 -0
- pyworkflow/utils/__init__.py +0 -0
- pyworkflow/utils/duration.py +177 -0
- pyworkflow/utils/schedule.py +391 -0
- pyworkflow_engine-0.1.7.dist-info/METADATA +687 -0
- pyworkflow_engine-0.1.7.dist-info/RECORD +196 -0
- pyworkflow_engine-0.1.7.dist-info/WHEEL +5 -0
- pyworkflow_engine-0.1.7.dist-info/entry_points.txt +2 -0
- pyworkflow_engine-0.1.7.dist-info/licenses/LICENSE +21 -0
- pyworkflow_engine-0.1.7.dist-info/top_level.txt +5 -0
- tests/examples/__init__.py +0 -0
- tests/integration/__init__.py +0 -0
- tests/integration/test_cancellation.py +330 -0
- tests/integration/test_child_workflows.py +439 -0
- tests/integration/test_continue_as_new.py +428 -0
- tests/integration/test_dynamodb_storage.py +1146 -0
- tests/integration/test_fault_tolerance.py +369 -0
- tests/integration/test_schedule_storage.py +484 -0
- tests/unit/__init__.py +0 -0
- tests/unit/backends/__init__.py +1 -0
- tests/unit/backends/test_dynamodb_storage.py +1554 -0
- tests/unit/backends/test_postgres_storage.py +1281 -0
- tests/unit/backends/test_sqlite_storage.py +1460 -0
- tests/unit/conftest.py +41 -0
- tests/unit/test_cancellation.py +364 -0
- tests/unit/test_child_workflows.py +680 -0
- tests/unit/test_continue_as_new.py +441 -0
- tests/unit/test_event_limits.py +316 -0
- tests/unit/test_executor.py +320 -0
- tests/unit/test_fault_tolerance.py +334 -0
- tests/unit/test_hooks.py +495 -0
- tests/unit/test_registry.py +261 -0
- tests/unit/test_replay.py +420 -0
- tests/unit/test_schedule_schemas.py +285 -0
- tests/unit/test_schedule_utils.py +286 -0
- tests/unit/test_scheduled_workflow.py +274 -0
- tests/unit/test_step.py +353 -0
- tests/unit/test_workflow.py +243 -0
|
@@ -0,0 +1,773 @@
|
|
|
1
|
+
"""Workflow run management commands."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
|
|
6
|
+
import click
|
|
7
|
+
|
|
8
|
+
import pyworkflow
|
|
9
|
+
from pyworkflow import RunStatus, WorkflowRun
|
|
10
|
+
from pyworkflow.cli.output.formatters import (
|
|
11
|
+
format_json,
|
|
12
|
+
format_key_value,
|
|
13
|
+
format_plain,
|
|
14
|
+
format_table,
|
|
15
|
+
print_error,
|
|
16
|
+
print_info,
|
|
17
|
+
print_success,
|
|
18
|
+
print_warning,
|
|
19
|
+
)
|
|
20
|
+
from pyworkflow.cli.utils.async_helpers import async_command
|
|
21
|
+
from pyworkflow.cli.utils.storage import create_storage
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@click.group(name="runs")
|
|
25
|
+
def runs() -> None:
|
|
26
|
+
"""Manage workflow runs (list, status, logs)."""
|
|
27
|
+
pass
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
@runs.command(name="list")
|
|
31
|
+
@click.option(
|
|
32
|
+
"-q",
|
|
33
|
+
"--query",
|
|
34
|
+
help="Search in workflow name and input kwargs (case-insensitive)",
|
|
35
|
+
)
|
|
36
|
+
@click.option(
|
|
37
|
+
"--status",
|
|
38
|
+
type=click.Choice([s.value for s in RunStatus], case_sensitive=False),
|
|
39
|
+
help="Filter by run status",
|
|
40
|
+
)
|
|
41
|
+
@click.option(
|
|
42
|
+
"--start-time",
|
|
43
|
+
type=click.DateTime(),
|
|
44
|
+
help="Filter runs started at or after this time (ISO 8601 format)",
|
|
45
|
+
)
|
|
46
|
+
@click.option(
|
|
47
|
+
"--end-time",
|
|
48
|
+
type=click.DateTime(),
|
|
49
|
+
help="Filter runs started before this time (ISO 8601 format)",
|
|
50
|
+
)
|
|
51
|
+
@click.option(
|
|
52
|
+
"--limit",
|
|
53
|
+
type=int,
|
|
54
|
+
default=20,
|
|
55
|
+
help="Maximum number of runs to display (default: 20)",
|
|
56
|
+
)
|
|
57
|
+
@click.pass_context
|
|
58
|
+
@async_command
|
|
59
|
+
async def list_runs(
|
|
60
|
+
ctx: click.Context,
|
|
61
|
+
query: str | None,
|
|
62
|
+
status: str | None,
|
|
63
|
+
start_time: datetime | None,
|
|
64
|
+
end_time: datetime | None,
|
|
65
|
+
limit: int,
|
|
66
|
+
) -> None:
|
|
67
|
+
"""
|
|
68
|
+
List workflow runs.
|
|
69
|
+
|
|
70
|
+
Examples:
|
|
71
|
+
|
|
72
|
+
# List all runs
|
|
73
|
+
pyworkflow runs list
|
|
74
|
+
|
|
75
|
+
# Search runs by workflow name or input
|
|
76
|
+
pyworkflow runs list --query order
|
|
77
|
+
|
|
78
|
+
# List failed runs
|
|
79
|
+
pyworkflow runs list --status failed
|
|
80
|
+
|
|
81
|
+
# List runs from today
|
|
82
|
+
pyworkflow runs list --start-time 2025-01-01
|
|
83
|
+
|
|
84
|
+
# List runs in a time range
|
|
85
|
+
pyworkflow runs list --start-time 2025-01-01T00:00:00 --end-time 2025-01-02T00:00:00
|
|
86
|
+
|
|
87
|
+
# List with limit
|
|
88
|
+
pyworkflow runs list --limit 10
|
|
89
|
+
"""
|
|
90
|
+
# Get context data
|
|
91
|
+
config = ctx.obj["config"]
|
|
92
|
+
output = ctx.obj["output"]
|
|
93
|
+
storage_type = ctx.obj["storage_type"]
|
|
94
|
+
storage_path = ctx.obj["storage_path"]
|
|
95
|
+
|
|
96
|
+
# Create storage backend
|
|
97
|
+
storage = create_storage(storage_type, storage_path, config)
|
|
98
|
+
|
|
99
|
+
# Parse status filter
|
|
100
|
+
status_filter = RunStatus(status) if status else None
|
|
101
|
+
|
|
102
|
+
# List runs
|
|
103
|
+
try:
|
|
104
|
+
runs_list, _next_cursor = await storage.list_runs(
|
|
105
|
+
query=query,
|
|
106
|
+
status=status_filter,
|
|
107
|
+
start_time=start_time,
|
|
108
|
+
end_time=end_time,
|
|
109
|
+
limit=limit,
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
if not runs_list:
|
|
113
|
+
print_info("No workflow runs found")
|
|
114
|
+
return
|
|
115
|
+
|
|
116
|
+
# Calculate durations (stored as dynamic attribute for display)
|
|
117
|
+
durations: dict[str, str] = {}
|
|
118
|
+
for run in runs_list:
|
|
119
|
+
if run.started_at and run.completed_at:
|
|
120
|
+
dur = (run.completed_at - run.started_at).total_seconds()
|
|
121
|
+
durations[run.run_id] = f"{dur:.1f}s"
|
|
122
|
+
elif run.started_at:
|
|
123
|
+
dur = (datetime.now() - run.started_at.replace(tzinfo=None)).total_seconds()
|
|
124
|
+
durations[run.run_id] = f"{dur:.1f}s (ongoing)"
|
|
125
|
+
else:
|
|
126
|
+
durations[run.run_id] = "-"
|
|
127
|
+
|
|
128
|
+
# Format output
|
|
129
|
+
if output == "json":
|
|
130
|
+
data = [
|
|
131
|
+
{
|
|
132
|
+
"run_id": run.run_id,
|
|
133
|
+
"workflow_name": run.workflow_name,
|
|
134
|
+
"status": run.status.value,
|
|
135
|
+
"created_at": run.created_at.isoformat() if run.created_at else None,
|
|
136
|
+
"started_at": run.started_at.isoformat() if run.started_at else None,
|
|
137
|
+
"completed_at": run.completed_at.isoformat() if run.completed_at else None,
|
|
138
|
+
"duration": durations.get(run.run_id, "-"),
|
|
139
|
+
}
|
|
140
|
+
for run in runs_list
|
|
141
|
+
]
|
|
142
|
+
format_json(data)
|
|
143
|
+
|
|
144
|
+
elif output == "plain":
|
|
145
|
+
run_ids = [run.run_id for run in runs_list]
|
|
146
|
+
format_plain(run_ids)
|
|
147
|
+
|
|
148
|
+
else: # table (displays as list)
|
|
149
|
+
data = [
|
|
150
|
+
{
|
|
151
|
+
"Run ID": run.run_id,
|
|
152
|
+
"Workflow": run.workflow_name,
|
|
153
|
+
"Status": run.status.value,
|
|
154
|
+
"Started": run.started_at.strftime("%Y-%m-%d %H:%M:%S")
|
|
155
|
+
if run.started_at
|
|
156
|
+
else "-",
|
|
157
|
+
"Duration": durations.get(run.run_id, "-"),
|
|
158
|
+
}
|
|
159
|
+
for run in runs_list
|
|
160
|
+
]
|
|
161
|
+
format_table(
|
|
162
|
+
data,
|
|
163
|
+
["Run ID", "Workflow", "Status", "Started", "Duration"],
|
|
164
|
+
title="Workflow Runs",
|
|
165
|
+
)
|
|
166
|
+
|
|
167
|
+
except Exception as e:
|
|
168
|
+
print_error(f"Failed to list runs: {e}")
|
|
169
|
+
if ctx.obj["verbose"]:
|
|
170
|
+
raise
|
|
171
|
+
raise click.Abort()
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
@runs.command(name="status")
|
|
175
|
+
@click.argument("run_id")
|
|
176
|
+
@click.pass_context
|
|
177
|
+
@async_command
|
|
178
|
+
async def run_status(ctx: click.Context, run_id: str) -> None:
|
|
179
|
+
"""
|
|
180
|
+
Show workflow run status and details.
|
|
181
|
+
|
|
182
|
+
Args:
|
|
183
|
+
RUN_ID: Workflow run identifier
|
|
184
|
+
|
|
185
|
+
Examples:
|
|
186
|
+
|
|
187
|
+
pyworkflow runs status run_abc123def456
|
|
188
|
+
"""
|
|
189
|
+
# Get context data
|
|
190
|
+
config = ctx.obj["config"]
|
|
191
|
+
output = ctx.obj["output"]
|
|
192
|
+
storage_type = ctx.obj["storage_type"]
|
|
193
|
+
storage_path = ctx.obj["storage_path"]
|
|
194
|
+
|
|
195
|
+
# Create storage backend
|
|
196
|
+
storage = create_storage(storage_type, storage_path, config)
|
|
197
|
+
|
|
198
|
+
# Get workflow run
|
|
199
|
+
try:
|
|
200
|
+
run = await pyworkflow.get_workflow_run(run_id, storage=storage)
|
|
201
|
+
|
|
202
|
+
if not run:
|
|
203
|
+
print_error(f"Workflow run '{run_id}' not found")
|
|
204
|
+
raise click.Abort()
|
|
205
|
+
|
|
206
|
+
# Calculate duration
|
|
207
|
+
if run.started_at and run.completed_at:
|
|
208
|
+
duration = (run.completed_at - run.started_at).total_seconds()
|
|
209
|
+
duration_str = f"{duration:.1f}s"
|
|
210
|
+
elif run.started_at:
|
|
211
|
+
duration = (datetime.now() - run.started_at.replace(tzinfo=None)).total_seconds()
|
|
212
|
+
duration_str = f"{duration:.1f}s (ongoing)"
|
|
213
|
+
else:
|
|
214
|
+
duration_str = "-"
|
|
215
|
+
|
|
216
|
+
# Format output
|
|
217
|
+
if output == "json":
|
|
218
|
+
data = {
|
|
219
|
+
"run_id": run.run_id,
|
|
220
|
+
"workflow_name": run.workflow_name,
|
|
221
|
+
"status": run.status.value,
|
|
222
|
+
"created_at": run.created_at.isoformat() if run.created_at else None,
|
|
223
|
+
"started_at": run.started_at.isoformat() if run.started_at else None,
|
|
224
|
+
"completed_at": run.completed_at.isoformat() if run.completed_at else None,
|
|
225
|
+
"duration": duration_str,
|
|
226
|
+
"input_args": json.loads(run.input_args) if run.input_args else None,
|
|
227
|
+
"input_kwargs": json.loads(run.input_kwargs) if run.input_kwargs else None,
|
|
228
|
+
"result": json.loads(run.result) if run.result else None,
|
|
229
|
+
"error": run.error,
|
|
230
|
+
"metadata": run.metadata,
|
|
231
|
+
}
|
|
232
|
+
format_json(data)
|
|
233
|
+
|
|
234
|
+
else: # table or plain (use key-value format)
|
|
235
|
+
data = {
|
|
236
|
+
"Run ID": run.run_id,
|
|
237
|
+
"Workflow": run.workflow_name,
|
|
238
|
+
"Status": run.status.value,
|
|
239
|
+
"Created": run.created_at.strftime("%Y-%m-%d %H:%M:%S") if run.created_at else "-",
|
|
240
|
+
"Started": run.started_at.strftime("%Y-%m-%d %H:%M:%S") if run.started_at else "-",
|
|
241
|
+
"Completed": run.completed_at.strftime("%Y-%m-%d %H:%M:%S")
|
|
242
|
+
if run.completed_at
|
|
243
|
+
else "-",
|
|
244
|
+
"Duration": duration_str,
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
# Add input args if present
|
|
248
|
+
if run.input_kwargs:
|
|
249
|
+
try:
|
|
250
|
+
kwargs = json.loads(run.input_kwargs)
|
|
251
|
+
if kwargs:
|
|
252
|
+
data["Input Arguments"] = json.dumps(kwargs, indent=2)
|
|
253
|
+
except Exception:
|
|
254
|
+
pass
|
|
255
|
+
|
|
256
|
+
# Add result or error
|
|
257
|
+
if run.result:
|
|
258
|
+
try:
|
|
259
|
+
result = json.loads(run.result)
|
|
260
|
+
data["Result"] = (
|
|
261
|
+
json.dumps(result, indent=2) if not isinstance(result, str) else result
|
|
262
|
+
)
|
|
263
|
+
except Exception:
|
|
264
|
+
data["Result"] = run.result
|
|
265
|
+
|
|
266
|
+
if run.error:
|
|
267
|
+
data["Error"] = run.error
|
|
268
|
+
|
|
269
|
+
# Add metadata if present
|
|
270
|
+
if run.metadata:
|
|
271
|
+
data["Metadata"] = json.dumps(run.metadata, indent=2)
|
|
272
|
+
|
|
273
|
+
format_key_value(data, title=f"Workflow Run: {run_id}")
|
|
274
|
+
|
|
275
|
+
except Exception as e:
|
|
276
|
+
print_error(f"Failed to get run status: {e}")
|
|
277
|
+
if ctx.obj["verbose"]:
|
|
278
|
+
raise
|
|
279
|
+
raise click.Abort()
|
|
280
|
+
|
|
281
|
+
|
|
282
|
+
@runs.command(name="logs")
|
|
283
|
+
@click.argument("run_id")
|
|
284
|
+
@click.option(
|
|
285
|
+
"--filter",
|
|
286
|
+
"event_filter",
|
|
287
|
+
help="Filter events by type (e.g., step_completed, workflow_failed)",
|
|
288
|
+
)
|
|
289
|
+
@click.pass_context
|
|
290
|
+
@async_command
|
|
291
|
+
async def run_logs(
|
|
292
|
+
ctx: click.Context,
|
|
293
|
+
run_id: str,
|
|
294
|
+
event_filter: str | None,
|
|
295
|
+
) -> None:
|
|
296
|
+
"""
|
|
297
|
+
Show workflow execution event log.
|
|
298
|
+
|
|
299
|
+
Args:
|
|
300
|
+
RUN_ID: Workflow run identifier
|
|
301
|
+
|
|
302
|
+
Examples:
|
|
303
|
+
|
|
304
|
+
# Show all events
|
|
305
|
+
pyworkflow runs logs run_abc123def456
|
|
306
|
+
|
|
307
|
+
# Filter step completion events
|
|
308
|
+
pyworkflow runs logs run_abc123def456 --filter step_completed
|
|
309
|
+
|
|
310
|
+
# JSON output
|
|
311
|
+
pyworkflow --output json runs logs run_abc123def456
|
|
312
|
+
"""
|
|
313
|
+
# Get context data
|
|
314
|
+
config = ctx.obj["config"]
|
|
315
|
+
output = ctx.obj["output"]
|
|
316
|
+
storage_type = ctx.obj["storage_type"]
|
|
317
|
+
storage_path = ctx.obj["storage_path"]
|
|
318
|
+
|
|
319
|
+
# Create storage backend
|
|
320
|
+
storage = create_storage(storage_type, storage_path, config)
|
|
321
|
+
|
|
322
|
+
# Get events
|
|
323
|
+
try:
|
|
324
|
+
events = await pyworkflow.get_workflow_events(run_id, storage=storage)
|
|
325
|
+
|
|
326
|
+
if not events:
|
|
327
|
+
print_info(f"No events found for run: {run_id}")
|
|
328
|
+
return
|
|
329
|
+
|
|
330
|
+
# Filter events if requested
|
|
331
|
+
if event_filter:
|
|
332
|
+
events = [e for e in events if event_filter.lower() in e.type.value.lower()]
|
|
333
|
+
|
|
334
|
+
if not events:
|
|
335
|
+
print_info(f"No events matching filter: {event_filter}")
|
|
336
|
+
return
|
|
337
|
+
|
|
338
|
+
# Format output
|
|
339
|
+
if output == "json":
|
|
340
|
+
data = [
|
|
341
|
+
{
|
|
342
|
+
"event_id": event.event_id,
|
|
343
|
+
"sequence": event.sequence,
|
|
344
|
+
"type": event.type.value,
|
|
345
|
+
"timestamp": event.timestamp.isoformat() if event.timestamp else None,
|
|
346
|
+
"data": event.data,
|
|
347
|
+
}
|
|
348
|
+
for event in events
|
|
349
|
+
]
|
|
350
|
+
format_json(data)
|
|
351
|
+
|
|
352
|
+
elif output == "plain":
|
|
353
|
+
lines = [f"{event.sequence}: {event.type.value}" for event in events]
|
|
354
|
+
format_plain(lines)
|
|
355
|
+
|
|
356
|
+
else: # table (displays as list with full data)
|
|
357
|
+
from pyworkflow.cli.output.styles import DIM, RESET, Colors
|
|
358
|
+
|
|
359
|
+
print(f"\n{Colors.PRIMARY}{Colors.bold(f'Event Log: {run_id}')}{RESET}")
|
|
360
|
+
print(f"{DIM}{'─' * 60}{RESET}")
|
|
361
|
+
print(f"Total events: {len(events)}\n")
|
|
362
|
+
|
|
363
|
+
for event in events:
|
|
364
|
+
seq = event.sequence or "-"
|
|
365
|
+
event_type = event.type.value
|
|
366
|
+
timestamp = event.timestamp.strftime("%H:%M:%S.%f")[:-3] if event.timestamp else "-"
|
|
367
|
+
|
|
368
|
+
# Color code event types
|
|
369
|
+
type_color = {
|
|
370
|
+
"workflow.started": Colors.BLUE,
|
|
371
|
+
"workflow.completed": Colors.GREEN,
|
|
372
|
+
"workflow.failed": Colors.RED,
|
|
373
|
+
"workflow.interrupted": Colors.YELLOW,
|
|
374
|
+
"step.started": Colors.CYAN,
|
|
375
|
+
"step.completed": Colors.GREEN,
|
|
376
|
+
"step.failed": Colors.RED,
|
|
377
|
+
"step.retrying": Colors.YELLOW,
|
|
378
|
+
"sleep.started": Colors.MAGENTA,
|
|
379
|
+
"sleep.completed": Colors.MAGENTA,
|
|
380
|
+
"hook.created": Colors.YELLOW,
|
|
381
|
+
"hook.received": Colors.GREEN,
|
|
382
|
+
}.get(event_type, "")
|
|
383
|
+
|
|
384
|
+
print(f"{Colors.bold(str(seq))}")
|
|
385
|
+
print(f" Type: {type_color}{event_type}{RESET}")
|
|
386
|
+
print(f" Timestamp: {timestamp}")
|
|
387
|
+
|
|
388
|
+
# Pretty print data if not empty
|
|
389
|
+
if event.data:
|
|
390
|
+
data_str = json.dumps(event.data, indent=6)
|
|
391
|
+
# Indent each line of the JSON
|
|
392
|
+
data_lines = data_str.split("\n")
|
|
393
|
+
print(f" Data: {data_lines[0]}")
|
|
394
|
+
for line in data_lines[1:]:
|
|
395
|
+
print(f" {line}")
|
|
396
|
+
else:
|
|
397
|
+
print(f" Data: {DIM}{{}}{RESET}")
|
|
398
|
+
|
|
399
|
+
print() # Blank line between events
|
|
400
|
+
|
|
401
|
+
except Exception as e:
|
|
402
|
+
print_error(f"Failed to get event log: {e}")
|
|
403
|
+
if ctx.obj["verbose"]:
|
|
404
|
+
raise
|
|
405
|
+
raise click.Abort()
|
|
406
|
+
|
|
407
|
+
|
|
408
|
+
@runs.command(name="cancel")
|
|
409
|
+
@click.argument("run_id")
|
|
410
|
+
@click.option(
|
|
411
|
+
"--wait/--no-wait",
|
|
412
|
+
default=False,
|
|
413
|
+
help="Wait for cancellation to complete",
|
|
414
|
+
)
|
|
415
|
+
@click.option(
|
|
416
|
+
"--timeout",
|
|
417
|
+
type=int,
|
|
418
|
+
default=30,
|
|
419
|
+
help="Timeout in seconds when waiting (default: 30)",
|
|
420
|
+
)
|
|
421
|
+
@click.option(
|
|
422
|
+
"--reason",
|
|
423
|
+
help="Reason for cancellation",
|
|
424
|
+
)
|
|
425
|
+
@click.pass_context
|
|
426
|
+
@async_command
|
|
427
|
+
async def cancel_run(
|
|
428
|
+
ctx: click.Context,
|
|
429
|
+
run_id: str,
|
|
430
|
+
wait: bool,
|
|
431
|
+
timeout: int,
|
|
432
|
+
reason: str | None,
|
|
433
|
+
) -> None:
|
|
434
|
+
"""
|
|
435
|
+
Cancel a running or suspended workflow.
|
|
436
|
+
|
|
437
|
+
Gracefully terminates workflow execution. The workflow will receive
|
|
438
|
+
a CancellationError at the next checkpoint (step execution, sleep, or hook).
|
|
439
|
+
|
|
440
|
+
Args:
|
|
441
|
+
RUN_ID: Workflow run identifier
|
|
442
|
+
|
|
443
|
+
Examples:
|
|
444
|
+
|
|
445
|
+
# Cancel a workflow
|
|
446
|
+
pyworkflow runs cancel run_abc123def456
|
|
447
|
+
|
|
448
|
+
# Cancel and wait for completion
|
|
449
|
+
pyworkflow runs cancel run_abc123def456 --wait
|
|
450
|
+
|
|
451
|
+
# Cancel with timeout
|
|
452
|
+
pyworkflow runs cancel run_abc123def456 --wait --timeout 60
|
|
453
|
+
|
|
454
|
+
# Cancel with reason
|
|
455
|
+
pyworkflow runs cancel run_abc123def456 --reason "User requested"
|
|
456
|
+
"""
|
|
457
|
+
from pyworkflow.engine.executor import cancel_workflow
|
|
458
|
+
|
|
459
|
+
# Get context data
|
|
460
|
+
config = ctx.obj["config"]
|
|
461
|
+
output = ctx.obj["output"]
|
|
462
|
+
storage_type = ctx.obj["storage_type"]
|
|
463
|
+
storage_path = ctx.obj["storage_path"]
|
|
464
|
+
|
|
465
|
+
# Create storage backend
|
|
466
|
+
storage = create_storage(storage_type, storage_path, config)
|
|
467
|
+
|
|
468
|
+
try:
|
|
469
|
+
# First check if workflow exists
|
|
470
|
+
run = await storage.get_run(run_id)
|
|
471
|
+
if not run:
|
|
472
|
+
print_error(f"Workflow run '{run_id}' not found")
|
|
473
|
+
raise click.Abort()
|
|
474
|
+
|
|
475
|
+
# Check if already in terminal state
|
|
476
|
+
terminal_states = {RunStatus.COMPLETED, RunStatus.FAILED, RunStatus.CANCELLED}
|
|
477
|
+
if run.status in terminal_states:
|
|
478
|
+
print_warning(f"Workflow is already in terminal state: {run.status.value}")
|
|
479
|
+
return
|
|
480
|
+
|
|
481
|
+
# Cancel the workflow
|
|
482
|
+
print_info(f"Cancelling workflow: {run_id}")
|
|
483
|
+
|
|
484
|
+
cancelled = await cancel_workflow(
|
|
485
|
+
run_id=run_id,
|
|
486
|
+
reason=reason,
|
|
487
|
+
wait=wait,
|
|
488
|
+
timeout=float(timeout) if wait else None,
|
|
489
|
+
storage=storage,
|
|
490
|
+
)
|
|
491
|
+
|
|
492
|
+
if cancelled:
|
|
493
|
+
if wait:
|
|
494
|
+
# Get updated status
|
|
495
|
+
run = await storage.get_run(run_id)
|
|
496
|
+
if run and run.status == RunStatus.CANCELLED:
|
|
497
|
+
print_success(f"Workflow cancelled successfully: {run_id}")
|
|
498
|
+
else:
|
|
499
|
+
print_warning("Cancellation requested but workflow may still be running")
|
|
500
|
+
else:
|
|
501
|
+
print_success(f"Cancellation requested for workflow: {run_id}")
|
|
502
|
+
print_info("Use --wait to wait for cancellation to complete")
|
|
503
|
+
else:
|
|
504
|
+
print_warning("Could not cancel workflow (may already be in terminal state)")
|
|
505
|
+
|
|
506
|
+
# Output in different formats
|
|
507
|
+
if output == "json":
|
|
508
|
+
run = await storage.get_run(run_id)
|
|
509
|
+
data = {
|
|
510
|
+
"run_id": run_id,
|
|
511
|
+
"cancelled": cancelled,
|
|
512
|
+
"status": run.status.value if run else None,
|
|
513
|
+
}
|
|
514
|
+
format_json(data)
|
|
515
|
+
|
|
516
|
+
except click.Abort:
|
|
517
|
+
raise
|
|
518
|
+
except Exception as e:
|
|
519
|
+
print_error(f"Failed to cancel workflow: {e}")
|
|
520
|
+
if ctx.obj["verbose"]:
|
|
521
|
+
raise
|
|
522
|
+
raise click.Abort()
|
|
523
|
+
|
|
524
|
+
|
|
525
|
+
@runs.command(name="children")
|
|
526
|
+
@click.argument("run_id")
|
|
527
|
+
@click.option(
|
|
528
|
+
"--status",
|
|
529
|
+
type=click.Choice([s.value for s in RunStatus], case_sensitive=False),
|
|
530
|
+
help="Filter by child run status",
|
|
531
|
+
)
|
|
532
|
+
@click.pass_context
|
|
533
|
+
@async_command
|
|
534
|
+
async def list_children(
|
|
535
|
+
ctx: click.Context,
|
|
536
|
+
run_id: str,
|
|
537
|
+
status: str | None,
|
|
538
|
+
) -> None:
|
|
539
|
+
"""
|
|
540
|
+
List child workflows spawned by a parent workflow.
|
|
541
|
+
|
|
542
|
+
Shows all child workflows that were started by the specified parent workflow
|
|
543
|
+
using start_child_workflow(). Displays run_id, workflow name, status, and
|
|
544
|
+
timing information for each child.
|
|
545
|
+
|
|
546
|
+
Args:
|
|
547
|
+
RUN_ID: Parent workflow run identifier
|
|
548
|
+
|
|
549
|
+
Examples:
|
|
550
|
+
|
|
551
|
+
# List all children of a workflow
|
|
552
|
+
pyworkflow runs children run_abc123def456
|
|
553
|
+
|
|
554
|
+
# List only running children
|
|
555
|
+
pyworkflow runs children run_abc123def456 --status running
|
|
556
|
+
|
|
557
|
+
# JSON output
|
|
558
|
+
pyworkflow --output json runs children run_abc123def456
|
|
559
|
+
"""
|
|
560
|
+
# Get context data
|
|
561
|
+
config = ctx.obj["config"]
|
|
562
|
+
output = ctx.obj["output"]
|
|
563
|
+
storage_type = ctx.obj["storage_type"]
|
|
564
|
+
storage_path = ctx.obj["storage_path"]
|
|
565
|
+
|
|
566
|
+
# Create storage backend
|
|
567
|
+
storage = create_storage(storage_type, storage_path, config)
|
|
568
|
+
|
|
569
|
+
try:
|
|
570
|
+
# Check if parent workflow exists
|
|
571
|
+
parent_run = await storage.get_run(run_id)
|
|
572
|
+
if not parent_run:
|
|
573
|
+
print_error(f"Parent workflow run '{run_id}' not found")
|
|
574
|
+
raise click.Abort()
|
|
575
|
+
|
|
576
|
+
# Parse status filter
|
|
577
|
+
status_filter = RunStatus(status) if status else None
|
|
578
|
+
|
|
579
|
+
# Get children
|
|
580
|
+
children = await storage.get_children(run_id, status=status_filter)
|
|
581
|
+
|
|
582
|
+
if not children:
|
|
583
|
+
print_info(f"No child workflows found for run: {run_id}")
|
|
584
|
+
return
|
|
585
|
+
|
|
586
|
+
def _calc_duration(child: WorkflowRun) -> str:
|
|
587
|
+
"""Calculate duration for display."""
|
|
588
|
+
if child.started_at and child.completed_at:
|
|
589
|
+
duration = (child.completed_at - child.started_at).total_seconds()
|
|
590
|
+
return f"{duration:.1f}s"
|
|
591
|
+
elif child.started_at:
|
|
592
|
+
duration = (datetime.now() - child.started_at.replace(tzinfo=None)).total_seconds()
|
|
593
|
+
return f"{duration:.1f}s (ongoing)"
|
|
594
|
+
else:
|
|
595
|
+
return "-"
|
|
596
|
+
|
|
597
|
+
# Format output
|
|
598
|
+
if output == "json":
|
|
599
|
+
data = [
|
|
600
|
+
{
|
|
601
|
+
"run_id": child.run_id,
|
|
602
|
+
"workflow_name": child.workflow_name,
|
|
603
|
+
"status": child.status.value,
|
|
604
|
+
"nesting_depth": child.nesting_depth,
|
|
605
|
+
"created_at": child.created_at.isoformat() if child.created_at else None,
|
|
606
|
+
"started_at": child.started_at.isoformat() if child.started_at else None,
|
|
607
|
+
"completed_at": child.completed_at.isoformat() if child.completed_at else None,
|
|
608
|
+
"duration": _calc_duration(child),
|
|
609
|
+
}
|
|
610
|
+
for child in children
|
|
611
|
+
]
|
|
612
|
+
format_json(data)
|
|
613
|
+
|
|
614
|
+
elif output == "plain":
|
|
615
|
+
child_ids = [child.run_id for child in children]
|
|
616
|
+
format_plain(child_ids)
|
|
617
|
+
|
|
618
|
+
else: # table
|
|
619
|
+
data = [
|
|
620
|
+
{
|
|
621
|
+
"Run ID": child.run_id,
|
|
622
|
+
"Workflow": child.workflow_name,
|
|
623
|
+
"Status": child.status.value,
|
|
624
|
+
"Depth": child.nesting_depth,
|
|
625
|
+
"Started": child.started_at.strftime("%Y-%m-%d %H:%M:%S")
|
|
626
|
+
if child.started_at
|
|
627
|
+
else "-",
|
|
628
|
+
"Duration": _calc_duration(child),
|
|
629
|
+
}
|
|
630
|
+
for child in children
|
|
631
|
+
]
|
|
632
|
+
format_table(
|
|
633
|
+
data,
|
|
634
|
+
["Run ID", "Workflow", "Status", "Depth", "Started", "Duration"],
|
|
635
|
+
title=f"Child Workflows of {run_id}",
|
|
636
|
+
)
|
|
637
|
+
|
|
638
|
+
except click.Abort:
|
|
639
|
+
raise
|
|
640
|
+
except Exception as e:
|
|
641
|
+
print_error(f"Failed to list child workflows: {e}")
|
|
642
|
+
if ctx.obj["verbose"]:
|
|
643
|
+
raise
|
|
644
|
+
raise click.Abort()
|
|
645
|
+
|
|
646
|
+
|
|
647
|
+
@runs.command(name="chain")
|
|
648
|
+
@click.argument("run_id")
|
|
649
|
+
@click.pass_context
|
|
650
|
+
@async_command
|
|
651
|
+
async def run_chain(
|
|
652
|
+
ctx: click.Context,
|
|
653
|
+
run_id: str,
|
|
654
|
+
) -> None:
|
|
655
|
+
"""
|
|
656
|
+
Show the continue-as-new chain for a workflow run.
|
|
657
|
+
|
|
658
|
+
Displays all workflow runs in a continue-as-new chain, from the original
|
|
659
|
+
run to the latest continuation. Useful for tracking long-running workflows
|
|
660
|
+
that use continue_as_new() to reset their event history.
|
|
661
|
+
|
|
662
|
+
Args:
|
|
663
|
+
RUN_ID: Any workflow run identifier in the chain
|
|
664
|
+
|
|
665
|
+
Examples:
|
|
666
|
+
|
|
667
|
+
# Show chain for a workflow
|
|
668
|
+
pyworkflow runs chain run_abc123def456
|
|
669
|
+
|
|
670
|
+
# JSON output
|
|
671
|
+
pyworkflow --output json runs chain run_abc123def456
|
|
672
|
+
"""
|
|
673
|
+
# Get context data
|
|
674
|
+
config = ctx.obj["config"]
|
|
675
|
+
output = ctx.obj["output"]
|
|
676
|
+
storage_type = ctx.obj["storage_type"]
|
|
677
|
+
storage_path = ctx.obj["storage_path"]
|
|
678
|
+
|
|
679
|
+
# Create storage backend
|
|
680
|
+
storage = create_storage(storage_type, storage_path, config)
|
|
681
|
+
|
|
682
|
+
try:
|
|
683
|
+
# Get the chain
|
|
684
|
+
chain = await storage.get_workflow_chain(run_id)
|
|
685
|
+
|
|
686
|
+
if not chain:
|
|
687
|
+
print_error(f"Workflow run '{run_id}' not found")
|
|
688
|
+
raise click.Abort()
|
|
689
|
+
|
|
690
|
+
def _calc_duration(run: WorkflowRun) -> str:
|
|
691
|
+
"""Calculate duration for display."""
|
|
692
|
+
if run.started_at and run.completed_at:
|
|
693
|
+
duration = (run.completed_at - run.started_at).total_seconds()
|
|
694
|
+
return f"{duration:.1f}s"
|
|
695
|
+
elif run.started_at:
|
|
696
|
+
duration = (datetime.now() - run.started_at.replace(tzinfo=None)).total_seconds()
|
|
697
|
+
return f"{duration:.1f}s (ongoing)"
|
|
698
|
+
else:
|
|
699
|
+
return "-"
|
|
700
|
+
|
|
701
|
+
# Format output
|
|
702
|
+
if output == "json":
|
|
703
|
+
data = [
|
|
704
|
+
{
|
|
705
|
+
"run_id": run.run_id,
|
|
706
|
+
"workflow_name": run.workflow_name,
|
|
707
|
+
"status": run.status.value,
|
|
708
|
+
"continued_from_run_id": run.continued_from_run_id,
|
|
709
|
+
"continued_to_run_id": run.continued_to_run_id,
|
|
710
|
+
"created_at": run.created_at.isoformat() if run.created_at else None,
|
|
711
|
+
"started_at": run.started_at.isoformat() if run.started_at else None,
|
|
712
|
+
"completed_at": run.completed_at.isoformat() if run.completed_at else None,
|
|
713
|
+
"duration": _calc_duration(run),
|
|
714
|
+
}
|
|
715
|
+
for run in chain
|
|
716
|
+
]
|
|
717
|
+
format_json(data)
|
|
718
|
+
|
|
719
|
+
elif output == "plain":
|
|
720
|
+
run_ids = [run.run_id for run in chain]
|
|
721
|
+
format_plain(run_ids)
|
|
722
|
+
|
|
723
|
+
else: # table
|
|
724
|
+
from pyworkflow.cli.output.styles import DIM, RESET, Colors
|
|
725
|
+
|
|
726
|
+
print(f"\n{Colors.PRIMARY}{Colors.bold('Continue-As-New Chain')}{RESET}")
|
|
727
|
+
print(f"{DIM}{'─' * 60}{RESET}")
|
|
728
|
+
print(f"Chain length: {len(chain)} run(s)\n")
|
|
729
|
+
|
|
730
|
+
for i, run in enumerate(chain):
|
|
731
|
+
# Indicate position in chain
|
|
732
|
+
if i == 0:
|
|
733
|
+
position = "START"
|
|
734
|
+
elif i == len(chain) - 1:
|
|
735
|
+
position = "CURRENT"
|
|
736
|
+
else:
|
|
737
|
+
position = f"#{i + 1}"
|
|
738
|
+
|
|
739
|
+
# Color code status
|
|
740
|
+
status_color = {
|
|
741
|
+
"completed": Colors.GREEN,
|
|
742
|
+
"failed": Colors.RED,
|
|
743
|
+
"running": Colors.BLUE,
|
|
744
|
+
"suspended": Colors.YELLOW,
|
|
745
|
+
"cancelled": Colors.RED,
|
|
746
|
+
"continued_as_new": Colors.CYAN,
|
|
747
|
+
}.get(run.status.value, "")
|
|
748
|
+
|
|
749
|
+
# Mark the queried run
|
|
750
|
+
marker = " <--" if run.run_id == run_id else ""
|
|
751
|
+
|
|
752
|
+
print(f"{Colors.bold(position)}{marker}")
|
|
753
|
+
print(f" Run ID: {run.run_id}")
|
|
754
|
+
print(f" Workflow: {run.workflow_name}")
|
|
755
|
+
print(f" Status: {status_color}{run.status.value}{RESET}")
|
|
756
|
+
print(f" Duration: {_calc_duration(run)}")
|
|
757
|
+
|
|
758
|
+
if run.started_at:
|
|
759
|
+
print(f" Started: {run.started_at.strftime('%Y-%m-%d %H:%M:%S')}")
|
|
760
|
+
|
|
761
|
+
# Show arrow to next run if not last
|
|
762
|
+
if i < len(chain) - 1:
|
|
763
|
+
print(f"\n {DIM}↓ continued as new{RESET}\n")
|
|
764
|
+
else:
|
|
765
|
+
print()
|
|
766
|
+
|
|
767
|
+
except click.Abort:
|
|
768
|
+
raise
|
|
769
|
+
except Exception as e:
|
|
770
|
+
print_error(f"Failed to get workflow chain: {e}")
|
|
771
|
+
if ctx.obj["verbose"]:
|
|
772
|
+
raise
|
|
773
|
+
raise click.Abort()
|