pyworkflow-engine 0.1.7__py3-none-any.whl → 0.1.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyworkflow/__init__.py +10 -1
- pyworkflow/celery/tasks.py +272 -24
- pyworkflow/cli/__init__.py +4 -1
- pyworkflow/cli/commands/runs.py +4 -4
- pyworkflow/cli/commands/setup.py +203 -4
- pyworkflow/cli/utils/config_generator.py +76 -3
- pyworkflow/cli/utils/docker_manager.py +232 -0
- pyworkflow/config.py +94 -17
- pyworkflow/context/__init__.py +13 -0
- pyworkflow/context/base.py +26 -0
- pyworkflow/context/local.py +80 -0
- pyworkflow/context/step_context.py +295 -0
- pyworkflow/core/registry.py +6 -1
- pyworkflow/core/step.py +141 -0
- pyworkflow/core/workflow.py +56 -0
- pyworkflow/engine/events.py +30 -0
- pyworkflow/engine/replay.py +39 -0
- pyworkflow/primitives/child_workflow.py +1 -1
- pyworkflow/runtime/local.py +1 -1
- pyworkflow/storage/__init__.py +14 -0
- pyworkflow/storage/base.py +35 -0
- pyworkflow/storage/cassandra.py +1747 -0
- pyworkflow/storage/config.py +69 -0
- pyworkflow/storage/dynamodb.py +31 -2
- pyworkflow/storage/file.py +28 -0
- pyworkflow/storage/memory.py +18 -0
- pyworkflow/storage/mysql.py +1159 -0
- pyworkflow/storage/postgres.py +27 -2
- pyworkflow/storage/schemas.py +4 -3
- pyworkflow/storage/sqlite.py +25 -2
- {pyworkflow_engine-0.1.7.dist-info → pyworkflow_engine-0.1.10.dist-info}/METADATA +7 -4
- pyworkflow_engine-0.1.10.dist-info/RECORD +91 -0
- pyworkflow_engine-0.1.10.dist-info/top_level.txt +1 -0
- dashboard/backend/app/__init__.py +0 -1
- dashboard/backend/app/config.py +0 -32
- dashboard/backend/app/controllers/__init__.py +0 -6
- dashboard/backend/app/controllers/run_controller.py +0 -86
- dashboard/backend/app/controllers/workflow_controller.py +0 -33
- dashboard/backend/app/dependencies/__init__.py +0 -5
- dashboard/backend/app/dependencies/storage.py +0 -50
- dashboard/backend/app/repositories/__init__.py +0 -6
- dashboard/backend/app/repositories/run_repository.py +0 -80
- dashboard/backend/app/repositories/workflow_repository.py +0 -27
- dashboard/backend/app/rest/__init__.py +0 -8
- dashboard/backend/app/rest/v1/__init__.py +0 -12
- dashboard/backend/app/rest/v1/health.py +0 -33
- dashboard/backend/app/rest/v1/runs.py +0 -133
- dashboard/backend/app/rest/v1/workflows.py +0 -41
- dashboard/backend/app/schemas/__init__.py +0 -23
- dashboard/backend/app/schemas/common.py +0 -16
- dashboard/backend/app/schemas/event.py +0 -24
- dashboard/backend/app/schemas/hook.py +0 -25
- dashboard/backend/app/schemas/run.py +0 -54
- dashboard/backend/app/schemas/step.py +0 -28
- dashboard/backend/app/schemas/workflow.py +0 -31
- dashboard/backend/app/server.py +0 -87
- dashboard/backend/app/services/__init__.py +0 -6
- dashboard/backend/app/services/run_service.py +0 -240
- dashboard/backend/app/services/workflow_service.py +0 -155
- dashboard/backend/main.py +0 -18
- docs/concepts/cancellation.mdx +0 -362
- docs/concepts/continue-as-new.mdx +0 -434
- docs/concepts/events.mdx +0 -266
- docs/concepts/fault-tolerance.mdx +0 -370
- docs/concepts/hooks.mdx +0 -552
- docs/concepts/limitations.mdx +0 -167
- docs/concepts/schedules.mdx +0 -775
- docs/concepts/sleep.mdx +0 -312
- docs/concepts/steps.mdx +0 -301
- docs/concepts/workflows.mdx +0 -255
- docs/guides/cli.mdx +0 -942
- docs/guides/configuration.mdx +0 -560
- docs/introduction.mdx +0 -155
- docs/quickstart.mdx +0 -279
- examples/__init__.py +0 -1
- examples/celery/__init__.py +0 -1
- examples/celery/durable/docker-compose.yml +0 -55
- examples/celery/durable/pyworkflow.config.yaml +0 -12
- examples/celery/durable/workflows/__init__.py +0 -122
- examples/celery/durable/workflows/basic.py +0 -87
- examples/celery/durable/workflows/batch_processing.py +0 -102
- examples/celery/durable/workflows/cancellation.py +0 -273
- examples/celery/durable/workflows/child_workflow_patterns.py +0 -240
- examples/celery/durable/workflows/child_workflows.py +0 -202
- examples/celery/durable/workflows/continue_as_new.py +0 -260
- examples/celery/durable/workflows/fault_tolerance.py +0 -210
- examples/celery/durable/workflows/hooks.py +0 -211
- examples/celery/durable/workflows/idempotency.py +0 -112
- examples/celery/durable/workflows/long_running.py +0 -99
- examples/celery/durable/workflows/retries.py +0 -101
- examples/celery/durable/workflows/schedules.py +0 -209
- examples/celery/transient/01_basic_workflow.py +0 -91
- examples/celery/transient/02_fault_tolerance.py +0 -257
- examples/celery/transient/__init__.py +0 -20
- examples/celery/transient/pyworkflow.config.yaml +0 -25
- examples/local/__init__.py +0 -1
- examples/local/durable/01_basic_workflow.py +0 -94
- examples/local/durable/02_file_storage.py +0 -132
- examples/local/durable/03_retries.py +0 -169
- examples/local/durable/04_long_running.py +0 -119
- examples/local/durable/05_event_log.py +0 -145
- examples/local/durable/06_idempotency.py +0 -148
- examples/local/durable/07_hooks.py +0 -334
- examples/local/durable/08_cancellation.py +0 -233
- examples/local/durable/09_child_workflows.py +0 -198
- examples/local/durable/10_child_workflow_patterns.py +0 -265
- examples/local/durable/11_continue_as_new.py +0 -249
- examples/local/durable/12_schedules.py +0 -198
- examples/local/durable/__init__.py +0 -1
- examples/local/transient/01_quick_tasks.py +0 -87
- examples/local/transient/02_retries.py +0 -130
- examples/local/transient/03_sleep.py +0 -141
- examples/local/transient/__init__.py +0 -1
- pyworkflow_engine-0.1.7.dist-info/RECORD +0 -196
- pyworkflow_engine-0.1.7.dist-info/top_level.txt +0 -5
- tests/examples/__init__.py +0 -0
- tests/integration/__init__.py +0 -0
- tests/integration/test_cancellation.py +0 -330
- tests/integration/test_child_workflows.py +0 -439
- tests/integration/test_continue_as_new.py +0 -428
- tests/integration/test_dynamodb_storage.py +0 -1146
- tests/integration/test_fault_tolerance.py +0 -369
- tests/integration/test_schedule_storage.py +0 -484
- tests/unit/__init__.py +0 -0
- tests/unit/backends/__init__.py +0 -1
- tests/unit/backends/test_dynamodb_storage.py +0 -1554
- tests/unit/backends/test_postgres_storage.py +0 -1281
- tests/unit/backends/test_sqlite_storage.py +0 -1460
- tests/unit/conftest.py +0 -41
- tests/unit/test_cancellation.py +0 -364
- tests/unit/test_child_workflows.py +0 -680
- tests/unit/test_continue_as_new.py +0 -441
- tests/unit/test_event_limits.py +0 -316
- tests/unit/test_executor.py +0 -320
- tests/unit/test_fault_tolerance.py +0 -334
- tests/unit/test_hooks.py +0 -495
- tests/unit/test_registry.py +0 -261
- tests/unit/test_replay.py +0 -420
- tests/unit/test_schedule_schemas.py +0 -285
- tests/unit/test_schedule_utils.py +0 -286
- tests/unit/test_scheduled_workflow.py +0 -274
- tests/unit/test_step.py +0 -353
- tests/unit/test_workflow.py +0 -243
- {pyworkflow_engine-0.1.7.dist-info → pyworkflow_engine-0.1.10.dist-info}/WHEEL +0 -0
- {pyworkflow_engine-0.1.7.dist-info → pyworkflow_engine-0.1.10.dist-info}/entry_points.txt +0 -0
- {pyworkflow_engine-0.1.7.dist-info → pyworkflow_engine-0.1.10.dist-info}/licenses/LICENSE +0 -0
pyworkflow/storage/postgres.py
CHANGED
|
@@ -290,7 +290,7 @@ class PostgresStorageBackend(StorageBackend):
|
|
|
290
290
|
run.error,
|
|
291
291
|
run.idempotency_key,
|
|
292
292
|
run.max_duration,
|
|
293
|
-
json.dumps(run.
|
|
293
|
+
json.dumps(run.context),
|
|
294
294
|
run.recovery_attempts,
|
|
295
295
|
run.max_recovery_attempts,
|
|
296
296
|
run.recover_on_worker_loss,
|
|
@@ -385,6 +385,31 @@ class PostgresStorageBackend(StorageBackend):
|
|
|
385
385
|
run_id,
|
|
386
386
|
)
|
|
387
387
|
|
|
388
|
+
async def update_run_context(
|
|
389
|
+
self,
|
|
390
|
+
run_id: str,
|
|
391
|
+
context: dict,
|
|
392
|
+
) -> None:
|
|
393
|
+
"""Update the step context for a workflow run."""
|
|
394
|
+
pool = self._ensure_connected()
|
|
395
|
+
|
|
396
|
+
async with pool.acquire() as conn:
|
|
397
|
+
await conn.execute(
|
|
398
|
+
"""
|
|
399
|
+
UPDATE workflow_runs
|
|
400
|
+
SET metadata = $1, updated_at = $2
|
|
401
|
+
WHERE run_id = $3
|
|
402
|
+
""",
|
|
403
|
+
json.dumps(context),
|
|
404
|
+
datetime.now(UTC),
|
|
405
|
+
run_id,
|
|
406
|
+
)
|
|
407
|
+
|
|
408
|
+
async def get_run_context(self, run_id: str) -> dict:
|
|
409
|
+
"""Get the current step context for a workflow run."""
|
|
410
|
+
run = await self.get_run(run_id)
|
|
411
|
+
return run.context if run else {}
|
|
412
|
+
|
|
388
413
|
async def list_runs(
|
|
389
414
|
self,
|
|
390
415
|
query: str | None = None,
|
|
@@ -1087,7 +1112,7 @@ class PostgresStorageBackend(StorageBackend):
|
|
|
1087
1112
|
error=row["error"],
|
|
1088
1113
|
idempotency_key=row["idempotency_key"],
|
|
1089
1114
|
max_duration=row["max_duration"],
|
|
1090
|
-
|
|
1115
|
+
context=json.loads(row["metadata"]) if row["metadata"] else {},
|
|
1091
1116
|
recovery_attempts=row["recovery_attempts"],
|
|
1092
1117
|
max_recovery_attempts=row["max_recovery_attempts"],
|
|
1093
1118
|
recover_on_worker_loss=row["recover_on_worker_loss"],
|
pyworkflow/storage/schemas.py
CHANGED
|
@@ -86,7 +86,7 @@ class WorkflowRun:
|
|
|
86
86
|
# Configuration
|
|
87
87
|
idempotency_key: str | None = None
|
|
88
88
|
max_duration: str | None = None # e.g., "1h", "30m"
|
|
89
|
-
|
|
89
|
+
context: dict[str, Any] = field(default_factory=dict) # Step context data
|
|
90
90
|
|
|
91
91
|
# Recovery tracking for fault tolerance
|
|
92
92
|
recovery_attempts: int = 0 # Number of recovery attempts after worker failures
|
|
@@ -117,7 +117,7 @@ class WorkflowRun:
|
|
|
117
117
|
"error": self.error,
|
|
118
118
|
"idempotency_key": self.idempotency_key,
|
|
119
119
|
"max_duration": self.max_duration,
|
|
120
|
-
"
|
|
120
|
+
"context": self.context,
|
|
121
121
|
"recovery_attempts": self.recovery_attempts,
|
|
122
122
|
"max_recovery_attempts": self.max_recovery_attempts,
|
|
123
123
|
"recover_on_worker_loss": self.recover_on_worker_loss,
|
|
@@ -148,7 +148,8 @@ class WorkflowRun:
|
|
|
148
148
|
error=data.get("error"),
|
|
149
149
|
idempotency_key=data.get("idempotency_key"),
|
|
150
150
|
max_duration=data.get("max_duration"),
|
|
151
|
-
metadata
|
|
151
|
+
# Support both 'context' and legacy 'metadata' key for backward compatibility
|
|
152
|
+
context=data.get("context", data.get("metadata", {})),
|
|
152
153
|
recovery_attempts=data.get("recovery_attempts", 0),
|
|
153
154
|
max_recovery_attempts=data.get("max_recovery_attempts", 3),
|
|
154
155
|
recover_on_worker_loss=data.get("recover_on_worker_loss", True),
|
pyworkflow/storage/sqlite.py
CHANGED
|
@@ -264,7 +264,7 @@ class SQLiteStorageBackend(StorageBackend):
|
|
|
264
264
|
run.error,
|
|
265
265
|
run.idempotency_key,
|
|
266
266
|
run.max_duration,
|
|
267
|
-
json.dumps(run.
|
|
267
|
+
json.dumps(run.context),
|
|
268
268
|
run.recovery_attempts,
|
|
269
269
|
run.max_recovery_attempts,
|
|
270
270
|
1 if run.recover_on_worker_loss else 0,
|
|
@@ -357,6 +357,29 @@ class SQLiteStorageBackend(StorageBackend):
|
|
|
357
357
|
)
|
|
358
358
|
await db.commit()
|
|
359
359
|
|
|
360
|
+
async def update_run_context(
|
|
361
|
+
self,
|
|
362
|
+
run_id: str,
|
|
363
|
+
context: dict,
|
|
364
|
+
) -> None:
|
|
365
|
+
"""Update the step context for a workflow run."""
|
|
366
|
+
db = self._ensure_connected()
|
|
367
|
+
|
|
368
|
+
await db.execute(
|
|
369
|
+
"""
|
|
370
|
+
UPDATE workflow_runs
|
|
371
|
+
SET metadata = ?, updated_at = ?
|
|
372
|
+
WHERE run_id = ?
|
|
373
|
+
""",
|
|
374
|
+
(json.dumps(context), datetime.now(UTC).isoformat(), run_id),
|
|
375
|
+
)
|
|
376
|
+
await db.commit()
|
|
377
|
+
|
|
378
|
+
async def get_run_context(self, run_id: str) -> dict:
|
|
379
|
+
"""Get the current step context for a workflow run."""
|
|
380
|
+
run = await self.get_run(run_id)
|
|
381
|
+
return run.context if run else {}
|
|
382
|
+
|
|
360
383
|
async def list_runs(
|
|
361
384
|
self,
|
|
362
385
|
query: str | None = None,
|
|
@@ -1047,7 +1070,7 @@ class SQLiteStorageBackend(StorageBackend):
|
|
|
1047
1070
|
error=row[10],
|
|
1048
1071
|
idempotency_key=row[11],
|
|
1049
1072
|
max_duration=row[12],
|
|
1050
|
-
|
|
1073
|
+
context=json.loads(row[13]) if row[13] else {},
|
|
1051
1074
|
recovery_attempts=row[14],
|
|
1052
1075
|
max_recovery_attempts=row[15],
|
|
1053
1076
|
recover_on_worker_loss=bool(row[16]),
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: pyworkflow-engine
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.10
|
|
4
4
|
Summary: A Python implementation of durable, event-sourced workflows inspired by Vercel Workflow
|
|
5
5
|
Author: PyWorkflow Contributors
|
|
6
6
|
License: MIT
|
|
@@ -35,7 +35,6 @@ Requires-Dist: pyyaml>=6.0.0
|
|
|
35
35
|
Requires-Dist: croniter>=2.0.0
|
|
36
36
|
Provides-Extra: redis
|
|
37
37
|
Requires-Dist: redis>=5.0.0; extra == "redis"
|
|
38
|
-
Requires-Dist: celery[redis]<6.0.0,>=5.3.0; extra == "redis"
|
|
39
38
|
Provides-Extra: sqlite
|
|
40
39
|
Requires-Dist: aiosqlite>=0.19.0; extra == "sqlite"
|
|
41
40
|
Provides-Extra: postgres
|
|
@@ -44,12 +43,17 @@ Provides-Extra: aws
|
|
|
44
43
|
Requires-Dist: aws-durable-execution-sdk-python>=0.1.0; extra == "aws"
|
|
45
44
|
Provides-Extra: dynamodb
|
|
46
45
|
Requires-Dist: aiobotocore>=2.5.0; extra == "dynamodb"
|
|
46
|
+
Provides-Extra: cassandra
|
|
47
|
+
Requires-Dist: cassandra-driver>=3.29.0; extra == "cassandra"
|
|
48
|
+
Provides-Extra: mysql
|
|
49
|
+
Requires-Dist: aiomysql>=0.2.0; extra == "mysql"
|
|
47
50
|
Provides-Extra: all
|
|
48
51
|
Requires-Dist: redis>=5.0.0; extra == "all"
|
|
49
|
-
Requires-Dist: celery[redis]<6.0.0,>=5.3.0; extra == "all"
|
|
50
52
|
Requires-Dist: aiosqlite>=0.19.0; extra == "all"
|
|
51
53
|
Requires-Dist: asyncpg>=0.29.0; extra == "all"
|
|
54
|
+
Requires-Dist: aiomysql>=0.2.0; extra == "all"
|
|
52
55
|
Requires-Dist: aws-durable-execution-sdk-python>=0.1.0; extra == "all"
|
|
56
|
+
Requires-Dist: cassandra-driver>=3.29.0; extra == "all"
|
|
53
57
|
Provides-Extra: dev
|
|
54
58
|
Requires-Dist: pytest>=7.4.0; extra == "dev"
|
|
55
59
|
Requires-Dist: pytest-asyncio>=0.21.0; extra == "dev"
|
|
@@ -66,7 +70,6 @@ Requires-Dist: types-python-dateutil>=2.8.0; extra == "dev"
|
|
|
66
70
|
Requires-Dist: types-PyYAML>=6.0.0; extra == "dev"
|
|
67
71
|
Requires-Dist: flower>=2.0.0; extra == "dev"
|
|
68
72
|
Requires-Dist: redis>=5.0.0; extra == "dev"
|
|
69
|
-
Requires-Dist: celery[redis]<6.0.0,>=5.3.0; extra == "dev"
|
|
70
73
|
Requires-Dist: aiosqlite>=0.19.0; extra == "dev"
|
|
71
74
|
Requires-Dist: asyncpg>=0.29.0; extra == "dev"
|
|
72
75
|
Dynamic: license-file
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
pyworkflow/__init__.py,sha256=x1UEkpGJteYrluuAL54uE2I3rk2TRRcGeTuH5ZSMH7s,6281
|
|
2
|
+
pyworkflow/config.py,sha256=yw_3sJNzBanI9xIqU0kh__QL4hs3UVUBXkeCEmw5cfA,14164
|
|
3
|
+
pyworkflow/discovery.py,sha256=snW3l4nvY3Nc067TGlwtn_qdzTU9ybN7YPr8FbvY8iM,8066
|
|
4
|
+
pyworkflow/aws/__init__.py,sha256=Ak_xHcR9LTRX-CwcS0XecYmzrXZw4EM3V9aKBBDEmIk,1741
|
|
5
|
+
pyworkflow/aws/context.py,sha256=Vjyjip6U1Emg-WA5TlBaxFhcg15rf9mVJiPfT4VywHc,8217
|
|
6
|
+
pyworkflow/aws/handler.py,sha256=0SnQuIfQVD99QKMCRFPtrsrV_l1LYKFkzPIRx_2UkSI,5849
|
|
7
|
+
pyworkflow/aws/testing.py,sha256=WrRk9wjbycM-UyHFQWNnA83UE9IrYnhfT38WrbxQT2U,8844
|
|
8
|
+
pyworkflow/celery/__init__.py,sha256=FywVyqnT8AYz9cXkr-wel7_-N7dHFsPNASEPMFESf4Q,1179
|
|
9
|
+
pyworkflow/celery/app.py,sha256=EsmRqervXqnJn7Jl76ZDV9OIcNnIb6fRjDeuZEfYJL8,6456
|
|
10
|
+
pyworkflow/celery/scheduler.py,sha256=Ms4rqRpdpMiLM8l4y3DK-Divunj9afYuUaGGoNQe7P4,11288
|
|
11
|
+
pyworkflow/celery/tasks.py,sha256=HJXwJjlhWu9aKQKZn4Os8b3y5OOHK9CUnHMH6xVnlyk,64740
|
|
12
|
+
pyworkflow/cli/__init__.py,sha256=tcbe-fcZmyeEKUy_aEo8bsEF40HsNKOwvyMBZIJZPwc,3844
|
|
13
|
+
pyworkflow/cli/__main__.py,sha256=LxLLS4FEEPXa5rWpLTtKuivn6Xp9pGia-QKGoxt9SS0,148
|
|
14
|
+
pyworkflow/cli/commands/__init__.py,sha256=IXvnTgukALckkO8fTlZhVRq80ojSqpnIIgboAg_-yZU,39
|
|
15
|
+
pyworkflow/cli/commands/hooks.py,sha256=UtTjfuo4qMwkV0UfdvgWLIqhTw69hnHLjg6-KTVZyRY,21139
|
|
16
|
+
pyworkflow/cli/commands/quickstart.py,sha256=4i_eiCLPAkzXpVoDtlEMgU_JtJpmcffKlgb2dIag5aw,14352
|
|
17
|
+
pyworkflow/cli/commands/runs.py,sha256=dkAx0WSBLyooD-vUUDPqgrmM3ElFwqO4nycEZGkNq4Q,25103
|
|
18
|
+
pyworkflow/cli/commands/scheduler.py,sha256=w2iUoJ1CtEtOg_4TWslTHbzEPVsV-YybqWU9jkf38gs,3706
|
|
19
|
+
pyworkflow/cli/commands/schedules.py,sha256=UCKZLTWsiLwCewCEXmqOVQnptvvuIKsWSTXai61RYbM,23466
|
|
20
|
+
pyworkflow/cli/commands/setup.py,sha256=J-9lvz3m2sZiiLzQtQIfjmX0l8IpJ4L-xp5U4P7UmRY,32256
|
|
21
|
+
pyworkflow/cli/commands/worker.py,sha256=UJ8bQJTXMEk3BoMiivClTCKNt_f-g75jJ5O-khfcfsY,12110
|
|
22
|
+
pyworkflow/cli/commands/workflows.py,sha256=zRBFeqCa4Uo_wwEjgk0SBmkqgcaMznS6ghe1N0ub8Zs,42673
|
|
23
|
+
pyworkflow/cli/output/__init__.py,sha256=5VxKL3mXah5rCKmctxcAKVwp42T47qT1oBK5LFVHHEg,48
|
|
24
|
+
pyworkflow/cli/output/formatters.py,sha256=QzsgPR3cjIbH0723wuG_HzUx9xC7XMA6-NkT2y2lwtM,8785
|
|
25
|
+
pyworkflow/cli/output/styles.py,sha256=WK6GHq_zQGMITGf16U3Vhc4gG4E7YFlE8_d-uBLGbBk,3035
|
|
26
|
+
pyworkflow/cli/utils/__init__.py,sha256=yzEuZNPwj-ts1oR8QBD3pObRmGbL0oullXF-jIve5wo,40
|
|
27
|
+
pyworkflow/cli/utils/async_helpers.py,sha256=B7bPBiUWV9rzHjtXnINEh7AHO57ytQH1xU6TtxnA2dU,786
|
|
28
|
+
pyworkflow/cli/utils/config.py,sha256=Lb75T6-y45v0JiLS-fNNvFU2KDoMIBWyNqmFG_jsEeU,3928
|
|
29
|
+
pyworkflow/cli/utils/config_generator.py,sha256=cnmvVRou9UTjpblMilYa_YJD80htd8PdzsqvUXgR0pA,13952
|
|
30
|
+
pyworkflow/cli/utils/discovery.py,sha256=v-5FMresm-kYPW-hQbql6dOIu6sqIoW_TddD16B8SKo,1467
|
|
31
|
+
pyworkflow/cli/utils/docker_manager.py,sha256=Uif4yVMuAUGlwMM1WDBK7Va-c05Mqe9skg6o-qPcXeU,25588
|
|
32
|
+
pyworkflow/cli/utils/interactive.py,sha256=S2Ell-rUzzt3V10diGo5XCgiDcYFYSxoXNYkJ5EQ_Yc,9740
|
|
33
|
+
pyworkflow/cli/utils/storage.py,sha256=a5Iu2Xe1_mPgBVYc8B6I63MFfW12ko7wURqcpq3RBPA,4018
|
|
34
|
+
pyworkflow/context/__init__.py,sha256=dI5zW1lAFGw68jI2UpKUqyADozDboGNl-RmhEvSTuCI,2150
|
|
35
|
+
pyworkflow/context/aws.py,sha256=MYxrFsRzCgaZ0YQAyE26UOT_ryxuag5DwiDSodclQIg,7571
|
|
36
|
+
pyworkflow/context/base.py,sha256=sq2L5odO3IIzgAd_I_ww1-3hCOe3tyJtjrunriPAI7o,13570
|
|
37
|
+
pyworkflow/context/local.py,sha256=jXlY5h3EisP-7TqNVUSMi7mzHOCZNjAMZgCNf6R5OfU,35991
|
|
38
|
+
pyworkflow/context/mock.py,sha256=TJzQ3P3_ZHm1lCJZJACIFFvz2ydFxz2cT9eEGOQS5I0,12061
|
|
39
|
+
pyworkflow/context/step_context.py,sha256=fW0I1t5A-rWqaBN85MNlNmSLAs3W-qf4arcYne0J4Xw,9261
|
|
40
|
+
pyworkflow/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
41
|
+
pyworkflow/core/exceptions.py,sha256=F2nbXyoed7wlIJMeGfpgsIC8ZyWcYN0iKtOnBA7-xnQ,10719
|
|
42
|
+
pyworkflow/core/registry.py,sha256=ZUf2YTpBvWpC9EehRbMF8soXOk9VsjNruoi6lR4O33M,9361
|
|
43
|
+
pyworkflow/core/scheduled.py,sha256=479A7IvjHiMob7ZrZtfE6VqtypG6DLIGMGhh16jLIWM,10522
|
|
44
|
+
pyworkflow/core/step.py,sha256=RY7i0j44Gjg2aziLrnSpdz63fn5GFas4XVb-PTZw2jQ,22473
|
|
45
|
+
pyworkflow/core/workflow.py,sha256=dlcICq1B69-nxUJth_n-H8U9TjP3QZyjvquQXxWHcxs,12076
|
|
46
|
+
pyworkflow/engine/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
47
|
+
pyworkflow/engine/events.py,sha256=-ix7EZnNRLfSRk4GJAl5-18uela0BDoCghI_4p5UVKc,25114
|
|
48
|
+
pyworkflow/engine/executor.py,sha256=5b50m-a4XjrOoIS9hS4Rsgk_N114s5js6b-LuW2L0Jw,20333
|
|
49
|
+
pyworkflow/engine/replay.py,sha256=SYHR5PkbyZp1cUdGNwbzx-VKNHus7-SohCR6Tox26vA,10875
|
|
50
|
+
pyworkflow/observability/__init__.py,sha256=M_Uc3WdtshQSxLnj3T8D0M7f4zcCuFzVs8e8PKCuXDc,380
|
|
51
|
+
pyworkflow/observability/logging.py,sha256=4b_N4bIHUxlgOzEn5u1uB-ngCWPNDSU7daKAKxkjBUM,7018
|
|
52
|
+
pyworkflow/primitives/__init__.py,sha256=rEahSVLhG3nSxvcRhJeM1LBSBIV7AkcRTnxuMLmZMTM,1041
|
|
53
|
+
pyworkflow/primitives/child_handle.py,sha256=7NcIaNUQdZEoxmk5gQH1CJ6uQzpro3eFo-sEaM6l6w0,5466
|
|
54
|
+
pyworkflow/primitives/child_workflow.py,sha256=_T7PCqiH0tjIm_lpJ6NmfUPWCFx-MjH6t-C1orwohKs,13134
|
|
55
|
+
pyworkflow/primitives/continue_as_new.py,sha256=NKcimHsgr5ExkvRvfO28hxgPw_I7Q74Vz9WL8r0PhPc,3329
|
|
56
|
+
pyworkflow/primitives/define_hook.py,sha256=gNzk7DuObfWG1T9AdHnDnGLHNKjnApiVRlCKPObugfY,4443
|
|
57
|
+
pyworkflow/primitives/hooks.py,sha256=ws9U81ymsY8M4FFTvJ2X4EMGmIrilb3vCKZ0V_EGZdE,3085
|
|
58
|
+
pyworkflow/primitives/resume_hook.py,sha256=q6gb0qsAhOkFRKMs-PkbLSFLnLerx0VGMkPp9CbkXZQ,6192
|
|
59
|
+
pyworkflow/primitives/schedule.py,sha256=2hVM2Swl9dRx3RHd5nblJLaU8HaSy-NHYue2Cf9TOcU,14961
|
|
60
|
+
pyworkflow/primitives/shield.py,sha256=MUYakU0euZoYNb6MbFyRfJN8GEXsRFkIbZEo84vRN9c,2924
|
|
61
|
+
pyworkflow/primitives/sleep.py,sha256=iH1e5CoWY-jZbYNAU3GRW1xR_8EtCuPIcIohzU4jWJo,3097
|
|
62
|
+
pyworkflow/runtime/__init__.py,sha256=DkwTgFCMRGyyW8NGcW7Nyy9beOg5kO1TXhqhysj1-aY,649
|
|
63
|
+
pyworkflow/runtime/base.py,sha256=-X2pct03XuA3o1P6yD5ywTDgegN6_a450gG8MBVeKRE,5190
|
|
64
|
+
pyworkflow/runtime/celery.py,sha256=0hSwN4alL69ZgnIgYiITcJ0s_iTi8A_xrsdKo89k4Hs,9431
|
|
65
|
+
pyworkflow/runtime/factory.py,sha256=TRbqWPfyZ0tPFKb0faI9SkBRXxE5AEVTwGW4pS2diM8,2684
|
|
66
|
+
pyworkflow/runtime/local.py,sha256=WsjaNcS_aJKLIdglaukeOj_YZqrQ75glsv1MeqS6VhM,24128
|
|
67
|
+
pyworkflow/scheduler/__init__.py,sha256=lQQo0Cia_ULIg-KPIrqILV30rUIzybxj1k_ZZTQNZyg,222
|
|
68
|
+
pyworkflow/scheduler/local.py,sha256=CnK4UC6ofD3_AZJUlO9iUAdgAnbMmJvPaL_VucNKs5Q,8154
|
|
69
|
+
pyworkflow/serialization/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
70
|
+
pyworkflow/serialization/decoder.py,sha256=F7Ofuw1Yzo82iSFFXiK2yoW_v2YRbLMpX3CQbKjm0Ls,3860
|
|
71
|
+
pyworkflow/serialization/encoder.py,sha256=ZBwAxe5Bb4MCfFJePHw7ArJlIbBieSwUgsysGCI2iPU,4108
|
|
72
|
+
pyworkflow/storage/__init__.py,sha256=LhVjLNZdo4Mi5dEC75hjSPnbQr9jBoIsTOrC8vzTGOM,1924
|
|
73
|
+
pyworkflow/storage/base.py,sha256=DxgOB9kr3i1uaitY_E9PzhnNWxaq1U5EvbbSjKyoH8M,16104
|
|
74
|
+
pyworkflow/storage/cassandra.py,sha256=Nig0SUlTyxuNgPjOXnVBlzDq3PAGci4jIT1JI0i-GOk,61428
|
|
75
|
+
pyworkflow/storage/config.py,sha256=M5s0ekcGN1Hnj0UkIeaHE4-n-SpsyawN47LyKxKnIqo,9787
|
|
76
|
+
pyworkflow/storage/dynamodb.py,sha256=tGNQQqESxhZzOP5NJULCZKcQf9UuSQNL17TJo6R1jlw,53301
|
|
77
|
+
pyworkflow/storage/file.py,sha256=lKilavXn_CRiIVL5XeV7tY9lm2vJADH-h9Teg0gA84A,28842
|
|
78
|
+
pyworkflow/storage/memory.py,sha256=r2z6LiRw8J2AbO9Qw2wtYjzGfX-VJlRX_RVI2U8c-hs,19753
|
|
79
|
+
pyworkflow/storage/mysql.py,sha256=f1aGyAL8fGsLnmHkpEwP4MFSwvYTpQxOBECHKCnetGI,42904
|
|
80
|
+
pyworkflow/storage/postgres.py,sha256=s9NRBBHRhCOWbqhhg6vXe_8oX8ld0BgrsKW131j0A5s,41327
|
|
81
|
+
pyworkflow/storage/schemas.py,sha256=o1ntTYNgQQ5YVuXtPCShtENEsndVjdrXclWrkCgkitg,18002
|
|
82
|
+
pyworkflow/storage/sqlite.py,sha256=oBzJnnOp2uk0-U7hMTQk9QgJq3RBwXPQfrmYpivjdgE,39529
|
|
83
|
+
pyworkflow/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
84
|
+
pyworkflow/utils/duration.py,sha256=C-itmiSQQlplw7j6XB679hLF9xYGnyCwm7twO88OF8U,3978
|
|
85
|
+
pyworkflow/utils/schedule.py,sha256=dO_MkGFyfwZpb0LDlW6BGyZzlPuQIA6dc6j9nk9lc4Y,10691
|
|
86
|
+
pyworkflow_engine-0.1.10.dist-info/licenses/LICENSE,sha256=Y49RCTZ5ayn_yzBcRxnyIFdcMCyuYm150aty_FIznfY,1080
|
|
87
|
+
pyworkflow_engine-0.1.10.dist-info/METADATA,sha256=8mRzFtjSIFyJmSui5vzZzzl1jf8KmDM-wQ2gCRKeDbA,19628
|
|
88
|
+
pyworkflow_engine-0.1.10.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
89
|
+
pyworkflow_engine-0.1.10.dist-info/entry_points.txt,sha256=3IGAfuylnS39U0YX0pxnjrj54kB4iT_bNYrmsiDB-dE,51
|
|
90
|
+
pyworkflow_engine-0.1.10.dist-info/top_level.txt,sha256=FLTv9pQmLDBXrQdLOhTMIS3njFibliMsQEfumqmdzBE,11
|
|
91
|
+
pyworkflow_engine-0.1.10.dist-info/RECORD,,
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
pyworkflow
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
"""PyWorkflow Dashboard Backend."""
|
dashboard/backend/app/config.py
DELETED
|
@@ -1,32 +0,0 @@
|
|
|
1
|
-
"""Dashboard configuration using pydantic-settings."""
|
|
2
|
-
|
|
3
|
-
from pydantic_settings import BaseSettings
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
class Settings(BaseSettings):
|
|
7
|
-
"""Dashboard settings loaded from environment variables."""
|
|
8
|
-
|
|
9
|
-
# PyWorkflow configuration
|
|
10
|
-
pyworkflow_config_path: str | None = None # Path to pyworkflow.config.yaml
|
|
11
|
-
|
|
12
|
-
# Storage configuration (fallback if pyworkflow config not set)
|
|
13
|
-
storage_type: str = "file"
|
|
14
|
-
storage_path: str = "./pyworkflow_data"
|
|
15
|
-
|
|
16
|
-
# Server configuration
|
|
17
|
-
host: str = "0.0.0.0"
|
|
18
|
-
port: int = 8585
|
|
19
|
-
|
|
20
|
-
# CORS configuration
|
|
21
|
-
cors_origins: list[str] = ["http://localhost:5173", "http://localhost:3000"]
|
|
22
|
-
|
|
23
|
-
# Debug mode
|
|
24
|
-
debug: bool = False
|
|
25
|
-
|
|
26
|
-
class Config:
|
|
27
|
-
env_prefix = "DASHBOARD_"
|
|
28
|
-
env_file = ".env"
|
|
29
|
-
env_file_encoding = "utf-8"
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
settings = Settings()
|
|
@@ -1,86 +0,0 @@
|
|
|
1
|
-
"""Controller for workflow run endpoints."""
|
|
2
|
-
|
|
3
|
-
from datetime import datetime
|
|
4
|
-
|
|
5
|
-
from app.repositories.run_repository import RunRepository
|
|
6
|
-
from app.schemas.event import EventListResponse
|
|
7
|
-
from app.schemas.run import RunDetailResponse, RunListResponse, StartRunRequest, StartRunResponse
|
|
8
|
-
from app.services.run_service import RunService
|
|
9
|
-
from pyworkflow.storage.base import StorageBackend
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
class RunController:
|
|
13
|
-
"""Controller handling workflow run-related requests."""
|
|
14
|
-
|
|
15
|
-
def __init__(self, storage: StorageBackend):
|
|
16
|
-
"""Initialize controller with storage backend.
|
|
17
|
-
|
|
18
|
-
Args:
|
|
19
|
-
storage: PyWorkflow storage backend.
|
|
20
|
-
"""
|
|
21
|
-
self.repository = RunRepository(storage)
|
|
22
|
-
self.service = RunService(self.repository)
|
|
23
|
-
|
|
24
|
-
async def list_runs(
|
|
25
|
-
self,
|
|
26
|
-
query: str | None = None,
|
|
27
|
-
status: str | None = None,
|
|
28
|
-
start_time: datetime | None = None,
|
|
29
|
-
end_time: datetime | None = None,
|
|
30
|
-
limit: int = 100,
|
|
31
|
-
cursor: str | None = None,
|
|
32
|
-
) -> RunListResponse:
|
|
33
|
-
"""List workflow runs with optional filtering and cursor-based pagination.
|
|
34
|
-
|
|
35
|
-
Args:
|
|
36
|
-
query: Case-insensitive search in workflow name and input kwargs.
|
|
37
|
-
status: Filter by status.
|
|
38
|
-
start_time: Filter runs started at or after this time.
|
|
39
|
-
end_time: Filter runs started before this time.
|
|
40
|
-
limit: Maximum results.
|
|
41
|
-
cursor: Run ID to start after (for pagination).
|
|
42
|
-
|
|
43
|
-
Returns:
|
|
44
|
-
RunListResponse with matching runs and next_cursor.
|
|
45
|
-
"""
|
|
46
|
-
return await self.service.list_runs(
|
|
47
|
-
query=query,
|
|
48
|
-
status=status,
|
|
49
|
-
start_time=start_time,
|
|
50
|
-
end_time=end_time,
|
|
51
|
-
limit=limit,
|
|
52
|
-
cursor=cursor,
|
|
53
|
-
)
|
|
54
|
-
|
|
55
|
-
async def get_run(self, run_id: str) -> RunDetailResponse | None:
|
|
56
|
-
"""Get detailed information about a run.
|
|
57
|
-
|
|
58
|
-
Args:
|
|
59
|
-
run_id: The run ID.
|
|
60
|
-
|
|
61
|
-
Returns:
|
|
62
|
-
RunDetailResponse if found, None otherwise.
|
|
63
|
-
"""
|
|
64
|
-
return await self.service.get_run(run_id)
|
|
65
|
-
|
|
66
|
-
async def get_events(self, run_id: str) -> EventListResponse:
|
|
67
|
-
"""Get events for a run.
|
|
68
|
-
|
|
69
|
-
Args:
|
|
70
|
-
run_id: The run ID.
|
|
71
|
-
|
|
72
|
-
Returns:
|
|
73
|
-
EventListResponse with run events.
|
|
74
|
-
"""
|
|
75
|
-
return await self.service.get_events(run_id)
|
|
76
|
-
|
|
77
|
-
async def start_run(self, request: StartRunRequest) -> StartRunResponse:
|
|
78
|
-
"""Start a new workflow run.
|
|
79
|
-
|
|
80
|
-
Args:
|
|
81
|
-
request: The start run request containing workflow name and kwargs.
|
|
82
|
-
|
|
83
|
-
Returns:
|
|
84
|
-
StartRunResponse with run_id and workflow_name.
|
|
85
|
-
"""
|
|
86
|
-
return await self.service.start_run(request)
|
|
@@ -1,33 +0,0 @@
|
|
|
1
|
-
"""Controller for workflow endpoints."""
|
|
2
|
-
|
|
3
|
-
from app.repositories.workflow_repository import WorkflowRepository
|
|
4
|
-
from app.schemas.workflow import WorkflowListResponse, WorkflowResponse
|
|
5
|
-
from app.services.workflow_service import WorkflowService
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
class WorkflowController:
|
|
9
|
-
"""Controller handling workflow-related requests."""
|
|
10
|
-
|
|
11
|
-
def __init__(self):
|
|
12
|
-
"""Initialize controller with service layer."""
|
|
13
|
-
self.repository = WorkflowRepository()
|
|
14
|
-
self.service = WorkflowService(self.repository)
|
|
15
|
-
|
|
16
|
-
def list_workflows(self) -> WorkflowListResponse:
|
|
17
|
-
"""Get all registered workflows.
|
|
18
|
-
|
|
19
|
-
Returns:
|
|
20
|
-
WorkflowListResponse with all workflows.
|
|
21
|
-
"""
|
|
22
|
-
return self.service.list_workflows()
|
|
23
|
-
|
|
24
|
-
def get_workflow(self, name: str) -> WorkflowResponse | None:
|
|
25
|
-
"""Get a specific workflow by name.
|
|
26
|
-
|
|
27
|
-
Args:
|
|
28
|
-
name: Workflow name.
|
|
29
|
-
|
|
30
|
-
Returns:
|
|
31
|
-
WorkflowResponse if found, None otherwise.
|
|
32
|
-
"""
|
|
33
|
-
return self.service.get_workflow(name)
|
|
@@ -1,50 +0,0 @@
|
|
|
1
|
-
"""Storage dependency for FastAPI."""
|
|
2
|
-
|
|
3
|
-
from app.config import settings
|
|
4
|
-
from pyworkflow import get_storage as pyworkflow_get_storage
|
|
5
|
-
from pyworkflow.storage.base import StorageBackend
|
|
6
|
-
from pyworkflow.storage.file import FileStorageBackend
|
|
7
|
-
from pyworkflow.storage.memory import InMemoryStorageBackend
|
|
8
|
-
|
|
9
|
-
_storage_instance: StorageBackend | None = None
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
def reset_storage_cache() -> None:
|
|
13
|
-
"""Reset the cached storage instance.
|
|
14
|
-
|
|
15
|
-
Called during application startup to ensure fresh initialization
|
|
16
|
-
after pyworkflow configuration is loaded.
|
|
17
|
-
"""
|
|
18
|
-
global _storage_instance
|
|
19
|
-
_storage_instance = None
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
async def get_storage() -> StorageBackend:
|
|
23
|
-
"""Get or create the storage backend instance.
|
|
24
|
-
|
|
25
|
-
First tries to get storage from pyworkflow configuration.
|
|
26
|
-
Falls back to creating based on dashboard settings.
|
|
27
|
-
"""
|
|
28
|
-
global _storage_instance
|
|
29
|
-
|
|
30
|
-
if _storage_instance is None:
|
|
31
|
-
# Try to get from pyworkflow config first
|
|
32
|
-
storage = pyworkflow_get_storage()
|
|
33
|
-
|
|
34
|
-
if storage is None:
|
|
35
|
-
# Create based on dashboard config
|
|
36
|
-
if settings.storage_type == "file":
|
|
37
|
-
storage = FileStorageBackend(settings.storage_path)
|
|
38
|
-
elif settings.storage_type == "sqlite":
|
|
39
|
-
from pyworkflow.storage.sqlite import SQLiteStorageBackend
|
|
40
|
-
|
|
41
|
-
db_path = f"{settings.storage_path}/pyworkflow.db"
|
|
42
|
-
storage = SQLiteStorageBackend(db_path)
|
|
43
|
-
elif settings.storage_type == "memory":
|
|
44
|
-
storage = InMemoryStorageBackend()
|
|
45
|
-
else:
|
|
46
|
-
raise ValueError(f"Unknown storage type: {settings.storage_type}")
|
|
47
|
-
|
|
48
|
-
_storage_instance = storage
|
|
49
|
-
|
|
50
|
-
return _storage_instance
|
|
@@ -1,80 +0,0 @@
|
|
|
1
|
-
"""Repository for workflow run data access."""
|
|
2
|
-
|
|
3
|
-
from datetime import datetime
|
|
4
|
-
|
|
5
|
-
from pyworkflow.engine.events import Event
|
|
6
|
-
from pyworkflow.storage.base import StorageBackend
|
|
7
|
-
from pyworkflow.storage.schemas import (
|
|
8
|
-
RunStatus,
|
|
9
|
-
WorkflowRun,
|
|
10
|
-
)
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
class RunRepository:
|
|
14
|
-
"""Repository for accessing workflow run data via pyworkflow storage."""
|
|
15
|
-
|
|
16
|
-
def __init__(self, storage: StorageBackend):
|
|
17
|
-
"""Initialize with a storage backend.
|
|
18
|
-
|
|
19
|
-
Args:
|
|
20
|
-
storage: PyWorkflow storage backend instance.
|
|
21
|
-
"""
|
|
22
|
-
self.storage = storage
|
|
23
|
-
|
|
24
|
-
async def list_runs(
|
|
25
|
-
self,
|
|
26
|
-
query: str | None = None,
|
|
27
|
-
status: RunStatus | None = None,
|
|
28
|
-
start_time: datetime | None = None,
|
|
29
|
-
end_time: datetime | None = None,
|
|
30
|
-
limit: int = 100,
|
|
31
|
-
cursor: str | None = None,
|
|
32
|
-
) -> tuple[list[WorkflowRun], str | None]:
|
|
33
|
-
"""List workflow runs with optional filtering and cursor-based pagination.
|
|
34
|
-
|
|
35
|
-
Args:
|
|
36
|
-
query: Case-insensitive search in workflow name and input kwargs.
|
|
37
|
-
status: Filter by run status.
|
|
38
|
-
start_time: Filter runs started at or after this time.
|
|
39
|
-
end_time: Filter runs started before this time.
|
|
40
|
-
limit: Maximum number of results.
|
|
41
|
-
cursor: Run ID to start after (for pagination).
|
|
42
|
-
|
|
43
|
-
Returns:
|
|
44
|
-
Tuple of (list of workflow runs, next_cursor or None).
|
|
45
|
-
"""
|
|
46
|
-
return await self.storage.list_runs(
|
|
47
|
-
query=query,
|
|
48
|
-
status=status,
|
|
49
|
-
start_time=start_time,
|
|
50
|
-
end_time=end_time,
|
|
51
|
-
limit=limit,
|
|
52
|
-
cursor=cursor,
|
|
53
|
-
)
|
|
54
|
-
|
|
55
|
-
async def get_run(self, run_id: str) -> WorkflowRun | None:
|
|
56
|
-
"""Get a workflow run by ID.
|
|
57
|
-
|
|
58
|
-
Args:
|
|
59
|
-
run_id: The run ID.
|
|
60
|
-
|
|
61
|
-
Returns:
|
|
62
|
-
WorkflowRun if found, None otherwise.
|
|
63
|
-
"""
|
|
64
|
-
return await self.storage.get_run(run_id)
|
|
65
|
-
|
|
66
|
-
async def get_events(
|
|
67
|
-
self,
|
|
68
|
-
run_id: str,
|
|
69
|
-
event_types: list[str] | None = None,
|
|
70
|
-
) -> list[Event]:
|
|
71
|
-
"""Get all events for a workflow run.
|
|
72
|
-
|
|
73
|
-
Args:
|
|
74
|
-
run_id: The run ID.
|
|
75
|
-
event_types: Optional filter by event types.
|
|
76
|
-
|
|
77
|
-
Returns:
|
|
78
|
-
List of events ordered by sequence.
|
|
79
|
-
"""
|
|
80
|
-
return await self.storage.get_events(run_id, event_types=event_types)
|
|
@@ -1,27 +0,0 @@
|
|
|
1
|
-
"""Repository for workflow metadata access."""
|
|
2
|
-
|
|
3
|
-
from pyworkflow import get_workflow, list_workflows
|
|
4
|
-
from pyworkflow.core.registry import WorkflowMetadata
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
class WorkflowRepository:
|
|
8
|
-
"""Repository for accessing registered workflow metadata."""
|
|
9
|
-
|
|
10
|
-
def list_all(self) -> dict[str, WorkflowMetadata]:
|
|
11
|
-
"""Get all registered workflows.
|
|
12
|
-
|
|
13
|
-
Returns:
|
|
14
|
-
Dictionary mapping workflow names to their metadata.
|
|
15
|
-
"""
|
|
16
|
-
return list_workflows()
|
|
17
|
-
|
|
18
|
-
def get_by_name(self, name: str) -> WorkflowMetadata | None:
|
|
19
|
-
"""Get a specific workflow by name.
|
|
20
|
-
|
|
21
|
-
Args:
|
|
22
|
-
name: The workflow name.
|
|
23
|
-
|
|
24
|
-
Returns:
|
|
25
|
-
WorkflowMetadata if found, None otherwise.
|
|
26
|
-
"""
|
|
27
|
-
return get_workflow(name)
|
|
@@ -1,12 +0,0 @@
|
|
|
1
|
-
"""V1 API routes."""
|
|
2
|
-
|
|
3
|
-
from fastapi import APIRouter
|
|
4
|
-
|
|
5
|
-
from app.rest.v1.health import router as health_router
|
|
6
|
-
from app.rest.v1.runs import router as runs_router
|
|
7
|
-
from app.rest.v1.workflows import router as workflows_router
|
|
8
|
-
|
|
9
|
-
router = APIRouter()
|
|
10
|
-
router.include_router(health_router, tags=["health"])
|
|
11
|
-
router.include_router(workflows_router, prefix="/workflows", tags=["workflows"])
|
|
12
|
-
router.include_router(runs_router, prefix="/runs", tags=["runs"])
|
|
@@ -1,33 +0,0 @@
|
|
|
1
|
-
"""Health check endpoint."""
|
|
2
|
-
|
|
3
|
-
from fastapi import APIRouter, Depends
|
|
4
|
-
from pydantic import BaseModel
|
|
5
|
-
|
|
6
|
-
from app.dependencies import get_storage
|
|
7
|
-
from pyworkflow.storage.base import StorageBackend
|
|
8
|
-
|
|
9
|
-
router = APIRouter()
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
class HealthResponse(BaseModel):
|
|
13
|
-
"""Health check response."""
|
|
14
|
-
|
|
15
|
-
status: str
|
|
16
|
-
storage_healthy: bool
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
@router.get("/health", response_model=HealthResponse)
|
|
20
|
-
async def health_check(
|
|
21
|
-
storage: StorageBackend = Depends(get_storage),
|
|
22
|
-
) -> HealthResponse:
|
|
23
|
-
"""Check API and storage health.
|
|
24
|
-
|
|
25
|
-
Returns:
|
|
26
|
-
HealthResponse with status information.
|
|
27
|
-
"""
|
|
28
|
-
storage_healthy = await storage.health_check()
|
|
29
|
-
|
|
30
|
-
return HealthResponse(
|
|
31
|
-
status="healthy" if storage_healthy else "degraded",
|
|
32
|
-
storage_healthy=storage_healthy,
|
|
33
|
-
)
|