pyworkflow-engine 0.1.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dashboard/backend/app/__init__.py +1 -0
- dashboard/backend/app/config.py +32 -0
- dashboard/backend/app/controllers/__init__.py +6 -0
- dashboard/backend/app/controllers/run_controller.py +86 -0
- dashboard/backend/app/controllers/workflow_controller.py +33 -0
- dashboard/backend/app/dependencies/__init__.py +5 -0
- dashboard/backend/app/dependencies/storage.py +50 -0
- dashboard/backend/app/repositories/__init__.py +6 -0
- dashboard/backend/app/repositories/run_repository.py +80 -0
- dashboard/backend/app/repositories/workflow_repository.py +27 -0
- dashboard/backend/app/rest/__init__.py +8 -0
- dashboard/backend/app/rest/v1/__init__.py +12 -0
- dashboard/backend/app/rest/v1/health.py +33 -0
- dashboard/backend/app/rest/v1/runs.py +133 -0
- dashboard/backend/app/rest/v1/workflows.py +41 -0
- dashboard/backend/app/schemas/__init__.py +23 -0
- dashboard/backend/app/schemas/common.py +16 -0
- dashboard/backend/app/schemas/event.py +24 -0
- dashboard/backend/app/schemas/hook.py +25 -0
- dashboard/backend/app/schemas/run.py +54 -0
- dashboard/backend/app/schemas/step.py +28 -0
- dashboard/backend/app/schemas/workflow.py +31 -0
- dashboard/backend/app/server.py +87 -0
- dashboard/backend/app/services/__init__.py +6 -0
- dashboard/backend/app/services/run_service.py +240 -0
- dashboard/backend/app/services/workflow_service.py +155 -0
- dashboard/backend/main.py +18 -0
- docs/concepts/cancellation.mdx +362 -0
- docs/concepts/continue-as-new.mdx +434 -0
- docs/concepts/events.mdx +266 -0
- docs/concepts/fault-tolerance.mdx +370 -0
- docs/concepts/hooks.mdx +552 -0
- docs/concepts/limitations.mdx +167 -0
- docs/concepts/schedules.mdx +775 -0
- docs/concepts/sleep.mdx +312 -0
- docs/concepts/steps.mdx +301 -0
- docs/concepts/workflows.mdx +255 -0
- docs/guides/cli.mdx +942 -0
- docs/guides/configuration.mdx +560 -0
- docs/introduction.mdx +155 -0
- docs/quickstart.mdx +279 -0
- examples/__init__.py +1 -0
- examples/celery/__init__.py +1 -0
- examples/celery/durable/docker-compose.yml +55 -0
- examples/celery/durable/pyworkflow.config.yaml +12 -0
- examples/celery/durable/workflows/__init__.py +122 -0
- examples/celery/durable/workflows/basic.py +87 -0
- examples/celery/durable/workflows/batch_processing.py +102 -0
- examples/celery/durable/workflows/cancellation.py +273 -0
- examples/celery/durable/workflows/child_workflow_patterns.py +240 -0
- examples/celery/durable/workflows/child_workflows.py +202 -0
- examples/celery/durable/workflows/continue_as_new.py +260 -0
- examples/celery/durable/workflows/fault_tolerance.py +210 -0
- examples/celery/durable/workflows/hooks.py +211 -0
- examples/celery/durable/workflows/idempotency.py +112 -0
- examples/celery/durable/workflows/long_running.py +99 -0
- examples/celery/durable/workflows/retries.py +101 -0
- examples/celery/durable/workflows/schedules.py +209 -0
- examples/celery/transient/01_basic_workflow.py +91 -0
- examples/celery/transient/02_fault_tolerance.py +257 -0
- examples/celery/transient/__init__.py +20 -0
- examples/celery/transient/pyworkflow.config.yaml +25 -0
- examples/local/__init__.py +1 -0
- examples/local/durable/01_basic_workflow.py +94 -0
- examples/local/durable/02_file_storage.py +132 -0
- examples/local/durable/03_retries.py +169 -0
- examples/local/durable/04_long_running.py +119 -0
- examples/local/durable/05_event_log.py +145 -0
- examples/local/durable/06_idempotency.py +148 -0
- examples/local/durable/07_hooks.py +334 -0
- examples/local/durable/08_cancellation.py +233 -0
- examples/local/durable/09_child_workflows.py +198 -0
- examples/local/durable/10_child_workflow_patterns.py +265 -0
- examples/local/durable/11_continue_as_new.py +249 -0
- examples/local/durable/12_schedules.py +198 -0
- examples/local/durable/__init__.py +1 -0
- examples/local/transient/01_quick_tasks.py +87 -0
- examples/local/transient/02_retries.py +130 -0
- examples/local/transient/03_sleep.py +141 -0
- examples/local/transient/__init__.py +1 -0
- pyworkflow/__init__.py +256 -0
- pyworkflow/aws/__init__.py +68 -0
- pyworkflow/aws/context.py +234 -0
- pyworkflow/aws/handler.py +184 -0
- pyworkflow/aws/testing.py +310 -0
- pyworkflow/celery/__init__.py +41 -0
- pyworkflow/celery/app.py +198 -0
- pyworkflow/celery/scheduler.py +315 -0
- pyworkflow/celery/tasks.py +1746 -0
- pyworkflow/cli/__init__.py +132 -0
- pyworkflow/cli/__main__.py +6 -0
- pyworkflow/cli/commands/__init__.py +1 -0
- pyworkflow/cli/commands/hooks.py +640 -0
- pyworkflow/cli/commands/quickstart.py +495 -0
- pyworkflow/cli/commands/runs.py +773 -0
- pyworkflow/cli/commands/scheduler.py +130 -0
- pyworkflow/cli/commands/schedules.py +794 -0
- pyworkflow/cli/commands/setup.py +703 -0
- pyworkflow/cli/commands/worker.py +413 -0
- pyworkflow/cli/commands/workflows.py +1257 -0
- pyworkflow/cli/output/__init__.py +1 -0
- pyworkflow/cli/output/formatters.py +321 -0
- pyworkflow/cli/output/styles.py +121 -0
- pyworkflow/cli/utils/__init__.py +1 -0
- pyworkflow/cli/utils/async_helpers.py +30 -0
- pyworkflow/cli/utils/config.py +130 -0
- pyworkflow/cli/utils/config_generator.py +344 -0
- pyworkflow/cli/utils/discovery.py +53 -0
- pyworkflow/cli/utils/docker_manager.py +651 -0
- pyworkflow/cli/utils/interactive.py +364 -0
- pyworkflow/cli/utils/storage.py +115 -0
- pyworkflow/config.py +329 -0
- pyworkflow/context/__init__.py +63 -0
- pyworkflow/context/aws.py +230 -0
- pyworkflow/context/base.py +416 -0
- pyworkflow/context/local.py +930 -0
- pyworkflow/context/mock.py +381 -0
- pyworkflow/core/__init__.py +0 -0
- pyworkflow/core/exceptions.py +353 -0
- pyworkflow/core/registry.py +313 -0
- pyworkflow/core/scheduled.py +328 -0
- pyworkflow/core/step.py +494 -0
- pyworkflow/core/workflow.py +294 -0
- pyworkflow/discovery.py +248 -0
- pyworkflow/engine/__init__.py +0 -0
- pyworkflow/engine/events.py +879 -0
- pyworkflow/engine/executor.py +682 -0
- pyworkflow/engine/replay.py +273 -0
- pyworkflow/observability/__init__.py +19 -0
- pyworkflow/observability/logging.py +234 -0
- pyworkflow/primitives/__init__.py +33 -0
- pyworkflow/primitives/child_handle.py +174 -0
- pyworkflow/primitives/child_workflow.py +372 -0
- pyworkflow/primitives/continue_as_new.py +101 -0
- pyworkflow/primitives/define_hook.py +150 -0
- pyworkflow/primitives/hooks.py +97 -0
- pyworkflow/primitives/resume_hook.py +210 -0
- pyworkflow/primitives/schedule.py +545 -0
- pyworkflow/primitives/shield.py +96 -0
- pyworkflow/primitives/sleep.py +100 -0
- pyworkflow/runtime/__init__.py +21 -0
- pyworkflow/runtime/base.py +179 -0
- pyworkflow/runtime/celery.py +310 -0
- pyworkflow/runtime/factory.py +101 -0
- pyworkflow/runtime/local.py +706 -0
- pyworkflow/scheduler/__init__.py +9 -0
- pyworkflow/scheduler/local.py +248 -0
- pyworkflow/serialization/__init__.py +0 -0
- pyworkflow/serialization/decoder.py +146 -0
- pyworkflow/serialization/encoder.py +162 -0
- pyworkflow/storage/__init__.py +54 -0
- pyworkflow/storage/base.py +612 -0
- pyworkflow/storage/config.py +185 -0
- pyworkflow/storage/dynamodb.py +1315 -0
- pyworkflow/storage/file.py +827 -0
- pyworkflow/storage/memory.py +549 -0
- pyworkflow/storage/postgres.py +1161 -0
- pyworkflow/storage/schemas.py +486 -0
- pyworkflow/storage/sqlite.py +1136 -0
- pyworkflow/utils/__init__.py +0 -0
- pyworkflow/utils/duration.py +177 -0
- pyworkflow/utils/schedule.py +391 -0
- pyworkflow_engine-0.1.7.dist-info/METADATA +687 -0
- pyworkflow_engine-0.1.7.dist-info/RECORD +196 -0
- pyworkflow_engine-0.1.7.dist-info/WHEEL +5 -0
- pyworkflow_engine-0.1.7.dist-info/entry_points.txt +2 -0
- pyworkflow_engine-0.1.7.dist-info/licenses/LICENSE +21 -0
- pyworkflow_engine-0.1.7.dist-info/top_level.txt +5 -0
- tests/examples/__init__.py +0 -0
- tests/integration/__init__.py +0 -0
- tests/integration/test_cancellation.py +330 -0
- tests/integration/test_child_workflows.py +439 -0
- tests/integration/test_continue_as_new.py +428 -0
- tests/integration/test_dynamodb_storage.py +1146 -0
- tests/integration/test_fault_tolerance.py +369 -0
- tests/integration/test_schedule_storage.py +484 -0
- tests/unit/__init__.py +0 -0
- tests/unit/backends/__init__.py +1 -0
- tests/unit/backends/test_dynamodb_storage.py +1554 -0
- tests/unit/backends/test_postgres_storage.py +1281 -0
- tests/unit/backends/test_sqlite_storage.py +1460 -0
- tests/unit/conftest.py +41 -0
- tests/unit/test_cancellation.py +364 -0
- tests/unit/test_child_workflows.py +680 -0
- tests/unit/test_continue_as_new.py +441 -0
- tests/unit/test_event_limits.py +316 -0
- tests/unit/test_executor.py +320 -0
- tests/unit/test_fault_tolerance.py +334 -0
- tests/unit/test_hooks.py +495 -0
- tests/unit/test_registry.py +261 -0
- tests/unit/test_replay.py +420 -0
- tests/unit/test_schedule_schemas.py +285 -0
- tests/unit/test_schedule_utils.py +286 -0
- tests/unit/test_scheduled_workflow.py +274 -0
- tests/unit/test_step.py +353 -0
- tests/unit/test_workflow.py +243 -0
|
@@ -0,0 +1,1281 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Unit tests for PostgreSQL storage backend.
|
|
3
|
+
|
|
4
|
+
These tests verify the PostgresStorageBackend implementation.
|
|
5
|
+
For integration tests with a real PostgreSQL database, see tests/integration/.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from contextlib import asynccontextmanager
|
|
9
|
+
from datetime import UTC, datetime
|
|
10
|
+
from unittest.mock import AsyncMock, MagicMock, patch
|
|
11
|
+
|
|
12
|
+
import pytest
|
|
13
|
+
|
|
14
|
+
from pyworkflow.engine.events import Event, EventType
|
|
15
|
+
from pyworkflow.storage.schemas import (
|
|
16
|
+
Hook,
|
|
17
|
+
HookStatus,
|
|
18
|
+
OverlapPolicy,
|
|
19
|
+
RunStatus,
|
|
20
|
+
Schedule,
|
|
21
|
+
ScheduleSpec,
|
|
22
|
+
ScheduleStatus,
|
|
23
|
+
StepExecution,
|
|
24
|
+
StepStatus,
|
|
25
|
+
WorkflowRun,
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
# Skip all tests if asyncpg is not installed
|
|
29
|
+
pytest.importorskip("asyncpg")
|
|
30
|
+
|
|
31
|
+
from pyworkflow.storage.postgres import PostgresStorageBackend
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@pytest.fixture
|
|
35
|
+
def mock_backend():
|
|
36
|
+
"""Create a backend with mocked pool for testing."""
|
|
37
|
+
backend = PostgresStorageBackend()
|
|
38
|
+
mock_pool = MagicMock()
|
|
39
|
+
mock_conn = AsyncMock()
|
|
40
|
+
|
|
41
|
+
# Make pool.acquire() work as async context manager
|
|
42
|
+
@asynccontextmanager
|
|
43
|
+
async def mock_acquire():
|
|
44
|
+
yield mock_conn
|
|
45
|
+
|
|
46
|
+
mock_pool.acquire = mock_acquire
|
|
47
|
+
backend._pool = mock_pool
|
|
48
|
+
return backend, mock_conn
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class TestPostgresStorageBackendInit:
|
|
52
|
+
"""Test PostgresStorageBackend initialization."""
|
|
53
|
+
|
|
54
|
+
def test_init_with_dsn(self):
|
|
55
|
+
"""Test initialization with DSN connection string."""
|
|
56
|
+
dsn = "postgresql://user:pass@localhost:5432/db"
|
|
57
|
+
backend = PostgresStorageBackend(dsn=dsn)
|
|
58
|
+
|
|
59
|
+
assert backend.dsn == dsn
|
|
60
|
+
assert backend._pool is None
|
|
61
|
+
assert backend._initialized is False
|
|
62
|
+
|
|
63
|
+
def test_init_with_individual_params(self):
|
|
64
|
+
"""Test initialization with individual connection parameters."""
|
|
65
|
+
backend = PostgresStorageBackend(
|
|
66
|
+
host="db.example.com",
|
|
67
|
+
port=5433,
|
|
68
|
+
user="testuser",
|
|
69
|
+
password="testpass",
|
|
70
|
+
database="testdb",
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
assert backend.dsn is None
|
|
74
|
+
assert backend.host == "db.example.com"
|
|
75
|
+
assert backend.port == 5433
|
|
76
|
+
assert backend.user == "testuser"
|
|
77
|
+
assert backend.password == "testpass"
|
|
78
|
+
assert backend.database == "testdb"
|
|
79
|
+
|
|
80
|
+
def test_init_with_pool_settings(self):
|
|
81
|
+
"""Test initialization with custom pool settings."""
|
|
82
|
+
backend = PostgresStorageBackend(
|
|
83
|
+
min_pool_size=5,
|
|
84
|
+
max_pool_size=20,
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
assert backend.min_pool_size == 5
|
|
88
|
+
assert backend.max_pool_size == 20
|
|
89
|
+
|
|
90
|
+
def test_build_dsn_with_password(self):
|
|
91
|
+
"""Test DSN building with password."""
|
|
92
|
+
backend = PostgresStorageBackend(
|
|
93
|
+
host="localhost",
|
|
94
|
+
port=5432,
|
|
95
|
+
user="myuser",
|
|
96
|
+
password="mypass",
|
|
97
|
+
database="mydb",
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
dsn = backend._build_dsn()
|
|
101
|
+
assert dsn == "postgresql://myuser:mypass@localhost:5432/mydb"
|
|
102
|
+
|
|
103
|
+
def test_build_dsn_without_password(self):
|
|
104
|
+
"""Test DSN building without password."""
|
|
105
|
+
backend = PostgresStorageBackend(
|
|
106
|
+
host="localhost",
|
|
107
|
+
port=5432,
|
|
108
|
+
user="myuser",
|
|
109
|
+
password="",
|
|
110
|
+
database="mydb",
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
dsn = backend._build_dsn()
|
|
114
|
+
assert dsn == "postgresql://myuser@localhost:5432/mydb"
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
class TestPostgresStorageBackendConnection:
|
|
118
|
+
"""Test connection management."""
|
|
119
|
+
|
|
120
|
+
@pytest.mark.asyncio
|
|
121
|
+
async def test_ensure_connected_raises_when_not_connected(self):
|
|
122
|
+
"""Test that _ensure_connected raises when pool is None."""
|
|
123
|
+
backend = PostgresStorageBackend()
|
|
124
|
+
|
|
125
|
+
with pytest.raises(RuntimeError, match="Database not connected"):
|
|
126
|
+
backend._ensure_connected()
|
|
127
|
+
|
|
128
|
+
@pytest.mark.asyncio
|
|
129
|
+
async def test_connect_creates_pool(self):
|
|
130
|
+
"""Test that connect creates a connection pool."""
|
|
131
|
+
backend = PostgresStorageBackend(dsn="postgresql://test@localhost/test")
|
|
132
|
+
|
|
133
|
+
mock_pool = MagicMock()
|
|
134
|
+
mock_pool.acquire = MagicMock(return_value=AsyncMock())
|
|
135
|
+
|
|
136
|
+
async def mock_create_pool(*args, **kwargs):
|
|
137
|
+
return mock_pool
|
|
138
|
+
|
|
139
|
+
with patch("asyncpg.create_pool", side_effect=mock_create_pool) as mock_create:
|
|
140
|
+
# Mock the schema initialization
|
|
141
|
+
backend._initialize_schema = AsyncMock()
|
|
142
|
+
|
|
143
|
+
await backend.connect()
|
|
144
|
+
|
|
145
|
+
mock_create.assert_called_once()
|
|
146
|
+
assert backend._pool is not None
|
|
147
|
+
assert backend._initialized is True
|
|
148
|
+
|
|
149
|
+
@pytest.mark.asyncio
|
|
150
|
+
async def test_disconnect_closes_pool(self):
|
|
151
|
+
"""Test that disconnect closes the connection pool."""
|
|
152
|
+
backend = PostgresStorageBackend()
|
|
153
|
+
mock_pool = AsyncMock()
|
|
154
|
+
backend._pool = mock_pool
|
|
155
|
+
backend._initialized = True
|
|
156
|
+
|
|
157
|
+
await backend.disconnect()
|
|
158
|
+
|
|
159
|
+
mock_pool.close.assert_called_once()
|
|
160
|
+
assert backend._pool is None
|
|
161
|
+
assert backend._initialized is False
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
class TestRowConversion:
|
|
165
|
+
"""Test row to object conversion methods."""
|
|
166
|
+
|
|
167
|
+
def test_row_to_workflow_run(self):
|
|
168
|
+
"""Test converting database row to WorkflowRun."""
|
|
169
|
+
backend = PostgresStorageBackend()
|
|
170
|
+
|
|
171
|
+
# Create a mock record that behaves like asyncpg.Record
|
|
172
|
+
row = {
|
|
173
|
+
"run_id": "run_123",
|
|
174
|
+
"workflow_name": "test_workflow",
|
|
175
|
+
"status": "running",
|
|
176
|
+
"created_at": datetime(2024, 1, 1, 12, 0, 0, tzinfo=UTC),
|
|
177
|
+
"updated_at": datetime(2024, 1, 1, 12, 0, 1, tzinfo=UTC),
|
|
178
|
+
"started_at": datetime(2024, 1, 1, 12, 0, 0, tzinfo=UTC),
|
|
179
|
+
"completed_at": None,
|
|
180
|
+
"input_args": "[]",
|
|
181
|
+
"input_kwargs": '{"key": "value"}',
|
|
182
|
+
"result": None,
|
|
183
|
+
"error": None,
|
|
184
|
+
"idempotency_key": "idem_123",
|
|
185
|
+
"max_duration": "1h",
|
|
186
|
+
"metadata": '{"foo": "bar"}',
|
|
187
|
+
"recovery_attempts": 0,
|
|
188
|
+
"max_recovery_attempts": 3,
|
|
189
|
+
"recover_on_worker_loss": True,
|
|
190
|
+
"parent_run_id": None,
|
|
191
|
+
"nesting_depth": 0,
|
|
192
|
+
"continued_from_run_id": None,
|
|
193
|
+
"continued_to_run_id": None,
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
run = backend._row_to_workflow_run(row)
|
|
197
|
+
|
|
198
|
+
assert run.run_id == "run_123"
|
|
199
|
+
assert run.workflow_name == "test_workflow"
|
|
200
|
+
assert run.status == RunStatus.RUNNING
|
|
201
|
+
assert run.idempotency_key == "idem_123"
|
|
202
|
+
assert run.metadata == {"foo": "bar"}
|
|
203
|
+
assert run.recover_on_worker_loss is True
|
|
204
|
+
|
|
205
|
+
def test_row_to_event(self):
|
|
206
|
+
"""Test converting database row to Event."""
|
|
207
|
+
backend = PostgresStorageBackend()
|
|
208
|
+
|
|
209
|
+
row = {
|
|
210
|
+
"event_id": "event_123",
|
|
211
|
+
"run_id": "run_123",
|
|
212
|
+
"sequence": 5,
|
|
213
|
+
"type": "step.completed",
|
|
214
|
+
"timestamp": datetime(2024, 1, 1, 12, 0, 0, tzinfo=UTC),
|
|
215
|
+
"data": '{"step_id": "step_1"}',
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
event = backend._row_to_event(row)
|
|
219
|
+
|
|
220
|
+
assert event.event_id == "event_123"
|
|
221
|
+
assert event.run_id == "run_123"
|
|
222
|
+
assert event.sequence == 5
|
|
223
|
+
assert event.type == EventType.STEP_COMPLETED
|
|
224
|
+
assert event.data == {"step_id": "step_1"}
|
|
225
|
+
|
|
226
|
+
def test_row_to_step_execution(self):
|
|
227
|
+
"""Test converting database row to StepExecution."""
|
|
228
|
+
backend = PostgresStorageBackend()
|
|
229
|
+
|
|
230
|
+
row = {
|
|
231
|
+
"step_id": "step_123",
|
|
232
|
+
"run_id": "run_123",
|
|
233
|
+
"step_name": "process_data",
|
|
234
|
+
"status": "completed",
|
|
235
|
+
"created_at": datetime(2024, 1, 1, 12, 0, 0, tzinfo=UTC),
|
|
236
|
+
"started_at": datetime(2024, 1, 1, 12, 0, 1, tzinfo=UTC),
|
|
237
|
+
"completed_at": datetime(2024, 1, 1, 12, 0, 5, tzinfo=UTC),
|
|
238
|
+
"input_args": "[]",
|
|
239
|
+
"input_kwargs": "{}",
|
|
240
|
+
"result": '"success"',
|
|
241
|
+
"error": None,
|
|
242
|
+
"retry_count": 2,
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
step = backend._row_to_step_execution(row)
|
|
246
|
+
|
|
247
|
+
assert step.step_id == "step_123"
|
|
248
|
+
assert step.step_name == "process_data"
|
|
249
|
+
assert step.status == StepStatus.COMPLETED
|
|
250
|
+
assert step.attempt == 2
|
|
251
|
+
|
|
252
|
+
def test_row_to_hook(self):
|
|
253
|
+
"""Test converting database row to Hook."""
|
|
254
|
+
backend = PostgresStorageBackend()
|
|
255
|
+
|
|
256
|
+
row = {
|
|
257
|
+
"hook_id": "hook_123",
|
|
258
|
+
"run_id": "run_123",
|
|
259
|
+
"token": "token_abc",
|
|
260
|
+
"created_at": datetime(2024, 1, 1, 12, 0, 0, tzinfo=UTC),
|
|
261
|
+
"received_at": None,
|
|
262
|
+
"expires_at": datetime(2024, 1, 2, 12, 0, 0, tzinfo=UTC),
|
|
263
|
+
"status": "pending",
|
|
264
|
+
"payload": None,
|
|
265
|
+
"metadata": '{"webhook": true}',
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
hook = backend._row_to_hook(row)
|
|
269
|
+
|
|
270
|
+
assert hook.hook_id == "hook_123"
|
|
271
|
+
assert hook.token == "token_abc"
|
|
272
|
+
assert hook.status == HookStatus.PENDING
|
|
273
|
+
assert hook.metadata == {"webhook": True}
|
|
274
|
+
|
|
275
|
+
def test_row_to_schedule(self):
|
|
276
|
+
"""Test converting database row to Schedule."""
|
|
277
|
+
backend = PostgresStorageBackend()
|
|
278
|
+
|
|
279
|
+
row = {
|
|
280
|
+
"schedule_id": "sched_123",
|
|
281
|
+
"workflow_name": "daily_report",
|
|
282
|
+
"spec": '{"cron": "0 9 * * *", "timezone": "UTC"}',
|
|
283
|
+
"spec_type": "cron",
|
|
284
|
+
"timezone": "UTC",
|
|
285
|
+
"input_args": "[]",
|
|
286
|
+
"input_kwargs": "{}",
|
|
287
|
+
"status": "active",
|
|
288
|
+
"overlap_policy": "skip",
|
|
289
|
+
"next_run_time": datetime(2024, 1, 2, 9, 0, 0, tzinfo=UTC),
|
|
290
|
+
"last_run_time": datetime(2024, 1, 1, 9, 0, 0, tzinfo=UTC),
|
|
291
|
+
"running_run_ids": '["run_1", "run_2"]',
|
|
292
|
+
"metadata": "{}",
|
|
293
|
+
"created_at": datetime(2024, 1, 1, 0, 0, 0, tzinfo=UTC),
|
|
294
|
+
"updated_at": datetime(2024, 1, 1, 9, 0, 0, tzinfo=UTC),
|
|
295
|
+
"paused_at": None,
|
|
296
|
+
"deleted_at": None,
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
schedule = backend._row_to_schedule(row)
|
|
300
|
+
|
|
301
|
+
assert schedule.schedule_id == "sched_123"
|
|
302
|
+
assert schedule.workflow_name == "daily_report"
|
|
303
|
+
assert schedule.spec.cron == "0 9 * * *"
|
|
304
|
+
assert schedule.spec.timezone == "UTC"
|
|
305
|
+
assert schedule.status == ScheduleStatus.ACTIVE
|
|
306
|
+
assert schedule.overlap_policy == OverlapPolicy.SKIP
|
|
307
|
+
assert schedule.running_run_ids == ["run_1", "run_2"]
|
|
308
|
+
|
|
309
|
+
|
|
310
|
+
class TestPostgresStorageBackendConfig:
|
|
311
|
+
"""Test storage configuration integration."""
|
|
312
|
+
|
|
313
|
+
def test_storage_to_config_with_dsn(self):
|
|
314
|
+
"""Test serializing backend with DSN to config."""
|
|
315
|
+
from pyworkflow.storage.config import storage_to_config
|
|
316
|
+
|
|
317
|
+
backend = PostgresStorageBackend(dsn="postgresql://user:pass@host:5432/db")
|
|
318
|
+
config = storage_to_config(backend)
|
|
319
|
+
|
|
320
|
+
assert config["type"] == "postgres"
|
|
321
|
+
assert config["dsn"] == "postgresql://user:pass@host:5432/db"
|
|
322
|
+
|
|
323
|
+
def test_storage_to_config_with_params(self):
|
|
324
|
+
"""Test serializing backend with params to config."""
|
|
325
|
+
from pyworkflow.storage.config import storage_to_config
|
|
326
|
+
|
|
327
|
+
backend = PostgresStorageBackend(
|
|
328
|
+
host="db.example.com",
|
|
329
|
+
port=5433,
|
|
330
|
+
user="testuser",
|
|
331
|
+
password="testpass",
|
|
332
|
+
database="testdb",
|
|
333
|
+
)
|
|
334
|
+
config = storage_to_config(backend)
|
|
335
|
+
|
|
336
|
+
assert config["type"] == "postgres"
|
|
337
|
+
assert config["host"] == "db.example.com"
|
|
338
|
+
assert config["port"] == 5433
|
|
339
|
+
assert config["user"] == "testuser"
|
|
340
|
+
assert config["password"] == "testpass"
|
|
341
|
+
assert config["database"] == "testdb"
|
|
342
|
+
|
|
343
|
+
def test_config_to_storage_with_dsn(self):
|
|
344
|
+
"""Test creating backend from config with DSN."""
|
|
345
|
+
from pyworkflow.storage.config import config_to_storage
|
|
346
|
+
|
|
347
|
+
config = {"type": "postgres", "dsn": "postgresql://user:pass@host:5432/db"}
|
|
348
|
+
backend = config_to_storage(config)
|
|
349
|
+
|
|
350
|
+
assert isinstance(backend, PostgresStorageBackend)
|
|
351
|
+
assert backend.dsn == "postgresql://user:pass@host:5432/db"
|
|
352
|
+
|
|
353
|
+
def test_config_to_storage_with_params(self):
|
|
354
|
+
"""Test creating backend from config with params."""
|
|
355
|
+
from pyworkflow.storage.config import config_to_storage
|
|
356
|
+
|
|
357
|
+
config = {
|
|
358
|
+
"type": "postgres",
|
|
359
|
+
"host": "db.example.com",
|
|
360
|
+
"port": 5433,
|
|
361
|
+
"user": "testuser",
|
|
362
|
+
"password": "testpass",
|
|
363
|
+
"database": "testdb",
|
|
364
|
+
}
|
|
365
|
+
backend = config_to_storage(config)
|
|
366
|
+
|
|
367
|
+
assert isinstance(backend, PostgresStorageBackend)
|
|
368
|
+
assert backend.host == "db.example.com"
|
|
369
|
+
assert backend.port == 5433
|
|
370
|
+
assert backend.user == "testuser"
|
|
371
|
+
assert backend.password == "testpass"
|
|
372
|
+
assert backend.database == "testdb"
|
|
373
|
+
|
|
374
|
+
|
|
375
|
+
class TestWorkflowRunOperations:
|
|
376
|
+
"""Test workflow run CRUD operations."""
|
|
377
|
+
|
|
378
|
+
@pytest.mark.asyncio
|
|
379
|
+
async def test_create_run(self, mock_backend):
|
|
380
|
+
"""Test creating a workflow run."""
|
|
381
|
+
backend, mock_conn = mock_backend
|
|
382
|
+
|
|
383
|
+
run = WorkflowRun(
|
|
384
|
+
run_id="run_123",
|
|
385
|
+
workflow_name="test_workflow",
|
|
386
|
+
status=RunStatus.PENDING,
|
|
387
|
+
)
|
|
388
|
+
|
|
389
|
+
await backend.create_run(run)
|
|
390
|
+
|
|
391
|
+
mock_conn.execute.assert_called_once()
|
|
392
|
+
call_args = mock_conn.execute.call_args
|
|
393
|
+
assert "INSERT INTO workflow_runs" in call_args[0][0]
|
|
394
|
+
assert call_args[0][1] == "run_123"
|
|
395
|
+
|
|
396
|
+
@pytest.mark.asyncio
|
|
397
|
+
async def test_get_run_found(self, mock_backend):
|
|
398
|
+
"""Test retrieving an existing workflow run."""
|
|
399
|
+
backend, mock_conn = mock_backend
|
|
400
|
+
|
|
401
|
+
mock_conn.fetchrow.return_value = {
|
|
402
|
+
"run_id": "run_123",
|
|
403
|
+
"workflow_name": "test_workflow",
|
|
404
|
+
"status": "running",
|
|
405
|
+
"created_at": datetime(2024, 1, 1, 12, 0, 0, tzinfo=UTC),
|
|
406
|
+
"updated_at": datetime(2024, 1, 1, 12, 0, 1, tzinfo=UTC),
|
|
407
|
+
"started_at": None,
|
|
408
|
+
"completed_at": None,
|
|
409
|
+
"input_args": "[]",
|
|
410
|
+
"input_kwargs": "{}",
|
|
411
|
+
"result": None,
|
|
412
|
+
"error": None,
|
|
413
|
+
"idempotency_key": None,
|
|
414
|
+
"max_duration": None,
|
|
415
|
+
"metadata": "{}",
|
|
416
|
+
"recovery_attempts": 0,
|
|
417
|
+
"max_recovery_attempts": 3,
|
|
418
|
+
"recover_on_worker_loss": True,
|
|
419
|
+
"parent_run_id": None,
|
|
420
|
+
"nesting_depth": 0,
|
|
421
|
+
"continued_from_run_id": None,
|
|
422
|
+
"continued_to_run_id": None,
|
|
423
|
+
}
|
|
424
|
+
|
|
425
|
+
run = await backend.get_run("run_123")
|
|
426
|
+
|
|
427
|
+
assert run is not None
|
|
428
|
+
assert run.run_id == "run_123"
|
|
429
|
+
assert run.status == RunStatus.RUNNING
|
|
430
|
+
mock_conn.fetchrow.assert_called_once()
|
|
431
|
+
|
|
432
|
+
@pytest.mark.asyncio
|
|
433
|
+
async def test_get_run_not_found(self, mock_backend):
|
|
434
|
+
"""Test retrieving a non-existent workflow run."""
|
|
435
|
+
backend, mock_conn = mock_backend
|
|
436
|
+
mock_conn.fetchrow.return_value = None
|
|
437
|
+
|
|
438
|
+
run = await backend.get_run("nonexistent")
|
|
439
|
+
|
|
440
|
+
assert run is None
|
|
441
|
+
|
|
442
|
+
@pytest.mark.asyncio
|
|
443
|
+
async def test_get_run_by_idempotency_key(self, mock_backend):
|
|
444
|
+
"""Test retrieving workflow run by idempotency key."""
|
|
445
|
+
backend, mock_conn = mock_backend
|
|
446
|
+
|
|
447
|
+
mock_conn.fetchrow.return_value = {
|
|
448
|
+
"run_id": "run_123",
|
|
449
|
+
"workflow_name": "test_workflow",
|
|
450
|
+
"status": "completed",
|
|
451
|
+
"created_at": datetime(2024, 1, 1, 12, 0, 0, tzinfo=UTC),
|
|
452
|
+
"updated_at": datetime(2024, 1, 1, 12, 0, 1, tzinfo=UTC),
|
|
453
|
+
"started_at": None,
|
|
454
|
+
"completed_at": None,
|
|
455
|
+
"input_args": "[]",
|
|
456
|
+
"input_kwargs": "{}",
|
|
457
|
+
"result": None,
|
|
458
|
+
"error": None,
|
|
459
|
+
"idempotency_key": "idem_key_123",
|
|
460
|
+
"max_duration": None,
|
|
461
|
+
"metadata": "{}",
|
|
462
|
+
"recovery_attempts": 0,
|
|
463
|
+
"max_recovery_attempts": 3,
|
|
464
|
+
"recover_on_worker_loss": True,
|
|
465
|
+
"parent_run_id": None,
|
|
466
|
+
"nesting_depth": 0,
|
|
467
|
+
"continued_from_run_id": None,
|
|
468
|
+
"continued_to_run_id": None,
|
|
469
|
+
}
|
|
470
|
+
|
|
471
|
+
run = await backend.get_run_by_idempotency_key("idem_key_123")
|
|
472
|
+
|
|
473
|
+
assert run is not None
|
|
474
|
+
assert run.idempotency_key == "idem_key_123"
|
|
475
|
+
|
|
476
|
+
@pytest.mark.asyncio
|
|
477
|
+
async def test_update_run_status(self, mock_backend):
|
|
478
|
+
"""Test updating workflow run status."""
|
|
479
|
+
backend, mock_conn = mock_backend
|
|
480
|
+
|
|
481
|
+
await backend.update_run_status("run_123", RunStatus.COMPLETED, result='"done"', error=None)
|
|
482
|
+
|
|
483
|
+
mock_conn.execute.assert_called_once()
|
|
484
|
+
call_args = mock_conn.execute.call_args
|
|
485
|
+
assert "UPDATE workflow_runs" in call_args[0][0]
|
|
486
|
+
assert "status" in call_args[0][0]
|
|
487
|
+
|
|
488
|
+
@pytest.mark.asyncio
|
|
489
|
+
async def test_update_run_recovery_attempts(self, mock_backend):
|
|
490
|
+
"""Test updating recovery attempts counter."""
|
|
491
|
+
backend, mock_conn = mock_backend
|
|
492
|
+
|
|
493
|
+
await backend.update_run_recovery_attempts("run_123", 2)
|
|
494
|
+
|
|
495
|
+
mock_conn.execute.assert_called_once()
|
|
496
|
+
call_args = mock_conn.execute.call_args
|
|
497
|
+
assert "recovery_attempts" in call_args[0][0]
|
|
498
|
+
|
|
499
|
+
@pytest.mark.asyncio
|
|
500
|
+
async def test_list_runs(self, mock_backend):
|
|
501
|
+
"""Test listing workflow runs."""
|
|
502
|
+
backend, mock_conn = mock_backend
|
|
503
|
+
|
|
504
|
+
mock_conn.fetch.return_value = [
|
|
505
|
+
{
|
|
506
|
+
"run_id": "run_1",
|
|
507
|
+
"workflow_name": "test_workflow",
|
|
508
|
+
"status": "completed",
|
|
509
|
+
"created_at": datetime(2024, 1, 1, 12, 0, 0, tzinfo=UTC),
|
|
510
|
+
"updated_at": datetime(2024, 1, 1, 12, 0, 1, tzinfo=UTC),
|
|
511
|
+
"started_at": None,
|
|
512
|
+
"completed_at": None,
|
|
513
|
+
"input_args": "[]",
|
|
514
|
+
"input_kwargs": "{}",
|
|
515
|
+
"result": None,
|
|
516
|
+
"error": None,
|
|
517
|
+
"idempotency_key": None,
|
|
518
|
+
"max_duration": None,
|
|
519
|
+
"metadata": "{}",
|
|
520
|
+
"recovery_attempts": 0,
|
|
521
|
+
"max_recovery_attempts": 3,
|
|
522
|
+
"recover_on_worker_loss": True,
|
|
523
|
+
"parent_run_id": None,
|
|
524
|
+
"nesting_depth": 0,
|
|
525
|
+
"continued_from_run_id": None,
|
|
526
|
+
"continued_to_run_id": None,
|
|
527
|
+
}
|
|
528
|
+
]
|
|
529
|
+
|
|
530
|
+
runs, cursor = await backend.list_runs(limit=10)
|
|
531
|
+
|
|
532
|
+
assert len(runs) == 1
|
|
533
|
+
assert runs[0].run_id == "run_1"
|
|
534
|
+
|
|
535
|
+
|
|
536
|
+
class TestEventOperations:
|
|
537
|
+
"""Test event log operations."""
|
|
538
|
+
|
|
539
|
+
@pytest.mark.asyncio
|
|
540
|
+
async def test_record_event(self, mock_backend):
|
|
541
|
+
"""Test recording an event."""
|
|
542
|
+
backend, mock_conn = mock_backend
|
|
543
|
+
|
|
544
|
+
# Mock the sequence fetch
|
|
545
|
+
mock_conn.fetchrow.return_value = [0]
|
|
546
|
+
|
|
547
|
+
# Mock transaction context manager
|
|
548
|
+
@asynccontextmanager
|
|
549
|
+
async def mock_transaction():
|
|
550
|
+
yield
|
|
551
|
+
|
|
552
|
+
mock_conn.transaction = mock_transaction
|
|
553
|
+
|
|
554
|
+
event = Event(
|
|
555
|
+
event_id="event_123",
|
|
556
|
+
run_id="run_123",
|
|
557
|
+
type=EventType.WORKFLOW_STARTED,
|
|
558
|
+
timestamp=datetime.now(UTC),
|
|
559
|
+
data={"key": "value"},
|
|
560
|
+
)
|
|
561
|
+
|
|
562
|
+
await backend.record_event(event)
|
|
563
|
+
|
|
564
|
+
mock_conn.execute.assert_called_once()
|
|
565
|
+
call_args = mock_conn.execute.call_args
|
|
566
|
+
assert "INSERT INTO events" in call_args[0][0]
|
|
567
|
+
|
|
568
|
+
@pytest.mark.asyncio
|
|
569
|
+
async def test_get_events(self, mock_backend):
|
|
570
|
+
"""Test retrieving events for a workflow run."""
|
|
571
|
+
backend, mock_conn = mock_backend
|
|
572
|
+
|
|
573
|
+
mock_conn.fetch.return_value = [
|
|
574
|
+
{
|
|
575
|
+
"event_id": "event_1",
|
|
576
|
+
"run_id": "run_123",
|
|
577
|
+
"sequence": 0,
|
|
578
|
+
"type": "workflow.started",
|
|
579
|
+
"timestamp": datetime(2024, 1, 1, 12, 0, 0, tzinfo=UTC),
|
|
580
|
+
"data": "{}",
|
|
581
|
+
},
|
|
582
|
+
{
|
|
583
|
+
"event_id": "event_2",
|
|
584
|
+
"run_id": "run_123",
|
|
585
|
+
"sequence": 1,
|
|
586
|
+
"type": "step.completed",
|
|
587
|
+
"timestamp": datetime(2024, 1, 1, 12, 0, 1, tzinfo=UTC),
|
|
588
|
+
"data": '{"step_id": "step_1"}',
|
|
589
|
+
},
|
|
590
|
+
]
|
|
591
|
+
|
|
592
|
+
events = await backend.get_events("run_123")
|
|
593
|
+
|
|
594
|
+
assert len(events) == 2
|
|
595
|
+
assert events[0].type == EventType.WORKFLOW_STARTED
|
|
596
|
+
assert events[1].type == EventType.STEP_COMPLETED
|
|
597
|
+
|
|
598
|
+
@pytest.mark.asyncio
|
|
599
|
+
async def test_get_latest_event(self, mock_backend):
|
|
600
|
+
"""Test retrieving the latest event."""
|
|
601
|
+
backend, mock_conn = mock_backend
|
|
602
|
+
|
|
603
|
+
mock_conn.fetchrow.return_value = {
|
|
604
|
+
"event_id": "event_5",
|
|
605
|
+
"run_id": "run_123",
|
|
606
|
+
"sequence": 5,
|
|
607
|
+
"type": "step.completed",
|
|
608
|
+
"timestamp": datetime(2024, 1, 1, 12, 0, 5, tzinfo=UTC),
|
|
609
|
+
"data": "{}",
|
|
610
|
+
}
|
|
611
|
+
|
|
612
|
+
event = await backend.get_latest_event("run_123")
|
|
613
|
+
|
|
614
|
+
assert event is not None
|
|
615
|
+
assert event.sequence == 5
|
|
616
|
+
|
|
617
|
+
|
|
618
|
+
class TestStepOperations:
|
|
619
|
+
"""Test step execution operations."""
|
|
620
|
+
|
|
621
|
+
@pytest.mark.asyncio
|
|
622
|
+
async def test_create_step(self, mock_backend):
|
|
623
|
+
"""Test creating a step execution record."""
|
|
624
|
+
backend, mock_conn = mock_backend
|
|
625
|
+
|
|
626
|
+
step = StepExecution(
|
|
627
|
+
step_id="step_123",
|
|
628
|
+
run_id="run_123",
|
|
629
|
+
step_name="process_data",
|
|
630
|
+
status=StepStatus.PENDING,
|
|
631
|
+
)
|
|
632
|
+
|
|
633
|
+
await backend.create_step(step)
|
|
634
|
+
|
|
635
|
+
mock_conn.execute.assert_called_once()
|
|
636
|
+
call_args = mock_conn.execute.call_args
|
|
637
|
+
assert "INSERT INTO steps" in call_args[0][0]
|
|
638
|
+
|
|
639
|
+
@pytest.mark.asyncio
|
|
640
|
+
async def test_get_step_found(self, mock_backend):
|
|
641
|
+
"""Test retrieving an existing step."""
|
|
642
|
+
backend, mock_conn = mock_backend
|
|
643
|
+
|
|
644
|
+
mock_conn.fetchrow.return_value = {
|
|
645
|
+
"step_id": "step_123",
|
|
646
|
+
"run_id": "run_123",
|
|
647
|
+
"step_name": "process_data",
|
|
648
|
+
"status": "completed",
|
|
649
|
+
"created_at": datetime(2024, 1, 1, 12, 0, 0, tzinfo=UTC),
|
|
650
|
+
"started_at": datetime(2024, 1, 1, 12, 0, 1, tzinfo=UTC),
|
|
651
|
+
"completed_at": datetime(2024, 1, 1, 12, 0, 5, tzinfo=UTC),
|
|
652
|
+
"input_args": "[]",
|
|
653
|
+
"input_kwargs": "{}",
|
|
654
|
+
"result": '"success"',
|
|
655
|
+
"error": None,
|
|
656
|
+
"retry_count": 1,
|
|
657
|
+
}
|
|
658
|
+
|
|
659
|
+
step = await backend.get_step("step_123")
|
|
660
|
+
|
|
661
|
+
assert step is not None
|
|
662
|
+
assert step.step_id == "step_123"
|
|
663
|
+
assert step.status == StepStatus.COMPLETED
|
|
664
|
+
|
|
665
|
+
@pytest.mark.asyncio
|
|
666
|
+
async def test_get_step_not_found(self, mock_backend):
|
|
667
|
+
"""Test retrieving a non-existent step."""
|
|
668
|
+
backend, mock_conn = mock_backend
|
|
669
|
+
mock_conn.fetchrow.return_value = None
|
|
670
|
+
|
|
671
|
+
step = await backend.get_step("nonexistent")
|
|
672
|
+
|
|
673
|
+
assert step is None
|
|
674
|
+
|
|
675
|
+
@pytest.mark.asyncio
|
|
676
|
+
async def test_update_step_status(self, mock_backend):
|
|
677
|
+
"""Test updating step execution status."""
|
|
678
|
+
backend, mock_conn = mock_backend
|
|
679
|
+
|
|
680
|
+
await backend.update_step_status("step_123", StepStatus.COMPLETED, result='"done"')
|
|
681
|
+
|
|
682
|
+
mock_conn.execute.assert_called_once()
|
|
683
|
+
call_args = mock_conn.execute.call_args
|
|
684
|
+
assert "UPDATE steps" in call_args[0][0]
|
|
685
|
+
|
|
686
|
+
@pytest.mark.asyncio
|
|
687
|
+
async def test_list_steps(self, mock_backend):
|
|
688
|
+
"""Test listing steps for a workflow run."""
|
|
689
|
+
backend, mock_conn = mock_backend
|
|
690
|
+
|
|
691
|
+
mock_conn.fetch.return_value = [
|
|
692
|
+
{
|
|
693
|
+
"step_id": "step_1",
|
|
694
|
+
"run_id": "run_123",
|
|
695
|
+
"step_name": "step_one",
|
|
696
|
+
"status": "completed",
|
|
697
|
+
"created_at": datetime(2024, 1, 1, 12, 0, 0, tzinfo=UTC),
|
|
698
|
+
"started_at": None,
|
|
699
|
+
"completed_at": None,
|
|
700
|
+
"input_args": "[]",
|
|
701
|
+
"input_kwargs": "{}",
|
|
702
|
+
"result": None,
|
|
703
|
+
"error": None,
|
|
704
|
+
"retry_count": 1,
|
|
705
|
+
}
|
|
706
|
+
]
|
|
707
|
+
|
|
708
|
+
steps = await backend.list_steps("run_123")
|
|
709
|
+
|
|
710
|
+
assert len(steps) == 1
|
|
711
|
+
assert steps[0].step_id == "step_1"
|
|
712
|
+
|
|
713
|
+
|
|
714
|
+
class TestHookOperations:
|
|
715
|
+
"""Test hook operations."""
|
|
716
|
+
|
|
717
|
+
@pytest.mark.asyncio
|
|
718
|
+
async def test_create_hook(self, mock_backend):
|
|
719
|
+
"""Test creating a hook record."""
|
|
720
|
+
backend, mock_conn = mock_backend
|
|
721
|
+
|
|
722
|
+
hook = Hook(
|
|
723
|
+
hook_id="hook_123",
|
|
724
|
+
run_id="run_123",
|
|
725
|
+
token="token_abc",
|
|
726
|
+
)
|
|
727
|
+
|
|
728
|
+
await backend.create_hook(hook)
|
|
729
|
+
|
|
730
|
+
mock_conn.execute.assert_called_once()
|
|
731
|
+
call_args = mock_conn.execute.call_args
|
|
732
|
+
assert "INSERT INTO hooks" in call_args[0][0]
|
|
733
|
+
|
|
734
|
+
@pytest.mark.asyncio
|
|
735
|
+
async def test_get_hook_found(self, mock_backend):
|
|
736
|
+
"""Test retrieving an existing hook."""
|
|
737
|
+
backend, mock_conn = mock_backend
|
|
738
|
+
|
|
739
|
+
mock_conn.fetchrow.return_value = {
|
|
740
|
+
"hook_id": "hook_123",
|
|
741
|
+
"run_id": "run_123",
|
|
742
|
+
"token": "token_abc",
|
|
743
|
+
"created_at": datetime(2024, 1, 1, 12, 0, 0, tzinfo=UTC),
|
|
744
|
+
"received_at": None,
|
|
745
|
+
"expires_at": None,
|
|
746
|
+
"status": "pending",
|
|
747
|
+
"payload": None,
|
|
748
|
+
"metadata": "{}",
|
|
749
|
+
}
|
|
750
|
+
|
|
751
|
+
hook = await backend.get_hook("hook_123")
|
|
752
|
+
|
|
753
|
+
assert hook is not None
|
|
754
|
+
assert hook.hook_id == "hook_123"
|
|
755
|
+
assert hook.status == HookStatus.PENDING
|
|
756
|
+
|
|
757
|
+
@pytest.mark.asyncio
|
|
758
|
+
async def test_get_hook_not_found(self, mock_backend):
|
|
759
|
+
"""Test retrieving a non-existent hook."""
|
|
760
|
+
backend, mock_conn = mock_backend
|
|
761
|
+
mock_conn.fetchrow.return_value = None
|
|
762
|
+
|
|
763
|
+
hook = await backend.get_hook("nonexistent")
|
|
764
|
+
|
|
765
|
+
assert hook is None
|
|
766
|
+
|
|
767
|
+
@pytest.mark.asyncio
|
|
768
|
+
async def test_get_hook_by_token(self, mock_backend):
|
|
769
|
+
"""Test retrieving a hook by token."""
|
|
770
|
+
backend, mock_conn = mock_backend
|
|
771
|
+
|
|
772
|
+
mock_conn.fetchrow.return_value = {
|
|
773
|
+
"hook_id": "hook_123",
|
|
774
|
+
"run_id": "run_123",
|
|
775
|
+
"token": "token_abc",
|
|
776
|
+
"created_at": datetime(2024, 1, 1, 12, 0, 0, tzinfo=UTC),
|
|
777
|
+
"received_at": None,
|
|
778
|
+
"expires_at": None,
|
|
779
|
+
"status": "pending",
|
|
780
|
+
"payload": None,
|
|
781
|
+
"metadata": "{}",
|
|
782
|
+
}
|
|
783
|
+
|
|
784
|
+
hook = await backend.get_hook_by_token("token_abc")
|
|
785
|
+
|
|
786
|
+
assert hook is not None
|
|
787
|
+
assert hook.token == "token_abc"
|
|
788
|
+
|
|
789
|
+
@pytest.mark.asyncio
|
|
790
|
+
async def test_update_hook_status(self, mock_backend):
|
|
791
|
+
"""Test updating hook status."""
|
|
792
|
+
backend, mock_conn = mock_backend
|
|
793
|
+
|
|
794
|
+
await backend.update_hook_status(
|
|
795
|
+
"hook_123", HookStatus.RECEIVED, payload='{"data": "test"}'
|
|
796
|
+
)
|
|
797
|
+
|
|
798
|
+
mock_conn.execute.assert_called_once()
|
|
799
|
+
call_args = mock_conn.execute.call_args
|
|
800
|
+
assert "UPDATE hooks" in call_args[0][0]
|
|
801
|
+
|
|
802
|
+
@pytest.mark.asyncio
|
|
803
|
+
async def test_list_hooks(self, mock_backend):
|
|
804
|
+
"""Test listing hooks."""
|
|
805
|
+
backend, mock_conn = mock_backend
|
|
806
|
+
|
|
807
|
+
mock_conn.fetch.return_value = [
|
|
808
|
+
{
|
|
809
|
+
"hook_id": "hook_1",
|
|
810
|
+
"run_id": "run_123",
|
|
811
|
+
"token": "token_1",
|
|
812
|
+
"created_at": datetime(2024, 1, 1, 12, 0, 0, tzinfo=UTC),
|
|
813
|
+
"received_at": None,
|
|
814
|
+
"expires_at": None,
|
|
815
|
+
"status": "pending",
|
|
816
|
+
"payload": None,
|
|
817
|
+
"metadata": "{}",
|
|
818
|
+
}
|
|
819
|
+
]
|
|
820
|
+
|
|
821
|
+
hooks = await backend.list_hooks(run_id="run_123")
|
|
822
|
+
|
|
823
|
+
assert len(hooks) == 1
|
|
824
|
+
assert hooks[0].hook_id == "hook_1"
|
|
825
|
+
|
|
826
|
+
|
|
827
|
+
class TestCancellationOperations:
|
|
828
|
+
"""Test cancellation flag operations."""
|
|
829
|
+
|
|
830
|
+
@pytest.mark.asyncio
|
|
831
|
+
async def test_set_cancellation_flag(self, mock_backend):
|
|
832
|
+
"""Test setting a cancellation flag."""
|
|
833
|
+
backend, mock_conn = mock_backend
|
|
834
|
+
|
|
835
|
+
await backend.set_cancellation_flag("run_123")
|
|
836
|
+
|
|
837
|
+
mock_conn.execute.assert_called_once()
|
|
838
|
+
call_args = mock_conn.execute.call_args
|
|
839
|
+
assert "INSERT INTO cancellation_flags" in call_args[0][0]
|
|
840
|
+
|
|
841
|
+
@pytest.mark.asyncio
|
|
842
|
+
async def test_check_cancellation_flag_set(self, mock_backend):
|
|
843
|
+
"""Test checking a set cancellation flag."""
|
|
844
|
+
backend, mock_conn = mock_backend
|
|
845
|
+
mock_conn.fetchrow.return_value = [1] # Row exists
|
|
846
|
+
|
|
847
|
+
result = await backend.check_cancellation_flag("run_123")
|
|
848
|
+
|
|
849
|
+
assert result is True
|
|
850
|
+
|
|
851
|
+
@pytest.mark.asyncio
|
|
852
|
+
async def test_check_cancellation_flag_not_set(self, mock_backend):
|
|
853
|
+
"""Test checking when cancellation flag is not set."""
|
|
854
|
+
backend, mock_conn = mock_backend
|
|
855
|
+
mock_conn.fetchrow.return_value = None
|
|
856
|
+
|
|
857
|
+
result = await backend.check_cancellation_flag("run_123")
|
|
858
|
+
|
|
859
|
+
assert result is False
|
|
860
|
+
|
|
861
|
+
@pytest.mark.asyncio
|
|
862
|
+
async def test_clear_cancellation_flag(self, mock_backend):
|
|
863
|
+
"""Test clearing a cancellation flag."""
|
|
864
|
+
backend, mock_conn = mock_backend
|
|
865
|
+
|
|
866
|
+
await backend.clear_cancellation_flag("run_123")
|
|
867
|
+
|
|
868
|
+
mock_conn.execute.assert_called_once()
|
|
869
|
+
call_args = mock_conn.execute.call_args
|
|
870
|
+
assert "DELETE FROM cancellation_flags" in call_args[0][0]
|
|
871
|
+
|
|
872
|
+
|
|
873
|
+
class TestContinueAsNewOperations:
|
|
874
|
+
"""Test continue-as-new chain operations."""
|
|
875
|
+
|
|
876
|
+
@pytest.mark.asyncio
|
|
877
|
+
async def test_update_run_continuation(self, mock_backend):
|
|
878
|
+
"""Test updating continuation link."""
|
|
879
|
+
backend, mock_conn = mock_backend
|
|
880
|
+
|
|
881
|
+
await backend.update_run_continuation("run_1", "run_2")
|
|
882
|
+
|
|
883
|
+
mock_conn.execute.assert_called_once()
|
|
884
|
+
call_args = mock_conn.execute.call_args
|
|
885
|
+
assert "continued_to_run_id" in call_args[0][0]
|
|
886
|
+
|
|
887
|
+
@pytest.mark.asyncio
|
|
888
|
+
async def test_get_workflow_chain(self, mock_backend):
|
|
889
|
+
"""Test retrieving workflow chain."""
|
|
890
|
+
backend, mock_conn = mock_backend
|
|
891
|
+
|
|
892
|
+
# First call finds the start of chain
|
|
893
|
+
mock_conn.fetchrow.side_effect = [
|
|
894
|
+
None, # No continued_from_run_id (this is the start)
|
|
895
|
+
]
|
|
896
|
+
|
|
897
|
+
# Mock get_run for chain traversal
|
|
898
|
+
with patch.object(
|
|
899
|
+
backend,
|
|
900
|
+
"get_run",
|
|
901
|
+
return_value=WorkflowRun(
|
|
902
|
+
run_id="run_1",
|
|
903
|
+
workflow_name="test_workflow",
|
|
904
|
+
status=RunStatus.COMPLETED,
|
|
905
|
+
continued_to_run_id=None,
|
|
906
|
+
),
|
|
907
|
+
):
|
|
908
|
+
runs = await backend.get_workflow_chain("run_1")
|
|
909
|
+
|
|
910
|
+
assert len(runs) == 1
|
|
911
|
+
|
|
912
|
+
|
|
913
|
+
class TestChildWorkflowOperations:
|
|
914
|
+
"""Test child workflow operations."""
|
|
915
|
+
|
|
916
|
+
@pytest.mark.asyncio
|
|
917
|
+
async def test_get_children(self, mock_backend):
|
|
918
|
+
"""Test retrieving child workflows."""
|
|
919
|
+
backend, mock_conn = mock_backend
|
|
920
|
+
|
|
921
|
+
mock_conn.fetch.return_value = [
|
|
922
|
+
{
|
|
923
|
+
"run_id": "child_1",
|
|
924
|
+
"workflow_name": "child_workflow",
|
|
925
|
+
"status": "completed",
|
|
926
|
+
"created_at": datetime(2024, 1, 1, 12, 0, 0, tzinfo=UTC),
|
|
927
|
+
"updated_at": datetime(2024, 1, 1, 12, 0, 1, tzinfo=UTC),
|
|
928
|
+
"started_at": None,
|
|
929
|
+
"completed_at": None,
|
|
930
|
+
"input_args": "[]",
|
|
931
|
+
"input_kwargs": "{}",
|
|
932
|
+
"result": None,
|
|
933
|
+
"error": None,
|
|
934
|
+
"idempotency_key": None,
|
|
935
|
+
"max_duration": None,
|
|
936
|
+
"metadata": "{}",
|
|
937
|
+
"recovery_attempts": 0,
|
|
938
|
+
"max_recovery_attempts": 3,
|
|
939
|
+
"recover_on_worker_loss": True,
|
|
940
|
+
"parent_run_id": "parent_123",
|
|
941
|
+
"nesting_depth": 1,
|
|
942
|
+
"continued_from_run_id": None,
|
|
943
|
+
"continued_to_run_id": None,
|
|
944
|
+
}
|
|
945
|
+
]
|
|
946
|
+
|
|
947
|
+
children = await backend.get_children("parent_123")
|
|
948
|
+
|
|
949
|
+
assert len(children) == 1
|
|
950
|
+
assert children[0].parent_run_id == "parent_123"
|
|
951
|
+
|
|
952
|
+
@pytest.mark.asyncio
|
|
953
|
+
async def test_get_parent_found(self, mock_backend):
|
|
954
|
+
"""Test retrieving parent workflow."""
|
|
955
|
+
backend, mock_conn = mock_backend
|
|
956
|
+
|
|
957
|
+
# First call gets the child run
|
|
958
|
+
child_data = {
|
|
959
|
+
"run_id": "child_1",
|
|
960
|
+
"workflow_name": "child_workflow",
|
|
961
|
+
"status": "running",
|
|
962
|
+
"created_at": datetime(2024, 1, 1, 12, 0, 0, tzinfo=UTC),
|
|
963
|
+
"updated_at": datetime(2024, 1, 1, 12, 0, 1, tzinfo=UTC),
|
|
964
|
+
"started_at": None,
|
|
965
|
+
"completed_at": None,
|
|
966
|
+
"input_args": "[]",
|
|
967
|
+
"input_kwargs": "{}",
|
|
968
|
+
"result": None,
|
|
969
|
+
"error": None,
|
|
970
|
+
"idempotency_key": None,
|
|
971
|
+
"max_duration": None,
|
|
972
|
+
"metadata": "{}",
|
|
973
|
+
"recovery_attempts": 0,
|
|
974
|
+
"max_recovery_attempts": 3,
|
|
975
|
+
"recover_on_worker_loss": True,
|
|
976
|
+
"parent_run_id": "parent_123",
|
|
977
|
+
"nesting_depth": 1,
|
|
978
|
+
"continued_from_run_id": None,
|
|
979
|
+
"continued_to_run_id": None,
|
|
980
|
+
}
|
|
981
|
+
|
|
982
|
+
parent_data = {
|
|
983
|
+
"run_id": "parent_123",
|
|
984
|
+
"workflow_name": "parent_workflow",
|
|
985
|
+
"status": "running",
|
|
986
|
+
"created_at": datetime(2024, 1, 1, 12, 0, 0, tzinfo=UTC),
|
|
987
|
+
"updated_at": datetime(2024, 1, 1, 12, 0, 1, tzinfo=UTC),
|
|
988
|
+
"started_at": None,
|
|
989
|
+
"completed_at": None,
|
|
990
|
+
"input_args": "[]",
|
|
991
|
+
"input_kwargs": "{}",
|
|
992
|
+
"result": None,
|
|
993
|
+
"error": None,
|
|
994
|
+
"idempotency_key": None,
|
|
995
|
+
"max_duration": None,
|
|
996
|
+
"metadata": "{}",
|
|
997
|
+
"recovery_attempts": 0,
|
|
998
|
+
"max_recovery_attempts": 3,
|
|
999
|
+
"recover_on_worker_loss": True,
|
|
1000
|
+
"parent_run_id": None,
|
|
1001
|
+
"nesting_depth": 0,
|
|
1002
|
+
"continued_from_run_id": None,
|
|
1003
|
+
"continued_to_run_id": None,
|
|
1004
|
+
}
|
|
1005
|
+
|
|
1006
|
+
mock_conn.fetchrow.side_effect = [child_data, parent_data]
|
|
1007
|
+
|
|
1008
|
+
parent = await backend.get_parent("child_1")
|
|
1009
|
+
|
|
1010
|
+
assert parent is not None
|
|
1011
|
+
assert parent.run_id == "parent_123"
|
|
1012
|
+
|
|
1013
|
+
@pytest.mark.asyncio
|
|
1014
|
+
async def test_get_parent_not_found(self, mock_backend):
|
|
1015
|
+
"""Test get_parent when run has no parent."""
|
|
1016
|
+
backend, mock_conn = mock_backend
|
|
1017
|
+
|
|
1018
|
+
mock_conn.fetchrow.return_value = {
|
|
1019
|
+
"run_id": "run_1",
|
|
1020
|
+
"workflow_name": "test_workflow",
|
|
1021
|
+
"status": "running",
|
|
1022
|
+
"created_at": datetime(2024, 1, 1, 12, 0, 0, tzinfo=UTC),
|
|
1023
|
+
"updated_at": datetime(2024, 1, 1, 12, 0, 1, tzinfo=UTC),
|
|
1024
|
+
"started_at": None,
|
|
1025
|
+
"completed_at": None,
|
|
1026
|
+
"input_args": "[]",
|
|
1027
|
+
"input_kwargs": "{}",
|
|
1028
|
+
"result": None,
|
|
1029
|
+
"error": None,
|
|
1030
|
+
"idempotency_key": None,
|
|
1031
|
+
"max_duration": None,
|
|
1032
|
+
"metadata": "{}",
|
|
1033
|
+
"recovery_attempts": 0,
|
|
1034
|
+
"max_recovery_attempts": 3,
|
|
1035
|
+
"recover_on_worker_loss": True,
|
|
1036
|
+
"parent_run_id": None,
|
|
1037
|
+
"nesting_depth": 0,
|
|
1038
|
+
"continued_from_run_id": None,
|
|
1039
|
+
"continued_to_run_id": None,
|
|
1040
|
+
}
|
|
1041
|
+
|
|
1042
|
+
parent = await backend.get_parent("run_1")
|
|
1043
|
+
|
|
1044
|
+
assert parent is None
|
|
1045
|
+
|
|
1046
|
+
@pytest.mark.asyncio
|
|
1047
|
+
async def test_get_nesting_depth(self, mock_backend):
|
|
1048
|
+
"""Test getting nesting depth."""
|
|
1049
|
+
backend, mock_conn = mock_backend
|
|
1050
|
+
|
|
1051
|
+
mock_conn.fetchrow.return_value = {
|
|
1052
|
+
"run_id": "run_1",
|
|
1053
|
+
"workflow_name": "test_workflow",
|
|
1054
|
+
"status": "running",
|
|
1055
|
+
"created_at": datetime(2024, 1, 1, 12, 0, 0, tzinfo=UTC),
|
|
1056
|
+
"updated_at": datetime(2024, 1, 1, 12, 0, 1, tzinfo=UTC),
|
|
1057
|
+
"started_at": None,
|
|
1058
|
+
"completed_at": None,
|
|
1059
|
+
"input_args": "[]",
|
|
1060
|
+
"input_kwargs": "{}",
|
|
1061
|
+
"result": None,
|
|
1062
|
+
"error": None,
|
|
1063
|
+
"idempotency_key": None,
|
|
1064
|
+
"max_duration": None,
|
|
1065
|
+
"metadata": "{}",
|
|
1066
|
+
"recovery_attempts": 0,
|
|
1067
|
+
"max_recovery_attempts": 3,
|
|
1068
|
+
"recover_on_worker_loss": True,
|
|
1069
|
+
"parent_run_id": None,
|
|
1070
|
+
"nesting_depth": 2,
|
|
1071
|
+
"continued_from_run_id": None,
|
|
1072
|
+
"continued_to_run_id": None,
|
|
1073
|
+
}
|
|
1074
|
+
|
|
1075
|
+
depth = await backend.get_nesting_depth("run_1")
|
|
1076
|
+
|
|
1077
|
+
assert depth == 2
|
|
1078
|
+
|
|
1079
|
+
|
|
1080
|
+
class TestScheduleOperations:
|
|
1081
|
+
"""Test schedule operations."""
|
|
1082
|
+
|
|
1083
|
+
@pytest.mark.asyncio
|
|
1084
|
+
async def test_create_schedule(self, mock_backend):
|
|
1085
|
+
"""Test creating a schedule."""
|
|
1086
|
+
backend, mock_conn = mock_backend
|
|
1087
|
+
|
|
1088
|
+
schedule = Schedule(
|
|
1089
|
+
schedule_id="sched_123",
|
|
1090
|
+
workflow_name="daily_report",
|
|
1091
|
+
spec=ScheduleSpec(cron="0 9 * * *"),
|
|
1092
|
+
)
|
|
1093
|
+
|
|
1094
|
+
await backend.create_schedule(schedule)
|
|
1095
|
+
|
|
1096
|
+
mock_conn.execute.assert_called_once()
|
|
1097
|
+
call_args = mock_conn.execute.call_args
|
|
1098
|
+
assert "INSERT INTO schedules" in call_args[0][0]
|
|
1099
|
+
|
|
1100
|
+
@pytest.mark.asyncio
|
|
1101
|
+
async def test_get_schedule_found(self, mock_backend):
|
|
1102
|
+
"""Test retrieving an existing schedule."""
|
|
1103
|
+
backend, mock_conn = mock_backend
|
|
1104
|
+
|
|
1105
|
+
mock_conn.fetchrow.return_value = {
|
|
1106
|
+
"schedule_id": "sched_123",
|
|
1107
|
+
"workflow_name": "daily_report",
|
|
1108
|
+
"spec": '{"cron": "0 9 * * *", "timezone": "UTC"}',
|
|
1109
|
+
"spec_type": "cron",
|
|
1110
|
+
"timezone": "UTC",
|
|
1111
|
+
"input_args": "[]",
|
|
1112
|
+
"input_kwargs": "{}",
|
|
1113
|
+
"status": "active",
|
|
1114
|
+
"overlap_policy": "skip",
|
|
1115
|
+
"next_run_time": datetime(2024, 1, 2, 9, 0, 0, tzinfo=UTC),
|
|
1116
|
+
"last_run_time": None,
|
|
1117
|
+
"running_run_ids": "[]",
|
|
1118
|
+
"metadata": "{}",
|
|
1119
|
+
"created_at": datetime(2024, 1, 1, 0, 0, 0, tzinfo=UTC),
|
|
1120
|
+
"updated_at": None,
|
|
1121
|
+
"paused_at": None,
|
|
1122
|
+
"deleted_at": None,
|
|
1123
|
+
}
|
|
1124
|
+
|
|
1125
|
+
schedule = await backend.get_schedule("sched_123")
|
|
1126
|
+
|
|
1127
|
+
assert schedule is not None
|
|
1128
|
+
assert schedule.schedule_id == "sched_123"
|
|
1129
|
+
assert schedule.spec.cron == "0 9 * * *"
|
|
1130
|
+
|
|
1131
|
+
@pytest.mark.asyncio
|
|
1132
|
+
async def test_get_schedule_not_found(self, mock_backend):
|
|
1133
|
+
"""Test retrieving a non-existent schedule."""
|
|
1134
|
+
backend, mock_conn = mock_backend
|
|
1135
|
+
mock_conn.fetchrow.return_value = None
|
|
1136
|
+
|
|
1137
|
+
schedule = await backend.get_schedule("nonexistent")
|
|
1138
|
+
|
|
1139
|
+
assert schedule is None
|
|
1140
|
+
|
|
1141
|
+
@pytest.mark.asyncio
|
|
1142
|
+
async def test_update_schedule(self, mock_backend):
|
|
1143
|
+
"""Test updating a schedule."""
|
|
1144
|
+
backend, mock_conn = mock_backend
|
|
1145
|
+
|
|
1146
|
+
schedule = Schedule(
|
|
1147
|
+
schedule_id="sched_123",
|
|
1148
|
+
workflow_name="daily_report",
|
|
1149
|
+
spec=ScheduleSpec(cron="0 10 * * *"),
|
|
1150
|
+
)
|
|
1151
|
+
|
|
1152
|
+
await backend.update_schedule(schedule)
|
|
1153
|
+
|
|
1154
|
+
mock_conn.execute.assert_called_once()
|
|
1155
|
+
call_args = mock_conn.execute.call_args
|
|
1156
|
+
assert "UPDATE schedules" in call_args[0][0]
|
|
1157
|
+
|
|
1158
|
+
@pytest.mark.asyncio
|
|
1159
|
+
async def test_delete_schedule(self, mock_backend):
|
|
1160
|
+
"""Test deleting (soft delete) a schedule."""
|
|
1161
|
+
backend, mock_conn = mock_backend
|
|
1162
|
+
|
|
1163
|
+
await backend.delete_schedule("sched_123")
|
|
1164
|
+
|
|
1165
|
+
mock_conn.execute.assert_called_once()
|
|
1166
|
+
call_args = mock_conn.execute.call_args
|
|
1167
|
+
assert "UPDATE schedules" in call_args[0][0]
|
|
1168
|
+
assert "deleted_at" in call_args[0][0]
|
|
1169
|
+
|
|
1170
|
+
@pytest.mark.asyncio
|
|
1171
|
+
async def test_list_schedules(self, mock_backend):
|
|
1172
|
+
"""Test listing schedules."""
|
|
1173
|
+
backend, mock_conn = mock_backend
|
|
1174
|
+
|
|
1175
|
+
mock_conn.fetch.return_value = [
|
|
1176
|
+
{
|
|
1177
|
+
"schedule_id": "sched_1",
|
|
1178
|
+
"workflow_name": "daily_report",
|
|
1179
|
+
"spec": '{"cron": "0 9 * * *", "timezone": "UTC"}',
|
|
1180
|
+
"spec_type": "cron",
|
|
1181
|
+
"timezone": "UTC",
|
|
1182
|
+
"input_args": "[]",
|
|
1183
|
+
"input_kwargs": "{}",
|
|
1184
|
+
"status": "active",
|
|
1185
|
+
"overlap_policy": "skip",
|
|
1186
|
+
"next_run_time": datetime(2024, 1, 2, 9, 0, 0, tzinfo=UTC),
|
|
1187
|
+
"last_run_time": None,
|
|
1188
|
+
"running_run_ids": "[]",
|
|
1189
|
+
"metadata": "{}",
|
|
1190
|
+
"created_at": datetime(2024, 1, 1, 0, 0, 0, tzinfo=UTC),
|
|
1191
|
+
"updated_at": None,
|
|
1192
|
+
"paused_at": None,
|
|
1193
|
+
"deleted_at": None,
|
|
1194
|
+
}
|
|
1195
|
+
]
|
|
1196
|
+
|
|
1197
|
+
schedules = await backend.list_schedules()
|
|
1198
|
+
|
|
1199
|
+
assert len(schedules) == 1
|
|
1200
|
+
assert schedules[0].schedule_id == "sched_1"
|
|
1201
|
+
|
|
1202
|
+
@pytest.mark.asyncio
|
|
1203
|
+
async def test_get_due_schedules(self, mock_backend):
|
|
1204
|
+
"""Test getting schedules that are due to run."""
|
|
1205
|
+
backend, mock_conn = mock_backend
|
|
1206
|
+
|
|
1207
|
+
mock_conn.fetch.return_value = [
|
|
1208
|
+
{
|
|
1209
|
+
"schedule_id": "sched_1",
|
|
1210
|
+
"workflow_name": "daily_report",
|
|
1211
|
+
"spec": '{"cron": "0 9 * * *", "timezone": "UTC"}',
|
|
1212
|
+
"spec_type": "cron",
|
|
1213
|
+
"timezone": "UTC",
|
|
1214
|
+
"input_args": "[]",
|
|
1215
|
+
"input_kwargs": "{}",
|
|
1216
|
+
"status": "active",
|
|
1217
|
+
"overlap_policy": "skip",
|
|
1218
|
+
"next_run_time": datetime(2024, 1, 1, 9, 0, 0, tzinfo=UTC),
|
|
1219
|
+
"last_run_time": None,
|
|
1220
|
+
"running_run_ids": "[]",
|
|
1221
|
+
"metadata": "{}",
|
|
1222
|
+
"created_at": datetime(2024, 1, 1, 0, 0, 0, tzinfo=UTC),
|
|
1223
|
+
"updated_at": None,
|
|
1224
|
+
"paused_at": None,
|
|
1225
|
+
"deleted_at": None,
|
|
1226
|
+
}
|
|
1227
|
+
]
|
|
1228
|
+
|
|
1229
|
+
now = datetime(2024, 1, 1, 9, 1, 0, tzinfo=UTC)
|
|
1230
|
+
schedules = await backend.get_due_schedules(now)
|
|
1231
|
+
|
|
1232
|
+
assert len(schedules) == 1
|
|
1233
|
+
|
|
1234
|
+
@pytest.mark.asyncio
|
|
1235
|
+
async def test_add_running_run(self, mock_backend):
|
|
1236
|
+
"""Test adding a run_id to schedule's running_run_ids."""
|
|
1237
|
+
backend, mock_conn = mock_backend
|
|
1238
|
+
|
|
1239
|
+
# Mock get_schedule to return a schedule
|
|
1240
|
+
schedule = Schedule(
|
|
1241
|
+
schedule_id="sched_123",
|
|
1242
|
+
workflow_name="daily_report",
|
|
1243
|
+
spec=ScheduleSpec(cron="0 9 * * *"),
|
|
1244
|
+
running_run_ids=["run_1"],
|
|
1245
|
+
)
|
|
1246
|
+
|
|
1247
|
+
with (
|
|
1248
|
+
patch.object(backend, "get_schedule", return_value=schedule),
|
|
1249
|
+
patch.object(backend, "update_schedule") as mock_update,
|
|
1250
|
+
):
|
|
1251
|
+
await backend.add_running_run("sched_123", "run_2")
|
|
1252
|
+
|
|
1253
|
+
mock_update.assert_called_once()
|
|
1254
|
+
# Verify run_2 was added
|
|
1255
|
+
updated_schedule = mock_update.call_args[0][0]
|
|
1256
|
+
assert "run_2" in updated_schedule.running_run_ids
|
|
1257
|
+
|
|
1258
|
+
@pytest.mark.asyncio
|
|
1259
|
+
async def test_remove_running_run(self, mock_backend):
|
|
1260
|
+
"""Test removing a run_id from schedule's running_run_ids."""
|
|
1261
|
+
backend, mock_conn = mock_backend
|
|
1262
|
+
|
|
1263
|
+
# Mock get_schedule to return a schedule
|
|
1264
|
+
schedule = Schedule(
|
|
1265
|
+
schedule_id="sched_123",
|
|
1266
|
+
workflow_name="daily_report",
|
|
1267
|
+
spec=ScheduleSpec(cron="0 9 * * *"),
|
|
1268
|
+
running_run_ids=["run_1", "run_2"],
|
|
1269
|
+
)
|
|
1270
|
+
|
|
1271
|
+
with (
|
|
1272
|
+
patch.object(backend, "get_schedule", return_value=schedule),
|
|
1273
|
+
patch.object(backend, "update_schedule") as mock_update,
|
|
1274
|
+
):
|
|
1275
|
+
await backend.remove_running_run("sched_123", "run_1")
|
|
1276
|
+
|
|
1277
|
+
mock_update.assert_called_once()
|
|
1278
|
+
# Verify run_1 was removed
|
|
1279
|
+
updated_schedule = mock_update.call_args[0][0]
|
|
1280
|
+
assert "run_1" not in updated_schedule.running_run_ids
|
|
1281
|
+
assert "run_2" in updated_schedule.running_run_ids
|