pyworkflow-engine 0.1.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dashboard/backend/app/__init__.py +1 -0
- dashboard/backend/app/config.py +32 -0
- dashboard/backend/app/controllers/__init__.py +6 -0
- dashboard/backend/app/controllers/run_controller.py +86 -0
- dashboard/backend/app/controllers/workflow_controller.py +33 -0
- dashboard/backend/app/dependencies/__init__.py +5 -0
- dashboard/backend/app/dependencies/storage.py +50 -0
- dashboard/backend/app/repositories/__init__.py +6 -0
- dashboard/backend/app/repositories/run_repository.py +80 -0
- dashboard/backend/app/repositories/workflow_repository.py +27 -0
- dashboard/backend/app/rest/__init__.py +8 -0
- dashboard/backend/app/rest/v1/__init__.py +12 -0
- dashboard/backend/app/rest/v1/health.py +33 -0
- dashboard/backend/app/rest/v1/runs.py +133 -0
- dashboard/backend/app/rest/v1/workflows.py +41 -0
- dashboard/backend/app/schemas/__init__.py +23 -0
- dashboard/backend/app/schemas/common.py +16 -0
- dashboard/backend/app/schemas/event.py +24 -0
- dashboard/backend/app/schemas/hook.py +25 -0
- dashboard/backend/app/schemas/run.py +54 -0
- dashboard/backend/app/schemas/step.py +28 -0
- dashboard/backend/app/schemas/workflow.py +31 -0
- dashboard/backend/app/server.py +87 -0
- dashboard/backend/app/services/__init__.py +6 -0
- dashboard/backend/app/services/run_service.py +240 -0
- dashboard/backend/app/services/workflow_service.py +155 -0
- dashboard/backend/main.py +18 -0
- docs/concepts/cancellation.mdx +362 -0
- docs/concepts/continue-as-new.mdx +434 -0
- docs/concepts/events.mdx +266 -0
- docs/concepts/fault-tolerance.mdx +370 -0
- docs/concepts/hooks.mdx +552 -0
- docs/concepts/limitations.mdx +167 -0
- docs/concepts/schedules.mdx +775 -0
- docs/concepts/sleep.mdx +312 -0
- docs/concepts/steps.mdx +301 -0
- docs/concepts/workflows.mdx +255 -0
- docs/guides/cli.mdx +942 -0
- docs/guides/configuration.mdx +560 -0
- docs/introduction.mdx +155 -0
- docs/quickstart.mdx +279 -0
- examples/__init__.py +1 -0
- examples/celery/__init__.py +1 -0
- examples/celery/durable/docker-compose.yml +55 -0
- examples/celery/durable/pyworkflow.config.yaml +12 -0
- examples/celery/durable/workflows/__init__.py +122 -0
- examples/celery/durable/workflows/basic.py +87 -0
- examples/celery/durable/workflows/batch_processing.py +102 -0
- examples/celery/durable/workflows/cancellation.py +273 -0
- examples/celery/durable/workflows/child_workflow_patterns.py +240 -0
- examples/celery/durable/workflows/child_workflows.py +202 -0
- examples/celery/durable/workflows/continue_as_new.py +260 -0
- examples/celery/durable/workflows/fault_tolerance.py +210 -0
- examples/celery/durable/workflows/hooks.py +211 -0
- examples/celery/durable/workflows/idempotency.py +112 -0
- examples/celery/durable/workflows/long_running.py +99 -0
- examples/celery/durable/workflows/retries.py +101 -0
- examples/celery/durable/workflows/schedules.py +209 -0
- examples/celery/transient/01_basic_workflow.py +91 -0
- examples/celery/transient/02_fault_tolerance.py +257 -0
- examples/celery/transient/__init__.py +20 -0
- examples/celery/transient/pyworkflow.config.yaml +25 -0
- examples/local/__init__.py +1 -0
- examples/local/durable/01_basic_workflow.py +94 -0
- examples/local/durable/02_file_storage.py +132 -0
- examples/local/durable/03_retries.py +169 -0
- examples/local/durable/04_long_running.py +119 -0
- examples/local/durable/05_event_log.py +145 -0
- examples/local/durable/06_idempotency.py +148 -0
- examples/local/durable/07_hooks.py +334 -0
- examples/local/durable/08_cancellation.py +233 -0
- examples/local/durable/09_child_workflows.py +198 -0
- examples/local/durable/10_child_workflow_patterns.py +265 -0
- examples/local/durable/11_continue_as_new.py +249 -0
- examples/local/durable/12_schedules.py +198 -0
- examples/local/durable/__init__.py +1 -0
- examples/local/transient/01_quick_tasks.py +87 -0
- examples/local/transient/02_retries.py +130 -0
- examples/local/transient/03_sleep.py +141 -0
- examples/local/transient/__init__.py +1 -0
- pyworkflow/__init__.py +256 -0
- pyworkflow/aws/__init__.py +68 -0
- pyworkflow/aws/context.py +234 -0
- pyworkflow/aws/handler.py +184 -0
- pyworkflow/aws/testing.py +310 -0
- pyworkflow/celery/__init__.py +41 -0
- pyworkflow/celery/app.py +198 -0
- pyworkflow/celery/scheduler.py +315 -0
- pyworkflow/celery/tasks.py +1746 -0
- pyworkflow/cli/__init__.py +132 -0
- pyworkflow/cli/__main__.py +6 -0
- pyworkflow/cli/commands/__init__.py +1 -0
- pyworkflow/cli/commands/hooks.py +640 -0
- pyworkflow/cli/commands/quickstart.py +495 -0
- pyworkflow/cli/commands/runs.py +773 -0
- pyworkflow/cli/commands/scheduler.py +130 -0
- pyworkflow/cli/commands/schedules.py +794 -0
- pyworkflow/cli/commands/setup.py +703 -0
- pyworkflow/cli/commands/worker.py +413 -0
- pyworkflow/cli/commands/workflows.py +1257 -0
- pyworkflow/cli/output/__init__.py +1 -0
- pyworkflow/cli/output/formatters.py +321 -0
- pyworkflow/cli/output/styles.py +121 -0
- pyworkflow/cli/utils/__init__.py +1 -0
- pyworkflow/cli/utils/async_helpers.py +30 -0
- pyworkflow/cli/utils/config.py +130 -0
- pyworkflow/cli/utils/config_generator.py +344 -0
- pyworkflow/cli/utils/discovery.py +53 -0
- pyworkflow/cli/utils/docker_manager.py +651 -0
- pyworkflow/cli/utils/interactive.py +364 -0
- pyworkflow/cli/utils/storage.py +115 -0
- pyworkflow/config.py +329 -0
- pyworkflow/context/__init__.py +63 -0
- pyworkflow/context/aws.py +230 -0
- pyworkflow/context/base.py +416 -0
- pyworkflow/context/local.py +930 -0
- pyworkflow/context/mock.py +381 -0
- pyworkflow/core/__init__.py +0 -0
- pyworkflow/core/exceptions.py +353 -0
- pyworkflow/core/registry.py +313 -0
- pyworkflow/core/scheduled.py +328 -0
- pyworkflow/core/step.py +494 -0
- pyworkflow/core/workflow.py +294 -0
- pyworkflow/discovery.py +248 -0
- pyworkflow/engine/__init__.py +0 -0
- pyworkflow/engine/events.py +879 -0
- pyworkflow/engine/executor.py +682 -0
- pyworkflow/engine/replay.py +273 -0
- pyworkflow/observability/__init__.py +19 -0
- pyworkflow/observability/logging.py +234 -0
- pyworkflow/primitives/__init__.py +33 -0
- pyworkflow/primitives/child_handle.py +174 -0
- pyworkflow/primitives/child_workflow.py +372 -0
- pyworkflow/primitives/continue_as_new.py +101 -0
- pyworkflow/primitives/define_hook.py +150 -0
- pyworkflow/primitives/hooks.py +97 -0
- pyworkflow/primitives/resume_hook.py +210 -0
- pyworkflow/primitives/schedule.py +545 -0
- pyworkflow/primitives/shield.py +96 -0
- pyworkflow/primitives/sleep.py +100 -0
- pyworkflow/runtime/__init__.py +21 -0
- pyworkflow/runtime/base.py +179 -0
- pyworkflow/runtime/celery.py +310 -0
- pyworkflow/runtime/factory.py +101 -0
- pyworkflow/runtime/local.py +706 -0
- pyworkflow/scheduler/__init__.py +9 -0
- pyworkflow/scheduler/local.py +248 -0
- pyworkflow/serialization/__init__.py +0 -0
- pyworkflow/serialization/decoder.py +146 -0
- pyworkflow/serialization/encoder.py +162 -0
- pyworkflow/storage/__init__.py +54 -0
- pyworkflow/storage/base.py +612 -0
- pyworkflow/storage/config.py +185 -0
- pyworkflow/storage/dynamodb.py +1315 -0
- pyworkflow/storage/file.py +827 -0
- pyworkflow/storage/memory.py +549 -0
- pyworkflow/storage/postgres.py +1161 -0
- pyworkflow/storage/schemas.py +486 -0
- pyworkflow/storage/sqlite.py +1136 -0
- pyworkflow/utils/__init__.py +0 -0
- pyworkflow/utils/duration.py +177 -0
- pyworkflow/utils/schedule.py +391 -0
- pyworkflow_engine-0.1.7.dist-info/METADATA +687 -0
- pyworkflow_engine-0.1.7.dist-info/RECORD +196 -0
- pyworkflow_engine-0.1.7.dist-info/WHEEL +5 -0
- pyworkflow_engine-0.1.7.dist-info/entry_points.txt +2 -0
- pyworkflow_engine-0.1.7.dist-info/licenses/LICENSE +21 -0
- pyworkflow_engine-0.1.7.dist-info/top_level.txt +5 -0
- tests/examples/__init__.py +0 -0
- tests/integration/__init__.py +0 -0
- tests/integration/test_cancellation.py +330 -0
- tests/integration/test_child_workflows.py +439 -0
- tests/integration/test_continue_as_new.py +428 -0
- tests/integration/test_dynamodb_storage.py +1146 -0
- tests/integration/test_fault_tolerance.py +369 -0
- tests/integration/test_schedule_storage.py +484 -0
- tests/unit/__init__.py +0 -0
- tests/unit/backends/__init__.py +1 -0
- tests/unit/backends/test_dynamodb_storage.py +1554 -0
- tests/unit/backends/test_postgres_storage.py +1281 -0
- tests/unit/backends/test_sqlite_storage.py +1460 -0
- tests/unit/conftest.py +41 -0
- tests/unit/test_cancellation.py +364 -0
- tests/unit/test_child_workflows.py +680 -0
- tests/unit/test_continue_as_new.py +441 -0
- tests/unit/test_event_limits.py +316 -0
- tests/unit/test_executor.py +320 -0
- tests/unit/test_fault_tolerance.py +334 -0
- tests/unit/test_hooks.py +495 -0
- tests/unit/test_registry.py +261 -0
- tests/unit/test_replay.py +420 -0
- tests/unit/test_schedule_schemas.py +285 -0
- tests/unit/test_schedule_utils.py +286 -0
- tests/unit/test_scheduled_workflow.py +274 -0
- tests/unit/test_step.py +353 -0
- tests/unit/test_workflow.py +243 -0
|
@@ -0,0 +1,484 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Integration tests for schedule storage operations.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import os
|
|
6
|
+
from datetime import UTC, datetime, timedelta
|
|
7
|
+
|
|
8
|
+
import pytest
|
|
9
|
+
|
|
10
|
+
from pyworkflow.storage.file import FileStorageBackend
|
|
11
|
+
from pyworkflow.storage.memory import InMemoryStorageBackend
|
|
12
|
+
from pyworkflow.storage.schemas import (
|
|
13
|
+
Schedule,
|
|
14
|
+
ScheduleSpec,
|
|
15
|
+
ScheduleStatus,
|
|
16
|
+
)
|
|
17
|
+
from pyworkflow.storage.sqlite import SQLiteStorageBackend
|
|
18
|
+
|
|
19
|
+
# Check if PostgreSQL is available
|
|
20
|
+
try:
|
|
21
|
+
from pyworkflow.storage.postgres import PostgresStorageBackend
|
|
22
|
+
|
|
23
|
+
POSTGRES_AVAILABLE = True
|
|
24
|
+
except ImportError:
|
|
25
|
+
POSTGRES_AVAILABLE = False
|
|
26
|
+
|
|
27
|
+
# Get PostgreSQL connection info from environment
|
|
28
|
+
POSTGRES_DSN = os.environ.get(
|
|
29
|
+
"TEST_POSTGRES_DSN", "postgresql://pyworkflow:pyworkflow@localhost:5432/pyworkflow_test"
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
@pytest.fixture
|
|
34
|
+
def memory_storage():
|
|
35
|
+
"""Create an in-memory storage backend."""
|
|
36
|
+
return InMemoryStorageBackend()
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
@pytest.fixture
|
|
40
|
+
def file_storage(tmp_path):
|
|
41
|
+
"""Create a file storage backend."""
|
|
42
|
+
return FileStorageBackend(base_path=str(tmp_path))
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
@pytest.fixture
|
|
46
|
+
async def sqlite_storage(tmp_path):
|
|
47
|
+
"""Create a SQLite storage backend."""
|
|
48
|
+
backend = SQLiteStorageBackend(db_path=str(tmp_path / "test.db"))
|
|
49
|
+
await backend.connect()
|
|
50
|
+
yield backend
|
|
51
|
+
await backend.disconnect()
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
@pytest.fixture
|
|
55
|
+
async def postgres_storage():
|
|
56
|
+
"""Create a PostgreSQL storage backend."""
|
|
57
|
+
if not POSTGRES_AVAILABLE:
|
|
58
|
+
yield None
|
|
59
|
+
return
|
|
60
|
+
|
|
61
|
+
backend = PostgresStorageBackend(dsn=POSTGRES_DSN)
|
|
62
|
+
connected = False
|
|
63
|
+
try:
|
|
64
|
+
await backend.connect()
|
|
65
|
+
connected = True
|
|
66
|
+
yield backend
|
|
67
|
+
except Exception:
|
|
68
|
+
yield None
|
|
69
|
+
finally:
|
|
70
|
+
if connected and backend._pool is not None:
|
|
71
|
+
await backend.disconnect()
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def get_storage_params():
|
|
75
|
+
"""Get storage backend parameters based on availability."""
|
|
76
|
+
params = ["memory", "file", "sqlite"]
|
|
77
|
+
if POSTGRES_AVAILABLE and os.environ.get("TEST_POSTGRES_ENABLED", "").lower() == "true":
|
|
78
|
+
params.append("postgres")
|
|
79
|
+
return params
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
@pytest.fixture(params=get_storage_params())
|
|
83
|
+
async def storage(request, memory_storage, file_storage, sqlite_storage, postgres_storage):
|
|
84
|
+
"""Parametrized fixture for all available storage backends."""
|
|
85
|
+
if request.param == "memory":
|
|
86
|
+
return memory_storage
|
|
87
|
+
elif request.param == "file":
|
|
88
|
+
return file_storage
|
|
89
|
+
elif request.param == "sqlite":
|
|
90
|
+
return sqlite_storage
|
|
91
|
+
elif request.param == "postgres":
|
|
92
|
+
if postgres_storage is None:
|
|
93
|
+
pytest.skip("PostgreSQL not accessible")
|
|
94
|
+
return postgres_storage
|
|
95
|
+
raise ValueError(f"Unknown storage type: {request.param}")
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
class TestScheduleStorageCRUD:
|
|
99
|
+
"""Test basic CRUD operations for schedules."""
|
|
100
|
+
|
|
101
|
+
@pytest.mark.asyncio
|
|
102
|
+
async def test_create_schedule(self, storage):
|
|
103
|
+
"""Test creating a schedule."""
|
|
104
|
+
spec = ScheduleSpec(cron="0 9 * * *")
|
|
105
|
+
schedule = Schedule(
|
|
106
|
+
schedule_id="test_schedule_1",
|
|
107
|
+
workflow_name="test_workflow",
|
|
108
|
+
spec=spec,
|
|
109
|
+
created_at=datetime.now(UTC),
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
await storage.create_schedule(schedule)
|
|
113
|
+
|
|
114
|
+
# Retrieve and verify
|
|
115
|
+
retrieved = await storage.get_schedule("test_schedule_1")
|
|
116
|
+
assert retrieved is not None
|
|
117
|
+
assert retrieved.schedule_id == "test_schedule_1"
|
|
118
|
+
assert retrieved.workflow_name == "test_workflow"
|
|
119
|
+
assert retrieved.spec.cron == "0 9 * * *"
|
|
120
|
+
|
|
121
|
+
@pytest.mark.asyncio
|
|
122
|
+
async def test_get_schedule_not_found(self, storage):
|
|
123
|
+
"""Test getting a non-existent schedule."""
|
|
124
|
+
retrieved = await storage.get_schedule("nonexistent")
|
|
125
|
+
assert retrieved is None
|
|
126
|
+
|
|
127
|
+
@pytest.mark.asyncio
|
|
128
|
+
async def test_update_schedule(self, storage):
|
|
129
|
+
"""Test updating a schedule."""
|
|
130
|
+
spec = ScheduleSpec(cron="0 9 * * *")
|
|
131
|
+
schedule = Schedule(
|
|
132
|
+
schedule_id="update_test",
|
|
133
|
+
workflow_name="test_workflow",
|
|
134
|
+
spec=spec,
|
|
135
|
+
status=ScheduleStatus.ACTIVE,
|
|
136
|
+
created_at=datetime.now(UTC),
|
|
137
|
+
)
|
|
138
|
+
await storage.create_schedule(schedule)
|
|
139
|
+
|
|
140
|
+
# Update the schedule
|
|
141
|
+
schedule.status = ScheduleStatus.PAUSED
|
|
142
|
+
schedule.updated_at = datetime.now(UTC)
|
|
143
|
+
schedule.spec = ScheduleSpec(cron="0 10 * * *")
|
|
144
|
+
await storage.update_schedule(schedule)
|
|
145
|
+
|
|
146
|
+
# Verify update
|
|
147
|
+
retrieved = await storage.get_schedule("update_test")
|
|
148
|
+
assert retrieved.status == ScheduleStatus.PAUSED
|
|
149
|
+
assert retrieved.spec.cron == "0 10 * * *"
|
|
150
|
+
assert retrieved.updated_at is not None
|
|
151
|
+
|
|
152
|
+
@pytest.mark.asyncio
|
|
153
|
+
async def test_delete_schedule(self, storage):
|
|
154
|
+
"""Test deleting a schedule."""
|
|
155
|
+
spec = ScheduleSpec(interval="5m")
|
|
156
|
+
schedule = Schedule(
|
|
157
|
+
schedule_id="delete_test",
|
|
158
|
+
workflow_name="test_workflow",
|
|
159
|
+
spec=spec,
|
|
160
|
+
created_at=datetime.now(UTC),
|
|
161
|
+
)
|
|
162
|
+
await storage.create_schedule(schedule)
|
|
163
|
+
|
|
164
|
+
# Verify it exists
|
|
165
|
+
assert await storage.get_schedule("delete_test") is not None
|
|
166
|
+
|
|
167
|
+
# Delete
|
|
168
|
+
await storage.delete_schedule("delete_test")
|
|
169
|
+
|
|
170
|
+
# Verify deleted (soft delete - status should be DELETED)
|
|
171
|
+
retrieved = await storage.get_schedule("delete_test")
|
|
172
|
+
if retrieved is not None:
|
|
173
|
+
# If soft delete, status should be DELETED
|
|
174
|
+
assert retrieved.status == ScheduleStatus.DELETED
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
class TestScheduleStorageList:
|
|
178
|
+
"""Test listing schedules with various filters."""
|
|
179
|
+
|
|
180
|
+
@pytest.mark.asyncio
|
|
181
|
+
async def test_list_all_schedules(self, storage):
|
|
182
|
+
"""Test listing all schedules."""
|
|
183
|
+
now = datetime.now(UTC)
|
|
184
|
+
|
|
185
|
+
# Create multiple schedules
|
|
186
|
+
for i in range(5):
|
|
187
|
+
schedule = Schedule(
|
|
188
|
+
schedule_id=f"list_test_{i}",
|
|
189
|
+
workflow_name=f"workflow_{i % 2}", # 2 different workflows
|
|
190
|
+
spec=ScheduleSpec(cron="0 9 * * *"),
|
|
191
|
+
status=ScheduleStatus.ACTIVE if i % 2 == 0 else ScheduleStatus.PAUSED,
|
|
192
|
+
created_at=now,
|
|
193
|
+
)
|
|
194
|
+
await storage.create_schedule(schedule)
|
|
195
|
+
|
|
196
|
+
# List all
|
|
197
|
+
schedules = await storage.list_schedules()
|
|
198
|
+
assert len(schedules) == 5
|
|
199
|
+
|
|
200
|
+
@pytest.mark.asyncio
|
|
201
|
+
async def test_list_schedules_by_workflow(self, storage):
|
|
202
|
+
"""Test listing schedules filtered by workflow name."""
|
|
203
|
+
now = datetime.now(UTC)
|
|
204
|
+
|
|
205
|
+
# Create schedules for different workflows
|
|
206
|
+
for i in range(4):
|
|
207
|
+
schedule = Schedule(
|
|
208
|
+
schedule_id=f"wf_filter_{i}",
|
|
209
|
+
workflow_name=f"workflow_{i % 2}",
|
|
210
|
+
spec=ScheduleSpec(cron="0 9 * * *"),
|
|
211
|
+
created_at=now,
|
|
212
|
+
)
|
|
213
|
+
await storage.create_schedule(schedule)
|
|
214
|
+
|
|
215
|
+
# Filter by workflow_0
|
|
216
|
+
schedules = await storage.list_schedules(workflow_name="workflow_0")
|
|
217
|
+
assert len(schedules) == 2
|
|
218
|
+
for s in schedules:
|
|
219
|
+
assert s.workflow_name == "workflow_0"
|
|
220
|
+
|
|
221
|
+
@pytest.mark.asyncio
|
|
222
|
+
async def test_list_schedules_by_status(self, storage):
|
|
223
|
+
"""Test listing schedules filtered by status."""
|
|
224
|
+
now = datetime.now(UTC)
|
|
225
|
+
|
|
226
|
+
# Create schedules with different statuses
|
|
227
|
+
statuses = [
|
|
228
|
+
ScheduleStatus.ACTIVE,
|
|
229
|
+
ScheduleStatus.ACTIVE,
|
|
230
|
+
ScheduleStatus.PAUSED,
|
|
231
|
+
ScheduleStatus.DELETED,
|
|
232
|
+
]
|
|
233
|
+
for i, status in enumerate(statuses):
|
|
234
|
+
schedule = Schedule(
|
|
235
|
+
schedule_id=f"status_filter_{i}",
|
|
236
|
+
workflow_name="test_workflow",
|
|
237
|
+
spec=ScheduleSpec(cron="0 9 * * *"),
|
|
238
|
+
status=status,
|
|
239
|
+
created_at=now,
|
|
240
|
+
)
|
|
241
|
+
await storage.create_schedule(schedule)
|
|
242
|
+
|
|
243
|
+
# Filter by ACTIVE
|
|
244
|
+
active = await storage.list_schedules(status=ScheduleStatus.ACTIVE)
|
|
245
|
+
assert len(active) == 2
|
|
246
|
+
|
|
247
|
+
# Filter by PAUSED
|
|
248
|
+
paused = await storage.list_schedules(status=ScheduleStatus.PAUSED)
|
|
249
|
+
assert len(paused) == 1
|
|
250
|
+
|
|
251
|
+
@pytest.mark.asyncio
|
|
252
|
+
async def test_list_schedules_with_limit(self, storage):
|
|
253
|
+
"""Test listing schedules with limit."""
|
|
254
|
+
now = datetime.now(UTC)
|
|
255
|
+
|
|
256
|
+
# Create 10 schedules
|
|
257
|
+
for i in range(10):
|
|
258
|
+
schedule = Schedule(
|
|
259
|
+
schedule_id=f"limit_test_{i}",
|
|
260
|
+
workflow_name="test_workflow",
|
|
261
|
+
spec=ScheduleSpec(cron="0 9 * * *"),
|
|
262
|
+
created_at=now,
|
|
263
|
+
)
|
|
264
|
+
await storage.create_schedule(schedule)
|
|
265
|
+
|
|
266
|
+
# List with limit
|
|
267
|
+
schedules = await storage.list_schedules(limit=5)
|
|
268
|
+
assert len(schedules) == 5
|
|
269
|
+
|
|
270
|
+
@pytest.mark.asyncio
|
|
271
|
+
async def test_list_schedules_with_offset(self, storage):
|
|
272
|
+
"""Test listing schedules with offset."""
|
|
273
|
+
now = datetime.now(UTC)
|
|
274
|
+
|
|
275
|
+
# Create 5 schedules
|
|
276
|
+
for i in range(5):
|
|
277
|
+
schedule = Schedule(
|
|
278
|
+
schedule_id=f"offset_test_{i:02d}", # Zero-padded for ordering
|
|
279
|
+
workflow_name="test_workflow",
|
|
280
|
+
spec=ScheduleSpec(cron="0 9 * * *"),
|
|
281
|
+
created_at=now + timedelta(seconds=i), # Different timestamps
|
|
282
|
+
)
|
|
283
|
+
await storage.create_schedule(schedule)
|
|
284
|
+
|
|
285
|
+
# List with offset
|
|
286
|
+
schedules = await storage.list_schedules(offset=2, limit=10)
|
|
287
|
+
assert len(schedules) == 3
|
|
288
|
+
|
|
289
|
+
|
|
290
|
+
class TestScheduleDueSchedules:
|
|
291
|
+
"""Test getting due schedules."""
|
|
292
|
+
|
|
293
|
+
@pytest.mark.asyncio
|
|
294
|
+
async def test_get_due_schedules(self, storage):
|
|
295
|
+
"""Test getting schedules that are due to run."""
|
|
296
|
+
now = datetime.now(UTC)
|
|
297
|
+
past = now - timedelta(minutes=5)
|
|
298
|
+
future = now + timedelta(minutes=5)
|
|
299
|
+
|
|
300
|
+
# Create schedules with different next_run_times
|
|
301
|
+
for i, next_run in enumerate([past, past, future]):
|
|
302
|
+
schedule = Schedule(
|
|
303
|
+
schedule_id=f"due_test_{i}",
|
|
304
|
+
workflow_name="test_workflow",
|
|
305
|
+
spec=ScheduleSpec(cron="0 9 * * *"),
|
|
306
|
+
status=ScheduleStatus.ACTIVE,
|
|
307
|
+
next_run_time=next_run,
|
|
308
|
+
created_at=now,
|
|
309
|
+
)
|
|
310
|
+
await storage.create_schedule(schedule)
|
|
311
|
+
|
|
312
|
+
# Get due schedules
|
|
313
|
+
due = await storage.get_due_schedules(now)
|
|
314
|
+
|
|
315
|
+
# Should only get the 2 past schedules
|
|
316
|
+
assert len(due) == 2
|
|
317
|
+
for s in due:
|
|
318
|
+
assert s.next_run_time <= now
|
|
319
|
+
|
|
320
|
+
@pytest.mark.asyncio
|
|
321
|
+
async def test_get_due_schedules_excludes_paused(self, storage):
|
|
322
|
+
"""Test that paused schedules are not returned as due."""
|
|
323
|
+
now = datetime.now(UTC)
|
|
324
|
+
past = now - timedelta(minutes=5)
|
|
325
|
+
|
|
326
|
+
# Create active schedule
|
|
327
|
+
active = Schedule(
|
|
328
|
+
schedule_id="due_active",
|
|
329
|
+
workflow_name="test_workflow",
|
|
330
|
+
spec=ScheduleSpec(cron="0 9 * * *"),
|
|
331
|
+
status=ScheduleStatus.ACTIVE,
|
|
332
|
+
next_run_time=past,
|
|
333
|
+
created_at=now,
|
|
334
|
+
)
|
|
335
|
+
await storage.create_schedule(active)
|
|
336
|
+
|
|
337
|
+
# Create paused schedule
|
|
338
|
+
paused = Schedule(
|
|
339
|
+
schedule_id="due_paused",
|
|
340
|
+
workflow_name="test_workflow",
|
|
341
|
+
spec=ScheduleSpec(cron="0 9 * * *"),
|
|
342
|
+
status=ScheduleStatus.PAUSED,
|
|
343
|
+
next_run_time=past,
|
|
344
|
+
created_at=now,
|
|
345
|
+
)
|
|
346
|
+
await storage.create_schedule(paused)
|
|
347
|
+
|
|
348
|
+
# Get due schedules
|
|
349
|
+
due = await storage.get_due_schedules(now)
|
|
350
|
+
|
|
351
|
+
# Should only get active schedule
|
|
352
|
+
assert len(due) == 1
|
|
353
|
+
assert due[0].schedule_id == "due_active"
|
|
354
|
+
|
|
355
|
+
|
|
356
|
+
class TestScheduleRunningRuns:
|
|
357
|
+
"""Test managing running run IDs on schedules."""
|
|
358
|
+
|
|
359
|
+
@pytest.mark.asyncio
|
|
360
|
+
async def test_add_running_run(self, storage):
|
|
361
|
+
"""Test adding a running run ID to a schedule."""
|
|
362
|
+
now = datetime.now(UTC)
|
|
363
|
+
schedule = Schedule(
|
|
364
|
+
schedule_id="running_test",
|
|
365
|
+
workflow_name="test_workflow",
|
|
366
|
+
spec=ScheduleSpec(cron="0 9 * * *"),
|
|
367
|
+
created_at=now,
|
|
368
|
+
)
|
|
369
|
+
await storage.create_schedule(schedule)
|
|
370
|
+
|
|
371
|
+
# Add running run
|
|
372
|
+
await storage.add_running_run("running_test", "run_123")
|
|
373
|
+
|
|
374
|
+
# Verify
|
|
375
|
+
retrieved = await storage.get_schedule("running_test")
|
|
376
|
+
assert "run_123" in retrieved.running_run_ids
|
|
377
|
+
|
|
378
|
+
@pytest.mark.asyncio
|
|
379
|
+
async def test_add_multiple_running_runs(self, storage):
|
|
380
|
+
"""Test adding multiple running run IDs."""
|
|
381
|
+
now = datetime.now(UTC)
|
|
382
|
+
schedule = Schedule(
|
|
383
|
+
schedule_id="multi_run_test",
|
|
384
|
+
workflow_name="test_workflow",
|
|
385
|
+
spec=ScheduleSpec(cron="0 9 * * *"),
|
|
386
|
+
created_at=now,
|
|
387
|
+
)
|
|
388
|
+
await storage.create_schedule(schedule)
|
|
389
|
+
|
|
390
|
+
# Add multiple runs
|
|
391
|
+
await storage.add_running_run("multi_run_test", "run_1")
|
|
392
|
+
await storage.add_running_run("multi_run_test", "run_2")
|
|
393
|
+
await storage.add_running_run("multi_run_test", "run_3")
|
|
394
|
+
|
|
395
|
+
# Verify
|
|
396
|
+
retrieved = await storage.get_schedule("multi_run_test")
|
|
397
|
+
assert len(retrieved.running_run_ids) == 3
|
|
398
|
+
|
|
399
|
+
@pytest.mark.asyncio
|
|
400
|
+
async def test_remove_running_run(self, storage):
|
|
401
|
+
"""Test removing a running run ID from a schedule."""
|
|
402
|
+
now = datetime.now(UTC)
|
|
403
|
+
schedule = Schedule(
|
|
404
|
+
schedule_id="remove_run_test",
|
|
405
|
+
workflow_name="test_workflow",
|
|
406
|
+
spec=ScheduleSpec(cron="0 9 * * *"),
|
|
407
|
+
running_run_ids=["run_1", "run_2"],
|
|
408
|
+
created_at=now,
|
|
409
|
+
)
|
|
410
|
+
await storage.create_schedule(schedule)
|
|
411
|
+
|
|
412
|
+
# Remove a run
|
|
413
|
+
await storage.remove_running_run("remove_run_test", "run_1")
|
|
414
|
+
|
|
415
|
+
# Verify
|
|
416
|
+
retrieved = await storage.get_schedule("remove_run_test")
|
|
417
|
+
assert "run_1" not in retrieved.running_run_ids
|
|
418
|
+
assert "run_2" in retrieved.running_run_ids
|
|
419
|
+
|
|
420
|
+
@pytest.mark.asyncio
|
|
421
|
+
async def test_remove_nonexistent_run(self, storage):
|
|
422
|
+
"""Test removing a run ID that doesn't exist (should not error)."""
|
|
423
|
+
now = datetime.now(UTC)
|
|
424
|
+
schedule = Schedule(
|
|
425
|
+
schedule_id="remove_nonexistent",
|
|
426
|
+
workflow_name="test_workflow",
|
|
427
|
+
spec=ScheduleSpec(cron="0 9 * * *"),
|
|
428
|
+
running_run_ids=["run_1"],
|
|
429
|
+
created_at=now,
|
|
430
|
+
)
|
|
431
|
+
await storage.create_schedule(schedule)
|
|
432
|
+
|
|
433
|
+
# Remove non-existent run (should not raise)
|
|
434
|
+
await storage.remove_running_run("remove_nonexistent", "run_999")
|
|
435
|
+
|
|
436
|
+
# Verify original run still there
|
|
437
|
+
retrieved = await storage.get_schedule("remove_nonexistent")
|
|
438
|
+
assert "run_1" in retrieved.running_run_ids
|
|
439
|
+
|
|
440
|
+
|
|
441
|
+
class TestScheduleStatistics:
|
|
442
|
+
"""Test schedule statistics tracking."""
|
|
443
|
+
|
|
444
|
+
@pytest.mark.asyncio
|
|
445
|
+
async def test_increment_statistics(self, storage):
|
|
446
|
+
"""Test incrementing schedule statistics."""
|
|
447
|
+
# SQLite backend doesn't store statistics fields
|
|
448
|
+
if storage.__class__.__name__ == "SQLiteStorageBackend":
|
|
449
|
+
pytest.skip("SQLite backend doesn't support schedule statistics")
|
|
450
|
+
|
|
451
|
+
now = datetime.now(UTC)
|
|
452
|
+
schedule = Schedule(
|
|
453
|
+
schedule_id="stats_test",
|
|
454
|
+
workflow_name="test_workflow",
|
|
455
|
+
spec=ScheduleSpec(cron="0 9 * * *"),
|
|
456
|
+
total_runs=0,
|
|
457
|
+
successful_runs=0,
|
|
458
|
+
failed_runs=0,
|
|
459
|
+
created_at=now,
|
|
460
|
+
)
|
|
461
|
+
await storage.create_schedule(schedule)
|
|
462
|
+
|
|
463
|
+
# Simulate successful runs
|
|
464
|
+
schedule = await storage.get_schedule("stats_test")
|
|
465
|
+
schedule.total_runs += 1
|
|
466
|
+
schedule.successful_runs += 1
|
|
467
|
+
await storage.update_schedule(schedule)
|
|
468
|
+
|
|
469
|
+
# Verify
|
|
470
|
+
retrieved = await storage.get_schedule("stats_test")
|
|
471
|
+
assert retrieved.total_runs == 1
|
|
472
|
+
assert retrieved.successful_runs == 1
|
|
473
|
+
assert retrieved.failed_runs == 0
|
|
474
|
+
|
|
475
|
+
# Simulate failed run
|
|
476
|
+
retrieved.total_runs += 1
|
|
477
|
+
retrieved.failed_runs += 1
|
|
478
|
+
await storage.update_schedule(retrieved)
|
|
479
|
+
|
|
480
|
+
# Verify
|
|
481
|
+
final = await storage.get_schedule("stats_test")
|
|
482
|
+
assert final.total_runs == 2
|
|
483
|
+
assert final.successful_runs == 1
|
|
484
|
+
assert final.failed_runs == 1
|
tests/unit/__init__.py
ADDED
|
File without changes
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Unit tests for storage backends."""
|