pyworkflow-engine 0.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (196) hide show
  1. dashboard/backend/app/__init__.py +1 -0
  2. dashboard/backend/app/config.py +32 -0
  3. dashboard/backend/app/controllers/__init__.py +6 -0
  4. dashboard/backend/app/controllers/run_controller.py +86 -0
  5. dashboard/backend/app/controllers/workflow_controller.py +33 -0
  6. dashboard/backend/app/dependencies/__init__.py +5 -0
  7. dashboard/backend/app/dependencies/storage.py +50 -0
  8. dashboard/backend/app/repositories/__init__.py +6 -0
  9. dashboard/backend/app/repositories/run_repository.py +80 -0
  10. dashboard/backend/app/repositories/workflow_repository.py +27 -0
  11. dashboard/backend/app/rest/__init__.py +8 -0
  12. dashboard/backend/app/rest/v1/__init__.py +12 -0
  13. dashboard/backend/app/rest/v1/health.py +33 -0
  14. dashboard/backend/app/rest/v1/runs.py +133 -0
  15. dashboard/backend/app/rest/v1/workflows.py +41 -0
  16. dashboard/backend/app/schemas/__init__.py +23 -0
  17. dashboard/backend/app/schemas/common.py +16 -0
  18. dashboard/backend/app/schemas/event.py +24 -0
  19. dashboard/backend/app/schemas/hook.py +25 -0
  20. dashboard/backend/app/schemas/run.py +54 -0
  21. dashboard/backend/app/schemas/step.py +28 -0
  22. dashboard/backend/app/schemas/workflow.py +31 -0
  23. dashboard/backend/app/server.py +87 -0
  24. dashboard/backend/app/services/__init__.py +6 -0
  25. dashboard/backend/app/services/run_service.py +240 -0
  26. dashboard/backend/app/services/workflow_service.py +155 -0
  27. dashboard/backend/main.py +18 -0
  28. docs/concepts/cancellation.mdx +362 -0
  29. docs/concepts/continue-as-new.mdx +434 -0
  30. docs/concepts/events.mdx +266 -0
  31. docs/concepts/fault-tolerance.mdx +370 -0
  32. docs/concepts/hooks.mdx +552 -0
  33. docs/concepts/limitations.mdx +167 -0
  34. docs/concepts/schedules.mdx +775 -0
  35. docs/concepts/sleep.mdx +312 -0
  36. docs/concepts/steps.mdx +301 -0
  37. docs/concepts/workflows.mdx +255 -0
  38. docs/guides/cli.mdx +942 -0
  39. docs/guides/configuration.mdx +560 -0
  40. docs/introduction.mdx +155 -0
  41. docs/quickstart.mdx +279 -0
  42. examples/__init__.py +1 -0
  43. examples/celery/__init__.py +1 -0
  44. examples/celery/durable/docker-compose.yml +55 -0
  45. examples/celery/durable/pyworkflow.config.yaml +12 -0
  46. examples/celery/durable/workflows/__init__.py +122 -0
  47. examples/celery/durable/workflows/basic.py +87 -0
  48. examples/celery/durable/workflows/batch_processing.py +102 -0
  49. examples/celery/durable/workflows/cancellation.py +273 -0
  50. examples/celery/durable/workflows/child_workflow_patterns.py +240 -0
  51. examples/celery/durable/workflows/child_workflows.py +202 -0
  52. examples/celery/durable/workflows/continue_as_new.py +260 -0
  53. examples/celery/durable/workflows/fault_tolerance.py +210 -0
  54. examples/celery/durable/workflows/hooks.py +211 -0
  55. examples/celery/durable/workflows/idempotency.py +112 -0
  56. examples/celery/durable/workflows/long_running.py +99 -0
  57. examples/celery/durable/workflows/retries.py +101 -0
  58. examples/celery/durable/workflows/schedules.py +209 -0
  59. examples/celery/transient/01_basic_workflow.py +91 -0
  60. examples/celery/transient/02_fault_tolerance.py +257 -0
  61. examples/celery/transient/__init__.py +20 -0
  62. examples/celery/transient/pyworkflow.config.yaml +25 -0
  63. examples/local/__init__.py +1 -0
  64. examples/local/durable/01_basic_workflow.py +94 -0
  65. examples/local/durable/02_file_storage.py +132 -0
  66. examples/local/durable/03_retries.py +169 -0
  67. examples/local/durable/04_long_running.py +119 -0
  68. examples/local/durable/05_event_log.py +145 -0
  69. examples/local/durable/06_idempotency.py +148 -0
  70. examples/local/durable/07_hooks.py +334 -0
  71. examples/local/durable/08_cancellation.py +233 -0
  72. examples/local/durable/09_child_workflows.py +198 -0
  73. examples/local/durable/10_child_workflow_patterns.py +265 -0
  74. examples/local/durable/11_continue_as_new.py +249 -0
  75. examples/local/durable/12_schedules.py +198 -0
  76. examples/local/durable/__init__.py +1 -0
  77. examples/local/transient/01_quick_tasks.py +87 -0
  78. examples/local/transient/02_retries.py +130 -0
  79. examples/local/transient/03_sleep.py +141 -0
  80. examples/local/transient/__init__.py +1 -0
  81. pyworkflow/__init__.py +256 -0
  82. pyworkflow/aws/__init__.py +68 -0
  83. pyworkflow/aws/context.py +234 -0
  84. pyworkflow/aws/handler.py +184 -0
  85. pyworkflow/aws/testing.py +310 -0
  86. pyworkflow/celery/__init__.py +41 -0
  87. pyworkflow/celery/app.py +198 -0
  88. pyworkflow/celery/scheduler.py +315 -0
  89. pyworkflow/celery/tasks.py +1746 -0
  90. pyworkflow/cli/__init__.py +132 -0
  91. pyworkflow/cli/__main__.py +6 -0
  92. pyworkflow/cli/commands/__init__.py +1 -0
  93. pyworkflow/cli/commands/hooks.py +640 -0
  94. pyworkflow/cli/commands/quickstart.py +495 -0
  95. pyworkflow/cli/commands/runs.py +773 -0
  96. pyworkflow/cli/commands/scheduler.py +130 -0
  97. pyworkflow/cli/commands/schedules.py +794 -0
  98. pyworkflow/cli/commands/setup.py +703 -0
  99. pyworkflow/cli/commands/worker.py +413 -0
  100. pyworkflow/cli/commands/workflows.py +1257 -0
  101. pyworkflow/cli/output/__init__.py +1 -0
  102. pyworkflow/cli/output/formatters.py +321 -0
  103. pyworkflow/cli/output/styles.py +121 -0
  104. pyworkflow/cli/utils/__init__.py +1 -0
  105. pyworkflow/cli/utils/async_helpers.py +30 -0
  106. pyworkflow/cli/utils/config.py +130 -0
  107. pyworkflow/cli/utils/config_generator.py +344 -0
  108. pyworkflow/cli/utils/discovery.py +53 -0
  109. pyworkflow/cli/utils/docker_manager.py +651 -0
  110. pyworkflow/cli/utils/interactive.py +364 -0
  111. pyworkflow/cli/utils/storage.py +115 -0
  112. pyworkflow/config.py +329 -0
  113. pyworkflow/context/__init__.py +63 -0
  114. pyworkflow/context/aws.py +230 -0
  115. pyworkflow/context/base.py +416 -0
  116. pyworkflow/context/local.py +930 -0
  117. pyworkflow/context/mock.py +381 -0
  118. pyworkflow/core/__init__.py +0 -0
  119. pyworkflow/core/exceptions.py +353 -0
  120. pyworkflow/core/registry.py +313 -0
  121. pyworkflow/core/scheduled.py +328 -0
  122. pyworkflow/core/step.py +494 -0
  123. pyworkflow/core/workflow.py +294 -0
  124. pyworkflow/discovery.py +248 -0
  125. pyworkflow/engine/__init__.py +0 -0
  126. pyworkflow/engine/events.py +879 -0
  127. pyworkflow/engine/executor.py +682 -0
  128. pyworkflow/engine/replay.py +273 -0
  129. pyworkflow/observability/__init__.py +19 -0
  130. pyworkflow/observability/logging.py +234 -0
  131. pyworkflow/primitives/__init__.py +33 -0
  132. pyworkflow/primitives/child_handle.py +174 -0
  133. pyworkflow/primitives/child_workflow.py +372 -0
  134. pyworkflow/primitives/continue_as_new.py +101 -0
  135. pyworkflow/primitives/define_hook.py +150 -0
  136. pyworkflow/primitives/hooks.py +97 -0
  137. pyworkflow/primitives/resume_hook.py +210 -0
  138. pyworkflow/primitives/schedule.py +545 -0
  139. pyworkflow/primitives/shield.py +96 -0
  140. pyworkflow/primitives/sleep.py +100 -0
  141. pyworkflow/runtime/__init__.py +21 -0
  142. pyworkflow/runtime/base.py +179 -0
  143. pyworkflow/runtime/celery.py +310 -0
  144. pyworkflow/runtime/factory.py +101 -0
  145. pyworkflow/runtime/local.py +706 -0
  146. pyworkflow/scheduler/__init__.py +9 -0
  147. pyworkflow/scheduler/local.py +248 -0
  148. pyworkflow/serialization/__init__.py +0 -0
  149. pyworkflow/serialization/decoder.py +146 -0
  150. pyworkflow/serialization/encoder.py +162 -0
  151. pyworkflow/storage/__init__.py +54 -0
  152. pyworkflow/storage/base.py +612 -0
  153. pyworkflow/storage/config.py +185 -0
  154. pyworkflow/storage/dynamodb.py +1315 -0
  155. pyworkflow/storage/file.py +827 -0
  156. pyworkflow/storage/memory.py +549 -0
  157. pyworkflow/storage/postgres.py +1161 -0
  158. pyworkflow/storage/schemas.py +486 -0
  159. pyworkflow/storage/sqlite.py +1136 -0
  160. pyworkflow/utils/__init__.py +0 -0
  161. pyworkflow/utils/duration.py +177 -0
  162. pyworkflow/utils/schedule.py +391 -0
  163. pyworkflow_engine-0.1.7.dist-info/METADATA +687 -0
  164. pyworkflow_engine-0.1.7.dist-info/RECORD +196 -0
  165. pyworkflow_engine-0.1.7.dist-info/WHEEL +5 -0
  166. pyworkflow_engine-0.1.7.dist-info/entry_points.txt +2 -0
  167. pyworkflow_engine-0.1.7.dist-info/licenses/LICENSE +21 -0
  168. pyworkflow_engine-0.1.7.dist-info/top_level.txt +5 -0
  169. tests/examples/__init__.py +0 -0
  170. tests/integration/__init__.py +0 -0
  171. tests/integration/test_cancellation.py +330 -0
  172. tests/integration/test_child_workflows.py +439 -0
  173. tests/integration/test_continue_as_new.py +428 -0
  174. tests/integration/test_dynamodb_storage.py +1146 -0
  175. tests/integration/test_fault_tolerance.py +369 -0
  176. tests/integration/test_schedule_storage.py +484 -0
  177. tests/unit/__init__.py +0 -0
  178. tests/unit/backends/__init__.py +1 -0
  179. tests/unit/backends/test_dynamodb_storage.py +1554 -0
  180. tests/unit/backends/test_postgres_storage.py +1281 -0
  181. tests/unit/backends/test_sqlite_storage.py +1460 -0
  182. tests/unit/conftest.py +41 -0
  183. tests/unit/test_cancellation.py +364 -0
  184. tests/unit/test_child_workflows.py +680 -0
  185. tests/unit/test_continue_as_new.py +441 -0
  186. tests/unit/test_event_limits.py +316 -0
  187. tests/unit/test_executor.py +320 -0
  188. tests/unit/test_fault_tolerance.py +334 -0
  189. tests/unit/test_hooks.py +495 -0
  190. tests/unit/test_registry.py +261 -0
  191. tests/unit/test_replay.py +420 -0
  192. tests/unit/test_schedule_schemas.py +285 -0
  193. tests/unit/test_schedule_utils.py +286 -0
  194. tests/unit/test_scheduled_workflow.py +274 -0
  195. tests/unit/test_step.py +353 -0
  196. tests/unit/test_workflow.py +243 -0
@@ -0,0 +1,1554 @@
1
+ """
2
+ Unit tests for DynamoDB storage backend.
3
+
4
+ These tests verify the DynamoDBStorageBackend implementation.
5
+ For integration tests with a real DynamoDB database, see tests/integration/.
6
+ """
7
+
8
+ from contextlib import asynccontextmanager
9
+ from datetime import UTC, datetime
10
+ from unittest.mock import AsyncMock, MagicMock, patch
11
+
12
+ import pytest
13
+
14
+ from pyworkflow.engine.events import Event, EventType
15
+ from pyworkflow.storage.schemas import (
16
+ Hook,
17
+ HookStatus,
18
+ RunStatus,
19
+ Schedule,
20
+ ScheduleSpec,
21
+ ScheduleStatus,
22
+ StepExecution,
23
+ StepStatus,
24
+ WorkflowRun,
25
+ )
26
+
27
+ # Skip all tests if aiobotocore is not installed
28
+ pytest.importorskip("aiobotocore")
29
+
30
+ from pyworkflow.storage.dynamodb import DynamoDBStorageBackend
31
+
32
+
33
+ @pytest.fixture
34
+ def mock_backend():
35
+ """Create a backend with mocked client for testing."""
36
+ backend = DynamoDBStorageBackend()
37
+ mock_client = AsyncMock()
38
+
39
+ @asynccontextmanager
40
+ async def mock_get_client():
41
+ yield mock_client
42
+
43
+ backend._get_client = mock_get_client
44
+ return backend, mock_client
45
+
46
+
47
+ class TestDynamoDBStorageBackendInit:
48
+ """Test DynamoDB backend initialization."""
49
+
50
+ def test_init_with_defaults(self):
51
+ """Test initialization with default values."""
52
+ backend = DynamoDBStorageBackend()
53
+
54
+ assert backend.table_name == "pyworkflow"
55
+ assert backend.region == "us-east-1"
56
+ assert backend.endpoint_url is None
57
+ assert backend._initialized is False
58
+
59
+ def test_init_with_custom_params(self):
60
+ """Test initialization with custom parameters."""
61
+ backend = DynamoDBStorageBackend(
62
+ table_name="custom_table",
63
+ region="eu-west-1",
64
+ endpoint_url="http://localhost:8000",
65
+ )
66
+
67
+ assert backend.table_name == "custom_table"
68
+ assert backend.region == "eu-west-1"
69
+ assert backend.endpoint_url == "http://localhost:8000"
70
+
71
+ def test_init_with_local_endpoint(self):
72
+ """Test initialization with DynamoDB Local endpoint."""
73
+ backend = DynamoDBStorageBackend(
74
+ endpoint_url="http://localhost:8000",
75
+ )
76
+
77
+ assert backend.endpoint_url == "http://localhost:8000"
78
+
79
+
80
+ class TestDynamoDBStorageBackendConfig:
81
+ """Test configuration and serialization methods."""
82
+
83
+ def test_storage_to_config(self):
84
+ """Test serializing backend to config dict."""
85
+ from pyworkflow.storage.config import storage_to_config
86
+
87
+ backend = DynamoDBStorageBackend(
88
+ table_name="my_table",
89
+ region="us-west-2",
90
+ endpoint_url="http://localhost:8000",
91
+ )
92
+
93
+ config = storage_to_config(backend)
94
+
95
+ assert config["type"] == "dynamodb"
96
+ assert config["table_name"] == "my_table"
97
+ assert config["region"] == "us-west-2"
98
+ assert config["endpoint_url"] == "http://localhost:8000"
99
+
100
+ def test_config_to_storage(self):
101
+ """Test deserializing config dict to backend."""
102
+ from pyworkflow.storage.config import config_to_storage
103
+
104
+ config = {
105
+ "type": "dynamodb",
106
+ "table_name": "test_table",
107
+ "region": "ap-northeast-1",
108
+ "endpoint_url": "http://localhost:8000",
109
+ }
110
+
111
+ backend = config_to_storage(config)
112
+
113
+ assert isinstance(backend, DynamoDBStorageBackend)
114
+ assert backend.table_name == "test_table"
115
+ assert backend.region == "ap-northeast-1"
116
+ assert backend.endpoint_url == "http://localhost:8000"
117
+
118
+
119
+ class TestDynamoDBStorageBackendConnection:
120
+ """Test connection management."""
121
+
122
+ @pytest.mark.asyncio
123
+ async def test_connect_creates_table_if_not_exists(self):
124
+ """Test that connect creates table if it doesn't exist."""
125
+ backend = DynamoDBStorageBackend(table_name="test_table")
126
+
127
+ mock_client = AsyncMock()
128
+ from botocore.exceptions import ClientError
129
+
130
+ mock_client.describe_table = AsyncMock(
131
+ side_effect=ClientError(
132
+ {"Error": {"Code": "ResourceNotFoundException"}},
133
+ "DescribeTable",
134
+ )
135
+ )
136
+ mock_client.create_table = AsyncMock()
137
+
138
+ mock_waiter = AsyncMock()
139
+ mock_waiter.wait = AsyncMock()
140
+ mock_client.get_waiter = MagicMock(return_value=mock_waiter)
141
+
142
+ with patch.object(backend, "_get_client") as mock_get_client:
143
+ mock_context = AsyncMock()
144
+ mock_context.__aenter__ = AsyncMock(return_value=mock_client)
145
+ mock_context.__aexit__ = AsyncMock()
146
+ mock_get_client.return_value = mock_context
147
+
148
+ await backend.connect()
149
+
150
+ mock_client.create_table.assert_called_once()
151
+ call_kwargs = mock_client.create_table.call_args.kwargs
152
+
153
+ assert call_kwargs["TableName"] == "test_table"
154
+ assert call_kwargs["BillingMode"] == "PAY_PER_REQUEST"
155
+
156
+ # Verify GSIs were created
157
+ gsi_names = [gsi["IndexName"] for gsi in call_kwargs["GlobalSecondaryIndexes"]]
158
+ assert "GSI1" in gsi_names
159
+ assert "GSI2" in gsi_names
160
+ assert "GSI3" in gsi_names
161
+ assert "GSI4" in gsi_names
162
+ assert "GSI5" in gsi_names
163
+
164
+ @pytest.mark.asyncio
165
+ async def test_connect_skips_create_if_table_exists(self):
166
+ """Test that connect doesn't create table if it exists."""
167
+ backend = DynamoDBStorageBackend()
168
+
169
+ mock_client = AsyncMock()
170
+ mock_client.describe_table = AsyncMock(return_value={"Table": {}})
171
+ mock_client.create_table = AsyncMock()
172
+
173
+ with patch.object(backend, "_get_client") as mock_get_client:
174
+ mock_context = AsyncMock()
175
+ mock_context.__aenter__ = AsyncMock(return_value=mock_client)
176
+ mock_context.__aexit__ = AsyncMock()
177
+ mock_get_client.return_value = mock_context
178
+
179
+ await backend.connect()
180
+
181
+ mock_client.create_table.assert_not_called()
182
+ assert backend._initialized is True
183
+
184
+ @pytest.mark.asyncio
185
+ async def test_disconnect_sets_initialized_to_false(self):
186
+ """Test that disconnect sets _initialized to False."""
187
+ backend = DynamoDBStorageBackend()
188
+ backend._initialized = True
189
+
190
+ await backend.disconnect()
191
+
192
+ assert backend._initialized is False
193
+
194
+ @pytest.mark.asyncio
195
+ async def test_health_check_returns_true_when_healthy(self, mock_backend):
196
+ """Test health check returns True when list_runs succeeds."""
197
+ backend, mock_client = mock_backend
198
+
199
+ # Mock list_runs to return empty result
200
+ mock_client.query = AsyncMock(return_value={"Items": []})
201
+
202
+ with patch.object(backend, "list_runs", return_value=([], None)):
203
+ result = await backend.health_check()
204
+
205
+ assert result is True
206
+
207
+ @pytest.mark.asyncio
208
+ async def test_health_check_returns_false_on_error(self):
209
+ """Test health check returns False when list_runs fails."""
210
+ backend = DynamoDBStorageBackend()
211
+
212
+ with patch.object(backend, "list_runs", side_effect=Exception("Connection error")):
213
+ result = await backend.health_check()
214
+
215
+ assert result is False
216
+
217
+
218
+ class TestDynamoDBSerialization:
219
+ """Test DynamoDB value serialization/deserialization."""
220
+
221
+ @pytest.fixture
222
+ def backend(self):
223
+ """Create a DynamoDB backend instance."""
224
+ return DynamoDBStorageBackend()
225
+
226
+ def test_serialize_string(self, backend):
227
+ """Test serializing string values."""
228
+ result = backend._serialize_value("test")
229
+ assert result == {"S": "test"}
230
+
231
+ def test_serialize_int(self, backend):
232
+ """Test serializing integer values."""
233
+ result = backend._serialize_value(42)
234
+ assert result == {"N": "42"}
235
+
236
+ def test_serialize_float(self, backend):
237
+ """Test serializing float values."""
238
+ result = backend._serialize_value(3.14)
239
+ assert result == {"N": "3.14"}
240
+
241
+ def test_serialize_bool(self, backend):
242
+ """Test serializing boolean values."""
243
+ assert backend._serialize_value(True) == {"BOOL": True}
244
+ assert backend._serialize_value(False) == {"BOOL": False}
245
+
246
+ def test_serialize_none(self, backend):
247
+ """Test serializing None values."""
248
+ result = backend._serialize_value(None)
249
+ assert result == {"NULL": True}
250
+
251
+ def test_serialize_list(self, backend):
252
+ """Test serializing list values."""
253
+ result = backend._serialize_value(["a", 1, True])
254
+ assert result == {"L": [{"S": "a"}, {"N": "1"}, {"BOOL": True}]}
255
+
256
+ def test_serialize_dict(self, backend):
257
+ """Test serializing dict values."""
258
+ result = backend._serialize_value({"key": "value", "num": 42})
259
+ assert result == {"M": {"key": {"S": "value"}, "num": {"N": "42"}}}
260
+
261
+ def test_deserialize_string(self, backend):
262
+ """Test deserializing string values."""
263
+ result = backend._deserialize_value({"S": "test"})
264
+ assert result == "test"
265
+
266
+ def test_deserialize_number_int(self, backend):
267
+ """Test deserializing integer number values."""
268
+ result = backend._deserialize_value({"N": "42"})
269
+ assert result == 42
270
+
271
+ def test_deserialize_number_float(self, backend):
272
+ """Test deserializing float number values."""
273
+ result = backend._deserialize_value({"N": "3.14"})
274
+ assert result == 3.14
275
+
276
+ def test_deserialize_bool(self, backend):
277
+ """Test deserializing boolean values."""
278
+ assert backend._deserialize_value({"BOOL": True}) is True
279
+ assert backend._deserialize_value({"BOOL": False}) is False
280
+
281
+ def test_deserialize_null(self, backend):
282
+ """Test deserializing null values."""
283
+ result = backend._deserialize_value({"NULL": True})
284
+ assert result is None
285
+
286
+ def test_deserialize_list(self, backend):
287
+ """Test deserializing list values."""
288
+ result = backend._deserialize_value({"L": [{"S": "a"}, {"N": "1"}, {"BOOL": True}]})
289
+ assert result == ["a", 1, True]
290
+
291
+ def test_deserialize_dict(self, backend):
292
+ """Test deserializing dict values."""
293
+ result = backend._deserialize_value({"M": {"key": {"S": "value"}, "num": {"N": "42"}}})
294
+ assert result == {"key": "value", "num": 42}
295
+
296
+
297
+ class TestDynamoDBItemConversion:
298
+ """Test item-to-object conversion methods."""
299
+
300
+ @pytest.fixture
301
+ def backend(self):
302
+ """Create a DynamoDB backend instance."""
303
+ return DynamoDBStorageBackend()
304
+
305
+ def test_item_to_workflow_run(self, backend):
306
+ """Test converting DynamoDB item to WorkflowRun."""
307
+ now = datetime.now(UTC)
308
+ item = {
309
+ "run_id": "test_run",
310
+ "workflow_name": "test_workflow",
311
+ "status": "pending",
312
+ "created_at": now.isoformat(),
313
+ "updated_at": now.isoformat(),
314
+ "input_args": "[]",
315
+ "input_kwargs": "{}",
316
+ "metadata": "{}",
317
+ "recovery_attempts": 0,
318
+ "max_recovery_attempts": 3,
319
+ "recover_on_worker_loss": True,
320
+ "nesting_depth": 0,
321
+ }
322
+
323
+ run = backend._item_to_workflow_run(item)
324
+
325
+ assert run.run_id == "test_run"
326
+ assert run.workflow_name == "test_workflow"
327
+ assert run.status == RunStatus.PENDING
328
+ assert run.recovery_attempts == 0
329
+
330
+ def test_item_to_event(self, backend):
331
+ """Test converting DynamoDB item to Event."""
332
+ now = datetime.now(UTC)
333
+ item = {
334
+ "event_id": "evt_123",
335
+ "run_id": "test_run",
336
+ "sequence": 0,
337
+ "type": "workflow.started",
338
+ "timestamp": now.isoformat(),
339
+ "data": '{"key": "value"}',
340
+ }
341
+
342
+ event = backend._item_to_event(item)
343
+
344
+ assert event.event_id == "evt_123"
345
+ assert event.run_id == "test_run"
346
+ assert event.type == EventType.WORKFLOW_STARTED
347
+ assert event.data == {"key": "value"}
348
+
349
+ def test_item_to_step_execution(self, backend):
350
+ """Test converting DynamoDB item to StepExecution."""
351
+ now = datetime.now(UTC)
352
+ item = {
353
+ "step_id": "step_123",
354
+ "run_id": "test_run",
355
+ "step_name": "test_step",
356
+ "status": "completed",
357
+ "created_at": now.isoformat(),
358
+ "input_args": "[]",
359
+ "input_kwargs": "{}",
360
+ "retry_count": 0,
361
+ }
362
+
363
+ step = backend._item_to_step_execution(item)
364
+
365
+ assert step.step_id == "step_123"
366
+ assert step.run_id == "test_run"
367
+ assert step.step_name == "test_step"
368
+ assert step.status == StepStatus.COMPLETED
369
+ assert step.attempt == 1 # retry_count + 1
370
+
371
+ def test_item_to_hook(self, backend):
372
+ """Test converting DynamoDB item to Hook."""
373
+ now = datetime.now(UTC)
374
+ item = {
375
+ "hook_id": "hook_123",
376
+ "run_id": "test_run",
377
+ "token": "token_abc",
378
+ "created_at": now.isoformat(),
379
+ "status": "pending",
380
+ "metadata": "{}",
381
+ }
382
+
383
+ hook = backend._item_to_hook(item)
384
+
385
+ assert hook.hook_id == "hook_123"
386
+ assert hook.run_id == "test_run"
387
+ assert hook.token == "token_abc"
388
+ assert hook.status == HookStatus.PENDING
389
+
390
+ def test_item_to_schedule(self, backend):
391
+ """Test converting DynamoDB item to Schedule."""
392
+ now = datetime.now(UTC)
393
+ item = {
394
+ "schedule_id": "sched_123",
395
+ "workflow_name": "test_workflow",
396
+ "spec": "0 9 * * *",
397
+ "spec_type": "cron",
398
+ "timezone": "UTC",
399
+ "status": "active",
400
+ "input_args": "[]",
401
+ "input_kwargs": "{}",
402
+ "overlap_policy": "skip",
403
+ "created_at": now.isoformat(),
404
+ "running_run_ids": "[]",
405
+ }
406
+
407
+ schedule = backend._item_to_schedule(item)
408
+
409
+ assert schedule.schedule_id == "sched_123"
410
+ assert schedule.workflow_name == "test_workflow"
411
+ assert schedule.spec.cron == "0 9 * * *"
412
+ assert schedule.status == ScheduleStatus.ACTIVE
413
+
414
+
415
+ class TestDynamoDBKeyPatterns:
416
+ """Test DynamoDB key pattern generation."""
417
+
418
+ @pytest.fixture
419
+ def backend(self):
420
+ """Create a DynamoDB backend instance."""
421
+ return DynamoDBStorageBackend()
422
+
423
+ @pytest.mark.asyncio
424
+ async def test_workflow_run_key_pattern(self, backend):
425
+ """Test that workflow run uses correct key pattern."""
426
+ now = datetime.now(UTC)
427
+ run = WorkflowRun(
428
+ run_id="test_run_123",
429
+ workflow_name="test_workflow",
430
+ status=RunStatus.PENDING,
431
+ created_at=now,
432
+ updated_at=now,
433
+ input_args="[]",
434
+ input_kwargs="{}",
435
+ )
436
+
437
+ mock_client = AsyncMock()
438
+ mock_client.put_item = AsyncMock()
439
+
440
+ with patch.object(backend, "_get_client") as mock_get_client:
441
+ mock_context = AsyncMock()
442
+ mock_context.__aenter__ = AsyncMock(return_value=mock_client)
443
+ mock_context.__aexit__ = AsyncMock()
444
+ mock_get_client.return_value = mock_context
445
+
446
+ await backend.create_run(run)
447
+
448
+ call_args = mock_client.put_item.call_args
449
+ item = call_args.kwargs["Item"]
450
+
451
+ assert item["PK"]["S"] == "RUN#test_run_123"
452
+ assert item["SK"]["S"] == "#METADATA"
453
+ assert item["GSI1PK"]["S"] == "RUNS"
454
+
455
+ @pytest.mark.asyncio
456
+ async def test_event_key_pattern(self, backend):
457
+ """Test that events use correct key pattern with sequence."""
458
+ now = datetime.now(UTC)
459
+ event = Event(
460
+ event_id="evt_123",
461
+ run_id="test_run_123",
462
+ type=EventType.WORKFLOW_STARTED,
463
+ timestamp=now,
464
+ data={},
465
+ )
466
+
467
+ mock_client = AsyncMock()
468
+ mock_client.update_item = AsyncMock(return_value={"Attributes": {"seq": {"N": "1"}}})
469
+ mock_client.put_item = AsyncMock()
470
+
471
+ with patch.object(backend, "_get_client") as mock_get_client:
472
+ mock_context = AsyncMock()
473
+ mock_context.__aenter__ = AsyncMock(return_value=mock_client)
474
+ mock_context.__aexit__ = AsyncMock()
475
+ mock_get_client.return_value = mock_context
476
+
477
+ await backend.record_event(event)
478
+
479
+ put_call = mock_client.put_item.call_args
480
+ item = put_call.kwargs["Item"]
481
+
482
+ assert item["PK"]["S"] == "RUN#test_run_123"
483
+ assert item["SK"]["S"].startswith("EVENT#")
484
+
485
+
486
+ class TestWorkflowRunOperations:
487
+ """Test workflow run CRUD operations."""
488
+
489
+ @pytest.mark.asyncio
490
+ async def test_create_run(self, mock_backend):
491
+ """Test creating a workflow run."""
492
+ backend, mock_client = mock_backend
493
+ mock_client.put_item = AsyncMock()
494
+
495
+ now = datetime.now(UTC)
496
+ run = WorkflowRun(
497
+ run_id="test_run",
498
+ workflow_name="test_workflow",
499
+ status=RunStatus.PENDING,
500
+ created_at=now,
501
+ updated_at=now,
502
+ input_args="[]",
503
+ input_kwargs="{}",
504
+ )
505
+
506
+ await backend.create_run(run)
507
+
508
+ mock_client.put_item.assert_called_once()
509
+ call_kwargs = mock_client.put_item.call_args.kwargs
510
+ assert call_kwargs["TableName"] == "pyworkflow"
511
+ assert "PK" in call_kwargs["Item"]
512
+
513
+ @pytest.mark.asyncio
514
+ async def test_get_run_found(self, mock_backend):
515
+ """Test getting an existing workflow run."""
516
+ backend, mock_client = mock_backend
517
+ now = datetime.now(UTC)
518
+
519
+ mock_client.get_item = AsyncMock(
520
+ return_value={
521
+ "Item": {
522
+ "run_id": {"S": "test_run"},
523
+ "workflow_name": {"S": "test_workflow"},
524
+ "status": {"S": "pending"},
525
+ "created_at": {"S": now.isoformat()},
526
+ "updated_at": {"S": now.isoformat()},
527
+ "input_args": {"S": "[]"},
528
+ "input_kwargs": {"S": "{}"},
529
+ "metadata": {"S": "{}"},
530
+ "recovery_attempts": {"N": "0"},
531
+ "max_recovery_attempts": {"N": "3"},
532
+ "recover_on_worker_loss": {"BOOL": True},
533
+ "nesting_depth": {"N": "0"},
534
+ }
535
+ }
536
+ )
537
+
538
+ result = await backend.get_run("test_run")
539
+
540
+ assert result is not None
541
+ assert result.run_id == "test_run"
542
+ assert result.workflow_name == "test_workflow"
543
+
544
+ @pytest.mark.asyncio
545
+ async def test_get_run_not_found(self, mock_backend):
546
+ """Test getting a non-existent workflow run."""
547
+ backend, mock_client = mock_backend
548
+ mock_client.get_item = AsyncMock(return_value={})
549
+
550
+ result = await backend.get_run("nonexistent")
551
+
552
+ assert result is None
553
+
554
+ @pytest.mark.asyncio
555
+ async def test_get_run_by_idempotency_key(self, mock_backend):
556
+ """Test getting run by idempotency key uses GSI3."""
557
+ backend, mock_client = mock_backend
558
+ now = datetime.now(UTC)
559
+
560
+ mock_client.query = AsyncMock(
561
+ return_value={
562
+ "Items": [
563
+ {
564
+ "run_id": {"S": "test_run"},
565
+ "workflow_name": {"S": "test_workflow"},
566
+ "status": {"S": "pending"},
567
+ "created_at": {"S": now.isoformat()},
568
+ "updated_at": {"S": now.isoformat()},
569
+ "input_args": {"S": "[]"},
570
+ "input_kwargs": {"S": "{}"},
571
+ "metadata": {"S": "{}"},
572
+ "recovery_attempts": {"N": "0"},
573
+ "max_recovery_attempts": {"N": "3"},
574
+ "recover_on_worker_loss": {"BOOL": True},
575
+ "nesting_depth": {"N": "0"},
576
+ }
577
+ ]
578
+ }
579
+ )
580
+
581
+ result = await backend.get_run_by_idempotency_key("my_key")
582
+
583
+ call_args = mock_client.query.call_args
584
+ assert call_args.kwargs["IndexName"] == "GSI3"
585
+ assert result is not None
586
+ assert result.run_id == "test_run"
587
+
588
+ @pytest.mark.asyncio
589
+ async def test_update_run_status(self, mock_backend):
590
+ """Test updating run status."""
591
+ backend, mock_client = mock_backend
592
+ mock_client.update_item = AsyncMock()
593
+
594
+ await backend.update_run_status(
595
+ run_id="test_run",
596
+ status=RunStatus.RUNNING,
597
+ )
598
+
599
+ mock_client.update_item.assert_called_once()
600
+
601
+ @pytest.mark.asyncio
602
+ async def test_list_runs(self, mock_backend):
603
+ """Test listing workflow runs."""
604
+ backend, mock_client = mock_backend
605
+ now = datetime.now(UTC)
606
+
607
+ mock_client.query = AsyncMock(
608
+ return_value={
609
+ "Items": [
610
+ {
611
+ "run_id": {"S": "run_1"},
612
+ "workflow_name": {"S": "test"},
613
+ "status": {"S": "pending"},
614
+ "created_at": {"S": now.isoformat()},
615
+ "updated_at": {"S": now.isoformat()},
616
+ "input_args": {"S": "[]"},
617
+ "input_kwargs": {"S": "{}"},
618
+ "metadata": {"S": "{}"},
619
+ "recovery_attempts": {"N": "0"},
620
+ "max_recovery_attempts": {"N": "3"},
621
+ "recover_on_worker_loss": {"BOOL": True},
622
+ "nesting_depth": {"N": "0"},
623
+ }
624
+ ]
625
+ }
626
+ )
627
+
628
+ runs, cursor = await backend.list_runs()
629
+
630
+ assert len(runs) == 1
631
+ assert runs[0].run_id == "run_1"
632
+
633
+
634
+ class TestEventOperations:
635
+ """Test event log operations."""
636
+
637
+ @pytest.mark.asyncio
638
+ async def test_record_event(self, mock_backend):
639
+ """Test recording an event."""
640
+ backend, mock_client = mock_backend
641
+ mock_client.update_item = AsyncMock(return_value={"Attributes": {"seq": {"N": "1"}}})
642
+ mock_client.put_item = AsyncMock()
643
+
644
+ now = datetime.now(UTC)
645
+ event = Event(
646
+ event_id="evt_1",
647
+ run_id="test_run",
648
+ type=EventType.WORKFLOW_STARTED,
649
+ timestamp=now,
650
+ data={},
651
+ )
652
+
653
+ await backend.record_event(event)
654
+
655
+ mock_client.put_item.assert_called_once()
656
+
657
+ @pytest.mark.asyncio
658
+ async def test_get_events(self, mock_backend):
659
+ """Test getting events for a run."""
660
+ backend, mock_client = mock_backend
661
+ now = datetime.now(UTC)
662
+
663
+ mock_client.query = AsyncMock(
664
+ return_value={
665
+ "Items": [
666
+ {
667
+ "event_id": {"S": "evt_1"},
668
+ "run_id": {"S": "test_run"},
669
+ "sequence": {"N": "0"},
670
+ "type": {"S": "workflow.started"},
671
+ "timestamp": {"S": now.isoformat()},
672
+ "data": {"S": "{}"},
673
+ }
674
+ ]
675
+ }
676
+ )
677
+
678
+ events = await backend.get_events("test_run")
679
+
680
+ assert len(events) == 1
681
+ assert events[0].event_id == "evt_1"
682
+
683
+ @pytest.mark.asyncio
684
+ async def test_get_latest_event(self, mock_backend):
685
+ """Test getting latest event for a run."""
686
+ backend, mock_client = mock_backend
687
+ now = datetime.now(UTC)
688
+
689
+ mock_client.query = AsyncMock(
690
+ return_value={
691
+ "Items": [
692
+ {
693
+ "event_id": {"S": "evt_5"},
694
+ "run_id": {"S": "test_run"},
695
+ "sequence": {"N": "5"},
696
+ "type": {"S": "step.completed"},
697
+ "timestamp": {"S": now.isoformat()},
698
+ "data": {"S": "{}"},
699
+ }
700
+ ]
701
+ }
702
+ )
703
+
704
+ event = await backend.get_latest_event("test_run")
705
+
706
+ assert event is not None
707
+ assert event.event_id == "evt_5"
708
+
709
+
710
+ class TestStepOperations:
711
+ """Test step execution operations."""
712
+
713
+ @pytest.mark.asyncio
714
+ async def test_create_step(self, mock_backend):
715
+ """Test creating a step execution."""
716
+ backend, mock_client = mock_backend
717
+ mock_client.put_item = AsyncMock()
718
+
719
+ now = datetime.now(UTC)
720
+ step = StepExecution(
721
+ step_id="step_1",
722
+ run_id="test_run",
723
+ step_name="test_step",
724
+ status=StepStatus.RUNNING,
725
+ created_at=now,
726
+ input_args="[]",
727
+ input_kwargs="{}",
728
+ attempt=1,
729
+ )
730
+
731
+ await backend.create_step(step)
732
+
733
+ mock_client.put_item.assert_called_once()
734
+
735
+ @pytest.mark.asyncio
736
+ async def test_get_step_found(self, mock_backend):
737
+ """Test getting an existing step."""
738
+ backend, mock_client = mock_backend
739
+ now = datetime.now(UTC)
740
+
741
+ mock_client.scan = AsyncMock(
742
+ return_value={
743
+ "Items": [
744
+ {
745
+ "step_id": {"S": "step_1"},
746
+ "run_id": {"S": "test_run"},
747
+ "step_name": {"S": "test_step"},
748
+ "status": {"S": "completed"},
749
+ "created_at": {"S": now.isoformat()},
750
+ "input_args": {"S": "[]"},
751
+ "input_kwargs": {"S": "{}"},
752
+ "retry_count": {"N": "0"},
753
+ }
754
+ ]
755
+ }
756
+ )
757
+
758
+ result = await backend.get_step("step_1")
759
+
760
+ assert result is not None
761
+ assert result.step_id == "step_1"
762
+
763
+ @pytest.mark.asyncio
764
+ async def test_get_step_not_found(self, mock_backend):
765
+ """Test getting a non-existent step."""
766
+ backend, mock_client = mock_backend
767
+ mock_client.scan = AsyncMock(return_value={"Items": []})
768
+
769
+ result = await backend.get_step("nonexistent")
770
+
771
+ assert result is None
772
+
773
+ @pytest.mark.asyncio
774
+ async def test_update_step_status(self, mock_backend):
775
+ """Test updating step status."""
776
+ backend, mock_client = mock_backend
777
+ now = datetime.now(UTC)
778
+
779
+ # Mock get_step to return a step
780
+ mock_client.scan = AsyncMock(
781
+ return_value={
782
+ "Items": [
783
+ {
784
+ "step_id": {"S": "step_1"},
785
+ "run_id": {"S": "test_run"},
786
+ "step_name": {"S": "test_step"},
787
+ "status": {"S": "running"},
788
+ "created_at": {"S": now.isoformat()},
789
+ "input_args": {"S": "[]"},
790
+ "input_kwargs": {"S": "{}"},
791
+ "retry_count": {"N": "0"},
792
+ }
793
+ ]
794
+ }
795
+ )
796
+ mock_client.update_item = AsyncMock()
797
+
798
+ await backend.update_step_status(
799
+ step_id="step_1",
800
+ status="completed",
801
+ result='{"output": "success"}',
802
+ )
803
+
804
+ mock_client.update_item.assert_called_once()
805
+
806
+ @pytest.mark.asyncio
807
+ async def test_list_steps(self, mock_backend):
808
+ """Test listing steps for a run."""
809
+ backend, mock_client = mock_backend
810
+ now = datetime.now(UTC)
811
+
812
+ mock_client.query = AsyncMock(
813
+ return_value={
814
+ "Items": [
815
+ {
816
+ "step_id": {"S": "step_1"},
817
+ "run_id": {"S": "test_run"},
818
+ "step_name": {"S": "step_1"},
819
+ "status": {"S": "completed"},
820
+ "created_at": {"S": now.isoformat()},
821
+ "input_args": {"S": "[]"},
822
+ "input_kwargs": {"S": "{}"},
823
+ "retry_count": {"N": "0"},
824
+ }
825
+ ]
826
+ }
827
+ )
828
+
829
+ steps = await backend.list_steps("test_run")
830
+
831
+ assert len(steps) == 1
832
+
833
+
834
+ class TestHookOperations:
835
+ """Test hook/webhook operations."""
836
+
837
+ @pytest.mark.asyncio
838
+ async def test_create_hook(self, mock_backend):
839
+ """Test creating a hook."""
840
+ backend, mock_client = mock_backend
841
+ mock_client.put_item = AsyncMock()
842
+
843
+ now = datetime.now(UTC)
844
+ hook = Hook(
845
+ hook_id="hook_1",
846
+ run_id="test_run",
847
+ token="token_abc",
848
+ status=HookStatus.PENDING,
849
+ created_at=now,
850
+ )
851
+
852
+ await backend.create_hook(hook)
853
+
854
+ # Should be called twice (hook record + token lookup)
855
+ assert mock_client.put_item.call_count == 2
856
+
857
+ @pytest.mark.asyncio
858
+ async def test_get_hook_found(self, mock_backend):
859
+ """Test getting an existing hook."""
860
+ backend, mock_client = mock_backend
861
+ now = datetime.now(UTC)
862
+
863
+ mock_client.get_item = AsyncMock(
864
+ return_value={
865
+ "Item": {
866
+ "hook_id": {"S": "hook_1"},
867
+ "run_id": {"S": "test_run"},
868
+ "token": {"S": "token_abc"},
869
+ "status": {"S": "pending"},
870
+ "created_at": {"S": now.isoformat()},
871
+ "metadata": {"S": "{}"},
872
+ }
873
+ }
874
+ )
875
+
876
+ result = await backend.get_hook("hook_1")
877
+
878
+ assert result is not None
879
+ assert result.hook_id == "hook_1"
880
+
881
+ @pytest.mark.asyncio
882
+ async def test_get_hook_not_found(self, mock_backend):
883
+ """Test getting a non-existent hook."""
884
+ backend, mock_client = mock_backend
885
+ mock_client.get_item = AsyncMock(return_value={})
886
+
887
+ result = await backend.get_hook("nonexistent")
888
+
889
+ assert result is None
890
+
891
+ @pytest.mark.asyncio
892
+ async def test_get_hook_by_token(self, mock_backend):
893
+ """Test getting hook by token."""
894
+ backend, mock_client = mock_backend
895
+ now = datetime.now(UTC)
896
+
897
+ # First query returns the token lookup item with hook_id
898
+ mock_client.query = AsyncMock(
899
+ return_value={
900
+ "Items": [
901
+ {
902
+ "PK": {"S": "TOKEN#token_abc"},
903
+ "SK": {"S": "TOKEN#token_abc"},
904
+ "hook_id": {"S": "hook_1"},
905
+ }
906
+ ]
907
+ }
908
+ )
909
+
910
+ # Then get_item fetches the actual hook
911
+ mock_client.get_item = AsyncMock(
912
+ return_value={
913
+ "Item": {
914
+ "hook_id": {"S": "hook_1"},
915
+ "run_id": {"S": "test_run"},
916
+ "token": {"S": "token_abc"},
917
+ "status": {"S": "pending"},
918
+ "created_at": {"S": now.isoformat()},
919
+ "metadata": {"S": "{}"},
920
+ }
921
+ }
922
+ )
923
+
924
+ result = await backend.get_hook_by_token("token_abc")
925
+
926
+ assert result is not None
927
+ assert result.token == "token_abc"
928
+ mock_client.query.assert_called_once()
929
+ mock_client.get_item.assert_called_once()
930
+
931
+ @pytest.mark.asyncio
932
+ async def test_update_hook_status(self, mock_backend):
933
+ """Test updating hook status."""
934
+ backend, mock_client = mock_backend
935
+ mock_client.update_item = AsyncMock()
936
+
937
+ await backend.update_hook_status(
938
+ hook_id="hook_1",
939
+ status=HookStatus.RECEIVED,
940
+ payload='{"data": "payload"}',
941
+ )
942
+
943
+ mock_client.update_item.assert_called_once()
944
+
945
+ @pytest.mark.asyncio
946
+ async def test_list_hooks(self, mock_backend):
947
+ """Test listing hooks for a run."""
948
+ backend, mock_client = mock_backend
949
+ now = datetime.now(UTC)
950
+
951
+ mock_client.query = AsyncMock(
952
+ return_value={
953
+ "Items": [
954
+ {
955
+ "hook_id": {"S": "hook_1"},
956
+ "run_id": {"S": "test_run"},
957
+ "token": {"S": "token_abc"},
958
+ "status": {"S": "pending"},
959
+ "created_at": {"S": now.isoformat()},
960
+ "metadata": {"S": "{}"},
961
+ }
962
+ ]
963
+ }
964
+ )
965
+
966
+ hooks = await backend.list_hooks(run_id="test_run")
967
+
968
+ assert len(hooks) == 1
969
+
970
+
971
+ class TestCancellationOperations:
972
+ """Test cancellation flag operations."""
973
+
974
+ @pytest.mark.asyncio
975
+ async def test_set_cancellation_flag(self, mock_backend):
976
+ """Test setting a cancellation flag."""
977
+ backend, mock_client = mock_backend
978
+ mock_client.put_item = AsyncMock()
979
+
980
+ await backend.set_cancellation_flag("run_123")
981
+
982
+ call_args = mock_client.put_item.call_args
983
+ item = call_args.kwargs["Item"]
984
+ assert item["PK"]["S"] == "CANCEL#run_123"
985
+ assert item["SK"]["S"] == "#FLAG"
986
+
987
+ @pytest.mark.asyncio
988
+ async def test_check_cancellation_flag_set(self, mock_backend):
989
+ """Test checking cancellation flag when it exists."""
990
+ backend, mock_client = mock_backend
991
+ mock_client.get_item = AsyncMock(return_value={"Item": {"PK": {"S": "CANCEL#run_123"}}})
992
+
993
+ result = await backend.check_cancellation_flag("run_123")
994
+
995
+ assert result is True
996
+
997
+ @pytest.mark.asyncio
998
+ async def test_check_cancellation_flag_not_set(self, mock_backend):
999
+ """Test checking cancellation flag when it doesn't exist."""
1000
+ backend, mock_client = mock_backend
1001
+ mock_client.get_item = AsyncMock(return_value={})
1002
+
1003
+ result = await backend.check_cancellation_flag("run_123")
1004
+
1005
+ assert result is False
1006
+
1007
+ @pytest.mark.asyncio
1008
+ async def test_clear_cancellation_flag(self, mock_backend):
1009
+ """Test clearing a cancellation flag."""
1010
+ backend, mock_client = mock_backend
1011
+ mock_client.delete_item = AsyncMock()
1012
+
1013
+ await backend.clear_cancellation_flag("run_123")
1014
+
1015
+ call_args = mock_client.delete_item.call_args
1016
+ key = call_args.kwargs["Key"]
1017
+ assert key["PK"]["S"] == "CANCEL#run_123"
1018
+ assert key["SK"]["S"] == "#FLAG"
1019
+
1020
+
1021
+ class TestContinueAsNewOperations:
1022
+ """Test continue-as-new chain operations."""
1023
+
1024
+ @pytest.mark.asyncio
1025
+ async def test_update_run_continuation(self, mock_backend):
1026
+ """Test updating run continuation link."""
1027
+ backend, mock_client = mock_backend
1028
+ mock_client.update_item = AsyncMock()
1029
+
1030
+ await backend.update_run_continuation("run_1", "run_2")
1031
+
1032
+ mock_client.update_item.assert_called_once()
1033
+
1034
+ @pytest.mark.asyncio
1035
+ async def test_get_workflow_chain(self, mock_backend):
1036
+ """Test getting workflow chain."""
1037
+ backend, mock_client = mock_backend
1038
+ now = datetime.now(UTC)
1039
+
1040
+ # Chain: run_1 -> run_2
1041
+ # run_1: first in chain (no continued_from_run_id), has continued_to_run_id
1042
+ # run_2: second in chain, has continued_from_run_id, no continued_to_run_id
1043
+ run_1_item = {
1044
+ "Item": {
1045
+ "run_id": {"S": "run_1"},
1046
+ "workflow_name": {"S": "test"},
1047
+ "status": {"S": "completed"},
1048
+ "created_at": {"S": now.isoformat()},
1049
+ "updated_at": {"S": now.isoformat()},
1050
+ "input_args": {"S": "[]"},
1051
+ "input_kwargs": {"S": "{}"},
1052
+ "metadata": {"S": "{}"},
1053
+ "recovery_attempts": {"N": "0"},
1054
+ "max_recovery_attempts": {"N": "3"},
1055
+ "recover_on_worker_loss": {"BOOL": True},
1056
+ "nesting_depth": {"N": "0"},
1057
+ "continued_to_run_id": {"S": "run_2"},
1058
+ }
1059
+ }
1060
+ run_2_item = {
1061
+ "Item": {
1062
+ "run_id": {"S": "run_2"},
1063
+ "workflow_name": {"S": "test"},
1064
+ "status": {"S": "completed"},
1065
+ "created_at": {"S": now.isoformat()},
1066
+ "updated_at": {"S": now.isoformat()},
1067
+ "input_args": {"S": "[]"},
1068
+ "input_kwargs": {"S": "{}"},
1069
+ "metadata": {"S": "{}"},
1070
+ "recovery_attempts": {"N": "0"},
1071
+ "max_recovery_attempts": {"N": "3"},
1072
+ "recover_on_worker_loss": {"BOOL": True},
1073
+ "nesting_depth": {"N": "0"},
1074
+ "continued_from_run_id": {"S": "run_1"},
1075
+ }
1076
+ }
1077
+
1078
+ # Mock get_item to return different items based on which run is requested
1079
+ async def mock_get_item(**kwargs):
1080
+ key = kwargs.get("Key", {})
1081
+ pk = key.get("PK", {}).get("S", "")
1082
+ if "run_1" in pk:
1083
+ return run_1_item
1084
+ elif "run_2" in pk:
1085
+ return run_2_item
1086
+ return {}
1087
+
1088
+ mock_client.get_item = AsyncMock(side_effect=mock_get_item)
1089
+
1090
+ result = await backend.get_workflow_chain("run_2")
1091
+
1092
+ assert len(result) == 2
1093
+ assert result[0].run_id == "run_1"
1094
+ assert result[1].run_id == "run_2"
1095
+
1096
+
1097
+ class TestChildWorkflowOperations:
1098
+ """Test child workflow operations."""
1099
+
1100
+ @pytest.mark.asyncio
1101
+ async def test_get_children(self, mock_backend):
1102
+ """Test getting child workflows uses GSI4."""
1103
+ backend, mock_client = mock_backend
1104
+ now = datetime.now(UTC)
1105
+
1106
+ mock_client.query = AsyncMock(
1107
+ return_value={
1108
+ "Items": [
1109
+ {
1110
+ "run_id": {"S": "child_1"},
1111
+ "workflow_name": {"S": "child_workflow"},
1112
+ "status": {"S": "completed"},
1113
+ "created_at": {"S": now.isoformat()},
1114
+ "updated_at": {"S": now.isoformat()},
1115
+ "input_args": {"S": "[]"},
1116
+ "input_kwargs": {"S": "{}"},
1117
+ "metadata": {"S": "{}"},
1118
+ "recovery_attempts": {"N": "0"},
1119
+ "max_recovery_attempts": {"N": "3"},
1120
+ "recover_on_worker_loss": {"BOOL": True},
1121
+ "nesting_depth": {"N": "1"},
1122
+ "parent_run_id": {"S": "parent_123"},
1123
+ }
1124
+ ]
1125
+ }
1126
+ )
1127
+
1128
+ result = await backend.get_children("parent_123")
1129
+
1130
+ call_args = mock_client.query.call_args
1131
+ assert call_args.kwargs["IndexName"] == "GSI4"
1132
+ assert len(result) == 1
1133
+ assert result[0].run_id == "child_1"
1134
+
1135
+ @pytest.mark.asyncio
1136
+ async def test_get_parent_found(self, mock_backend):
1137
+ """Test getting parent workflow."""
1138
+ backend, mock_client = mock_backend
1139
+ now = datetime.now(UTC)
1140
+
1141
+ # First call returns child run
1142
+ child_response = {
1143
+ "Item": {
1144
+ "run_id": {"S": "child_1"},
1145
+ "workflow_name": {"S": "child"},
1146
+ "status": {"S": "running"},
1147
+ "created_at": {"S": now.isoformat()},
1148
+ "updated_at": {"S": now.isoformat()},
1149
+ "input_args": {"S": "[]"},
1150
+ "input_kwargs": {"S": "{}"},
1151
+ "metadata": {"S": "{}"},
1152
+ "recovery_attempts": {"N": "0"},
1153
+ "max_recovery_attempts": {"N": "3"},
1154
+ "recover_on_worker_loss": {"BOOL": True},
1155
+ "nesting_depth": {"N": "1"},
1156
+ "parent_run_id": {"S": "parent_1"},
1157
+ }
1158
+ }
1159
+
1160
+ # Second call returns parent run
1161
+ parent_response = {
1162
+ "Item": {
1163
+ "run_id": {"S": "parent_1"},
1164
+ "workflow_name": {"S": "parent"},
1165
+ "status": {"S": "running"},
1166
+ "created_at": {"S": now.isoformat()},
1167
+ "updated_at": {"S": now.isoformat()},
1168
+ "input_args": {"S": "[]"},
1169
+ "input_kwargs": {"S": "{}"},
1170
+ "metadata": {"S": "{}"},
1171
+ "recovery_attempts": {"N": "0"},
1172
+ "max_recovery_attempts": {"N": "3"},
1173
+ "recover_on_worker_loss": {"BOOL": True},
1174
+ "nesting_depth": {"N": "0"},
1175
+ }
1176
+ }
1177
+
1178
+ mock_client.get_item = AsyncMock(side_effect=[child_response, parent_response])
1179
+
1180
+ result = await backend.get_parent("child_1")
1181
+
1182
+ assert result is not None
1183
+ assert result.run_id == "parent_1"
1184
+
1185
+ @pytest.mark.asyncio
1186
+ async def test_get_parent_not_found(self, mock_backend):
1187
+ """Test getting parent when no parent exists."""
1188
+ backend, mock_client = mock_backend
1189
+ now = datetime.now(UTC)
1190
+
1191
+ mock_client.get_item = AsyncMock(
1192
+ return_value={
1193
+ "Item": {
1194
+ "run_id": {"S": "root_run"},
1195
+ "workflow_name": {"S": "test"},
1196
+ "status": {"S": "running"},
1197
+ "created_at": {"S": now.isoformat()},
1198
+ "updated_at": {"S": now.isoformat()},
1199
+ "input_args": {"S": "[]"},
1200
+ "input_kwargs": {"S": "{}"},
1201
+ "metadata": {"S": "{}"},
1202
+ "recovery_attempts": {"N": "0"},
1203
+ "max_recovery_attempts": {"N": "3"},
1204
+ "recover_on_worker_loss": {"BOOL": True},
1205
+ "nesting_depth": {"N": "0"},
1206
+ }
1207
+ }
1208
+ )
1209
+
1210
+ result = await backend.get_parent("root_run")
1211
+
1212
+ assert result is None
1213
+
1214
+ @pytest.mark.asyncio
1215
+ async def test_get_nesting_depth(self, mock_backend):
1216
+ """Test getting nesting depth."""
1217
+ backend, mock_client = mock_backend
1218
+ now = datetime.now(UTC)
1219
+
1220
+ mock_client.get_item = AsyncMock(
1221
+ return_value={
1222
+ "Item": {
1223
+ "run_id": {"S": "child_run"},
1224
+ "workflow_name": {"S": "test"},
1225
+ "status": {"S": "running"},
1226
+ "created_at": {"S": now.isoformat()},
1227
+ "updated_at": {"S": now.isoformat()},
1228
+ "input_args": {"S": "[]"},
1229
+ "input_kwargs": {"S": "{}"},
1230
+ "metadata": {"S": "{}"},
1231
+ "recovery_attempts": {"N": "0"},
1232
+ "max_recovery_attempts": {"N": "3"},
1233
+ "recover_on_worker_loss": {"BOOL": True},
1234
+ "nesting_depth": {"N": "2"},
1235
+ }
1236
+ }
1237
+ )
1238
+
1239
+ result = await backend.get_nesting_depth("child_run")
1240
+
1241
+ assert result == 2
1242
+
1243
+
1244
+ class TestScheduleOperations:
1245
+ """Test schedule CRUD operations."""
1246
+
1247
+ @pytest.mark.asyncio
1248
+ async def test_create_schedule(self, mock_backend):
1249
+ """Test creating a schedule."""
1250
+ backend, mock_client = mock_backend
1251
+ mock_client.put_item = AsyncMock()
1252
+
1253
+ now = datetime.now(UTC)
1254
+ schedule = Schedule(
1255
+ schedule_id="sched_1",
1256
+ workflow_name="test_workflow",
1257
+ spec=ScheduleSpec(cron="0 9 * * *"),
1258
+ status=ScheduleStatus.ACTIVE,
1259
+ created_at=now,
1260
+ )
1261
+
1262
+ await backend.create_schedule(schedule)
1263
+
1264
+ mock_client.put_item.assert_called_once()
1265
+
1266
+ @pytest.mark.asyncio
1267
+ async def test_get_schedule_found(self, mock_backend):
1268
+ """Test getting an existing schedule."""
1269
+ backend, mock_client = mock_backend
1270
+ now = datetime.now(UTC)
1271
+
1272
+ mock_client.get_item = AsyncMock(
1273
+ return_value={
1274
+ "Item": {
1275
+ "schedule_id": {"S": "sched_1"},
1276
+ "workflow_name": {"S": "test_workflow"},
1277
+ "spec": {"S": "0 9 * * *"},
1278
+ "spec_type": {"S": "cron"},
1279
+ "timezone": {"S": "UTC"},
1280
+ "status": {"S": "active"},
1281
+ "input_args": {"S": "[]"},
1282
+ "input_kwargs": {"S": "{}"},
1283
+ "overlap_policy": {"S": "skip"},
1284
+ "created_at": {"S": now.isoformat()},
1285
+ "running_run_ids": {"S": "[]"},
1286
+ }
1287
+ }
1288
+ )
1289
+
1290
+ result = await backend.get_schedule("sched_1")
1291
+
1292
+ assert result is not None
1293
+ assert result.schedule_id == "sched_1"
1294
+
1295
+ @pytest.mark.asyncio
1296
+ async def test_get_schedule_not_found(self, mock_backend):
1297
+ """Test getting a non-existent schedule."""
1298
+ backend, mock_client = mock_backend
1299
+ mock_client.get_item = AsyncMock(return_value={})
1300
+
1301
+ result = await backend.get_schedule("nonexistent")
1302
+
1303
+ assert result is None
1304
+
1305
+ @pytest.mark.asyncio
1306
+ async def test_update_schedule(self, mock_backend):
1307
+ """Test updating a schedule."""
1308
+ backend, mock_client = mock_backend
1309
+ mock_client.put_item = AsyncMock()
1310
+
1311
+ now = datetime.now(UTC)
1312
+ schedule = Schedule(
1313
+ schedule_id="sched_1",
1314
+ workflow_name="test_workflow",
1315
+ spec=ScheduleSpec(cron="0 10 * * *"),
1316
+ status=ScheduleStatus.PAUSED,
1317
+ created_at=now,
1318
+ )
1319
+
1320
+ await backend.update_schedule(schedule)
1321
+
1322
+ mock_client.put_item.assert_called_once()
1323
+
1324
+ @pytest.mark.asyncio
1325
+ async def test_delete_schedule(self, mock_backend):
1326
+ """Test deleting a schedule."""
1327
+ backend, mock_client = mock_backend
1328
+ now = datetime.now(UTC)
1329
+
1330
+ mock_client.get_item = AsyncMock(
1331
+ return_value={
1332
+ "Item": {
1333
+ "schedule_id": {"S": "sched_1"},
1334
+ "workflow_name": {"S": "test"},
1335
+ "spec": {"S": "0 9 * * *"},
1336
+ "spec_type": {"S": "cron"},
1337
+ "timezone": {"S": "UTC"},
1338
+ "status": {"S": "active"},
1339
+ "input_args": {"S": "[]"},
1340
+ "input_kwargs": {"S": "{}"},
1341
+ "overlap_policy": {"S": "skip"},
1342
+ "created_at": {"S": now.isoformat()},
1343
+ "running_run_ids": {"S": "[]"},
1344
+ }
1345
+ }
1346
+ )
1347
+ mock_client.put_item = AsyncMock()
1348
+
1349
+ await backend.delete_schedule("sched_1")
1350
+
1351
+ mock_client.put_item.assert_called_once()
1352
+
1353
+ @pytest.mark.asyncio
1354
+ async def test_list_schedules(self, mock_backend):
1355
+ """Test listing schedules."""
1356
+ backend, mock_client = mock_backend
1357
+ now = datetime.now(UTC)
1358
+
1359
+ mock_client.query = AsyncMock(
1360
+ return_value={
1361
+ "Items": [
1362
+ {
1363
+ "schedule_id": {"S": "sched_1"},
1364
+ "workflow_name": {"S": "test"},
1365
+ "spec": {"S": "0 9 * * *"},
1366
+ "spec_type": {"S": "cron"},
1367
+ "timezone": {"S": "UTC"},
1368
+ "status": {"S": "active"},
1369
+ "input_args": {"S": "[]"},
1370
+ "input_kwargs": {"S": "{}"},
1371
+ "overlap_policy": {"S": "skip"},
1372
+ "created_at": {"S": now.isoformat()},
1373
+ "running_run_ids": {"S": "[]"},
1374
+ }
1375
+ ]
1376
+ }
1377
+ )
1378
+
1379
+ schedules = await backend.list_schedules()
1380
+
1381
+ assert len(schedules) == 1
1382
+
1383
+ @pytest.mark.asyncio
1384
+ async def test_get_due_schedules(self, mock_backend):
1385
+ """Test getting due schedules uses GSI5."""
1386
+ backend, mock_client = mock_backend
1387
+ now = datetime.now(UTC)
1388
+
1389
+ mock_client.query = AsyncMock(
1390
+ return_value={
1391
+ "Items": [
1392
+ {
1393
+ "schedule_id": {"S": "sched_1"},
1394
+ "workflow_name": {"S": "test"},
1395
+ "spec": {"S": "0 9 * * *"},
1396
+ "spec_type": {"S": "cron"},
1397
+ "timezone": {"S": "UTC"},
1398
+ "status": {"S": "active"},
1399
+ "input_args": {"S": "[]"},
1400
+ "input_kwargs": {"S": "{}"},
1401
+ "overlap_policy": {"S": "skip"},
1402
+ "created_at": {"S": now.isoformat()},
1403
+ "next_run_time": {"S": now.isoformat()},
1404
+ "running_run_ids": {"S": "[]"},
1405
+ }
1406
+ ]
1407
+ }
1408
+ )
1409
+
1410
+ result = await backend.get_due_schedules(now)
1411
+
1412
+ call_args = mock_client.query.call_args
1413
+ assert call_args.kwargs["IndexName"] == "GSI5"
1414
+ assert call_args.kwargs["ExpressionAttributeValues"][":pk"]["S"] == "ACTIVE_SCHEDULES"
1415
+ assert len(result) == 1
1416
+
1417
+ @pytest.mark.asyncio
1418
+ async def test_add_running_run(self, mock_backend):
1419
+ """Test adding a running run to schedule."""
1420
+ backend, mock_client = mock_backend
1421
+ now = datetime.now(UTC)
1422
+
1423
+ mock_client.get_item = AsyncMock(
1424
+ return_value={
1425
+ "Item": {
1426
+ "schedule_id": {"S": "sched_1"},
1427
+ "workflow_name": {"S": "test"},
1428
+ "spec": {"S": "0 9 * * *"},
1429
+ "spec_type": {"S": "cron"},
1430
+ "timezone": {"S": "UTC"},
1431
+ "status": {"S": "active"},
1432
+ "input_args": {"S": "[]"},
1433
+ "input_kwargs": {"S": "{}"},
1434
+ "overlap_policy": {"S": "skip"},
1435
+ "created_at": {"S": now.isoformat()},
1436
+ "running_run_ids": {"S": "[]"},
1437
+ }
1438
+ }
1439
+ )
1440
+ mock_client.put_item = AsyncMock()
1441
+
1442
+ await backend.add_running_run("sched_1", "run_1")
1443
+
1444
+ mock_client.put_item.assert_called_once()
1445
+
1446
+ @pytest.mark.asyncio
1447
+ async def test_remove_running_run(self, mock_backend):
1448
+ """Test removing a running run from schedule."""
1449
+ backend, mock_client = mock_backend
1450
+ now = datetime.now(UTC)
1451
+
1452
+ mock_client.get_item = AsyncMock(
1453
+ return_value={
1454
+ "Item": {
1455
+ "schedule_id": {"S": "sched_1"},
1456
+ "workflow_name": {"S": "test"},
1457
+ "spec": {"S": "0 9 * * *"},
1458
+ "spec_type": {"S": "cron"},
1459
+ "timezone": {"S": "UTC"},
1460
+ "status": {"S": "active"},
1461
+ "input_args": {"S": "[]"},
1462
+ "input_kwargs": {"S": "{}"},
1463
+ "overlap_policy": {"S": "skip"},
1464
+ "created_at": {"S": now.isoformat()},
1465
+ "running_run_ids": {"S": '["run_1", "run_2"]'},
1466
+ }
1467
+ }
1468
+ )
1469
+ mock_client.put_item = AsyncMock()
1470
+
1471
+ await backend.remove_running_run("sched_1", "run_1")
1472
+
1473
+ mock_client.put_item.assert_called_once()
1474
+
1475
+
1476
+ class TestDynamoDBGSIQueries:
1477
+ """Test GSI-based queries."""
1478
+
1479
+ @pytest.fixture
1480
+ def backend(self):
1481
+ """Create a DynamoDB backend instance."""
1482
+ return DynamoDBStorageBackend()
1483
+
1484
+ @pytest.mark.asyncio
1485
+ async def test_gsi1_runs_by_status(self, backend):
1486
+ """Test that list_runs uses GSI1 for status queries."""
1487
+ mock_client = AsyncMock()
1488
+ mock_client.query = AsyncMock(return_value={"Items": []})
1489
+
1490
+ with patch.object(backend, "_get_client") as mock_get_client:
1491
+ mock_context = AsyncMock()
1492
+ mock_context.__aenter__ = AsyncMock(return_value=mock_client)
1493
+ mock_context.__aexit__ = AsyncMock()
1494
+ mock_get_client.return_value = mock_context
1495
+
1496
+ await backend.list_runs(status=RunStatus.RUNNING)
1497
+
1498
+ call_args = mock_client.query.call_args
1499
+ assert call_args.kwargs["IndexName"] == "GSI1"
1500
+
1501
+ @pytest.mark.asyncio
1502
+ async def test_gsi3_idempotency_key(self, backend):
1503
+ """Test that get_run_by_idempotency_key uses GSI3."""
1504
+ mock_client = AsyncMock()
1505
+ mock_client.query = AsyncMock(return_value={"Items": []})
1506
+
1507
+ with patch.object(backend, "_get_client") as mock_get_client:
1508
+ mock_context = AsyncMock()
1509
+ mock_context.__aenter__ = AsyncMock(return_value=mock_client)
1510
+ mock_context.__aexit__ = AsyncMock()
1511
+ mock_get_client.return_value = mock_context
1512
+
1513
+ await backend.get_run_by_idempotency_key("test_key")
1514
+
1515
+ call_args = mock_client.query.call_args
1516
+ assert call_args.kwargs["IndexName"] == "GSI3"
1517
+ assert "IDEMPOTENCY#test_key" in str(call_args.kwargs["ExpressionAttributeValues"])
1518
+
1519
+ @pytest.mark.asyncio
1520
+ async def test_gsi4_children(self, backend):
1521
+ """Test that get_children uses GSI4."""
1522
+ mock_client = AsyncMock()
1523
+ mock_client.query = AsyncMock(return_value={"Items": []})
1524
+
1525
+ with patch.object(backend, "_get_client") as mock_get_client:
1526
+ mock_context = AsyncMock()
1527
+ mock_context.__aenter__ = AsyncMock(return_value=mock_client)
1528
+ mock_context.__aexit__ = AsyncMock()
1529
+ mock_get_client.return_value = mock_context
1530
+
1531
+ await backend.get_children("parent_123")
1532
+
1533
+ call_args = mock_client.query.call_args
1534
+ assert call_args.kwargs["IndexName"] == "GSI4"
1535
+ assert "PARENT#parent_123" in str(call_args.kwargs["ExpressionAttributeValues"])
1536
+
1537
+ @pytest.mark.asyncio
1538
+ async def test_gsi5_due_schedules(self, backend):
1539
+ """Test that get_due_schedules uses GSI5."""
1540
+ now = datetime.now(UTC)
1541
+ mock_client = AsyncMock()
1542
+ mock_client.query = AsyncMock(return_value={"Items": []})
1543
+
1544
+ with patch.object(backend, "_get_client") as mock_get_client:
1545
+ mock_context = AsyncMock()
1546
+ mock_context.__aenter__ = AsyncMock(return_value=mock_client)
1547
+ mock_context.__aexit__ = AsyncMock()
1548
+ mock_get_client.return_value = mock_context
1549
+
1550
+ await backend.get_due_schedules(now)
1551
+
1552
+ call_args = mock_client.query.call_args
1553
+ assert call_args.kwargs["IndexName"] == "GSI5"
1554
+ assert "ACTIVE_SCHEDULES" in str(call_args.kwargs["ExpressionAttributeValues"])