planar 0.9.3__py3-none-any.whl → 0.11.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. planar/ai/agent.py +2 -1
  2. planar/ai/agent_base.py +24 -5
  3. planar/ai/state.py +17 -0
  4. planar/app.py +18 -1
  5. planar/data/connection.py +108 -0
  6. planar/data/dataset.py +11 -104
  7. planar/data/utils.py +89 -0
  8. planar/db/alembic/env.py +25 -1
  9. planar/files/storage/azure_blob.py +1 -1
  10. planar/registry_items.py +2 -0
  11. planar/routers/dataset_router.py +213 -0
  12. planar/routers/info.py +79 -36
  13. planar/routers/models.py +1 -0
  14. planar/routers/workflow.py +2 -0
  15. planar/scaffold_templates/pyproject.toml.j2 +1 -1
  16. planar/security/authorization.py +31 -3
  17. planar/security/default_policies.cedar +25 -0
  18. planar/testing/fixtures.py +34 -1
  19. planar/testing/planar_test_client.py +1 -1
  20. planar/workflows/decorators.py +2 -1
  21. planar/workflows/wrappers.py +1 -0
  22. {planar-0.9.3.dist-info → planar-0.11.0.dist-info}/METADATA +9 -1
  23. {planar-0.9.3.dist-info → planar-0.11.0.dist-info}/RECORD +25 -72
  24. {planar-0.9.3.dist-info → planar-0.11.0.dist-info}/WHEEL +1 -1
  25. planar/ai/test_agent_serialization.py +0 -229
  26. planar/ai/test_agent_tool_step_display.py +0 -78
  27. planar/data/test_dataset.py +0 -354
  28. planar/files/storage/test_azure_blob.py +0 -435
  29. planar/files/storage/test_local_directory.py +0 -162
  30. planar/files/storage/test_s3.py +0 -299
  31. planar/files/test_files.py +0 -282
  32. planar/human/test_human.py +0 -385
  33. planar/logging/test_formatter.py +0 -327
  34. planar/modeling/mixins/test_auditable.py +0 -97
  35. planar/modeling/mixins/test_timestamp.py +0 -134
  36. planar/modeling/mixins/test_uuid_primary_key.py +0 -52
  37. planar/routers/test_agents_router.py +0 -174
  38. planar/routers/test_files_router.py +0 -49
  39. planar/routers/test_object_config_router.py +0 -367
  40. planar/routers/test_routes_security.py +0 -168
  41. planar/routers/test_rule_router.py +0 -470
  42. planar/routers/test_workflow_router.py +0 -539
  43. planar/rules/test_data/account_dormancy_management.json +0 -223
  44. planar/rules/test_data/airline_loyalty_points_calculator.json +0 -262
  45. planar/rules/test_data/applicant_risk_assessment.json +0 -435
  46. planar/rules/test_data/booking_fraud_detection.json +0 -407
  47. planar/rules/test_data/cellular_data_rollover_system.json +0 -258
  48. planar/rules/test_data/clinical_trial_eligibility_screener.json +0 -437
  49. planar/rules/test_data/customer_lifetime_value.json +0 -143
  50. planar/rules/test_data/import_duties_calculator.json +0 -289
  51. planar/rules/test_data/insurance_prior_authorization.json +0 -443
  52. planar/rules/test_data/online_check_in_eligibility_system.json +0 -254
  53. planar/rules/test_data/order_consolidation_system.json +0 -375
  54. planar/rules/test_data/portfolio_risk_monitor.json +0 -471
  55. planar/rules/test_data/supply_chain_risk.json +0 -253
  56. planar/rules/test_data/warehouse_cross_docking.json +0 -237
  57. planar/rules/test_rules.py +0 -1494
  58. planar/security/tests/test_auth_middleware.py +0 -162
  59. planar/security/tests/test_authorization_context.py +0 -78
  60. planar/security/tests/test_cedar_basics.py +0 -41
  61. planar/security/tests/test_cedar_policies.py +0 -158
  62. planar/security/tests/test_jwt_principal_context.py +0 -179
  63. planar/test_app.py +0 -142
  64. planar/test_cli.py +0 -394
  65. planar/test_config.py +0 -515
  66. planar/test_object_config.py +0 -527
  67. planar/test_object_registry.py +0 -14
  68. planar/test_sqlalchemy.py +0 -193
  69. planar/test_utils.py +0 -105
  70. planar/testing/test_memory_storage.py +0 -143
  71. planar/workflows/test_concurrency_detection.py +0 -120
  72. planar/workflows/test_lock_timeout.py +0 -140
  73. planar/workflows/test_serialization.py +0 -1203
  74. planar/workflows/test_suspend_deserialization.py +0 -231
  75. planar/workflows/test_workflow.py +0 -2005
  76. {planar-0.9.3.dist-info → planar-0.11.0.dist-info}/entry_points.txt +0 -0
@@ -1,2005 +0,0 @@
1
- import asyncio
2
- import gc
3
- import json
4
- from collections import defaultdict
5
- from datetime import datetime, timedelta
6
- from decimal import Decimal
7
- from uuid import UUID
8
-
9
- import pytest
10
- from freezegun import freeze_time
11
- from pydantic import BaseModel
12
- from sqlmodel import col, select
13
- from sqlmodel.ext.asyncio.session import AsyncSession
14
-
15
- from planar.session import get_session
16
- from planar.testing.workflow_observer import WorkflowObserver
17
- from planar.utils import one_or_raise, utc_now
18
- from planar.workflows.contrib import message
19
- from planar.workflows.decorators import (
20
- __AS_STEP_CACHE,
21
- __is_workflow_step,
22
- as_step,
23
- step,
24
- workflow,
25
- )
26
- from planar.workflows.exceptions import NonDeterministicStepCallError
27
- from planar.workflows.execution import execute, lock_and_execute
28
- from planar.workflows.models import (
29
- StepStatus,
30
- StepType,
31
- Workflow,
32
- WorkflowStatus,
33
- WorkflowStep,
34
- )
35
- from planar.workflows.notifications import Notification, workflow_notification_context
36
- from planar.workflows.orchestrator import WorkflowOrchestrator
37
- from planar.workflows.step_core import (
38
- Suspend,
39
- suspend,
40
- )
41
- from planar.workflows.step_testing_utils import (
42
- get_step_ancestors,
43
- get_step_children,
44
- get_step_descendants,
45
- get_step_parent,
46
- )
47
-
48
-
49
- # =============================================================================
50
- # Test 1 – Basic Workflow Lifecycle
51
- # =============================================================================
52
- async def test_workflow_lifecycle(session: AsyncSession):
53
- @workflow()
54
- async def sample_workflow():
55
- return "success"
56
-
57
- wf = await sample_workflow.start()
58
- await execute(wf)
59
- updated_wf = await session.get(Workflow, wf.id)
60
- assert updated_wf is not None
61
- assert updated_wf.status == WorkflowStatus.SUCCEEDED
62
- assert updated_wf.result == "success"
63
-
64
-
65
- # =============================================================================
66
- # Test 2 – Session Context Is Set
67
- # =============================================================================
68
- async def test_session_context_is_set(session: AsyncSession):
69
- @workflow()
70
- async def session_workflow():
71
- s = get_session()
72
- # Ensure that the session returned is the one we set from the fixture.
73
- assert s is session
74
- return "success"
75
-
76
- wf = await session_workflow.start()
77
- await execute(wf)
78
- updated_wf = await session.get(Workflow, wf.id)
79
- assert updated_wf
80
- assert updated_wf.status == WorkflowStatus.SUCCEEDED
81
- assert updated_wf.result == "success"
82
-
83
-
84
- # =============================================================================
85
- # Test 3 – Step Execution and Tracking
86
- # =============================================================================
87
- async def test_step_execution(session: AsyncSession):
88
- @step()
89
- async def step1():
90
- return "step1_result"
91
-
92
- @step()
93
- async def step2():
94
- return "step2_result"
95
-
96
- @workflow()
97
- async def multistep_workflow():
98
- await step1()
99
- await step2()
100
- return "done"
101
-
102
- wf = await multistep_workflow.start()
103
- await execute(wf)
104
-
105
- steps = (
106
- await session.exec(
107
- select(WorkflowStep).where(WorkflowStep.workflow_id == wf.id)
108
- )
109
- ).all()
110
- assert len(steps) == 2
111
- fnames = {s.function_name.split(".")[-1] for s in steps}
112
- assert "step1" in fnames
113
- assert "step2" in fnames
114
- for s in steps:
115
- assert s.status == StepStatus.SUCCEEDED
116
- assert s.workflow_id == wf.id
117
-
118
-
119
- # =============================================================================
120
- # Test 4 – Step Error Handling
121
- # =============================================================================
122
- async def test_step_error_handling(session: AsyncSession):
123
- @step()
124
- async def failing_step():
125
- raise ValueError("Intentional failure")
126
-
127
- @workflow()
128
- async def error_workflow():
129
- await failing_step()
130
- return "done"
131
-
132
- wf = await error_workflow.start()
133
- with pytest.raises(ValueError, match="Intentional failure"):
134
- await execute(wf)
135
-
136
- updated_wf = await session.get(Workflow, wf.id)
137
- assert updated_wf
138
- assert updated_wf.status == WorkflowStatus.FAILED
139
- step_entry = (
140
- await session.exec(
141
- select(WorkflowStep).where(WorkflowStep.workflow_id == wf.id)
142
- )
143
- ).one()
144
- assert step_entry.error is not None
145
- assert "Intentional failure" in step_entry.error["message"]
146
-
147
-
148
- # =============================================================================
149
- # Test 5 – Workflow Resumption (Retry a Failing Step)
150
- # =============================================================================
151
- async def test_workflow_resumption(session: AsyncSession):
152
- should_fail = True
153
-
154
- @step(max_retries=1)
155
- async def dynamic_step():
156
- nonlocal should_fail
157
- if should_fail:
158
- raise RuntimeError("Temporary failure")
159
- return "done"
160
-
161
- @workflow()
162
- async def resumable_workflow():
163
- return await dynamic_step()
164
-
165
- wf = await resumable_workflow.start()
166
- # First execution should suspend (i.e. return a Suspend object) because of failure.
167
- result1 = await execute(wf)
168
- assert isinstance(result1, Suspend)
169
- updated_wf = await session.get(Workflow, wf.id)
170
- assert updated_wf
171
- assert updated_wf.status == WorkflowStatus.PENDING
172
-
173
- # Fix the error and resume.
174
- should_fail = False
175
- result2 = await execute(wf)
176
- updated_wf = await session.get(Workflow, wf.id)
177
- assert updated_wf
178
- assert updated_wf.status == WorkflowStatus.SUCCEEDED
179
- assert result2 == "done"
180
-
181
-
182
- # =============================================================================
183
- # Test 6 – Input Data Persistence
184
- # =============================================================================
185
- async def test_input_data_persistence(session: AsyncSession):
186
- @workflow()
187
- async def data_workflow(a: int, b: int):
188
- return a + b
189
-
190
- wf = await data_workflow.start(10, 20)
191
- await execute(wf)
192
- updated_wf = await session.get(Workflow, wf.id)
193
- assert updated_wf
194
- assert updated_wf.args == [10, 20]
195
- assert updated_wf.kwargs == {}
196
- assert updated_wf.result == 30
197
-
198
-
199
- # =============================================================================
200
- # Test 7 – Completed Workflow Resumption
201
- # =============================================================================
202
- async def test_completed_workflow_resumption(session: AsyncSession):
203
- @workflow()
204
- async def completed_workflow():
205
- return "final_result"
206
-
207
- wf = await completed_workflow.start()
208
- result1 = await execute(wf)
209
- result2 = await execute(wf)
210
- assert result1 == "final_result"
211
- assert result2 == "final_result"
212
-
213
-
214
- # =============================================================================
215
- # Test 8 – Step Idempotency
216
- # =============================================================================
217
- async def test_step_idempotency(session: AsyncSession):
218
- execution_count = 0
219
-
220
- @step()
221
- async def idempotent_step():
222
- nonlocal execution_count
223
- execution_count += 1
224
- return "idempotent"
225
-
226
- @workflow()
227
- async def idempotent_workflow():
228
- await idempotent_step()
229
- return "done"
230
-
231
- wf = await idempotent_workflow.start()
232
- await execute(wf)
233
- # On resumption the step should not run again.
234
- await execute(wf)
235
- assert execution_count == 1
236
-
237
-
238
- # =============================================================================
239
- # Test 9 – Error Traceback Storage (Adjusted)
240
- # =============================================================================
241
- async def test_error_traceback_storage(session: AsyncSession):
242
- @step()
243
- async def error_step():
244
- raise ValueError("Error with traceback")
245
-
246
- @workflow()
247
- async def traceback_workflow():
248
- await error_step()
249
- return "done"
250
-
251
- wf = await traceback_workflow.start()
252
- with pytest.raises(ValueError, match="Error with traceback"):
253
- await execute(wf)
254
-
255
- step_entry = (
256
- await session.exec(
257
- select(WorkflowStep).where(WorkflowStep.workflow_id == wf.id)
258
- )
259
- ).one()
260
- assert step_entry.error is not None
261
- # The new engine does not store full tracebacks, so we check only the error message.
262
- assert "Error with traceback" in step_entry.error["message"]
263
-
264
-
265
- # =============================================================================
266
- # Test 10 – Empty Workflow (No Steps)
267
- # =============================================================================
268
- async def test_empty_workflow(session: AsyncSession):
269
- @workflow()
270
- async def empty_workflow():
271
- return "direct_result"
272
-
273
- wf = await empty_workflow.start()
274
- await execute(wf)
275
- updated_wf = await session.get(Workflow, wf.id)
276
- assert updated_wf
277
- assert updated_wf.status == WorkflowStatus.SUCCEEDED
278
- assert updated_wf.result == "direct_result"
279
- # Verify no DurableStep records were created.
280
- step_entry = (
281
- await session.exec(
282
- select(WorkflowStep).where(WorkflowStep.workflow_id == wf.id)
283
- )
284
- ).first()
285
- assert step_entry is None
286
-
287
-
288
- # =============================================================================
289
- # Test 11 – Complex Workflow with Retries and Data Persistence
290
- # =============================================================================
291
- async def test_complex_workflow_with_retries_and_data_persistence(
292
- session: AsyncSession,
293
- ):
294
- step1_attempts = 0
295
- step2_attempts = 0
296
- step3_attempts = 0
297
-
298
- @step(max_retries=1)
299
- async def step1(input_val: int) -> int:
300
- nonlocal step1_attempts
301
- step1_attempts += 1
302
- if step1_attempts == 1:
303
- raise RuntimeError("Step 1 temporary failure")
304
- return input_val + 10
305
-
306
- @step(max_retries=1)
307
- async def step2(input_val: int) -> int:
308
- nonlocal step2_attempts
309
- step2_attempts += 1
310
- if step2_attempts == 1:
311
- raise RuntimeError("Step 2 temporary failure")
312
- return input_val * 2
313
-
314
- @step(max_retries=1)
315
- async def step3(input_val: int) -> int:
316
- nonlocal step3_attempts
317
- step3_attempts += 1
318
- if step3_attempts == 1:
319
- raise RuntimeError("Step 3 temporary failure")
320
- return input_val - 5
321
-
322
- @workflow()
323
- async def chained_workflow(initial_input: int) -> int:
324
- r1 = await step1(initial_input)
325
- r2 = await step2(r1)
326
- r3 = await step3(r2)
327
- return r3
328
-
329
- wf = await chained_workflow.start(5)
330
- # First run: step1 fails → workflow suspended.
331
- await execute(wf)
332
- updated_wf = await session.get(Workflow, wf.id)
333
- assert updated_wf
334
- assert updated_wf.status == WorkflowStatus.PENDING
335
- step1_entry = (
336
- await session.exec(
337
- select(WorkflowStep)
338
- .where(WorkflowStep.workflow_id == wf.id)
339
- .where(col(WorkflowStep.function_name).like("%step1%"))
340
- )
341
- ).one()
342
- assert step1_entry.status == StepStatus.FAILED
343
- assert step1_attempts == 1
344
-
345
- # Second run: step1 succeeds, step2 fails.
346
- await execute(wf)
347
- updated_wf = await session.get(Workflow, wf.id)
348
- step1_entry = (
349
- await session.exec(
350
- select(WorkflowStep)
351
- .where(WorkflowStep.workflow_id == wf.id)
352
- .where(col(WorkflowStep.function_name).like("%step1%"))
353
- )
354
- ).one()
355
- assert step1_entry.status == StepStatus.SUCCEEDED
356
- assert step1_entry.result == 15 # 5 + 10
357
- assert step1_attempts == 2
358
- step2_entry = (
359
- await session.exec(
360
- select(WorkflowStep)
361
- .where(WorkflowStep.workflow_id == wf.id)
362
- .where(col(WorkflowStep.function_name).like("%step2%"))
363
- )
364
- ).one()
365
- assert step2_entry.status == StepStatus.FAILED
366
- assert step2_attempts == 1
367
-
368
- # Third run: step2 succeeds, step3 fails.
369
- await execute(wf)
370
- updated_wf = await session.get(Workflow, wf.id)
371
- step2_entry = (
372
- await session.exec(
373
- select(WorkflowStep)
374
- .where(WorkflowStep.workflow_id == wf.id)
375
- .where(col(WorkflowStep.function_name).like("%step2%"))
376
- )
377
- ).one()
378
- assert step2_entry.status == StepStatus.SUCCEEDED
379
- assert step2_entry.result == 30 # 15 * 2
380
- assert step2_attempts == 2
381
- step3_entry = (
382
- await session.exec(
383
- select(WorkflowStep)
384
- .where(WorkflowStep.workflow_id == wf.id)
385
- .where(col(WorkflowStep.function_name).like("%step3%"))
386
- )
387
- ).one()
388
- assert step3_entry.status == StepStatus.FAILED
389
- assert step3_attempts == 1
390
-
391
- # Fourth run: step3 succeeds → final result.
392
- final_result = await execute(wf)
393
- updated_wf = await session.get(Workflow, wf.id)
394
- assert updated_wf
395
- assert updated_wf.status == WorkflowStatus.SUCCEEDED
396
- assert final_result == 25 # 30 - 5
397
- assert updated_wf.result == 25
398
- step3_entry = (
399
- await session.exec(
400
- select(WorkflowStep)
401
- .where(WorkflowStep.workflow_id == wf.id)
402
- .where(col(WorkflowStep.function_name).like("%step3%"))
403
- )
404
- ).one()
405
- assert step3_entry.status == StepStatus.SUCCEEDED
406
- assert step3_entry.result == 25
407
- assert step3_attempts == 2
408
-
409
- # Verify workflow input data persistence.
410
- assert updated_wf.args == [5]
411
- assert updated_wf.kwargs == {}
412
-
413
-
414
- # =============================================================================
415
- # Test 12 – Step Retries
416
- # =============================================================================
417
- async def test_step_retries(session: AsyncSession):
418
- retry_limit = 3
419
- attempt_count = 0
420
-
421
- @step(max_retries=retry_limit)
422
- async def retry_step():
423
- nonlocal attempt_count
424
- attempt_count += 1
425
- raise RuntimeError("Temporary failure")
426
-
427
- @workflow()
428
- async def retry_workflow():
429
- await retry_step()
430
- return "done"
431
-
432
- wf = await retry_workflow.start()
433
-
434
- # Attempt 1
435
- await execute(wf)
436
- updated_wf = await session.get(Workflow, wf.id)
437
- assert updated_wf
438
- assert updated_wf.status == WorkflowStatus.PENDING
439
- step_entry = (
440
- await session.exec(
441
- select(WorkflowStep).where(WorkflowStep.workflow_id == wf.id)
442
- )
443
- ).one()
444
- assert step_entry.retry_count == 0
445
- assert attempt_count == 1
446
-
447
- # Attempt 2
448
- await execute(wf)
449
- step_entry = (
450
- await session.exec(
451
- select(WorkflowStep).where(WorkflowStep.workflow_id == wf.id)
452
- )
453
- ).one()
454
- assert step_entry.retry_count == 1
455
- assert attempt_count == 2
456
-
457
- # Attempt 3
458
- await execute(wf)
459
- step_entry = (
460
- await session.exec(
461
- select(WorkflowStep).where(WorkflowStep.workflow_id == wf.id)
462
- )
463
- ).one()
464
- assert step_entry.retry_count == 2
465
- assert attempt_count == 3
466
-
467
- # Attempt 4 – exceed retries so that execution raises.
468
- with pytest.raises(RuntimeError, match="Temporary failure"):
469
- await execute(wf)
470
- step_entry = (
471
- await session.exec(
472
- select(WorkflowStep).where(WorkflowStep.workflow_id == wf.id)
473
- )
474
- ).one()
475
- assert step_entry.retry_count == 3
476
- assert attempt_count == 4
477
-
478
- # Further execution should not increment attempts.
479
- with pytest.raises(RuntimeError, match="Temporary failure"):
480
- await execute(wf)
481
- step_entry = (
482
- await session.exec(
483
- select(WorkflowStep).where(WorkflowStep.workflow_id == wf.id)
484
- )
485
- ).one()
486
- assert step_entry.retry_count == 3
487
- assert attempt_count == 4
488
-
489
-
490
- # =============================================================================
491
- # Test 13 – Looped Step Execution
492
- # =============================================================================
493
- async def test_looped_step_execution(session: AsyncSession):
494
- loop_count = 3
495
-
496
- @step()
497
- async def say_hello_step():
498
- return "hello"
499
-
500
- @workflow()
501
- async def looped_workflow(count: int):
502
- for _ in range(count):
503
- await say_hello_step()
504
- return "done"
505
-
506
- wf = await looped_workflow.start(loop_count)
507
- await execute(wf)
508
-
509
- steps = (
510
- await session.exec(
511
- select(WorkflowStep)
512
- .where(WorkflowStep.workflow_id == wf.id)
513
- .order_by(col(WorkflowStep.step_id))
514
- )
515
- ).all()
516
- assert len(steps) == loop_count
517
- for i, s in enumerate(steps, start=1):
518
- assert s.function_name.split(".")[-1] == "say_hello_step"
519
- assert s.step_id == i
520
- assert s.status == StepStatus.SUCCEEDED
521
-
522
-
523
- # =============================================================================
524
- # Test 14 – Basic Sleep Functionality
525
- # =============================================================================
526
- async def test_basic_sleep_functionality(session: AsyncSession):
527
- with freeze_time("2024-01-01 00:00:00") as frozen_time:
528
-
529
- @workflow()
530
- async def sleeping_workflow():
531
- await suspend(interval=timedelta(seconds=10))
532
- return "awake"
533
-
534
- wf = await sleeping_workflow.start()
535
- result = await execute(wf)
536
- updated_wf = await session.get(Workflow, wf.id)
537
- assert updated_wf
538
- # The suspend step should have returned a Suspend object.
539
- assert isinstance(result, Suspend)
540
- assert updated_wf.status == WorkflowStatus.PENDING
541
- expected_wakeup = datetime(2024, 1, 1, 0, 0, 10)
542
- assert updated_wf.wakeup_at == expected_wakeup
543
-
544
- # Check that the suspend step record has function_name 'suspend'
545
- sleep_step = (
546
- await session.exec(
547
- select(WorkflowStep).where(WorkflowStep.workflow_id == wf.id)
548
- )
549
- ).one()
550
- assert sleep_step.function_name.split(".")[-1] == "suspend"
551
-
552
- # Move time forward and resume.
553
- frozen_time.move_to("2024-01-01 00:00:11")
554
- final_result = await execute(wf)
555
- assert final_result == "awake"
556
- assert updated_wf.status == WorkflowStatus.SUCCEEDED
557
-
558
-
559
- # =============================================================================
560
- # Test 15 – Worker Skips Sleeping Workflows
561
- # =============================================================================
562
- async def test_worker_skips_sleeping_workflows(session: AsyncSession):
563
- @workflow()
564
- async def sleeping_workflow():
565
- await suspend(interval=timedelta(minutes=5))
566
- return "done"
567
-
568
- wf = await sleeping_workflow.start()
569
- # Execute once to suspend.
570
- result = await execute(wf)
571
- assert isinstance(result, Suspend)
572
-
573
- # Simulate the worker’s query for ready workflows.
574
- ready_wfs = (
575
- await session.exec(
576
- select(Workflow)
577
- .where(Workflow.status == WorkflowStatus.PENDING)
578
- .where(col(Workflow.wakeup_at) <= utc_now())
579
- )
580
- ).all()
581
- # At 12:00 the wakeup time (12:05) is in the future.
582
- assert len(ready_wfs) == 0
583
-
584
- result = await execute(wf)
585
- assert result == "done"
586
-
587
-
588
- # =============================================================================
589
- # Test 16 – Multiple Sleep Steps
590
- # =============================================================================
591
- async def test_multiple_sleep_steps(session: AsyncSession):
592
- @workflow()
593
- async def multi_sleep_workflow():
594
- await suspend(interval=timedelta(seconds=2))
595
- await suspend(interval=timedelta(seconds=4))
596
- return 42
597
-
598
- start_date = utc_now()
599
- wf = await multi_sleep_workflow.start()
600
- assert wf
601
- # First run: suspend for 10 seconds.
602
- result = await execute(wf)
603
- assert isinstance(result, Suspend)
604
- await session.refresh(wf)
605
- assert wf.wakeup_at
606
- assert (wf.wakeup_at - start_date) >= timedelta(seconds=2)
607
- assert (wf.wakeup_at - start_date) <= timedelta(seconds=3)
608
-
609
- # Move time forward and resume.
610
- await asyncio.sleep(2)
611
- result = await execute(wf)
612
- assert isinstance(result, Suspend)
613
- await session.refresh(wf)
614
- assert (wf.wakeup_at - start_date) >= timedelta(seconds=6)
615
- assert (wf.wakeup_at - start_date) <= timedelta(seconds=7)
616
-
617
- # Verify that two suspend steps were recorded.
618
- sleep_steps = (
619
- await session.exec(
620
- select(WorkflowStep)
621
- .where(WorkflowStep.workflow_id == wf.id)
622
- .order_by(col(WorkflowStep.step_id))
623
- )
624
- ).all()
625
- assert len(sleep_steps) == 2
626
- assert [s.step_id for s in sleep_steps] == [1, 2]
627
-
628
- # Final execution after second sleep.
629
- await asyncio.sleep(4.5)
630
- final_result = await execute(wf)
631
- assert final_result == 42
632
-
633
-
634
- # =============================================================================
635
- # Test 17 – Looped Execution with Step Dependencies
636
- # =============================================================================
637
- async def test_looped_execution_with_step_dependencies(session: AsyncSession):
638
- step_attempts = defaultdict(int)
639
- expected_results = []
640
-
641
- @step(max_retries=1)
642
- async def process_step(input_val: int) -> int:
643
- step_attempts[input_val] += 1
644
- if step_attempts[input_val] == 1:
645
- raise RuntimeError(f"Temporary failure for input {input_val}")
646
- return input_val + 5
647
-
648
- @workflow()
649
- async def looped_dependency_workflow(initial: int) -> int:
650
- nonlocal expected_results
651
- expected_results = []
652
- current = initial
653
- for _ in range(3):
654
- current = await process_step(current)
655
- expected_results.append(current)
656
- return current
657
-
658
- wf = await looped_dependency_workflow.start(10)
659
- # Run through several execution attempts until the workflow finishes.
660
- for _ in range(6):
661
- try:
662
- await execute(wf)
663
- except Exception:
664
- pass
665
-
666
- updated_wf = await session.get(Workflow, wf.id)
667
- assert updated_wf
668
- assert updated_wf.status == WorkflowStatus.SUCCEEDED
669
- # 10 → 15 → 20 → 25
670
- assert updated_wf.result == 25
671
-
672
- steps = (
673
- await session.exec(
674
- select(WorkflowStep)
675
- .where(WorkflowStep.workflow_id == wf.id)
676
- .order_by(col(WorkflowStep.step_id))
677
- )
678
- ).all()
679
- assert len(steps) == 3
680
- assert all("process_step" in s.function_name for s in steps)
681
- assert [s.result for s in steps] == [15, 20, 25]
682
-
683
- # Each step should have retried exactly once.
684
- for s in steps:
685
- assert s.retry_count == 1
686
-
687
- # Verify that error messages were recorded on failed attempts.
688
- step_errors = (
689
- await session.exec(
690
- select(WorkflowStep.error)
691
- .where(WorkflowStep.workflow_id == wf.id)
692
- .where(WorkflowStep.status == StepStatus.SUCCEEDED)
693
- )
694
- ).all()
695
- for err in step_errors:
696
- if err:
697
- assert "Temporary failure" in err["message"]
698
-
699
- assert expected_results == [15, 20, 25]
700
-
701
-
702
- async def test_handling_step_errors(session: AsyncSession):
703
- @step(max_retries=0)
704
- async def step1():
705
- raise ValueError("Step 1 error")
706
-
707
- @workflow()
708
- async def step_try_catch_workflow():
709
- try:
710
- await step1()
711
- except ValueError:
712
- # Suspend the workflow in the except block
713
- await suspend(interval=timedelta(seconds=5))
714
- return "handled"
715
- return "done"
716
-
717
- # Start the workflow
718
- wf = await step_try_catch_workflow.start()
719
-
720
- # First execution: should raise ValueError in step1, catch it, call
721
- # sleep(...) -> suspended
722
- result = await execute(wf)
723
- # Expect a Suspend object because the workflow is waiting
724
- assert isinstance(result, Suspend)
725
-
726
- updated_wf = await session.get(Workflow, wf.id)
727
- assert updated_wf is not None
728
- assert updated_wf.status == WorkflowStatus.PENDING
729
- assert updated_wf.wakeup_at is not None
730
-
731
- # Verify that two step records were created:
732
- steps = (
733
- await session.exec(
734
- select(WorkflowStep).where(WorkflowStep.workflow_id == wf.id)
735
- )
736
- ).all()
737
- assert len(steps) == 2
738
- # The first step (step1) failed
739
- assert steps[0].status == StepStatus.FAILED
740
- assert steps[0].result is None
741
-
742
- # --- Second execution: after wakeup time
743
- final_result = await execute(wf)
744
- updated_wf = await session.get(Workflow, wf.id)
745
- assert updated_wf
746
- # Now the workflow should resume and finish, returning "handled"
747
- assert final_result == "handled"
748
- assert updated_wf.status == WorkflowStatus.SUCCEEDED
749
- assert updated_wf.result == "handled"
750
-
751
- # Finally, verify the step records remain as expected.
752
- steps = (
753
- await session.exec(
754
- select(WorkflowStep).where(WorkflowStep.workflow_id == wf.id)
755
- )
756
- ).all()
757
- assert len(steps) == 2
758
- assert steps[0].status == StepStatus.FAILED
759
- assert steps[0].result is None
760
- assert steps[1].status == StepStatus.SUCCEEDED
761
- assert steps[1].error is None
762
-
763
-
764
- async def test_exceute_properly_intercepts_coroutine(session: AsyncSession):
765
- async def shell(cmd: str):
766
- proc = await asyncio.create_subprocess_shell(
767
- cmd, stdout=asyncio.subprocess.PIPE
768
- )
769
- stdout, _ = await proc.communicate()
770
- return stdout.decode().strip()
771
-
772
- @step()
773
- async def step1():
774
- echo_output = await non_step1()
775
- assert echo_output == "echoing 20"
776
- count = int(echo_output.split()[-1])
777
- for _ in range(10):
778
- await asyncio.sleep(0.01)
779
- count += 1
780
- return count
781
-
782
- async def non_step1():
783
- count = 0
784
- for _ in range(10):
785
- await asyncio.sleep(0.01)
786
- count += 1
787
- return await step2(count)
788
-
789
- @step()
790
- async def step2(count: int):
791
- return await non_step2(count)
792
-
793
- async def non_step2(count: int):
794
- for _ in range(count):
795
- await asyncio.sleep(0.01)
796
- count += 1
797
- return await shell(f"echo echoing {count}")
798
-
799
- @step()
800
- async def step3(count: int):
801
- for _ in range(10):
802
- await asyncio.sleep(0.01)
803
- count += 1
804
- return count
805
-
806
- @workflow()
807
- async def nested_step_and_non_step_calls():
808
- count = await step1()
809
- count = await step3(count)
810
- return count
811
-
812
- wf = await nested_step_and_non_step_calls.start()
813
- await execute(wf)
814
- assert wf.status == WorkflowStatus.SUCCEEDED
815
- assert wf.result == 40
816
-
817
- steps = (
818
- await session.exec(
819
- select(WorkflowStep)
820
- .where(WorkflowStep.workflow_id == wf.id)
821
- .order_by(col(WorkflowStep.step_id))
822
- )
823
- ).all()
824
- for s in steps:
825
- s.function_name = s.function_name.split(".")[-1]
826
-
827
- assert all(s.status == StepStatus.SUCCEEDED for s in steps)
828
- assert tuple(s.function_name.split(".")[-1] for s in steps) == (
829
- "step1",
830
- "step2",
831
- "step3",
832
- )
833
- assert tuple(s.result for s in steps) == (30, "echoing 20", 40)
834
-
835
-
836
- async def test_sub_workflows(session: AsyncSession):
837
- @step()
838
- async def step1(n: int) -> Decimal:
839
- await suspend(interval=timedelta(seconds=0.1))
840
- return Decimal(1 + n)
841
-
842
- @step()
843
- async def step2(n: int) -> Decimal:
844
- await suspend(interval=timedelta(seconds=0.1))
845
- return Decimal(2 + n)
846
-
847
- @step()
848
- async def step3(n: int) -> Decimal:
849
- await suspend(interval=timedelta(seconds=0.1))
850
- return Decimal(3 + n)
851
-
852
- @workflow()
853
- async def workflow1(n: int) -> Decimal:
854
- return await step1(n)
855
-
856
- @workflow()
857
- async def workflow2(n: int) -> Decimal:
858
- return await step2(n)
859
-
860
- @workflow()
861
- async def workflow3(n: int) -> Decimal:
862
- return await step3(n)
863
-
864
- @workflow()
865
- async def call_sub_workflows() -> Decimal:
866
- w1 = await workflow1(1)
867
- w2 = await workflow2(2)
868
- w3 = await workflow3(3)
869
- assert w1 == Decimal(2)
870
- assert w2 == Decimal(4)
871
- assert w3 == Decimal(6)
872
- return w1 + w2 + w3
873
-
874
- async with WorkflowOrchestrator.ensure_started() as orchestrator:
875
- wf = await call_sub_workflows.start()
876
- result = await orchestrator.wait_for_completion(wf.id)
877
-
878
- await session.refresh(wf)
879
- assert wf.status == WorkflowStatus.SUCCEEDED
880
- assert result == Decimal(12)
881
-
882
- all_workflows = []
883
- workflows = (
884
- await session.exec(select(Workflow).order_by(col(Workflow.created_at)))
885
- ).all()
886
- for w in workflows:
887
- steps = (
888
- await session.exec(
889
- select(WorkflowStep)
890
- .where(col(WorkflowStep.workflow_id) == w.id)
891
- .order_by(col(WorkflowStep.step_id))
892
- )
893
- ).all()
894
- all_workflows.append(
895
- {
896
- "status": w.status,
897
- "function_name": w.function_name.split(".")[-1],
898
- "steps": [
899
- {
900
- "step_id": s.step_id,
901
- "step_status": s.status,
902
- "function_name": s.function_name.split(".")[-1],
903
- }
904
- for s in steps
905
- ],
906
- }
907
- )
908
-
909
- assert all_workflows == [
910
- {
911
- "status": WorkflowStatus.SUCCEEDED,
912
- "function_name": "call_sub_workflows",
913
- "steps": [
914
- {
915
- "step_id": 1,
916
- "step_status": StepStatus.SUCCEEDED,
917
- "function_name": "start_workflow_step",
918
- },
919
- {
920
- "step_id": 2,
921
- "step_status": StepStatus.SUCCEEDED,
922
- "function_name": "start_workflow_step",
923
- },
924
- {
925
- "step_id": 3,
926
- "step_status": StepStatus.SUCCEEDED,
927
- "function_name": "start_workflow_step",
928
- },
929
- ],
930
- },
931
- {
932
- "status": WorkflowStatus.SUCCEEDED,
933
- "function_name": "workflow1",
934
- "steps": [
935
- {
936
- "step_id": 1,
937
- "step_status": StepStatus.SUCCEEDED,
938
- "function_name": "step1",
939
- },
940
- {
941
- "step_id": 2,
942
- "step_status": StepStatus.SUCCEEDED,
943
- "function_name": "suspend",
944
- },
945
- ],
946
- },
947
- {
948
- "status": WorkflowStatus.SUCCEEDED,
949
- "function_name": "workflow2",
950
- "steps": [
951
- {
952
- "step_id": 1,
953
- "step_status": StepStatus.SUCCEEDED,
954
- "function_name": "step2",
955
- },
956
- {
957
- "step_id": 2,
958
- "step_status": StepStatus.SUCCEEDED,
959
- "function_name": "suspend",
960
- },
961
- ],
962
- },
963
- {
964
- "status": WorkflowStatus.SUCCEEDED,
965
- "function_name": "workflow3",
966
- "steps": [
967
- {
968
- "step_id": 1,
969
- "step_status": StepStatus.SUCCEEDED,
970
- "function_name": "step3",
971
- },
972
- {
973
- "step_id": 2,
974
- "step_status": StepStatus.SUCCEEDED,
975
- "function_name": "suspend",
976
- },
977
- ],
978
- },
979
- ]
980
-
981
-
982
- @pytest.mark.xfail(reason="Not supported for now")
983
- async def test_sub_workflows_concurrent_execution(session: AsyncSession):
984
- @step()
985
- async def step1(n: int):
986
- await suspend(interval=timedelta(seconds=0.1))
987
- return 1 + n
988
-
989
- @step()
990
- async def step2(n: int):
991
- await suspend(interval=timedelta(seconds=0.1))
992
- return 2 + n
993
-
994
- @step()
995
- async def step3(n: int):
996
- await suspend(interval=timedelta(seconds=0.1))
997
- return 3 + n
998
-
999
- @workflow()
1000
- async def workflow1(n: int):
1001
- return await step1(n)
1002
-
1003
- @workflow()
1004
- async def workflow2(n: int):
1005
- return await step2(n)
1006
-
1007
- @workflow()
1008
- async def workflow3(n: int):
1009
- return await step3(n)
1010
-
1011
- @workflow()
1012
- async def concurrent_call_sub_workflows():
1013
- w1, w2, w3 = await asyncio.gather(workflow1(1), workflow2(2), workflow3(3))
1014
- return w1 + w2 + w3
1015
-
1016
- async with WorkflowOrchestrator.ensure_started() as orchestrator:
1017
- wf = await concurrent_call_sub_workflows.start()
1018
- await orchestrator.wait_for_completion(wf.id)
1019
-
1020
- await session.refresh(wf)
1021
- assert wf.status == WorkflowStatus.SUCCEEDED
1022
- assert wf.result == 12
1023
-
1024
- all_workflows = []
1025
- workflows = (
1026
- await session.exec(select(Workflow).order_by(col(Workflow.created_at)))
1027
- ).all()
1028
- for w in workflows:
1029
- steps = (
1030
- await session.exec(
1031
- select(WorkflowStep)
1032
- .where(col(WorkflowStep.workflow_id) == w.id)
1033
- .order_by(col(WorkflowStep.step_id))
1034
- )
1035
- ).all()
1036
- all_workflows.append(
1037
- {
1038
- "status": w.status,
1039
- "function_name": w.function_name.split(".")[-1],
1040
- "steps": [
1041
- {
1042
- "step_id": s.step_id,
1043
- "step_status": s.status,
1044
- "function_name": s.function_name.split(".")[-1],
1045
- }
1046
- for s in steps
1047
- ],
1048
- "result": w.result,
1049
- }
1050
- )
1051
-
1052
- assert all_workflows == [
1053
- {
1054
- "status": WorkflowStatus.SUCCEEDED,
1055
- "function_name": "concurrent_call_sub_workflows",
1056
- "steps": [
1057
- {
1058
- "step_id": 1,
1059
- "step_status": StepStatus.SUCCEEDED,
1060
- "function_name": "start_workflow_step",
1061
- },
1062
- {
1063
- "step_id": 2,
1064
- "step_status": StepStatus.SUCCEEDED,
1065
- "function_name": "start_workflow_step",
1066
- },
1067
- {
1068
- "step_id": 3,
1069
- "step_status": StepStatus.SUCCEEDED,
1070
- "function_name": "start_workflow_step",
1071
- },
1072
- ],
1073
- "result": 12,
1074
- },
1075
- {
1076
- "status": WorkflowStatus.SUCCEEDED,
1077
- "function_name": "workflow1",
1078
- "steps": [
1079
- {
1080
- "step_id": 1,
1081
- "step_status": StepStatus.SUCCEEDED,
1082
- "function_name": "step1",
1083
- },
1084
- {
1085
- "step_id": 2,
1086
- "step_status": StepStatus.SUCCEEDED,
1087
- "function_name": "suspend",
1088
- },
1089
- ],
1090
- "result": 2,
1091
- },
1092
- {
1093
- "status": WorkflowStatus.SUCCEEDED,
1094
- "function_name": "workflow2",
1095
- "steps": [
1096
- {
1097
- "step_id": 1,
1098
- "step_status": StepStatus.SUCCEEDED,
1099
- "function_name": "step2",
1100
- },
1101
- {
1102
- "step_id": 2,
1103
- "step_status": StepStatus.SUCCEEDED,
1104
- "function_name": "suspend",
1105
- },
1106
- ],
1107
- "result": 4,
1108
- },
1109
- {
1110
- "status": WorkflowStatus.SUCCEEDED,
1111
- "function_name": "workflow3",
1112
- "steps": [
1113
- {
1114
- "step_id": 1,
1115
- "step_status": StepStatus.SUCCEEDED,
1116
- "function_name": "step3",
1117
- },
1118
- {
1119
- "step_id": 2,
1120
- "step_status": StepStatus.SUCCEEDED,
1121
- "function_name": "suspend",
1122
- },
1123
- ],
1124
- "result": 6,
1125
- },
1126
- ]
1127
-
1128
-
1129
- @pytest.mark.xfail(reason="Not supported for now")
1130
- async def test_step_can_be_scheduled_as_tasks(session: AsyncSession):
1131
- @step()
1132
- async def step1():
1133
- s2, s3, s4 = await asyncio.gather(step2(), step3(), step4())
1134
- return s2 + s3 + s4
1135
-
1136
- @step()
1137
- async def step2():
1138
- await suspend(interval=timedelta(seconds=0.1))
1139
- return 2
1140
-
1141
- @step()
1142
- async def step3():
1143
- await suspend(interval=timedelta(seconds=0.1))
1144
- return 3
1145
-
1146
- @step()
1147
- async def step4():
1148
- await suspend(interval=timedelta(seconds=0.1))
1149
- return 4
1150
-
1151
- @workflow()
1152
- async def execute_steps_in_parallel():
1153
- return await step1()
1154
-
1155
- async with WorkflowOrchestrator.ensure_started() as orchestrator:
1156
- wf = await execute_steps_in_parallel.start()
1157
- await orchestrator.wait_for_completion(wf.id)
1158
-
1159
- await session.refresh(wf)
1160
- assert wf.status == WorkflowStatus.SUCCEEDED
1161
- assert wf.result == 9
1162
-
1163
- all_workflows = []
1164
- workflows = (
1165
- await session.exec(select(Workflow).order_by(col(Workflow.created_at)))
1166
- ).all()
1167
- for w in workflows:
1168
- steps = (
1169
- await session.exec(
1170
- select(WorkflowStep)
1171
- .where(col(WorkflowStep.workflow_id) == w.id)
1172
- .order_by(col(WorkflowStep.step_id))
1173
- )
1174
- ).all()
1175
- all_workflows.append(
1176
- {
1177
- "status": w.status,
1178
- "function_name": w.function_name.split(".")[-1],
1179
- "steps": [
1180
- {
1181
- "step_id": s.step_id,
1182
- "step_status": s.status,
1183
- "function_name": s.function_name.split(".")[-1],
1184
- }
1185
- for s in steps
1186
- ],
1187
- "result": w.result,
1188
- }
1189
- )
1190
-
1191
- assert all_workflows == [
1192
- {
1193
- "status": WorkflowStatus.SUCCEEDED,
1194
- "function_name": "execute_steps_in_parallel",
1195
- "steps": [
1196
- {
1197
- "step_id": 1,
1198
- "step_status": StepStatus.SUCCEEDED,
1199
- "function_name": "step1",
1200
- },
1201
- {
1202
- "step_id": 2,
1203
- "step_status": StepStatus.SUCCEEDED,
1204
- "function_name": "start_workflow_step",
1205
- },
1206
- {
1207
- "step_id": 3,
1208
- "step_status": StepStatus.SUCCEEDED,
1209
- "function_name": "start_workflow_step",
1210
- },
1211
- {
1212
- "step_id": 4,
1213
- "step_status": StepStatus.SUCCEEDED,
1214
- "function_name": "start_workflow_step",
1215
- },
1216
- ],
1217
- "result": 9,
1218
- },
1219
- {
1220
- "status": WorkflowStatus.SUCCEEDED,
1221
- "function_name": "auto_workflow",
1222
- "steps": [
1223
- {
1224
- "step_id": 1,
1225
- "step_status": StepStatus.SUCCEEDED,
1226
- "function_name": "step2",
1227
- },
1228
- {
1229
- "step_id": 2,
1230
- "step_status": StepStatus.SUCCEEDED,
1231
- "function_name": "suspend",
1232
- },
1233
- ],
1234
- "result": 2,
1235
- },
1236
- {
1237
- "status": WorkflowStatus.SUCCEEDED,
1238
- "function_name": "auto_workflow",
1239
- "steps": [
1240
- {
1241
- "step_id": 1,
1242
- "step_status": StepStatus.SUCCEEDED,
1243
- "function_name": "step3",
1244
- },
1245
- {
1246
- "step_id": 2,
1247
- "step_status": StepStatus.SUCCEEDED,
1248
- "function_name": "suspend",
1249
- },
1250
- ],
1251
- "result": 3,
1252
- },
1253
- {
1254
- "status": WorkflowStatus.SUCCEEDED,
1255
- "function_name": "auto_workflow",
1256
- "steps": [
1257
- {
1258
- "step_id": 1,
1259
- "step_status": StepStatus.SUCCEEDED,
1260
- "function_name": "step4",
1261
- },
1262
- {
1263
- "step_id": 2,
1264
- "step_status": StepStatus.SUCCEEDED,
1265
- "function_name": "suspend",
1266
- },
1267
- ],
1268
- "result": 4,
1269
- },
1270
- ]
1271
-
1272
-
1273
- async def test_nested_workflow_started_from_nested_step_failed(session: AsyncSession):
1274
- @step()
1275
- async def update_inbound_document_with_classification(
1276
- item_id: str, classification: str
1277
- ) -> bool:
1278
- await asyncio.sleep(0.1)
1279
- raise Exception(f"some issue with {item_id}/{classification}")
1280
-
1281
- @workflow()
1282
- async def classify_inbound_document(item_id: str, attachment_id: str):
1283
- await update_inbound_document_with_classification(item_id, "classified")
1284
-
1285
- @step()
1286
- async def upload_documents_from_email(limit: int) -> list[str]:
1287
- await asyncio.sleep(0.1)
1288
- return [
1289
- json.dumps({"item_id": "doc 1", "attachment_id": "attachment 1"}),
1290
- ]
1291
-
1292
- @step()
1293
- async def start_classify_inbound_document_workflow(
1294
- inbound_document_with_attachment: str,
1295
- ):
1296
- obj = json.loads(inbound_document_with_attachment)
1297
- await classify_inbound_document(obj["item_id"], obj["attachment_id"])
1298
-
1299
- @workflow()
1300
- async def email_documents_uploader(limit: int = 10) -> list[str]:
1301
- inbound_documents_with_attachments = await upload_documents_from_email(limit)
1302
- for doc in inbound_documents_with_attachments:
1303
- await start_classify_inbound_document_workflow(doc)
1304
- return inbound_documents_with_attachments
1305
-
1306
- wf = await email_documents_uploader.start()
1307
- async with WorkflowOrchestrator.ensure_started(poll_interval=1) as orchestrator:
1308
- with pytest.raises(Exception, match="some issue with doc 1/classified"):
1309
- await orchestrator.wait_for_completion(wf.id)
1310
-
1311
- all_workflows = []
1312
- workflows = (
1313
- await session.exec(select(Workflow).order_by(col(Workflow.created_at)))
1314
- ).all()
1315
- for w in workflows:
1316
- steps = (
1317
- await session.exec(
1318
- select(WorkflowStep)
1319
- .where(col(WorkflowStep.workflow_id) == w.id)
1320
- .order_by(col(WorkflowStep.step_id))
1321
- )
1322
- ).all()
1323
- all_workflows.append(
1324
- {
1325
- "status": w.status,
1326
- "function_name": w.function_name.split(".")[-1],
1327
- "steps": [
1328
- {
1329
- "step_id": s.step_id,
1330
- "step_status": s.status,
1331
- "function_name": s.function_name.split(".")[-1],
1332
- }
1333
- for s in steps
1334
- ],
1335
- }
1336
- )
1337
-
1338
- assert all_workflows == [
1339
- {
1340
- "status": WorkflowStatus.FAILED,
1341
- "function_name": "email_documents_uploader",
1342
- "steps": [
1343
- {
1344
- "step_id": 1,
1345
- "step_status": StepStatus.SUCCEEDED,
1346
- "function_name": "upload_documents_from_email",
1347
- },
1348
- {
1349
- "step_id": 2,
1350
- "step_status": StepStatus.FAILED,
1351
- "function_name": "start_classify_inbound_document_workflow",
1352
- },
1353
- {
1354
- "step_id": 3,
1355
- "step_status": StepStatus.SUCCEEDED,
1356
- "function_name": "start_workflow_step",
1357
- },
1358
- ],
1359
- },
1360
- {
1361
- "status": WorkflowStatus.FAILED,
1362
- "function_name": "classify_inbound_document",
1363
- "steps": [
1364
- {
1365
- "function_name": "update_inbound_document_with_classification",
1366
- "step_id": 1,
1367
- "step_status": StepStatus.FAILED,
1368
- }
1369
- ],
1370
- },
1371
- ]
1372
-
1373
-
1374
- # =============================================================================
1375
- # Tests for Non-Deterministic Step Call Detection
1376
- # =============================================================================
1377
- async def test_non_deterministic_step_detection_args(session: AsyncSession):
1378
- # Track whether we're in first or second execution attempt
1379
- is_first_execution = [True]
1380
-
1381
- class ConfigModel(BaseModel):
1382
- name: str
1383
- value: int
1384
- nested: dict[str, str]
1385
-
1386
- @step(max_retries=1)
1387
- async def failing_step_with_model(config: ConfigModel) -> str:
1388
- # First execution will always fail
1389
- if is_first_execution[0]:
1390
- is_first_execution[0] = False
1391
- raise RuntimeError("First attempt fails deliberately")
1392
-
1393
- # Return something (won't matter for the test)
1394
- return f"Processed {config.name} with value {config.value}"
1395
-
1396
- @workflow()
1397
- async def model_workflow() -> str:
1398
- # First execution will use this config
1399
- config = ConfigModel(name="test-config", value=42, nested={"key": "original"})
1400
-
1401
- # On retry, we'll modify the config in a non-deterministic way
1402
- if not is_first_execution[0]:
1403
- # This change should be detected as non-deterministic
1404
- config = ConfigModel(
1405
- name="test-config",
1406
- value=42,
1407
- nested={"key": "modified"}, # Change in nested field
1408
- )
1409
-
1410
- return await failing_step_with_model(config)
1411
-
1412
- # Start and execute the workflow
1413
- wf = await model_workflow.start()
1414
-
1415
- # First execution will fail but set up for retry
1416
- await execute(wf)
1417
-
1418
- # Verify the workflow is in pending state with a failed step
1419
- updated_wf = await session.get(Workflow, wf.id)
1420
- assert updated_wf
1421
- assert updated_wf.status == WorkflowStatus.PENDING
1422
-
1423
- # Find the step record
1424
- s = (
1425
- await session.exec(
1426
- select(WorkflowStep).where(WorkflowStep.workflow_id == wf.id)
1427
- )
1428
- ).one()
1429
- assert s.status == StepStatus.FAILED
1430
- assert s.retry_count == 0
1431
-
1432
- # Second execution should fail with NonDeterministicStepCallError
1433
- # because the nested field was changed
1434
- with pytest.raises(
1435
- NonDeterministicStepCallError,
1436
- match="Non-deterministic step call detected at step ID 1. Previous args",
1437
- ) as excinfo:
1438
- await execute(wf)
1439
-
1440
- # Verify error message contains information about the non-deterministic input
1441
- err_msg = str(excinfo.value)
1442
- assert "Non-deterministic step call detected" in err_msg
1443
- assert "nested" in err_msg or "original" in err_msg or "modified" in err_msg
1444
-
1445
-
1446
- async def test_non_deterministic_step_detection_kwargs(session: AsyncSession):
1447
- # Track whether we're in first or second execution attempt
1448
- is_first_execution = [True]
1449
-
1450
- class ConfigModel(BaseModel):
1451
- name: str
1452
- value: int
1453
- options: dict[str, bool]
1454
-
1455
- @step(max_retries=1)
1456
- async def failing_step_with_kwargs(
1457
- basic_value: int, *, config: ConfigModel, flag: bool = False
1458
- ) -> str:
1459
- # First execution will always fail
1460
- if is_first_execution[0]:
1461
- is_first_execution[0] = False
1462
- raise RuntimeError("First attempt fails deliberately")
1463
-
1464
- # Return something (won't matter for the test)
1465
- return f"Processed with {basic_value} and {config.name}"
1466
-
1467
- @workflow()
1468
- async def kwargs_workflow() -> str:
1469
- # First execution will use these values
1470
- basic_value = 100
1471
- config = ConfigModel(
1472
- name="config-1", value=42, options={"debug": True, "verbose": False}
1473
- )
1474
- flag = False
1475
-
1476
- # On retry, we'll modify the kwargs in a non-deterministic way
1477
- if not is_first_execution[0]:
1478
- # This kwargs change should be detected as non-deterministic
1479
- flag = True # Changed from False to True
1480
-
1481
- return await failing_step_with_kwargs(basic_value, config=config, flag=flag)
1482
-
1483
- # Start and execute the workflow
1484
- wf = await kwargs_workflow.start()
1485
-
1486
- # First execution will fail but set up for retry
1487
- await execute(wf)
1488
-
1489
- # Verify the workflow is in pending state with a failed step
1490
- updated_wf = await session.get(Workflow, wf.id)
1491
- assert updated_wf
1492
- assert updated_wf.status == WorkflowStatus.PENDING
1493
-
1494
- # Find the step record
1495
- s = (
1496
- await session.exec(
1497
- select(WorkflowStep).where(WorkflowStep.workflow_id == wf.id)
1498
- )
1499
- ).one()
1500
- assert s.status == StepStatus.FAILED
1501
- assert s.retry_count == 0
1502
-
1503
- # Second execution should fail with NonDeterministicStepCallError
1504
- # because the flag kwarg was changed
1505
- with pytest.raises(
1506
- NonDeterministicStepCallError,
1507
- match="Non-deterministic step call detected at step ID 1. Previous kwargs",
1508
- ) as excinfo:
1509
- await execute(wf)
1510
-
1511
- # Verify error message contains information about the non-deterministic input
1512
- err_msg = str(excinfo.value)
1513
- assert "Non-deterministic step call detected" in err_msg
1514
- assert "flag" in err_msg
1515
-
1516
-
1517
- async def test_non_deterministic_step_detection_function(session: AsyncSession):
1518
- # Track whether we're in first or second execution attempt
1519
- is_first_execution = [True]
1520
-
1521
- @step(max_retries=1)
1522
- async def first_step(value: int) -> int:
1523
- is_first_execution[0] = False
1524
- raise RuntimeError("First step fails deliberately")
1525
-
1526
- @step()
1527
- async def second_step(value: int) -> int:
1528
- return value * 2
1529
-
1530
- @workflow()
1531
- async def different_step_workflow() -> int:
1532
- initial_value = 5
1533
-
1534
- # On first execution, call first_step
1535
- if is_first_execution[0]:
1536
- return await first_step(initial_value)
1537
- else:
1538
- # On retry, call a completely different step
1539
- # This should be detected as non-deterministic
1540
- return await second_step(initial_value)
1541
-
1542
- # Start and execute the workflow
1543
- wf = await different_step_workflow.start()
1544
-
1545
- # First execution will fail but set up for retry
1546
- await execute(wf)
1547
-
1548
- # Verify the workflow is in pending state with a failed step
1549
- updated_wf = await session.get(Workflow, wf.id)
1550
- assert updated_wf
1551
- assert updated_wf.status == WorkflowStatus.PENDING
1552
-
1553
- # Find the step record
1554
- s = (
1555
- await session.exec(
1556
- select(WorkflowStep).where(WorkflowStep.workflow_id == wf.id)
1557
- )
1558
- ).one()
1559
- assert s.status == StepStatus.FAILED
1560
- assert s.retry_count == 0
1561
-
1562
- # Second execution should fail with NonDeterministicStepCallError
1563
- # because we're calling a completely different step
1564
- with pytest.raises(
1565
- NonDeterministicStepCallError,
1566
- match="Non-deterministic step call detected at step ID 1. Previous function name",
1567
- ) as excinfo:
1568
- await execute(wf)
1569
-
1570
- # Verify error message contains information about the non-deterministic function call
1571
- err_msg = str(excinfo.value)
1572
- assert "Non-deterministic step call detected" in err_msg
1573
- assert "first_step" in err_msg and "second_step" in err_msg
1574
-
1575
-
1576
- async def test_task_cancellation(session: AsyncSession):
1577
- @step()
1578
- async def handled_cancellation_step():
1579
- try:
1580
- asyncio.create_task(canceller(asyncio.current_task()))
1581
- await asyncio.sleep(10)
1582
- return "completed"
1583
- except asyncio.CancelledError:
1584
- return "cancelled"
1585
-
1586
- @step()
1587
- async def unhandled_cancellation_step():
1588
- asyncio.create_task(canceller(asyncio.current_task()))
1589
- await asyncio.sleep(10)
1590
- return "completed2"
1591
-
1592
- @workflow()
1593
- async def cancellation_workflow():
1594
- result = await handled_cancellation_step()
1595
- try:
1596
- return await unhandled_cancellation_step()
1597
- except asyncio.CancelledError:
1598
- return f'first step result: "{result}". second step cancelled'
1599
-
1600
- async def canceller(task: asyncio.Task | None):
1601
- assert task
1602
- task.cancel()
1603
-
1604
- async with WorkflowOrchestrator.ensure_started() as orchestrator:
1605
- wf = await cancellation_workflow.start()
1606
- await orchestrator.wait_for_completion(wf.id)
1607
-
1608
- await session.refresh(wf)
1609
- steps = (
1610
- await session.exec(
1611
- select(WorkflowStep).where(WorkflowStep.workflow_id == wf.id)
1612
- )
1613
- ).all()
1614
- assert len(steps) == 2
1615
- assert steps[0].status == StepStatus.SUCCEEDED
1616
- assert steps[0].result == "cancelled"
1617
- assert steps[1].status == StepStatus.FAILED
1618
- assert steps[1].error
1619
- assert steps[1].error["type"] == "CancelledError"
1620
- assert wf.status == WorkflowStatus.SUCCEEDED
1621
- assert wf.result == 'first step result: "cancelled". second step cancelled'
1622
-
1623
-
1624
- async def test_as_step_helper(session: AsyncSession):
1625
- # Force garbage collection to ensure any non-referenced cached functions are removed
1626
- gc.collect()
1627
-
1628
- # Store the initial cache count since our assertions will be based on it
1629
- initial_cache_count = len(__AS_STEP_CACHE)
1630
-
1631
- # Create a regular coroutine function (not a step)
1632
- async def regular_function(value: int) -> int:
1633
- return value * 2
1634
-
1635
- # Verify it's not already a step
1636
- assert not __is_workflow_step(regular_function)
1637
-
1638
- # Convert it to a step
1639
- step_function = as_step(regular_function, step_type=StepType.COMPUTE)
1640
-
1641
- # Verify it's now recognized as a step
1642
- assert __is_workflow_step(step_function)
1643
-
1644
- # Calling as_step again should return the same cached step function
1645
- step_function_again = as_step(regular_function, step_type=StepType.COMPUTE)
1646
- assert step_function is step_function_again
1647
-
1648
- # Create a workflow that uses the step
1649
- @workflow()
1650
- async def as_step_workflow(input_value: int) -> int:
1651
- result = await step_function(input_value)
1652
- return result
1653
-
1654
- # Execute the workflow
1655
- wf = await as_step_workflow.start(5)
1656
- result = await execute(wf)
1657
-
1658
- # Verify the workflow completed successfully
1659
- updated_wf = await session.get(Workflow, wf.id)
1660
- assert updated_wf
1661
- assert updated_wf.status == WorkflowStatus.SUCCEEDED
1662
- assert result == 10 # 5 * 2
1663
-
1664
- # Verify a step was created and executed
1665
- steps = (
1666
- await session.exec(
1667
- select(WorkflowStep).where(WorkflowStep.workflow_id == wf.id)
1668
- )
1669
- ).all()
1670
- assert len(steps) == 1
1671
- assert steps[0].status == StepStatus.SUCCEEDED
1672
- assert steps[0].result == 10
1673
-
1674
- # Test with an already-decorated step function
1675
- @step()
1676
- async def already_step_function(value: int) -> int:
1677
- return value + 10
1678
-
1679
- # as_step should return the original function if it's already a step
1680
- same_step = as_step(already_step_function, step_type=StepType.COMPUTE)
1681
- assert same_step is already_step_function
1682
-
1683
- # Create and execute a workflow using the already-step function
1684
- @workflow()
1685
- async def existing_step_workflow(input_value: int) -> int:
1686
- result = await already_step_function(input_value)
1687
- return result
1688
-
1689
- wf2 = await existing_step_workflow.start(7)
1690
- result2 = await execute(wf2)
1691
-
1692
- # Verify workflow execution
1693
- updated_wf2 = await session.get(Workflow, wf2.id)
1694
- assert updated_wf2
1695
- assert updated_wf2.status == WorkflowStatus.SUCCEEDED
1696
- assert result2 == 17 # 7 + 10
1697
-
1698
- # We should have 1 entry in the cache at this point.
1699
- assert len(__AS_STEP_CACHE) == initial_cache_count + 1
1700
-
1701
- # Test that WeakKeyDictionary prevents memory leaks
1702
- def create_temp_function():
1703
- # Create a function that will go out of scope
1704
- async def temp_function(x: int) -> int:
1705
- return x * 3
1706
-
1707
- # Apply as_step to the function
1708
- as_step(temp_function, step_type=StepType.COMPUTE)
1709
-
1710
- return temp_function
1711
-
1712
- # Create a temporary function which will have `as_step` applied on it
1713
- temp_function = create_temp_function()
1714
-
1715
- # Verify that the new function is in the cache
1716
- assert len(__AS_STEP_CACHE) == initial_cache_count + 2
1717
-
1718
- # Clear the reference to the function
1719
- temp_function = None
1720
- assert temp_function is None # use the variable to make linter happy
1721
-
1722
- # Force garbage collection
1723
- gc.collect()
1724
-
1725
- # Verify the weak reference is now None (object was garbage collected)
1726
- assert len(__AS_STEP_CACHE) == initial_cache_count + 1
1727
-
1728
-
1729
- async def test_workflow_notifications(session: AsyncSession):
1730
- """Test that all workflow notifications are delivered correctly."""
1731
- # Create a WorkflowObserver to capture notifications
1732
- observer = WorkflowObserver()
1733
- exec_count = 0
1734
-
1735
- @step(max_retries=1)
1736
- async def some_step():
1737
- nonlocal exec_count
1738
- if exec_count == 0:
1739
- exec_count += 1
1740
- raise Exception("First execution")
1741
- return "success"
1742
-
1743
- @workflow()
1744
- async def notification_workflow():
1745
- await some_step()
1746
- return "done"
1747
-
1748
- async def wait_notifications(workflow_id: UUID):
1749
- # First execution fails
1750
- await observer.wait(Notification.WORKFLOW_STARTED, workflow_id)
1751
- await observer.wait(Notification.WORKFLOW_RESUMED, workflow_id)
1752
- await observer.wait(Notification.STEP_RUNNING, workflow_id)
1753
- await observer.wait(Notification.STEP_FAILED, workflow_id)
1754
- await observer.wait(Notification.WORKFLOW_SUSPENDED, workflow_id)
1755
-
1756
- # # Second execution succeeds
1757
- await observer.wait(Notification.WORKFLOW_RESUMED, workflow_id)
1758
- await observer.wait(Notification.STEP_RUNNING, workflow_id)
1759
- await observer.wait(Notification.STEP_SUCCEEDED, workflow_id)
1760
- await observer.wait(Notification.WORKFLOW_SUCCEEDED, workflow_id)
1761
-
1762
- async with workflow_notification_context(observer.on_workflow_notification):
1763
- wf = await notification_workflow.start()
1764
- wait_task = asyncio.create_task(wait_notifications(wf.id))
1765
- # execution 1
1766
- await lock_and_execute(wf)
1767
- # execution 2
1768
- await lock_and_execute(wf)
1769
-
1770
- # Verify we received all notifications by simply waiting the task
1771
- await wait_task
1772
-
1773
-
1774
- # =============================================================================
1775
- # Test for Step Hierarchy Implementation
1776
- # =============================================================================
1777
- async def test_step_hierarchy_implementation(session: AsyncSession):
1778
- """Test that the step hierarchy is correctly implemented with parent-child relationships."""
1779
-
1780
- @step()
1781
- async def parent_step():
1782
- return await child_step()
1783
-
1784
- @step()
1785
- async def child_step():
1786
- return await grandchild_step()
1787
-
1788
- @step()
1789
- async def grandchild_step():
1790
- return "done"
1791
-
1792
- @workflow()
1793
- async def hierarchy_workflow():
1794
- return await parent_step()
1795
-
1796
- # Run the workflow
1797
- wf = await hierarchy_workflow.start()
1798
- await lock_and_execute(wf)
1799
-
1800
- # Get all steps for this workflow
1801
- steps = (
1802
- await session.exec(
1803
- select(WorkflowStep)
1804
- .where(WorkflowStep.workflow_id == wf.id)
1805
- .order_by(col(WorkflowStep.step_id))
1806
- )
1807
- ).all()
1808
-
1809
- # We should have 3 steps
1810
- assert len(steps) == 3
1811
-
1812
- # Verify step types
1813
- assert steps[0].function_name.split(".")[-1] == "parent_step"
1814
- assert steps[1].function_name.split(".")[-1] == "child_step"
1815
- assert steps[2].function_name.split(".")[-1] == "grandchild_step"
1816
-
1817
- parent_step_id = steps[0].step_id
1818
- descendant_step_ids = [steps[1].step_id, steps[2].step_id]
1819
-
1820
- for descendant_step_id in descendant_step_ids:
1821
- assert parent_step_id < descendant_step_id
1822
-
1823
- # Verify parent-child relationships
1824
- assert steps[0].parent_step_id is None # Parent has no parent
1825
- assert steps[1].parent_step_id == steps[0].step_id # Child's parent is parent
1826
- assert steps[2].parent_step_id == steps[1].step_id # Grandchild's parent is child
1827
-
1828
- # Verify hierarchy utility functions
1829
-
1830
- # 1. Get parent
1831
- parent = await get_step_parent(steps[2]) # Get parent of grandchild
1832
- assert parent is not None
1833
- assert parent.step_id == steps[1].step_id
1834
- assert parent.function_name == steps[1].function_name
1835
-
1836
- # 2. Get children
1837
- children = await get_step_children(steps[0]) # Get children of parent
1838
- assert len(children) == 1
1839
- assert children[0].step_id == steps[1].step_id
1840
-
1841
- # 3. Get descendants
1842
- descendants = await get_step_descendants(steps[0]) # Get all descendants of parent
1843
- assert len(descendants) == 2
1844
- descendant_ids = sorted([d.step_id for d in descendants])
1845
- assert descendant_ids == [steps[1].step_id, steps[2].step_id]
1846
-
1847
- # Get ancestors of grandchild
1848
- ancestors = await get_step_ancestors(steps[2])
1849
- assert len(ancestors) == 2
1850
- assert (
1851
- ancestors[0].step_id == steps[1].step_id
1852
- ) # First ancestor is the immediate parent
1853
- assert (
1854
- ancestors[1].step_id == steps[0].step_id
1855
- ) # Second ancestor is the grandparent
1856
-
1857
-
1858
- async def test_basic_step_parent_child(session: AsyncSession):
1859
- """Basic test of parent-child relationship."""
1860
-
1861
- @step()
1862
- async def parent():
1863
- return await child()
1864
-
1865
- @step()
1866
- async def child():
1867
- return "done"
1868
-
1869
- @workflow()
1870
- async def parent_child_workflow():
1871
- return await parent()
1872
-
1873
- wf = await parent_child_workflow.start()
1874
- await lock_and_execute(wf)
1875
-
1876
- steps = (
1877
- await session.exec(
1878
- select(WorkflowStep)
1879
- .where(WorkflowStep.workflow_id == wf.id)
1880
- .order_by(col(WorkflowStep.step_id))
1881
- )
1882
- ).all()
1883
-
1884
- assert len(steps) == 2
1885
- parent_step = steps[0]
1886
- child_step = steps[1]
1887
-
1888
- assert parent_step.step_id < child_step.step_id
1889
-
1890
- # Test parent-child relationship
1891
- assert child_step.parent_step_id == parent_step.step_id
1892
- assert parent_step.parent_step_id is None
1893
-
1894
-
1895
- async def test_child_workflow_called_as_function_has_parent_id(session: AsyncSession):
1896
- @workflow()
1897
- async def child_workflow():
1898
- return "child_result"
1899
-
1900
- @workflow()
1901
- async def parent_workflow():
1902
- # Call child workflow as async function - this should set parent_id
1903
- result = await child_workflow()
1904
- return f"parent got: {result}"
1905
-
1906
- # Start parent workflow
1907
- parent_wf = await parent_workflow.start()
1908
-
1909
- async with WorkflowOrchestrator.ensure_started() as orchestrator:
1910
- await orchestrator.wait_for_completion(parent_wf.id)
1911
-
1912
- # Verify parent workflow completed successfully
1913
- await session.refresh(parent_wf)
1914
- assert parent_wf.status == WorkflowStatus.SUCCEEDED
1915
- assert parent_wf.result == "parent got: child_result"
1916
-
1917
- # Get all workflows and find the child workflow
1918
- all_workflows = (
1919
- await session.exec(select(Workflow).order_by(col(Workflow.created_at)))
1920
- ).all()
1921
-
1922
- assert len(all_workflows) == 2
1923
- child_wf = next(wf for wf in all_workflows if wf.id != parent_wf.id)
1924
-
1925
- # Verify child workflow has parent_id set to parent workflow
1926
- assert child_wf.parent_id == parent_wf.id
1927
- assert child_wf.status == WorkflowStatus.SUCCEEDED
1928
- assert child_wf.result == "child_result"
1929
-
1930
-
1931
- async def test_child_workflow_called_as_start_step(session: AsyncSession):
1932
- child_workflow_id = None
1933
-
1934
- @workflow()
1935
- async def child_workflow():
1936
- return "child_result"
1937
-
1938
- @workflow()
1939
- async def parent_workflow():
1940
- nonlocal child_workflow_id
1941
- # Call child workflow using start_step - this should NOT set parent_id
1942
- child_workflow_id = await child_workflow.start_step()
1943
- return f"started child: {child_workflow_id}"
1944
-
1945
- # Start parent workflow
1946
- parent_wf = await parent_workflow.start()
1947
-
1948
- async with WorkflowOrchestrator.ensure_started() as orchestrator:
1949
- await orchestrator.wait_for_completion(parent_wf.id)
1950
- assert child_workflow_id
1951
- await orchestrator.wait_for_completion(child_workflow_id)
1952
-
1953
- # Verify parent workflow completed successfully
1954
- await session.refresh(parent_wf)
1955
- assert parent_wf.status == WorkflowStatus.SUCCEEDED
1956
-
1957
- # Get all workflows and find the child workflow
1958
- all_workflows = (
1959
- await session.exec(select(Workflow).order_by(col(Workflow.created_at)))
1960
- ).all()
1961
-
1962
- assert len(all_workflows) == 2
1963
- child_wf = next(wf for wf in all_workflows if wf.id != parent_wf.id)
1964
-
1965
- # Verify child workflow has NO parent_id set
1966
- assert child_wf.parent_id is None
1967
- assert child_wf.status == WorkflowStatus.SUCCEEDED
1968
- assert child_wf.result == "child_result"
1969
-
1970
-
1971
- # =============================================================================
1972
- # Test for message steps
1973
- # =============================================================================
1974
- class Example(BaseModel):
1975
- id: int
1976
- msg: str
1977
-
1978
-
1979
- @pytest.mark.parametrize("input", ["hello", Example(id=1, msg="hello")])
1980
- async def test_message(session: AsyncSession, input: str | BaseModel):
1981
- @workflow()
1982
- async def msg_workflow(msg: str | BaseModel):
1983
- await message(msg)
1984
-
1985
- async with WorkflowOrchestrator.ensure_started() as orchestrator:
1986
- wf = await msg_workflow.start(input)
1987
- await orchestrator.wait_for_completion(wf.id)
1988
-
1989
- await session.refresh(wf)
1990
- steps = (
1991
- await session.exec(
1992
- select(WorkflowStep).where(WorkflowStep.workflow_id == wf.id)
1993
- )
1994
- ).all()
1995
-
1996
- step = one_or_raise(steps)
1997
- # We recorded a single `WorkflowStep` of type `MESSAGE` to the DB.
1998
- assert step.status is StepStatus.SUCCEEDED
1999
- assert step.step_type is StepType.MESSAGE
2000
- if isinstance(input, str):
2001
- assert step.args == [input]
2002
- else:
2003
- assert step.args == [input.model_dump()]
2004
- assert not step.kwargs
2005
- assert step.result is None