pyworkflow-engine 0.1.18__tar.gz → 0.1.19__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/CLAUDE.md +2 -2
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/PKG-INFO +1 -1
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/docs/concepts/cancellation.mdx +5 -3
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/celery/durable/workflows/cancellation.py +2 -2
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/celery/durable/workflows/continue_as_new.py +3 -3
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/local/durable/11_continue_as_new.py +3 -3
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyproject.toml +1 -1
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/__init__.py +1 -1
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/aws/context.py +1 -1
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/celery/tasks.py +3 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/context/base.py +5 -2
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/context/local.py +30 -6
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/context/mock.py +1 -1
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/context/step_context.py +73 -3
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/core/exceptions.py +1 -1
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/core/step.py +2 -2
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/core/workflow.py +3 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/engine/executor.py +7 -6
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/primitives/child_workflow.py +1 -1
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/primitives/continue_as_new.py +5 -5
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/primitives/shield.py +1 -1
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/integration/test_cancellation.py +85 -1
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/integration/test_child_workflows.py +5 -2
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/integration/test_continue_as_new.py +5 -5
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/unit/test_cancellation.py +218 -14
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/unit/test_continue_as_new.py +24 -16
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/DISTRIBUTED.md +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/LICENSE +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/MANIFEST.in +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/README.md +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/RELEASING.md +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/docs/concepts/continue-as-new.mdx +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/docs/concepts/events.mdx +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/docs/concepts/fault-tolerance.mdx +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/docs/concepts/hooks.mdx +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/docs/concepts/limitations.mdx +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/docs/concepts/schedules.mdx +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/docs/concepts/sleep.mdx +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/docs/concepts/step-context.mdx +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/docs/concepts/steps.mdx +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/docs/concepts/workflows.mdx +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/docs/guides/brokers.mdx +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/docs/guides/cli.mdx +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/docs/guides/configuration.mdx +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/docs/introduction.mdx +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/docs/quickstart.mdx +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/__init__.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/celery/__init__.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/celery/durable/docker-compose.yml +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/celery/durable/pyworkflow.config.yaml +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/celery/durable/workflows/__init__.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/celery/durable/workflows/basic.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/celery/durable/workflows/batch_processing.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/celery/durable/workflows/child_workflow_patterns.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/celery/durable/workflows/child_workflows.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/celery/durable/workflows/fault_tolerance.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/celery/durable/workflows/hooks.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/celery/durable/workflows/idempotency.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/celery/durable/workflows/long_running.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/celery/durable/workflows/retries.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/celery/durable/workflows/schedules.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/celery/durable/workflows/step_context.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/celery/transient/01_basic_workflow.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/celery/transient/02_fault_tolerance.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/celery/transient/__init__.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/celery/transient/pyworkflow.config.yaml +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/local/__init__.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/local/durable/01_basic_workflow.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/local/durable/02_file_storage.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/local/durable/03_retries.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/local/durable/04_long_running.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/local/durable/05_event_log.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/local/durable/06_idempotency.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/local/durable/07_hooks.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/local/durable/08_cancellation.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/local/durable/09_child_workflows.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/local/durable/10_child_workflow_patterns.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/local/durable/12_schedules.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/local/durable/13_step_context.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/local/durable/__init__.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/local/transient/01_quick_tasks.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/local/transient/02_retries.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/local/transient/03_sleep.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/local/transient/__init__.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/aws/__init__.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/aws/handler.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/aws/testing.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/celery/__init__.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/celery/app.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/celery/loop.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/celery/scheduler.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/celery/singleton.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/cli/__init__.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/cli/__main__.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/cli/commands/__init__.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/cli/commands/hooks.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/cli/commands/quickstart.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/cli/commands/runs.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/cli/commands/scheduler.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/cli/commands/schedules.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/cli/commands/setup.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/cli/commands/worker.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/cli/commands/workflows.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/cli/output/__init__.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/cli/output/formatters.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/cli/output/styles.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/cli/utils/__init__.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/cli/utils/async_helpers.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/cli/utils/config.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/cli/utils/config_generator.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/cli/utils/discovery.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/cli/utils/docker_manager.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/cli/utils/interactive.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/cli/utils/storage.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/config.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/context/__init__.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/context/aws.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/core/__init__.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/core/registry.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/core/scheduled.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/core/validation.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/discovery.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/engine/__init__.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/engine/events.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/engine/replay.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/observability/__init__.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/observability/logging.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/primitives/__init__.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/primitives/child_handle.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/primitives/define_hook.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/primitives/hooks.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/primitives/resume_hook.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/primitives/schedule.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/primitives/sleep.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/runtime/__init__.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/runtime/base.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/runtime/celery.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/runtime/factory.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/runtime/local.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/scheduler/__init__.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/scheduler/local.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/serialization/__init__.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/serialization/decoder.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/serialization/encoder.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/storage/__init__.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/storage/base.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/storage/cassandra.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/storage/config.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/storage/dynamodb.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/storage/file.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/storage/memory.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/storage/mysql.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/storage/postgres.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/storage/schemas.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/storage/sqlite.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/utils/__init__.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/utils/duration.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/utils/schedule.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow_engine.egg-info/SOURCES.txt +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/setup.cfg +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/integration/__init__.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/integration/test_cassandra_storage.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/integration/test_dynamodb_storage.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/integration/test_fault_tolerance.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/integration/test_schedule_storage.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/integration/test_singleton.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/integration/test_workflow_suspended.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/unit/__init__.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/unit/backends/__init__.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/unit/backends/test_cassandra_storage.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/unit/backends/test_dynamodb_storage.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/unit/backends/test_postgres_storage.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/unit/backends/test_sqlite_storage.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/unit/conftest.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/unit/test_child_workflows.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/unit/test_cli_worker.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/unit/test_event_limits.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/unit/test_executor.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/unit/test_fault_tolerance.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/unit/test_hooks.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/unit/test_registry.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/unit/test_replay.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/unit/test_schedule_schemas.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/unit/test_schedule_utils.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/unit/test_scheduled_workflow.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/unit/test_singleton.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/unit/test_step.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/unit/test_step_context.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/unit/test_validation.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/unit/test_workflow.py +0 -0
- {pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/unit/test_workflow_suspended.py +0 -0
|
@@ -360,14 +360,14 @@ Cancellation is checked at these points:
|
|
|
360
360
|
**Important Limitation:**
|
|
361
361
|
Cancellation does NOT interrupt a step that is already executing. If a step function takes a long time (e.g., a 10-minute API call), cancellation will only be detected after the step completes. This is by design to avoid leaving operations in an inconsistent state.
|
|
362
362
|
|
|
363
|
-
For long-running operations that need to be cancellable mid-execution, the step should periodically call `ctx.check_cancellation()` to cooperatively check for cancellation:
|
|
363
|
+
For long-running operations that need to be cancellable mid-execution, the step should periodically call `await ctx.check_cancellation()` to cooperatively check for cancellation. This async method checks both the in-memory flag and the storage backend's cancellation flag (in durable mode), enabling detection of external cancellation requests:
|
|
364
364
|
|
|
365
365
|
```python
|
|
366
366
|
@step()
|
|
367
367
|
async def long_running_step():
|
|
368
368
|
ctx = get_context()
|
|
369
369
|
for chunk in process_large_dataset():
|
|
370
|
-
ctx.check_cancellation() # Cooperative cancellation check
|
|
370
|
+
await ctx.check_cancellation() # Cooperative cancellation check
|
|
371
371
|
await process_chunk(chunk)
|
|
372
372
|
return result
|
|
373
373
|
```
|
|
@@ -119,7 +119,7 @@ async def process_large_dataset(dataset_id: str):
|
|
|
119
119
|
results = []
|
|
120
120
|
for chunk in dataset.chunks():
|
|
121
121
|
# Check for cancellation periodically
|
|
122
|
-
ctx.check_cancellation()
|
|
122
|
+
await ctx.check_cancellation()
|
|
123
123
|
|
|
124
124
|
result = await process_chunk(chunk)
|
|
125
125
|
results.append(result)
|
|
@@ -127,6 +127,8 @@ async def process_large_dataset(dataset_id: str):
|
|
|
127
127
|
return results
|
|
128
128
|
```
|
|
129
129
|
|
|
130
|
+
The `check_cancellation()` method is async because in durable mode it queries the storage backend's cancellation flag, enabling detection of external cancellation requests (e.g., from `cancel_workflow()`). It checks the in-memory flag first as a fast path, then falls back to storage if needed.
|
|
131
|
+
|
|
130
132
|
This allows the step to respond to cancellation requests between chunks rather than waiting until the entire dataset is processed.
|
|
131
133
|
|
|
132
134
|
## Handling Cancellation
|
|
@@ -167,7 +169,7 @@ async with shield():
|
|
|
167
169
|
```
|
|
168
170
|
|
|
169
171
|
While inside a `shield()` block:
|
|
170
|
-
- `ctx.check_cancellation()` will not raise `CancellationError`
|
|
172
|
+
- `await ctx.check_cancellation()` will not raise `CancellationError`
|
|
171
173
|
- The cancellation request is preserved
|
|
172
174
|
- Cancellation will take effect after exiting the shield
|
|
173
175
|
|
|
@@ -286,7 +288,7 @@ $ pyworkflow runs logs run_abc123 --filter cancellation
|
|
|
286
288
|
ctx = get_context()
|
|
287
289
|
for i, item in enumerate(items):
|
|
288
290
|
if i % 100 == 0: # Check every 100 items
|
|
289
|
-
ctx.check_cancellation()
|
|
291
|
+
await ctx.check_cancellation()
|
|
290
292
|
await process_item(item)
|
|
291
293
|
```
|
|
292
294
|
</Accordion>
|
|
@@ -189,7 +189,7 @@ async def cancel_demo_long_running_step(items: list) -> list:
|
|
|
189
189
|
Example of cooperative cancellation within a long-running step.
|
|
190
190
|
|
|
191
191
|
Since cancellation doesn't interrupt steps mid-execution, use
|
|
192
|
-
ctx.check_cancellation() for responsive cancellation in long loops.
|
|
192
|
+
await ctx.check_cancellation() for responsive cancellation in long loops.
|
|
193
193
|
"""
|
|
194
194
|
ctx = get_context()
|
|
195
195
|
results = []
|
|
@@ -197,7 +197,7 @@ async def cancel_demo_long_running_step(items: list) -> list:
|
|
|
197
197
|
for i, item in enumerate(items):
|
|
198
198
|
# Check for cancellation periodically
|
|
199
199
|
if i % 10 == 0:
|
|
200
|
-
ctx.check_cancellation() # Raises CancellationError if cancelled
|
|
200
|
+
await ctx.check_cancellation() # Raises CancellationError if cancelled
|
|
201
201
|
|
|
202
202
|
# Process item
|
|
203
203
|
await asyncio.sleep(0.1)
|
|
@@ -101,7 +101,7 @@ async def batch_processor(offset: int = 0, batch_size: int = 10) -> str:
|
|
|
101
101
|
print(f" [Batch] Processed {len(items)} items")
|
|
102
102
|
|
|
103
103
|
# Continue with next batch
|
|
104
|
-
continue_as_new(offset=offset + batch_size, batch_size=batch_size)
|
|
104
|
+
await continue_as_new(offset=offset + batch_size, batch_size=batch_size)
|
|
105
105
|
|
|
106
106
|
|
|
107
107
|
# --- Message Consumer Workflow ---
|
|
@@ -133,7 +133,7 @@ async def message_consumer(cursor: str | None = None, messages_processed: int =
|
|
|
133
133
|
print(f" [Consumer] Handled {count} messages (total: {total})")
|
|
134
134
|
|
|
135
135
|
# Continue with new cursor
|
|
136
|
-
continue_as_new(cursor=new_cursor, messages_processed=total)
|
|
136
|
+
await continue_as_new(cursor=new_cursor, messages_processed=total)
|
|
137
137
|
|
|
138
138
|
|
|
139
139
|
# --- Recurring Task Workflow ---
|
|
@@ -159,7 +159,7 @@ async def recurring_report(iteration: int = 1, max_iterations: int = 3) -> str:
|
|
|
159
159
|
return f"All {max_iterations} reports generated!"
|
|
160
160
|
|
|
161
161
|
# Continue with next iteration
|
|
162
|
-
continue_as_new(iteration=iteration + 1, max_iterations=max_iterations)
|
|
162
|
+
await continue_as_new(iteration=iteration + 1, max_iterations=max_iterations)
|
|
163
163
|
|
|
164
164
|
|
|
165
165
|
async def run_examples():
|
{pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/examples/local/durable/11_continue_as_new.py
RENAMED
|
@@ -86,7 +86,7 @@ async def batch_processor(offset: int = 0, batch_size: int = 10) -> str:
|
|
|
86
86
|
print(" [Workflow] Batch complete. Continuing with next batch...")
|
|
87
87
|
|
|
88
88
|
# Continue with next batch - fresh event history!
|
|
89
|
-
continue_as_new(offset=offset + batch_size, batch_size=batch_size)
|
|
89
|
+
await continue_as_new(offset=offset + batch_size, batch_size=batch_size)
|
|
90
90
|
|
|
91
91
|
|
|
92
92
|
# --- Example 2: Polling Workflow ---
|
|
@@ -116,7 +116,7 @@ async def polling_workflow(cursor: str | None = None, poll_count: int = 0) -> st
|
|
|
116
116
|
|
|
117
117
|
# Continue polling with new cursor
|
|
118
118
|
print(f" [Workflow] Continuing with new cursor: {new_cursor}")
|
|
119
|
-
continue_as_new(cursor=new_cursor, poll_count=poll_count + 1)
|
|
119
|
+
await continue_as_new(cursor=new_cursor, poll_count=poll_count + 1)
|
|
120
120
|
|
|
121
121
|
|
|
122
122
|
# --- Example 3: Counter Workflow (Simple Demo) ---
|
|
@@ -132,7 +132,7 @@ async def countdown_workflow(count: int) -> str:
|
|
|
132
132
|
return "Countdown complete!"
|
|
133
133
|
|
|
134
134
|
# Continue with decremented count
|
|
135
|
-
continue_as_new(count=count - 1)
|
|
135
|
+
await continue_as_new(count=count - 1)
|
|
136
136
|
|
|
137
137
|
|
|
138
138
|
async def example_batch_processing(storage):
|
|
@@ -7,7 +7,7 @@ packages = [{include = "pyworkflow"}]
|
|
|
7
7
|
|
|
8
8
|
[project]
|
|
9
9
|
name = "pyworkflow-engine"
|
|
10
|
-
version = "0.1.
|
|
10
|
+
version = "0.1.19"
|
|
11
11
|
description = "A Python implementation of durable, event-sourced workflows inspired by Vercel Workflow"
|
|
12
12
|
readme = "README.md"
|
|
13
13
|
requires-python = ">=3.11"
|
|
@@ -209,7 +209,7 @@ class AWSWorkflowContext(WorkflowContext):
|
|
|
209
209
|
"""Request cancellation (AWS manages this internally)."""
|
|
210
210
|
logger.warning("Cancellation not supported in AWS context")
|
|
211
211
|
|
|
212
|
-
def check_cancellation(self) -> None:
|
|
212
|
+
async def check_cancellation(self) -> None:
|
|
213
213
|
"""Check cancellation (AWS manages this internally)."""
|
|
214
214
|
pass # AWS handles this
|
|
215
215
|
|
|
@@ -207,6 +207,9 @@ def execute_step_task(
|
|
|
207
207
|
context_class = _resolve_context_class(context_class_name)
|
|
208
208
|
if context_class is not None:
|
|
209
209
|
step_ctx = context_class.from_dict(context_data)
|
|
210
|
+
# Inject cancellation metadata so check_cancellation() works on workers
|
|
211
|
+
object.__setattr__(step_ctx, "_cancellation_run_id", run_id)
|
|
212
|
+
object.__setattr__(step_ctx, "_cancellation_storage", storage)
|
|
210
213
|
step_context_token = _set_step_context_internal(step_ctx)
|
|
211
214
|
# Set readonly mode to prevent mutation in steps
|
|
212
215
|
readonly_token = _set_step_context_readonly(True)
|
|
@@ -239,12 +239,15 @@ class WorkflowContext(ABC):
|
|
|
239
239
|
...
|
|
240
240
|
|
|
241
241
|
@abstractmethod
|
|
242
|
-
def check_cancellation(self) -> None:
|
|
242
|
+
async def check_cancellation(self) -> None:
|
|
243
243
|
"""
|
|
244
244
|
Check for cancellation and raise if requested.
|
|
245
245
|
|
|
246
246
|
This should be called at interruptible points (before steps,
|
|
247
|
-
during sleeps, etc.) to allow graceful cancellation.
|
|
247
|
+
during sleeps, etc.) to allow graceful cancellation. In durable
|
|
248
|
+
mode, this also checks the storage backend's cancellation flag
|
|
249
|
+
to detect external cancellation requests (e.g., from
|
|
250
|
+
``cancel_workflow()``).
|
|
248
251
|
|
|
249
252
|
Raises:
|
|
250
253
|
CancellationError: If cancellation was requested and not blocked
|
|
@@ -744,7 +744,7 @@ class LocalContext(WorkflowContext):
|
|
|
744
744
|
return
|
|
745
745
|
|
|
746
746
|
# Check for cancellation before sleeping
|
|
747
|
-
self.check_cancellation()
|
|
747
|
+
await self.check_cancellation()
|
|
748
748
|
|
|
749
749
|
# Durable mode - suspend workflow
|
|
750
750
|
sleep_id = self._generate_sleep_id(duration_seconds)
|
|
@@ -913,7 +913,7 @@ class LocalContext(WorkflowContext):
|
|
|
913
913
|
)
|
|
914
914
|
|
|
915
915
|
# Check for cancellation before waiting for hook
|
|
916
|
-
self.check_cancellation()
|
|
916
|
+
await self.check_cancellation()
|
|
917
917
|
|
|
918
918
|
# Generate deterministic hook_id
|
|
919
919
|
self._step_counter += 1
|
|
@@ -1038,19 +1038,22 @@ class LocalContext(WorkflowContext):
|
|
|
1038
1038
|
reason=reason,
|
|
1039
1039
|
)
|
|
1040
1040
|
|
|
1041
|
-
def check_cancellation(self) -> None:
|
|
1041
|
+
async def check_cancellation(self) -> None:
|
|
1042
1042
|
"""
|
|
1043
1043
|
Check for cancellation and raise if requested.
|
|
1044
1044
|
|
|
1045
1045
|
This should be called at interruptible points (before steps,
|
|
1046
|
-
during sleeps, etc.) to allow graceful cancellation.
|
|
1046
|
+
during sleeps, etc.) to allow graceful cancellation. In durable
|
|
1047
|
+
mode, this also queries the storage backend to detect external
|
|
1048
|
+
cancellation requests (e.g., from ``cancel_workflow()``).
|
|
1047
1049
|
|
|
1048
1050
|
Raises:
|
|
1049
1051
|
CancellationError: If cancellation was requested and not blocked
|
|
1050
1052
|
"""
|
|
1051
|
-
|
|
1052
|
-
from pyworkflow.core.exceptions import CancellationError
|
|
1053
|
+
from pyworkflow.core.exceptions import CancellationError
|
|
1053
1054
|
|
|
1055
|
+
# Fast path: in-memory flag
|
|
1056
|
+
if self._cancellation_requested and not self._cancellation_blocked:
|
|
1054
1057
|
logger.info(
|
|
1055
1058
|
"Cancellation check triggered - raising CancellationError",
|
|
1056
1059
|
run_id=self._run_id,
|
|
@@ -1061,6 +1064,27 @@ class LocalContext(WorkflowContext):
|
|
|
1061
1064
|
reason=self._cancellation_reason,
|
|
1062
1065
|
)
|
|
1063
1066
|
|
|
1067
|
+
# Storage check: detect external cancellation (durable mode only)
|
|
1068
|
+
if not self._cancellation_blocked and self._durable and self._storage is not None:
|
|
1069
|
+
try:
|
|
1070
|
+
if await self._storage.check_cancellation_flag(self._run_id):
|
|
1071
|
+
self._cancellation_requested = True
|
|
1072
|
+
logger.info(
|
|
1073
|
+
"Cancellation detected via storage flag - raising CancellationError",
|
|
1074
|
+
run_id=self._run_id,
|
|
1075
|
+
)
|
|
1076
|
+
raise CancellationError(
|
|
1077
|
+
message="Workflow was cancelled: detected via storage flag",
|
|
1078
|
+
reason=self._cancellation_reason,
|
|
1079
|
+
)
|
|
1080
|
+
except CancellationError:
|
|
1081
|
+
raise
|
|
1082
|
+
except Exception as e:
|
|
1083
|
+
logger.warning(
|
|
1084
|
+
f"Failed to check cancellation flag in storage: {e}",
|
|
1085
|
+
run_id=self._run_id,
|
|
1086
|
+
)
|
|
1087
|
+
|
|
1064
1088
|
@property
|
|
1065
1089
|
def cancellation_blocked(self) -> bool:
|
|
1066
1090
|
"""
|
|
@@ -322,7 +322,7 @@ class MockContext(WorkflowContext):
|
|
|
322
322
|
self._cancellation_requested = True
|
|
323
323
|
self._cancellation_reason = reason
|
|
324
324
|
|
|
325
|
-
def check_cancellation(self) -> None:
|
|
325
|
+
async def check_cancellation(self) -> None:
|
|
326
326
|
"""Check if cancellation was requested and raise if not blocked."""
|
|
327
327
|
from pyworkflow.core.exceptions import CancellationError
|
|
328
328
|
|
|
@@ -47,11 +47,17 @@ Usage:
|
|
|
47
47
|
return {"valid": True}
|
|
48
48
|
"""
|
|
49
49
|
|
|
50
|
+
from __future__ import annotations
|
|
51
|
+
|
|
50
52
|
from contextvars import ContextVar, Token
|
|
51
|
-
from typing import Any, Self
|
|
53
|
+
from typing import TYPE_CHECKING, Any, Self
|
|
52
54
|
|
|
55
|
+
from loguru import logger
|
|
53
56
|
from pydantic import BaseModel, ConfigDict
|
|
54
57
|
|
|
58
|
+
if TYPE_CHECKING:
|
|
59
|
+
from pyworkflow.storage.base import StorageBackend
|
|
60
|
+
|
|
55
61
|
|
|
56
62
|
class StepContext(BaseModel):
|
|
57
63
|
"""
|
|
@@ -80,6 +86,61 @@ class StepContext(BaseModel):
|
|
|
80
86
|
|
|
81
87
|
model_config = ConfigDict(frozen=True, extra="forbid")
|
|
82
88
|
|
|
89
|
+
# Private attributes injected by the framework (not serialized).
|
|
90
|
+
# These enable check_cancellation() to work even when WorkflowContext
|
|
91
|
+
# is not available (e.g., on Celery workers, inside LangGraph tools).
|
|
92
|
+
_cancellation_run_id: str | None = None
|
|
93
|
+
_cancellation_storage: StorageBackend | None = None
|
|
94
|
+
|
|
95
|
+
async def check_cancellation(self) -> None:
|
|
96
|
+
"""
|
|
97
|
+
Check for cancellation and raise CancellationError if requested.
|
|
98
|
+
|
|
99
|
+
This works in all execution contexts:
|
|
100
|
+
- If a WorkflowContext is available, delegates to it (checks both
|
|
101
|
+
in-memory flag and storage).
|
|
102
|
+
- Otherwise, checks the storage cancellation flag directly using
|
|
103
|
+
the run_id and storage injected by the framework.
|
|
104
|
+
|
|
105
|
+
This is especially useful for long-running operations inside steps
|
|
106
|
+
or tool adapters where WorkflowContext may not be available (e.g.,
|
|
107
|
+
on Celery workers, inside LangGraph tool execution).
|
|
108
|
+
|
|
109
|
+
Raises:
|
|
110
|
+
CancellationError: If cancellation was requested
|
|
111
|
+
|
|
112
|
+
Example:
|
|
113
|
+
@step()
|
|
114
|
+
async def long_running_step():
|
|
115
|
+
ctx = get_step_context()
|
|
116
|
+
for chunk in chunks:
|
|
117
|
+
await ctx.check_cancellation()
|
|
118
|
+
await process(chunk)
|
|
119
|
+
"""
|
|
120
|
+
from pyworkflow.context import get_context, has_context
|
|
121
|
+
from pyworkflow.core.exceptions import CancellationError
|
|
122
|
+
|
|
123
|
+
# Fast path: delegate to WorkflowContext if available
|
|
124
|
+
if has_context():
|
|
125
|
+
await get_context().check_cancellation()
|
|
126
|
+
return
|
|
127
|
+
|
|
128
|
+
# Fallback: check storage flag directly
|
|
129
|
+
if self._cancellation_run_id is not None and self._cancellation_storage is not None:
|
|
130
|
+
try:
|
|
131
|
+
if await self._cancellation_storage.check_cancellation_flag(
|
|
132
|
+
self._cancellation_run_id
|
|
133
|
+
):
|
|
134
|
+
raise CancellationError(
|
|
135
|
+
message="Workflow was cancelled: detected via storage flag",
|
|
136
|
+
)
|
|
137
|
+
except CancellationError:
|
|
138
|
+
raise
|
|
139
|
+
except Exception as e:
|
|
140
|
+
logger.warning(
|
|
141
|
+
f"Failed to check cancellation flag in storage: {e}",
|
|
142
|
+
)
|
|
143
|
+
|
|
83
144
|
def with_updates(self: Self, **kwargs: Any) -> Self:
|
|
84
145
|
"""
|
|
85
146
|
Create a new context with updated values.
|
|
@@ -203,12 +264,21 @@ async def set_step_context(ctx: StepContext) -> None:
|
|
|
203
264
|
if not isinstance(ctx, StepContext):
|
|
204
265
|
raise TypeError(f"Expected StepContext instance, got {type(ctx).__name__}")
|
|
205
266
|
|
|
267
|
+
# Inject cancellation metadata from WorkflowContext if available.
|
|
268
|
+
# This enables check_cancellation() to work even when WorkflowContext
|
|
269
|
+
# is not accessible (e.g., on Celery workers, inside LangGraph tools).
|
|
270
|
+
from pyworkflow.context import get_context, has_context
|
|
271
|
+
|
|
272
|
+
if has_context():
|
|
273
|
+
workflow_ctx = get_context()
|
|
274
|
+
object.__setattr__(ctx, "_cancellation_run_id", workflow_ctx.run_id)
|
|
275
|
+
if workflow_ctx.is_durable and workflow_ctx.storage is not None:
|
|
276
|
+
object.__setattr__(ctx, "_cancellation_storage", workflow_ctx.storage)
|
|
277
|
+
|
|
206
278
|
# Set the context in the contextvar
|
|
207
279
|
_step_context.set(ctx)
|
|
208
280
|
|
|
209
281
|
# Persist to storage if we're in a durable workflow
|
|
210
|
-
from pyworkflow.context import get_context, has_context
|
|
211
|
-
|
|
212
282
|
if has_context():
|
|
213
283
|
workflow_ctx = get_context()
|
|
214
284
|
if workflow_ctx.is_durable and workflow_ctx.storage is not None:
|
|
@@ -47,7 +47,7 @@ class CancellationError(WorkflowError):
|
|
|
47
47
|
Note:
|
|
48
48
|
CancellationError is raised at checkpoint boundaries (before steps,
|
|
49
49
|
sleeps, hooks), not during step execution. Long-running steps can
|
|
50
|
-
call ``ctx.check_cancellation()`` for cooperative cancellation.
|
|
50
|
+
call ``await ctx.check_cancellation()`` for cooperative cancellation.
|
|
51
51
|
|
|
52
52
|
Example:
|
|
53
53
|
@workflow
|
|
@@ -110,7 +110,7 @@ def step(
|
|
|
110
110
|
ctx = get_context()
|
|
111
111
|
|
|
112
112
|
# Check for cancellation before executing step
|
|
113
|
-
ctx.check_cancellation()
|
|
113
|
+
await ctx.check_cancellation()
|
|
114
114
|
|
|
115
115
|
# Transient mode: execute directly without event sourcing
|
|
116
116
|
# Retries are still supported via direct execution
|
|
@@ -243,7 +243,7 @@ def step(
|
|
|
243
243
|
)
|
|
244
244
|
|
|
245
245
|
# Check for cancellation before executing step
|
|
246
|
-
ctx.check_cancellation()
|
|
246
|
+
await ctx.check_cancellation()
|
|
247
247
|
|
|
248
248
|
# Validate parameters before execution
|
|
249
249
|
validate_step_parameters(func, args, kwargs, step_name)
|
|
@@ -233,6 +233,9 @@ async def execute_workflow_with_context(
|
|
|
233
233
|
context_data = await storage.get_run_context(run_id)
|
|
234
234
|
if context_data:
|
|
235
235
|
step_ctx = context_class.from_dict(context_data)
|
|
236
|
+
# Inject cancellation metadata so check_cancellation() works
|
|
237
|
+
object.__setattr__(step_ctx, "_cancellation_run_id", run_id)
|
|
238
|
+
object.__setattr__(step_ctx, "_cancellation_storage", storage)
|
|
236
239
|
step_context_token = _set_step_context_internal(step_ctx)
|
|
237
240
|
|
|
238
241
|
try:
|
|
@@ -566,7 +566,7 @@ async def cancel_workflow(
|
|
|
566
566
|
Cancellation does NOT interrupt a step that is already executing.
|
|
567
567
|
If a step takes a long time, cancellation will only be detected after
|
|
568
568
|
the step completes. For long-running steps that need mid-execution
|
|
569
|
-
cancellation, call ``ctx.check_cancellation()`` periodically within
|
|
569
|
+
cancellation, call ``await ctx.check_cancellation()`` periodically within
|
|
570
570
|
the step function.
|
|
571
571
|
|
|
572
572
|
Args:
|
|
@@ -649,9 +649,14 @@ async def cancel_workflow(
|
|
|
649
649
|
current_status=run.status.value,
|
|
650
650
|
)
|
|
651
651
|
|
|
652
|
+
# Always set the cancellation flag in storage so that distributed
|
|
653
|
+
# components (Celery workers, LangGraph tools, StepContext.check_cancellation())
|
|
654
|
+
# can detect the cancellation regardless of workflow status.
|
|
655
|
+
await storage.set_cancellation_flag(run_id)
|
|
656
|
+
|
|
652
657
|
# Handle based on current status
|
|
653
658
|
if run.status == RunStatus.SUSPENDED:
|
|
654
|
-
# For suspended workflows, update status to CANCELLED immediately
|
|
659
|
+
# For suspended workflows, also update status to CANCELLED immediately
|
|
655
660
|
# The workflow will see cancellation when it tries to resume
|
|
656
661
|
cancelled_event = create_workflow_cancelled_event(
|
|
657
662
|
run_id=run_id,
|
|
@@ -667,10 +672,6 @@ async def cancel_workflow(
|
|
|
667
672
|
)
|
|
668
673
|
|
|
669
674
|
elif run.status in {RunStatus.RUNNING, RunStatus.PENDING}:
|
|
670
|
-
# For running/pending workflows, set cancellation flag
|
|
671
|
-
# The workflow will detect this at the next check point
|
|
672
|
-
await storage.set_cancellation_flag(run_id)
|
|
673
|
-
|
|
674
675
|
logger.info(
|
|
675
676
|
"Cancellation flag set for running workflow",
|
|
676
677
|
run_id=run_id,
|
{pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/pyworkflow/primitives/continue_as_new.py
RENAMED
|
@@ -14,7 +14,7 @@ from pyworkflow.context import get_context, has_context
|
|
|
14
14
|
from pyworkflow.core.exceptions import ContinueAsNewSignal
|
|
15
15
|
|
|
16
16
|
|
|
17
|
-
def continue_as_new(*args: Any, **kwargs: Any) -> NoReturn:
|
|
17
|
+
async def continue_as_new(*args: Any, **kwargs: Any) -> NoReturn:
|
|
18
18
|
"""
|
|
19
19
|
Complete current workflow and start a new execution with fresh event history.
|
|
20
20
|
|
|
@@ -48,7 +48,7 @@ def continue_as_new(*args: Any, **kwargs: Any) -> NoReturn:
|
|
|
48
48
|
|
|
49
49
|
# Continue with new cursor if more items
|
|
50
50
|
if new_cursor:
|
|
51
|
-
continue_as_new(cursor=new_cursor)
|
|
51
|
+
await continue_as_new(cursor=new_cursor)
|
|
52
52
|
|
|
53
53
|
return "done"
|
|
54
54
|
|
|
@@ -59,7 +59,7 @@ def continue_as_new(*args: Any, **kwargs: Any) -> NoReturn:
|
|
|
59
59
|
|
|
60
60
|
# Continue with next day
|
|
61
61
|
next_date = get_next_date(date)
|
|
62
|
-
continue_as_new(date=next_date)
|
|
62
|
+
await continue_as_new(date=next_date)
|
|
63
63
|
|
|
64
64
|
@workflow
|
|
65
65
|
async def batch_processor(offset: int = 0, batch_size: int = 100):
|
|
@@ -69,7 +69,7 @@ def continue_as_new(*args: Any, **kwargs: Any) -> NoReturn:
|
|
|
69
69
|
for item in items:
|
|
70
70
|
await process_item(item)
|
|
71
71
|
# Continue with next batch
|
|
72
|
-
continue_as_new(offset=offset + batch_size, batch_size=batch_size)
|
|
72
|
+
await continue_as_new(offset=offset + batch_size, batch_size=batch_size)
|
|
73
73
|
|
|
74
74
|
return f"Processed {offset} items total"
|
|
75
75
|
"""
|
|
@@ -88,7 +88,7 @@ def continue_as_new(*args: Any, **kwargs: Any) -> NoReturn:
|
|
|
88
88
|
ctx = get_context()
|
|
89
89
|
|
|
90
90
|
# Check for cancellation - don't continue if cancelled
|
|
91
|
-
ctx.check_cancellation()
|
|
91
|
+
await ctx.check_cancellation()
|
|
92
92
|
|
|
93
93
|
logger.info(
|
|
94
94
|
"Workflow continuing as new execution",
|
|
@@ -15,7 +15,7 @@ Note:
|
|
|
15
15
|
Cancellation does NOT interrupt a step mid-execution. If a step takes
|
|
16
16
|
a long time, cancellation will only be detected after it completes.
|
|
17
17
|
For cooperative cancellation within long-running steps, call
|
|
18
|
-
``ctx.check_cancellation()`` periodically.
|
|
18
|
+
``await ctx.check_cancellation()`` periodically.
|
|
19
19
|
|
|
20
20
|
Example:
|
|
21
21
|
@workflow
|
{pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/integration/test_cancellation.py
RENAMED
|
@@ -319,7 +319,7 @@ class TestCancellationErrorHandling:
|
|
|
319
319
|
durable=False,
|
|
320
320
|
)
|
|
321
321
|
ctx.request_cancellation()
|
|
322
|
-
ctx.check_cancellation()
|
|
322
|
+
await ctx.check_cancellation()
|
|
323
323
|
except CancellationError:
|
|
324
324
|
cleanup_called = True
|
|
325
325
|
raise
|
|
@@ -328,3 +328,87 @@ class TestCancellationErrorHandling:
|
|
|
328
328
|
await workflow_with_cleanup()
|
|
329
329
|
|
|
330
330
|
assert cleanup_called is True
|
|
331
|
+
|
|
332
|
+
|
|
333
|
+
class TestCooperativeCancellation:
|
|
334
|
+
"""Test cooperative cancellation for long-running steps via storage flag."""
|
|
335
|
+
|
|
336
|
+
@pytest.mark.asyncio
|
|
337
|
+
async def test_long_running_step_detects_storage_cancellation(self):
|
|
338
|
+
"""Test that a step using await ctx.check_cancellation() detects external cancellation."""
|
|
339
|
+
from pyworkflow.context import LocalContext, reset_context, set_context
|
|
340
|
+
|
|
341
|
+
storage = InMemoryStorageBackend()
|
|
342
|
+
|
|
343
|
+
# Create a workflow run
|
|
344
|
+
run = WorkflowRun(
|
|
345
|
+
run_id="coop_cancel_run",
|
|
346
|
+
workflow_name="test_workflow",
|
|
347
|
+
status=RunStatus.RUNNING,
|
|
348
|
+
)
|
|
349
|
+
await storage.create_run(run)
|
|
350
|
+
|
|
351
|
+
ctx = LocalContext(
|
|
352
|
+
run_id="coop_cancel_run",
|
|
353
|
+
workflow_name="test_workflow",
|
|
354
|
+
storage=storage,
|
|
355
|
+
durable=True,
|
|
356
|
+
)
|
|
357
|
+
token = set_context(ctx)
|
|
358
|
+
|
|
359
|
+
items_processed = 0
|
|
360
|
+
|
|
361
|
+
try:
|
|
362
|
+
# Set cancellation flag externally (simulating cancel_workflow())
|
|
363
|
+
await storage.set_cancellation_flag("coop_cancel_run")
|
|
364
|
+
|
|
365
|
+
# Simulate a long-running step with cooperative cancellation
|
|
366
|
+
with pytest.raises(CancellationError):
|
|
367
|
+
for _i in range(100):
|
|
368
|
+
await ctx.check_cancellation()
|
|
369
|
+
items_processed += 1
|
|
370
|
+
await asyncio.sleep(0.001)
|
|
371
|
+
|
|
372
|
+
# Should have been cancelled on the first iteration
|
|
373
|
+
assert items_processed == 0
|
|
374
|
+
finally:
|
|
375
|
+
reset_context(token)
|
|
376
|
+
|
|
377
|
+
@pytest.mark.asyncio
|
|
378
|
+
async def test_cooperative_cancellation_mid_loop(self):
|
|
379
|
+
"""Test cancellation detected mid-loop after external flag is set."""
|
|
380
|
+
from pyworkflow.context import LocalContext, reset_context, set_context
|
|
381
|
+
|
|
382
|
+
storage = InMemoryStorageBackend()
|
|
383
|
+
|
|
384
|
+
run = WorkflowRun(
|
|
385
|
+
run_id="coop_mid_run",
|
|
386
|
+
workflow_name="test_workflow",
|
|
387
|
+
status=RunStatus.RUNNING,
|
|
388
|
+
)
|
|
389
|
+
await storage.create_run(run)
|
|
390
|
+
|
|
391
|
+
ctx = LocalContext(
|
|
392
|
+
run_id="coop_mid_run",
|
|
393
|
+
workflow_name="test_workflow",
|
|
394
|
+
storage=storage,
|
|
395
|
+
durable=True,
|
|
396
|
+
)
|
|
397
|
+
token = set_context(ctx)
|
|
398
|
+
|
|
399
|
+
items_processed = 0
|
|
400
|
+
|
|
401
|
+
try:
|
|
402
|
+
with pytest.raises(CancellationError):
|
|
403
|
+
for i in range(100):
|
|
404
|
+
# Set cancellation after processing 5 items
|
|
405
|
+
if i == 5:
|
|
406
|
+
await storage.set_cancellation_flag("coop_mid_run")
|
|
407
|
+
|
|
408
|
+
await ctx.check_cancellation()
|
|
409
|
+
items_processed += 1
|
|
410
|
+
|
|
411
|
+
# Should have processed exactly 5 items before cancellation
|
|
412
|
+
assert items_processed == 5
|
|
413
|
+
finally:
|
|
414
|
+
reset_context(token)
|
{pyworkflow_engine-0.1.18 → pyworkflow_engine-0.1.19}/tests/integration/test_child_workflows.py
RENAMED
|
@@ -290,10 +290,13 @@ class TestBasicChildWorkflow:
|
|
|
290
290
|
# Wait for child to complete
|
|
291
291
|
await asyncio.sleep(0.5)
|
|
292
292
|
|
|
293
|
-
# Check children
|
|
293
|
+
# Check children - the child gets cancelled because the parent completed
|
|
294
|
+
# and _handle_parent_completion_local cancels non-terminal children.
|
|
295
|
+
# With cooperative cancellation, the child detects the storage flag
|
|
296
|
+
# and raises CancellationError before completing.
|
|
294
297
|
children = await storage.get_children(run_id)
|
|
295
298
|
assert len(children) == 1
|
|
296
|
-
assert children[0].status
|
|
299
|
+
assert children[0].status in (RunStatus.CANCELLED, RunStatus.FAILED)
|
|
297
300
|
|
|
298
301
|
@pytest.mark.asyncio
|
|
299
302
|
async def test_child_workflow_handle_result(self, setup_storage):
|