pyworkflow-engine 0.1.15__tar.gz → 0.1.16__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/PKG-INFO +1 -1
- pyworkflow_engine-0.1.16/docs/guides/brokers.mdx +367 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/docs/guides/cli.mdx +21 -1
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/docs/guides/configuration.mdx +24 -4
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/docs/quickstart.mdx +2 -2
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyproject.toml +1 -1
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/__init__.py +1 -1
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/celery/app.py +128 -9
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/celery/singleton.py +91 -6
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/cli/commands/worker.py +94 -2
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow_engine.egg-info/SOURCES.txt +2 -0
- pyworkflow_engine-0.1.16/tests/unit/test_cli_worker.py +268 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/unit/test_singleton.py +140 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/CLAUDE.md +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/DISTRIBUTED.md +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/LICENSE +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/MANIFEST.in +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/README.md +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/RELEASING.md +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/docs/concepts/cancellation.mdx +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/docs/concepts/continue-as-new.mdx +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/docs/concepts/events.mdx +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/docs/concepts/fault-tolerance.mdx +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/docs/concepts/hooks.mdx +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/docs/concepts/limitations.mdx +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/docs/concepts/schedules.mdx +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/docs/concepts/sleep.mdx +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/docs/concepts/step-context.mdx +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/docs/concepts/steps.mdx +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/docs/concepts/workflows.mdx +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/docs/introduction.mdx +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/__init__.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/celery/__init__.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/celery/durable/docker-compose.yml +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/celery/durable/pyworkflow.config.yaml +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/celery/durable/workflows/__init__.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/celery/durable/workflows/basic.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/celery/durable/workflows/batch_processing.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/celery/durable/workflows/cancellation.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/celery/durable/workflows/child_workflow_patterns.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/celery/durable/workflows/child_workflows.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/celery/durable/workflows/continue_as_new.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/celery/durable/workflows/fault_tolerance.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/celery/durable/workflows/hooks.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/celery/durable/workflows/idempotency.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/celery/durable/workflows/long_running.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/celery/durable/workflows/retries.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/celery/durable/workflows/schedules.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/celery/durable/workflows/step_context.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/celery/transient/01_basic_workflow.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/celery/transient/02_fault_tolerance.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/celery/transient/__init__.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/celery/transient/pyworkflow.config.yaml +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/local/__init__.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/local/durable/01_basic_workflow.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/local/durable/02_file_storage.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/local/durable/03_retries.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/local/durable/04_long_running.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/local/durable/05_event_log.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/local/durable/06_idempotency.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/local/durable/07_hooks.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/local/durable/08_cancellation.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/local/durable/09_child_workflows.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/local/durable/10_child_workflow_patterns.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/local/durable/11_continue_as_new.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/local/durable/12_schedules.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/local/durable/13_step_context.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/local/durable/__init__.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/local/transient/01_quick_tasks.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/local/transient/02_retries.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/local/transient/03_sleep.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/examples/local/transient/__init__.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/aws/__init__.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/aws/context.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/aws/handler.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/aws/testing.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/celery/__init__.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/celery/loop.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/celery/scheduler.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/celery/tasks.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/cli/__init__.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/cli/__main__.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/cli/commands/__init__.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/cli/commands/hooks.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/cli/commands/quickstart.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/cli/commands/runs.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/cli/commands/scheduler.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/cli/commands/schedules.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/cli/commands/setup.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/cli/commands/workflows.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/cli/output/__init__.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/cli/output/formatters.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/cli/output/styles.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/cli/utils/__init__.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/cli/utils/async_helpers.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/cli/utils/config.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/cli/utils/config_generator.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/cli/utils/discovery.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/cli/utils/docker_manager.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/cli/utils/interactive.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/cli/utils/storage.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/config.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/context/__init__.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/context/aws.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/context/base.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/context/local.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/context/mock.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/context/step_context.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/core/__init__.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/core/exceptions.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/core/registry.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/core/scheduled.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/core/step.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/core/validation.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/core/workflow.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/discovery.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/engine/__init__.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/engine/events.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/engine/executor.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/engine/replay.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/observability/__init__.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/observability/logging.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/primitives/__init__.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/primitives/child_handle.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/primitives/child_workflow.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/primitives/continue_as_new.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/primitives/define_hook.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/primitives/hooks.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/primitives/resume_hook.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/primitives/schedule.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/primitives/shield.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/primitives/sleep.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/runtime/__init__.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/runtime/base.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/runtime/celery.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/runtime/factory.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/runtime/local.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/scheduler/__init__.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/scheduler/local.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/serialization/__init__.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/serialization/decoder.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/serialization/encoder.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/storage/__init__.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/storage/base.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/storage/cassandra.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/storage/config.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/storage/dynamodb.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/storage/file.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/storage/memory.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/storage/mysql.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/storage/postgres.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/storage/schemas.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/storage/sqlite.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/utils/__init__.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/utils/duration.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/pyworkflow/utils/schedule.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/setup.cfg +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/integration/__init__.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/integration/test_cancellation.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/integration/test_cassandra_storage.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/integration/test_child_workflows.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/integration/test_continue_as_new.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/integration/test_dynamodb_storage.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/integration/test_fault_tolerance.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/integration/test_schedule_storage.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/integration/test_singleton.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/integration/test_workflow_suspended.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/unit/__init__.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/unit/backends/__init__.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/unit/backends/test_cassandra_storage.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/unit/backends/test_dynamodb_storage.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/unit/backends/test_postgres_storage.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/unit/backends/test_sqlite_storage.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/unit/conftest.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/unit/test_cancellation.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/unit/test_child_workflows.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/unit/test_continue_as_new.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/unit/test_event_limits.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/unit/test_executor.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/unit/test_fault_tolerance.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/unit/test_hooks.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/unit/test_registry.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/unit/test_replay.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/unit/test_schedule_schemas.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/unit/test_schedule_utils.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/unit/test_scheduled_workflow.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/unit/test_step.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/unit/test_step_context.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/unit/test_validation.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/unit/test_workflow.py +0 -0
- {pyworkflow_engine-0.1.15 → pyworkflow_engine-0.1.16}/tests/unit/test_workflow_suspended.py +0 -0
|
@@ -0,0 +1,367 @@
|
|
|
1
|
+
---
|
|
2
|
+
title: 'Celery Brokers'
|
|
3
|
+
description: 'Configure message brokers for distributed workflow execution with Celery runtime'
|
|
4
|
+
---
|
|
5
|
+
|
|
6
|
+
## Overview
|
|
7
|
+
|
|
8
|
+
When using the **Celery runtime** (`runtime: celery`), PyWorkflow requires a **message broker** to
|
|
9
|
+
transport messages between your application and workers, and optionally a **result backend** to
|
|
10
|
+
store task results.
|
|
11
|
+
|
|
12
|
+
<Note>
|
|
13
|
+
This guide only applies when using the Celery runtime for distributed execution. The local runtime
|
|
14
|
+
(`runtime: local`) runs workflows in-process and does not require a broker.
|
|
15
|
+
</Note>
|
|
16
|
+
|
|
17
|
+
PyWorkflow supports several broker configurations:
|
|
18
|
+
|
|
19
|
+
<CardGroup cols={2}>
|
|
20
|
+
<Card title="Redis" icon="database">
|
|
21
|
+
Simple, fast, and recommended for most deployments
|
|
22
|
+
</Card>
|
|
23
|
+
<Card title="Redis Sentinel" icon="shield-halved">
|
|
24
|
+
High-availability Redis with automatic failover
|
|
25
|
+
</Card>
|
|
26
|
+
</CardGroup>
|
|
27
|
+
|
|
28
|
+
---
|
|
29
|
+
|
|
30
|
+
## Redis
|
|
31
|
+
|
|
32
|
+
Redis is the recommended broker for most PyWorkflow deployments. It's simple to set up, provides
|
|
33
|
+
excellent performance, and supports both broker and result backend functionality.
|
|
34
|
+
|
|
35
|
+
### Basic Configuration
|
|
36
|
+
|
|
37
|
+
<Tabs>
|
|
38
|
+
<Tab title="Config File">
|
|
39
|
+
```yaml
|
|
40
|
+
# pyworkflow.config.yaml
|
|
41
|
+
runtime: celery
|
|
42
|
+
|
|
43
|
+
celery:
|
|
44
|
+
broker: redis://localhost:6379/0
|
|
45
|
+
result_backend: redis://localhost:6379/1
|
|
46
|
+
```
|
|
47
|
+
</Tab>
|
|
48
|
+
<Tab title="Environment Variables">
|
|
49
|
+
```bash
|
|
50
|
+
export PYWORKFLOW_CELERY_BROKER=redis://localhost:6379/0
|
|
51
|
+
export PYWORKFLOW_CELERY_RESULT_BACKEND=redis://localhost:6379/1
|
|
52
|
+
```
|
|
53
|
+
</Tab>
|
|
54
|
+
<Tab title="Programmatic">
|
|
55
|
+
```python
|
|
56
|
+
import pyworkflow
|
|
57
|
+
|
|
58
|
+
pyworkflow.configure(
|
|
59
|
+
default_runtime="celery",
|
|
60
|
+
celery_broker="redis://localhost:6379/0",
|
|
61
|
+
)
|
|
62
|
+
```
|
|
63
|
+
</Tab>
|
|
64
|
+
</Tabs>
|
|
65
|
+
|
|
66
|
+
### URL Format
|
|
67
|
+
|
|
68
|
+
```
|
|
69
|
+
redis://[[username:]password@]host[:port][/database]
|
|
70
|
+
```
|
|
71
|
+
|
|
72
|
+
| Component | Description | Default |
|
|
73
|
+
|-----------|-------------|---------|
|
|
74
|
+
| `username` | Redis username (Redis 6+ ACL) | None |
|
|
75
|
+
| `password` | Redis password | None |
|
|
76
|
+
| `host` | Redis server hostname | `localhost` |
|
|
77
|
+
| `port` | Redis server port | `6379` |
|
|
78
|
+
| `database` | Redis database number | `0` |
|
|
79
|
+
|
|
80
|
+
### Examples
|
|
81
|
+
|
|
82
|
+
```yaml
|
|
83
|
+
# Local Redis (default)
|
|
84
|
+
celery:
|
|
85
|
+
broker: redis://localhost:6379/0
|
|
86
|
+
|
|
87
|
+
# Redis with password
|
|
88
|
+
celery:
|
|
89
|
+
broker: redis://:mypassword@localhost:6379/0
|
|
90
|
+
|
|
91
|
+
# Redis with username and password (Redis 6+ ACL)
|
|
92
|
+
celery:
|
|
93
|
+
broker: redis://myuser:mypassword@localhost:6379/0
|
|
94
|
+
|
|
95
|
+
# Remote Redis
|
|
96
|
+
celery:
|
|
97
|
+
broker: redis://redis.example.com:6379/0
|
|
98
|
+
|
|
99
|
+
# Different databases for broker and backend
|
|
100
|
+
celery:
|
|
101
|
+
broker: redis://localhost:6379/0
|
|
102
|
+
result_backend: redis://localhost:6379/1
|
|
103
|
+
```
|
|
104
|
+
|
|
105
|
+
### TLS/SSL Connection
|
|
106
|
+
|
|
107
|
+
For encrypted connections, use the `rediss://` scheme:
|
|
108
|
+
|
|
109
|
+
```yaml
|
|
110
|
+
celery:
|
|
111
|
+
broker: rediss://redis.example.com:6379/0
|
|
112
|
+
result_backend: rediss://redis.example.com:6379/1
|
|
113
|
+
```
|
|
114
|
+
|
|
115
|
+
<Note>
|
|
116
|
+
When using `rediss://`, ensure your Redis server is configured with TLS certificates.
|
|
117
|
+
</Note>
|
|
118
|
+
|
|
119
|
+
---
|
|
120
|
+
|
|
121
|
+
## Redis Sentinel
|
|
122
|
+
|
|
123
|
+
[Redis Sentinel](https://redis.io/docs/management/sentinel/) provides high availability for Redis
|
|
124
|
+
through automatic failover. When a master node fails, Sentinel automatically promotes a replica
|
|
125
|
+
to master, ensuring your workflows continue processing with minimal interruption.
|
|
126
|
+
|
|
127
|
+
### When to Use Sentinel
|
|
128
|
+
|
|
129
|
+
Use Redis Sentinel when you need:
|
|
130
|
+
|
|
131
|
+
- **High availability**: Automatic failover when master fails
|
|
132
|
+
- **Monitoring**: Constant health checks on Redis instances
|
|
133
|
+
- **Notification**: Alerts when Redis instances change state
|
|
134
|
+
- **Configuration provider**: Clients discover current master automatically
|
|
135
|
+
|
|
136
|
+
### Basic Configuration
|
|
137
|
+
|
|
138
|
+
<Tabs>
|
|
139
|
+
<Tab title="Config File">
|
|
140
|
+
```yaml
|
|
141
|
+
# pyworkflow.config.yaml
|
|
142
|
+
runtime: celery
|
|
143
|
+
|
|
144
|
+
celery:
|
|
145
|
+
broker: sentinel://sentinel1:26379,sentinel2:26379,sentinel3:26379/0
|
|
146
|
+
result_backend: sentinel://sentinel1:26379,sentinel2:26379,sentinel3:26379/1
|
|
147
|
+
sentinel_master: mymaster
|
|
148
|
+
```
|
|
149
|
+
</Tab>
|
|
150
|
+
<Tab title="Environment Variables">
|
|
151
|
+
```bash
|
|
152
|
+
export PYWORKFLOW_CELERY_BROKER=sentinel://sentinel1:26379,sentinel2:26379,sentinel3:26379/0
|
|
153
|
+
export PYWORKFLOW_CELERY_RESULT_BACKEND=sentinel://sentinel1:26379,sentinel2:26379,sentinel3:26379/1
|
|
154
|
+
export PYWORKFLOW_CELERY_SENTINEL_MASTER=mymaster
|
|
155
|
+
```
|
|
156
|
+
</Tab>
|
|
157
|
+
<Tab title="CLI Arguments">
|
|
158
|
+
```bash
|
|
159
|
+
pyworkflow worker run --sentinel-master mymaster
|
|
160
|
+
```
|
|
161
|
+
</Tab>
|
|
162
|
+
</Tabs>
|
|
163
|
+
|
|
164
|
+
### URL Format
|
|
165
|
+
|
|
166
|
+
```
|
|
167
|
+
sentinel://[[password@]host1[:port1],host2[:port2],...]/database
|
|
168
|
+
```
|
|
169
|
+
|
|
170
|
+
| Component | Description | Default |
|
|
171
|
+
|-----------|-------------|---------|
|
|
172
|
+
| `password` | Sentinel password | None |
|
|
173
|
+
| `host1,host2,...` | Comma-separated Sentinel hosts | Required |
|
|
174
|
+
| `port` | Sentinel port for each host | `26379` |
|
|
175
|
+
| `database` | Redis database number | `0` |
|
|
176
|
+
|
|
177
|
+
### Configuration Options
|
|
178
|
+
|
|
179
|
+
| Option | Environment Variable | CLI Flag | Default | Description |
|
|
180
|
+
|--------|---------------------|----------|---------|-------------|
|
|
181
|
+
| `sentinel_master` | `PYWORKFLOW_CELERY_SENTINEL_MASTER` | `--sentinel-master` | `mymaster` | Name of the Sentinel master group |
|
|
182
|
+
|
|
183
|
+
### Examples
|
|
184
|
+
|
|
185
|
+
```yaml
|
|
186
|
+
# Basic Sentinel setup
|
|
187
|
+
celery:
|
|
188
|
+
broker: sentinel://sentinel1:26379,sentinel2:26379,sentinel3:26379/0
|
|
189
|
+
result_backend: sentinel://sentinel1:26379,sentinel2:26379,sentinel3:26379/1
|
|
190
|
+
sentinel_master: mymaster
|
|
191
|
+
|
|
192
|
+
# Sentinel with password
|
|
193
|
+
celery:
|
|
194
|
+
broker: sentinel://sentinelpassword@sentinel1:26379,sentinel2:26379/0
|
|
195
|
+
sentinel_master: mymaster
|
|
196
|
+
|
|
197
|
+
# Sentinel with custom ports
|
|
198
|
+
celery:
|
|
199
|
+
broker: sentinel://sentinel1:26380,sentinel2:26381,sentinel3:26382/0
|
|
200
|
+
sentinel_master: redis-primary
|
|
201
|
+
|
|
202
|
+
# Single Sentinel (not recommended for production)
|
|
203
|
+
celery:
|
|
204
|
+
broker: sentinel://sentinel1:26379/0
|
|
205
|
+
sentinel_master: mymaster
|
|
206
|
+
```
|
|
207
|
+
|
|
208
|
+
### TLS/SSL with Sentinel
|
|
209
|
+
|
|
210
|
+
For encrypted connections to Sentinel, use the `sentinel+ssl://` scheme:
|
|
211
|
+
|
|
212
|
+
```yaml
|
|
213
|
+
celery:
|
|
214
|
+
broker: sentinel+ssl://sentinel1:26379,sentinel2:26379,sentinel3:26379/0
|
|
215
|
+
result_backend: sentinel+ssl://sentinel1:26379,sentinel2:26379,sentinel3:26379/1
|
|
216
|
+
sentinel_master: mymaster
|
|
217
|
+
```
|
|
218
|
+
|
|
219
|
+
### Sentinel Architecture
|
|
220
|
+
|
|
221
|
+
A typical Sentinel deployment consists of:
|
|
222
|
+
|
|
223
|
+
```
|
|
224
|
+
┌─────────────────────────────────────────────────────────────┐
|
|
225
|
+
│ Sentinel Cluster │
|
|
226
|
+
│ ┌──────────┐ ┌──────────┐ ┌──────────┐ │
|
|
227
|
+
│ │Sentinel 1│ │Sentinel 2│ │Sentinel 3│ │
|
|
228
|
+
│ └────┬─────┘ └────┬─────┘ └────┬─────┘ │
|
|
229
|
+
│ │ │ │ │
|
|
230
|
+
│ └───────────────┼───────────────┘ │
|
|
231
|
+
│ │ │
|
|
232
|
+
│ ▼ │
|
|
233
|
+
│ ┌────────────────┐ │
|
|
234
|
+
│ │ Redis Master │◄──── Writes │
|
|
235
|
+
│ └───────┬────────┘ │
|
|
236
|
+
│ │ Replication │
|
|
237
|
+
│ ┌────────────┼────────────┐ │
|
|
238
|
+
│ ▼ ▼ ▼ │
|
|
239
|
+
│ ┌────────────┐ ┌────────────┐ ┌────────────┐ │
|
|
240
|
+
│ │ Replica 1 │ │ Replica 2 │ │ Replica 3 │ │
|
|
241
|
+
│ └────────────┘ └────────────┘ └────────────┘ │
|
|
242
|
+
└─────────────────────────────────────────────────────────────┘
|
|
243
|
+
```
|
|
244
|
+
|
|
245
|
+
<Warning>
|
|
246
|
+
Always deploy at least **3 Sentinel instances** in production. Sentinel uses quorum-based
|
|
247
|
+
decision making, and a single Sentinel cannot reliably detect failures.
|
|
248
|
+
</Warning>
|
|
249
|
+
|
|
250
|
+
---
|
|
251
|
+
|
|
252
|
+
## Configuration Reference
|
|
253
|
+
|
|
254
|
+
### Environment Variables
|
|
255
|
+
|
|
256
|
+
| Variable | Description | Example |
|
|
257
|
+
|----------|-------------|---------|
|
|
258
|
+
| `PYWORKFLOW_CELERY_BROKER` | Celery broker URL | `redis://localhost:6379/0` |
|
|
259
|
+
| `PYWORKFLOW_CELERY_RESULT_BACKEND` | Celery result backend URL | `redis://localhost:6379/1` |
|
|
260
|
+
| `PYWORKFLOW_CELERY_SENTINEL_MASTER` | Sentinel master name | `mymaster` |
|
|
261
|
+
|
|
262
|
+
### CLI Options
|
|
263
|
+
|
|
264
|
+
| Option | Description |
|
|
265
|
+
|--------|-------------|
|
|
266
|
+
| `--sentinel-master` | Redis Sentinel master name (required for `sentinel://` URLs) |
|
|
267
|
+
|
|
268
|
+
See the [CLI Guide](/guides/cli#worker-run) for all available worker options including
|
|
269
|
+
autoscaling, task limits, and arbitrary Celery argument passthrough.
|
|
270
|
+
|
|
271
|
+
### Config File Options
|
|
272
|
+
|
|
273
|
+
```yaml
|
|
274
|
+
celery:
|
|
275
|
+
# Broker URL (required)
|
|
276
|
+
broker: redis://localhost:6379/0
|
|
277
|
+
|
|
278
|
+
# Result backend URL (optional, defaults to broker)
|
|
279
|
+
result_backend: redis://localhost:6379/1
|
|
280
|
+
|
|
281
|
+
# Sentinel master name (required for sentinel:// URLs)
|
|
282
|
+
sentinel_master: mymaster
|
|
283
|
+
```
|
|
284
|
+
|
|
285
|
+
---
|
|
286
|
+
|
|
287
|
+
## Choosing a Broker
|
|
288
|
+
|
|
289
|
+
| Requirement | Recommended Broker |
|
|
290
|
+
|-------------|-------------------|
|
|
291
|
+
| Development/Testing | Redis (single instance) |
|
|
292
|
+
| Production (simple) | Redis (single instance or managed service) |
|
|
293
|
+
| Production (HA required) | Redis Sentinel |
|
|
294
|
+
| Cloud deployment | Managed Redis (AWS ElastiCache, Azure Cache, GCP Memorystore) |
|
|
295
|
+
|
|
296
|
+
<Tip>
|
|
297
|
+
Most cloud providers offer managed Redis services with built-in high availability.
|
|
298
|
+
These are often easier to operate than self-managed Sentinel clusters.
|
|
299
|
+
</Tip>
|
|
300
|
+
|
|
301
|
+
---
|
|
302
|
+
|
|
303
|
+
## Troubleshooting
|
|
304
|
+
|
|
305
|
+
### Connection Refused
|
|
306
|
+
|
|
307
|
+
```
|
|
308
|
+
Error: Connection refused (redis://localhost:6379/0)
|
|
309
|
+
```
|
|
310
|
+
|
|
311
|
+
**Solution**: Ensure Redis is running and accessible:
|
|
312
|
+
|
|
313
|
+
```bash
|
|
314
|
+
# Check if Redis is running
|
|
315
|
+
redis-cli ping
|
|
316
|
+
|
|
317
|
+
# Start Redis (if using Docker)
|
|
318
|
+
docker run -d -p 6379:6379 redis:7-alpine
|
|
319
|
+
```
|
|
320
|
+
|
|
321
|
+
### Sentinel Master Not Found
|
|
322
|
+
|
|
323
|
+
```
|
|
324
|
+
Error: No master found for 'mymaster'
|
|
325
|
+
```
|
|
326
|
+
|
|
327
|
+
**Solutions**:
|
|
328
|
+
1. Verify the master name matches your Sentinel configuration
|
|
329
|
+
2. Check that Sentinel instances are running and healthy
|
|
330
|
+
3. Ensure network connectivity between your application and Sentinel
|
|
331
|
+
|
|
332
|
+
```bash
|
|
333
|
+
# Check Sentinel status
|
|
334
|
+
redis-cli -p 26379 SENTINEL master mymaster
|
|
335
|
+
```
|
|
336
|
+
|
|
337
|
+
### Authentication Failed
|
|
338
|
+
|
|
339
|
+
```
|
|
340
|
+
Error: NOAUTH Authentication required
|
|
341
|
+
```
|
|
342
|
+
|
|
343
|
+
**Solution**: Include password in the URL:
|
|
344
|
+
|
|
345
|
+
```yaml
|
|
346
|
+
celery:
|
|
347
|
+
broker: redis://:yourpassword@localhost:6379/0
|
|
348
|
+
```
|
|
349
|
+
|
|
350
|
+
---
|
|
351
|
+
|
|
352
|
+
## Next Steps
|
|
353
|
+
|
|
354
|
+
<CardGroup cols={2}>
|
|
355
|
+
<Card title="CLI Guide" icon="terminal" href="/guides/cli">
|
|
356
|
+
Learn CLI commands for worker management
|
|
357
|
+
</Card>
|
|
358
|
+
<Card title="Configuration" icon="gear" href="/guides/configuration">
|
|
359
|
+
Complete configuration reference
|
|
360
|
+
</Card>
|
|
361
|
+
<Card title="Fault Tolerance" icon="shield" href="/concepts/fault-tolerance">
|
|
362
|
+
Learn about automatic recovery
|
|
363
|
+
</Card>
|
|
364
|
+
<Card title="Workflows" icon="diagram-project" href="/concepts/workflows">
|
|
365
|
+
Learn workflow concepts
|
|
366
|
+
</Card>
|
|
367
|
+
</CardGroup>
|
|
@@ -416,10 +416,15 @@ pyworkflow worker run
|
|
|
416
416
|
| `--workflow` | Only process workflow orchestration tasks |
|
|
417
417
|
| `--step` | Only process step execution tasks |
|
|
418
418
|
| `--schedule` | Only process scheduled resumption tasks |
|
|
419
|
-
| `--concurrency N` | Number of worker processes (default:
|
|
419
|
+
| `--concurrency N` | Number of worker processes (default: 1) |
|
|
420
420
|
| `--loglevel LEVEL` | Log level: `debug`, `info`, `warning`, `error` |
|
|
421
421
|
| `--hostname NAME` | Custom worker hostname |
|
|
422
422
|
| `--beat` | Also start Celery Beat scheduler |
|
|
423
|
+
| `--autoscale MIN,MAX` | Enable worker autoscaling (e.g., `2,10`) |
|
|
424
|
+
| `--max-tasks-per-child N` | Replace worker child after N tasks |
|
|
425
|
+
| `--prefetch-multiplier N` | Task prefetch count per worker process |
|
|
426
|
+
| `--time-limit SECONDS` | Hard time limit for tasks |
|
|
427
|
+
| `--soft-time-limit SECONDS` | Soft time limit for tasks |
|
|
423
428
|
|
|
424
429
|
<Tabs>
|
|
425
430
|
<Tab title="All Queues (Default)">
|
|
@@ -440,6 +445,21 @@ pyworkflow worker run
|
|
|
440
445
|
pyworkflow worker run --schedule
|
|
441
446
|
```
|
|
442
447
|
</Tab>
|
|
448
|
+
<Tab title="Advanced Celery Options">
|
|
449
|
+
```bash
|
|
450
|
+
# Enable autoscaling (min 2, max 10 workers)
|
|
451
|
+
pyworkflow worker run --step --autoscale 2,10
|
|
452
|
+
|
|
453
|
+
# Set task limits
|
|
454
|
+
pyworkflow worker run --max-tasks-per-child 100 --time-limit 300
|
|
455
|
+
|
|
456
|
+
# Pass arbitrary Celery arguments after --
|
|
457
|
+
pyworkflow worker run -- --max-memory-per-child=200000
|
|
458
|
+
|
|
459
|
+
# Combine PyWorkflow options with Celery passthrough
|
|
460
|
+
pyworkflow worker run --step --autoscale 2,8 -- --max-memory-per-child=150000
|
|
461
|
+
```
|
|
462
|
+
</Tab>
|
|
443
463
|
</Tabs>
|
|
444
464
|
|
|
445
465
|
<Tip>
|
|
@@ -542,19 +542,39 @@ print(f"Celery Broker: {config.celery_broker}")
|
|
|
542
542
|
|
|
543
543
|
---
|
|
544
544
|
|
|
545
|
+
## Worker Configuration
|
|
546
|
+
|
|
547
|
+
When running Celery workers, you can configure worker behavior through CLI options.
|
|
548
|
+
See the [CLI Guide](/guides/cli#worker-run) for all available options including:
|
|
549
|
+
|
|
550
|
+
- **Autoscaling**: Automatically scale worker processes based on load
|
|
551
|
+
- **Task limits**: Control tasks per child and prefetch multiplier
|
|
552
|
+
- **Time limits**: Set hard and soft time limits for tasks
|
|
553
|
+
- **Celery passthrough**: Forward arbitrary arguments to Celery
|
|
554
|
+
|
|
555
|
+
```bash
|
|
556
|
+
# Example: Production worker with autoscaling and task limits
|
|
557
|
+
pyworkflow worker run --step --autoscale 2,10 --max-tasks-per-child 100
|
|
558
|
+
|
|
559
|
+
# Example: Pass arbitrary Celery options
|
|
560
|
+
pyworkflow worker run -- --max-memory-per-child=200000
|
|
561
|
+
```
|
|
562
|
+
|
|
563
|
+
---
|
|
564
|
+
|
|
545
565
|
## Next Steps
|
|
546
566
|
|
|
547
567
|
<CardGroup cols={2}>
|
|
548
568
|
<Card title="CLI Guide" icon="terminal" href="/guides/cli">
|
|
549
569
|
Learn CLI commands and options.
|
|
550
570
|
</Card>
|
|
551
|
-
<Card title="Celery
|
|
552
|
-
Configure distributed execution
|
|
571
|
+
<Card title="Celery Brokers" icon="server" href="/guides/brokers">
|
|
572
|
+
Configure message brokers for distributed execution.
|
|
553
573
|
</Card>
|
|
554
574
|
<Card title="Storage Backends" icon="database" href="/concepts/storage">
|
|
555
575
|
Choose the right storage backend.
|
|
556
576
|
</Card>
|
|
557
|
-
<Card title="
|
|
558
|
-
|
|
577
|
+
<Card title="Fault Tolerance" icon="shield" href="/concepts/fault-tolerance">
|
|
578
|
+
Learn about automatic recovery.
|
|
559
579
|
</Card>
|
|
560
580
|
</CardGroup>
|
|
@@ -51,7 +51,7 @@ Project Created!
|
|
|
51
51
|
Next steps:
|
|
52
52
|
|
|
53
53
|
1. Start a worker:
|
|
54
|
-
$ pyworkflow worker
|
|
54
|
+
$ pyworkflow worker run
|
|
55
55
|
|
|
56
56
|
2. Run a workflow:
|
|
57
57
|
$ pyworkflow workflows run process_order \
|
|
@@ -117,7 +117,7 @@ If you prefer to set up manually or need more control:
|
|
|
117
117
|
EOF
|
|
118
118
|
|
|
119
119
|
# 3. Start Celery worker
|
|
120
|
-
pyworkflow worker
|
|
120
|
+
pyworkflow worker run
|
|
121
121
|
```
|
|
122
122
|
</Tab>
|
|
123
123
|
</Tabs>
|
|
@@ -7,7 +7,7 @@ packages = [{include = "pyworkflow"}]
|
|
|
7
7
|
|
|
8
8
|
[project]
|
|
9
9
|
name = "pyworkflow-engine"
|
|
10
|
-
version = "0.1.
|
|
10
|
+
version = "0.1.16"
|
|
11
11
|
description = "A Python implementation of durable, event-sourced workflows inspired by Vercel Workflow"
|
|
12
12
|
readme = "README.md"
|
|
13
13
|
requires-python = ">=3.11"
|
|
@@ -13,6 +13,7 @@ garbage collector and Celery's saferepr module. It does not affect functionality
|
|
|
13
13
|
"""
|
|
14
14
|
|
|
15
15
|
import os
|
|
16
|
+
from typing import Any
|
|
16
17
|
|
|
17
18
|
from celery import Celery
|
|
18
19
|
from celery.signals import worker_init, worker_process_init, worker_shutdown
|
|
@@ -38,6 +39,74 @@ def _configure_worker_logging() -> None:
|
|
|
38
39
|
_logging_configured = True
|
|
39
40
|
|
|
40
41
|
|
|
42
|
+
def is_sentinel_url(url: str) -> bool:
|
|
43
|
+
"""Check if URL uses sentinel:// or sentinel+ssl:// protocol."""
|
|
44
|
+
return url.startswith("sentinel://") or url.startswith("sentinel+ssl://")
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def parse_sentinel_url(url: str) -> tuple[list[tuple[str, int]], int, str | None]:
|
|
48
|
+
"""
|
|
49
|
+
Parse a sentinel:// URL into sentinel hosts, database number, and password.
|
|
50
|
+
|
|
51
|
+
Format: sentinel://[password@]host1:port1,host2:port2/db_number
|
|
52
|
+
|
|
53
|
+
Args:
|
|
54
|
+
url: Sentinel URL (sentinel:// or sentinel+ssl://)
|
|
55
|
+
|
|
56
|
+
Returns:
|
|
57
|
+
Tuple of ([(host, port), ...], db_number, password or None)
|
|
58
|
+
|
|
59
|
+
Examples:
|
|
60
|
+
>>> parse_sentinel_url("sentinel://host1:26379,host2:26379/0")
|
|
61
|
+
([('host1', 26379), ('host2', 26379)], 0, None)
|
|
62
|
+
|
|
63
|
+
>>> parse_sentinel_url("sentinel://mypassword@host1:26379/0")
|
|
64
|
+
([('host1', 26379)], 0, 'mypassword')
|
|
65
|
+
"""
|
|
66
|
+
# Remove protocol prefix
|
|
67
|
+
if url.startswith("sentinel+ssl://"):
|
|
68
|
+
url_without_protocol = url[len("sentinel+ssl://") :]
|
|
69
|
+
elif url.startswith("sentinel://"):
|
|
70
|
+
url_without_protocol = url[len("sentinel://") :]
|
|
71
|
+
else:
|
|
72
|
+
raise ValueError(f"Invalid sentinel URL: {url}")
|
|
73
|
+
|
|
74
|
+
# Extract password if present (password@hosts)
|
|
75
|
+
password: str | None = None
|
|
76
|
+
if "@" in url_without_protocol:
|
|
77
|
+
password, url_without_protocol = url_without_protocol.split("@", 1)
|
|
78
|
+
|
|
79
|
+
# Extract database number from path
|
|
80
|
+
db_number = 0
|
|
81
|
+
if "/" in url_without_protocol:
|
|
82
|
+
hosts_part, db_part = url_without_protocol.rsplit("/", 1)
|
|
83
|
+
# Handle query params in db part
|
|
84
|
+
if "?" in db_part:
|
|
85
|
+
db_part = db_part.split("?")[0]
|
|
86
|
+
if db_part:
|
|
87
|
+
db_number = int(db_part)
|
|
88
|
+
else:
|
|
89
|
+
hosts_part = url_without_protocol
|
|
90
|
+
# Handle query params
|
|
91
|
+
if "?" in hosts_part:
|
|
92
|
+
hosts_part = hosts_part.split("?")[0]
|
|
93
|
+
|
|
94
|
+
# Parse hosts
|
|
95
|
+
sentinels: list[tuple[str, int]] = []
|
|
96
|
+
for host_port in hosts_part.split(","):
|
|
97
|
+
host_port = host_port.strip()
|
|
98
|
+
if not host_port:
|
|
99
|
+
continue
|
|
100
|
+
if ":" in host_port:
|
|
101
|
+
host, port_str = host_port.rsplit(":", 1)
|
|
102
|
+
sentinels.append((host, int(port_str)))
|
|
103
|
+
else:
|
|
104
|
+
# Default Sentinel port
|
|
105
|
+
sentinels.append((host_port, 26379))
|
|
106
|
+
|
|
107
|
+
return sentinels, db_number, password
|
|
108
|
+
|
|
109
|
+
|
|
41
110
|
def discover_workflows(modules: list[str] | None = None) -> None:
|
|
42
111
|
"""
|
|
43
112
|
Discover and import workflow modules to register workflows with Celery workers.
|
|
@@ -79,6 +148,9 @@ def create_celery_app(
|
|
|
79
148
|
broker_url: str | None = None,
|
|
80
149
|
result_backend: str | None = None,
|
|
81
150
|
app_name: str = "pyworkflow",
|
|
151
|
+
sentinel_master_name: str | None = None,
|
|
152
|
+
broker_transport_options: dict[str, Any] | None = None,
|
|
153
|
+
result_backend_transport_options: dict[str, Any] | None = None,
|
|
82
154
|
) -> Celery:
|
|
83
155
|
"""
|
|
84
156
|
Create and configure a Celery application for PyWorkflow.
|
|
@@ -87,6 +159,9 @@ def create_celery_app(
|
|
|
87
159
|
broker_url: Celery broker URL. Priority: parameter > PYWORKFLOW_CELERY_BROKER env var > redis://localhost:6379/0
|
|
88
160
|
result_backend: Result backend URL. Priority: parameter > PYWORKFLOW_CELERY_RESULT_BACKEND env var > redis://localhost:6379/1
|
|
89
161
|
app_name: Application name
|
|
162
|
+
sentinel_master_name: Redis Sentinel master name. Priority: parameter > PYWORKFLOW_CELERY_SENTINEL_MASTER env var > "mymaster"
|
|
163
|
+
broker_transport_options: Additional transport options for the broker (merged with defaults)
|
|
164
|
+
result_backend_transport_options: Additional transport options for the result backend (merged with defaults)
|
|
90
165
|
|
|
91
166
|
Returns:
|
|
92
167
|
Configured Celery application
|
|
@@ -94,6 +169,7 @@ def create_celery_app(
|
|
|
94
169
|
Environment Variables:
|
|
95
170
|
PYWORKFLOW_CELERY_BROKER: Celery broker URL (used if broker_url param not provided)
|
|
96
171
|
PYWORKFLOW_CELERY_RESULT_BACKEND: Result backend URL (used if result_backend param not provided)
|
|
172
|
+
PYWORKFLOW_CELERY_SENTINEL_MASTER: Sentinel master name (used if sentinel_master_name param not provided)
|
|
97
173
|
|
|
98
174
|
Examples:
|
|
99
175
|
# Default configuration (uses env vars if set, otherwise localhost Redis)
|
|
@@ -110,6 +186,13 @@ def create_celery_app(
|
|
|
110
186
|
broker_url="amqp://guest:guest@rabbitmq:5672//",
|
|
111
187
|
result_backend="redis://localhost:6379/1"
|
|
112
188
|
)
|
|
189
|
+
|
|
190
|
+
# Redis Sentinel for high availability
|
|
191
|
+
app = create_celery_app(
|
|
192
|
+
broker_url="sentinel://sentinel1:26379,sentinel2:26379,sentinel3:26379/0",
|
|
193
|
+
result_backend="sentinel://sentinel1:26379,sentinel2:26379,sentinel3:26379/1",
|
|
194
|
+
sentinel_master_name="mymaster"
|
|
195
|
+
)
|
|
113
196
|
"""
|
|
114
197
|
# Priority: parameter > environment variable > hardcoded default
|
|
115
198
|
broker_url = broker_url or os.getenv("PYWORKFLOW_CELERY_BROKER") or "redis://localhost:6379/0"
|
|
@@ -119,6 +202,45 @@ def create_celery_app(
|
|
|
119
202
|
or "redis://localhost:6379/1"
|
|
120
203
|
)
|
|
121
204
|
|
|
205
|
+
# Detect broker and backend types
|
|
206
|
+
is_sentinel_broker = is_sentinel_url(broker_url)
|
|
207
|
+
is_sentinel_backend = is_sentinel_url(result_backend)
|
|
208
|
+
is_redis_broker = broker_url.startswith("redis://") or broker_url.startswith("rediss://")
|
|
209
|
+
|
|
210
|
+
# Get Sentinel master name from param, env, or default
|
|
211
|
+
master_name = (
|
|
212
|
+
sentinel_master_name or os.getenv("PYWORKFLOW_CELERY_SENTINEL_MASTER") or "mymaster"
|
|
213
|
+
)
|
|
214
|
+
|
|
215
|
+
# Build transport options for broker
|
|
216
|
+
if is_sentinel_broker:
|
|
217
|
+
sentinel_broker_opts: dict[str, Any] = {"master_name": master_name}
|
|
218
|
+
# Merge with user options (user takes precedence)
|
|
219
|
+
final_broker_opts: dict[str, Any] = {
|
|
220
|
+
"visibility_timeout": 3600,
|
|
221
|
+
**sentinel_broker_opts,
|
|
222
|
+
**(broker_transport_options or {}),
|
|
223
|
+
}
|
|
224
|
+
else:
|
|
225
|
+
final_broker_opts = {
|
|
226
|
+
"visibility_timeout": 3600,
|
|
227
|
+
**(broker_transport_options or {}),
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
# Build transport options for result backend
|
|
231
|
+
if is_sentinel_backend:
|
|
232
|
+
sentinel_backend_opts: dict[str, Any] = {"master_name": master_name}
|
|
233
|
+
final_backend_opts: dict[str, Any] = {
|
|
234
|
+
"visibility_timeout": 3600,
|
|
235
|
+
**sentinel_backend_opts,
|
|
236
|
+
**(result_backend_transport_options or {}),
|
|
237
|
+
}
|
|
238
|
+
else:
|
|
239
|
+
final_backend_opts = {
|
|
240
|
+
"visibility_timeout": 3600,
|
|
241
|
+
**(result_backend_transport_options or {}),
|
|
242
|
+
}
|
|
243
|
+
|
|
122
244
|
app = Celery(
|
|
123
245
|
app_name,
|
|
124
246
|
broker=broker_url,
|
|
@@ -138,12 +260,8 @@ def create_celery_app(
|
|
|
138
260
|
enable_utc=True,
|
|
139
261
|
# Broker transport options - prevent task redelivery
|
|
140
262
|
# See: https://github.com/celery/celery/issues/5935
|
|
141
|
-
broker_transport_options=
|
|
142
|
-
|
|
143
|
-
},
|
|
144
|
-
result_backend_transport_options={
|
|
145
|
-
"visibility_timeout": 3600,
|
|
146
|
-
},
|
|
263
|
+
broker_transport_options=final_broker_opts,
|
|
264
|
+
result_backend_transport_options=final_backend_opts,
|
|
147
265
|
# Task routing
|
|
148
266
|
task_default_queue="pyworkflow.default",
|
|
149
267
|
task_default_exchange="pyworkflow",
|
|
@@ -194,12 +312,13 @@ def create_celery_app(
|
|
|
194
312
|
worker_task_log_format="[%(asctime)s: %(levelname)s/%(processName)s] [%(task_name)s(%(task_id)s)] %(message)s",
|
|
195
313
|
)
|
|
196
314
|
|
|
197
|
-
# Configure singleton locking for Redis brokers
|
|
315
|
+
# Configure singleton locking for Redis or Sentinel brokers
|
|
198
316
|
# This enables distributed locking to prevent duplicate task execution
|
|
199
|
-
is_redis_broker
|
|
200
|
-
if is_redis_broker:
|
|
317
|
+
if is_redis_broker or is_sentinel_broker:
|
|
201
318
|
app.conf.update(
|
|
202
319
|
singleton_backend_url=broker_url,
|
|
320
|
+
singleton_backend_is_sentinel=is_sentinel_broker,
|
|
321
|
+
singleton_sentinel_master=master_name if is_sentinel_broker else None,
|
|
203
322
|
singleton_key_prefix="pyworkflow:lock:",
|
|
204
323
|
singleton_lock_expiry=3600, # 1 hour TTL (safety net)
|
|
205
324
|
)
|