pyworkflow-engine 0.1.19__tar.gz → 0.1.21__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (191) hide show
  1. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/CLAUDE.md +4 -4
  2. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/DISTRIBUTED.md +5 -5
  3. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/PKG-INFO +1 -1
  4. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/docs/guides/cli.mdx +1 -2
  5. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyproject.toml +1 -1
  6. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/__init__.py +1 -1
  7. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/celery/tasks.py +17 -12
  8. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/cli/commands/worker.py +2 -13
  9. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/core/step.py +20 -0
  10. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/storage/base.py +29 -0
  11. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/storage/cassandra.py +25 -0
  12. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/storage/dynamodb.py +31 -0
  13. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/storage/file.py +28 -0
  14. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/storage/memory.py +14 -0
  15. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/storage/mysql.py +20 -0
  16. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/storage/postgres.py +24 -0
  17. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/storage/sqlite.py +20 -0
  18. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/unit/test_cli_worker.py +0 -9
  19. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/LICENSE +0 -0
  20. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/MANIFEST.in +0 -0
  21. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/README.md +0 -0
  22. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/RELEASING.md +0 -0
  23. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/docs/concepts/cancellation.mdx +0 -0
  24. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/docs/concepts/continue-as-new.mdx +0 -0
  25. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/docs/concepts/events.mdx +0 -0
  26. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/docs/concepts/fault-tolerance.mdx +0 -0
  27. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/docs/concepts/hooks.mdx +0 -0
  28. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/docs/concepts/limitations.mdx +0 -0
  29. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/docs/concepts/schedules.mdx +0 -0
  30. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/docs/concepts/sleep.mdx +0 -0
  31. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/docs/concepts/step-context.mdx +0 -0
  32. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/docs/concepts/steps.mdx +0 -0
  33. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/docs/concepts/workflows.mdx +0 -0
  34. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/docs/guides/brokers.mdx +0 -0
  35. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/docs/guides/configuration.mdx +0 -0
  36. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/docs/introduction.mdx +0 -0
  37. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/docs/quickstart.mdx +0 -0
  38. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/__init__.py +0 -0
  39. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/celery/__init__.py +0 -0
  40. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/celery/durable/docker-compose.yml +0 -0
  41. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/celery/durable/pyworkflow.config.yaml +0 -0
  42. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/celery/durable/workflows/__init__.py +0 -0
  43. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/celery/durable/workflows/basic.py +0 -0
  44. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/celery/durable/workflows/batch_processing.py +0 -0
  45. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/celery/durable/workflows/cancellation.py +0 -0
  46. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/celery/durable/workflows/child_workflow_patterns.py +0 -0
  47. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/celery/durable/workflows/child_workflows.py +0 -0
  48. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/celery/durable/workflows/continue_as_new.py +0 -0
  49. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/celery/durable/workflows/fault_tolerance.py +0 -0
  50. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/celery/durable/workflows/hooks.py +0 -0
  51. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/celery/durable/workflows/idempotency.py +0 -0
  52. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/celery/durable/workflows/long_running.py +0 -0
  53. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/celery/durable/workflows/retries.py +0 -0
  54. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/celery/durable/workflows/schedules.py +0 -0
  55. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/celery/durable/workflows/step_context.py +0 -0
  56. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/celery/transient/01_basic_workflow.py +0 -0
  57. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/celery/transient/02_fault_tolerance.py +0 -0
  58. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/celery/transient/__init__.py +0 -0
  59. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/celery/transient/pyworkflow.config.yaml +0 -0
  60. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/local/__init__.py +0 -0
  61. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/local/durable/01_basic_workflow.py +0 -0
  62. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/local/durable/02_file_storage.py +0 -0
  63. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/local/durable/03_retries.py +0 -0
  64. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/local/durable/04_long_running.py +0 -0
  65. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/local/durable/05_event_log.py +0 -0
  66. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/local/durable/06_idempotency.py +0 -0
  67. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/local/durable/07_hooks.py +0 -0
  68. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/local/durable/08_cancellation.py +0 -0
  69. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/local/durable/09_child_workflows.py +0 -0
  70. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/local/durable/10_child_workflow_patterns.py +0 -0
  71. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/local/durable/11_continue_as_new.py +0 -0
  72. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/local/durable/12_schedules.py +0 -0
  73. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/local/durable/13_step_context.py +0 -0
  74. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/local/durable/__init__.py +0 -0
  75. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/local/transient/01_quick_tasks.py +0 -0
  76. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/local/transient/02_retries.py +0 -0
  77. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/local/transient/03_sleep.py +0 -0
  78. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/examples/local/transient/__init__.py +0 -0
  79. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/aws/__init__.py +0 -0
  80. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/aws/context.py +0 -0
  81. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/aws/handler.py +0 -0
  82. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/aws/testing.py +0 -0
  83. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/celery/__init__.py +0 -0
  84. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/celery/app.py +0 -0
  85. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/celery/loop.py +0 -0
  86. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/celery/scheduler.py +0 -0
  87. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/celery/singleton.py +0 -0
  88. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/cli/__init__.py +0 -0
  89. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/cli/__main__.py +0 -0
  90. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/cli/commands/__init__.py +0 -0
  91. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/cli/commands/hooks.py +0 -0
  92. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/cli/commands/quickstart.py +0 -0
  93. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/cli/commands/runs.py +0 -0
  94. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/cli/commands/scheduler.py +0 -0
  95. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/cli/commands/schedules.py +0 -0
  96. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/cli/commands/setup.py +0 -0
  97. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/cli/commands/workflows.py +0 -0
  98. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/cli/output/__init__.py +0 -0
  99. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/cli/output/formatters.py +0 -0
  100. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/cli/output/styles.py +0 -0
  101. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/cli/utils/__init__.py +0 -0
  102. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/cli/utils/async_helpers.py +0 -0
  103. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/cli/utils/config.py +0 -0
  104. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/cli/utils/config_generator.py +0 -0
  105. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/cli/utils/discovery.py +0 -0
  106. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/cli/utils/docker_manager.py +0 -0
  107. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/cli/utils/interactive.py +0 -0
  108. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/cli/utils/storage.py +0 -0
  109. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/config.py +0 -0
  110. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/context/__init__.py +0 -0
  111. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/context/aws.py +0 -0
  112. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/context/base.py +0 -0
  113. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/context/local.py +0 -0
  114. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/context/mock.py +0 -0
  115. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/context/step_context.py +0 -0
  116. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/core/__init__.py +0 -0
  117. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/core/exceptions.py +0 -0
  118. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/core/registry.py +0 -0
  119. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/core/scheduled.py +0 -0
  120. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/core/validation.py +0 -0
  121. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/core/workflow.py +0 -0
  122. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/discovery.py +0 -0
  123. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/engine/__init__.py +0 -0
  124. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/engine/events.py +0 -0
  125. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/engine/executor.py +0 -0
  126. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/engine/replay.py +0 -0
  127. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/observability/__init__.py +0 -0
  128. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/observability/logging.py +0 -0
  129. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/primitives/__init__.py +0 -0
  130. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/primitives/child_handle.py +0 -0
  131. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/primitives/child_workflow.py +0 -0
  132. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/primitives/continue_as_new.py +0 -0
  133. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/primitives/define_hook.py +0 -0
  134. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/primitives/hooks.py +0 -0
  135. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/primitives/resume_hook.py +0 -0
  136. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/primitives/schedule.py +0 -0
  137. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/primitives/shield.py +0 -0
  138. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/primitives/sleep.py +0 -0
  139. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/runtime/__init__.py +0 -0
  140. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/runtime/base.py +0 -0
  141. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/runtime/celery.py +0 -0
  142. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/runtime/factory.py +0 -0
  143. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/runtime/local.py +0 -0
  144. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/scheduler/__init__.py +0 -0
  145. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/scheduler/local.py +0 -0
  146. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/serialization/__init__.py +0 -0
  147. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/serialization/decoder.py +0 -0
  148. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/serialization/encoder.py +0 -0
  149. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/storage/__init__.py +0 -0
  150. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/storage/config.py +0 -0
  151. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/storage/schemas.py +0 -0
  152. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/utils/__init__.py +0 -0
  153. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/utils/duration.py +0 -0
  154. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow/utils/schedule.py +0 -0
  155. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/pyworkflow_engine.egg-info/SOURCES.txt +0 -0
  156. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/setup.cfg +0 -0
  157. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/integration/__init__.py +0 -0
  158. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/integration/test_cancellation.py +0 -0
  159. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/integration/test_cassandra_storage.py +0 -0
  160. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/integration/test_child_workflows.py +0 -0
  161. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/integration/test_continue_as_new.py +0 -0
  162. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/integration/test_dynamodb_storage.py +0 -0
  163. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/integration/test_fault_tolerance.py +0 -0
  164. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/integration/test_schedule_storage.py +0 -0
  165. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/integration/test_singleton.py +0 -0
  166. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/integration/test_workflow_suspended.py +0 -0
  167. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/unit/__init__.py +0 -0
  168. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/unit/backends/__init__.py +0 -0
  169. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/unit/backends/test_cassandra_storage.py +0 -0
  170. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/unit/backends/test_dynamodb_storage.py +0 -0
  171. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/unit/backends/test_postgres_storage.py +0 -0
  172. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/unit/backends/test_sqlite_storage.py +0 -0
  173. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/unit/conftest.py +0 -0
  174. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/unit/test_cancellation.py +0 -0
  175. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/unit/test_child_workflows.py +0 -0
  176. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/unit/test_continue_as_new.py +0 -0
  177. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/unit/test_event_limits.py +0 -0
  178. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/unit/test_executor.py +0 -0
  179. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/unit/test_fault_tolerance.py +0 -0
  180. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/unit/test_hooks.py +0 -0
  181. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/unit/test_registry.py +0 -0
  182. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/unit/test_replay.py +0 -0
  183. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/unit/test_schedule_schemas.py +0 -0
  184. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/unit/test_schedule_utils.py +0 -0
  185. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/unit/test_scheduled_workflow.py +0 -0
  186. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/unit/test_singleton.py +0 -0
  187. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/unit/test_step.py +0 -0
  188. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/unit/test_step_context.py +0 -0
  189. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/unit/test_validation.py +0 -0
  190. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/unit/test_workflow.py +0 -0
  191. {pyworkflow_engine-0.1.19 → pyworkflow_engine-0.1.21}/tests/unit/test_workflow_suspended.py +0 -0
@@ -401,7 +401,7 @@ task_routes = {
401
401
  celery -A pyworkflow.celery.tasks worker -Q workflows -n workflow@%h
402
402
 
403
403
  # Step worker (scalable)
404
- celery -A pyworkflow.celery.tasks worker -Q steps -n step@%h --concurrency=4
404
+ celery -A pyworkflow.celery.tasks worker -Q steps -n step@%h --autoscale=10,2
405
405
  ```
406
406
 
407
407
  ### Logging with Loguru
@@ -604,9 +604,9 @@ result["modified"] = True
604
604
  - **Production (medium)**: Redis (fast, in-memory)
605
605
  - **Production (large)**: PostgreSQL (scalable, full SQL)
606
606
 
607
- ### Celery Concurrency
608
- - Workflow workers: Low concurrency (lightweight orchestration)
609
- - Step workers: High concurrency (actual work)
607
+ ### Celery Autoscaling
608
+ - Workflow workers: Low autoscale range (lightweight orchestration), e.g. `--autoscale=4,1`
609
+ - Step workers: High autoscale range (actual work), e.g. `--autoscale=10,2`
610
610
  - Scale step workers horizontally as needed
611
611
 
612
612
  ## References
@@ -70,13 +70,13 @@ docker run -d -p 6379:6379 redis:7-alpine
70
70
  celery -A pyworkflow.celery.app worker \
71
71
  --loglevel=info \
72
72
  --queues=pyworkflow.steps \
73
- --concurrency=4
73
+ --autoscale=10,2
74
74
 
75
75
  # Terminal 2: Start worker for workflow orchestration
76
76
  celery -A pyworkflow.celery.app worker \
77
77
  --loglevel=info \
78
78
  --queues=pyworkflow.workflows \
79
- --concurrency=2
79
+ --autoscale=4,1
80
80
 
81
81
  # Terminal 3: Start Celery Beat for scheduled tasks (sleep resumption)
82
82
  celery -A pyworkflow.celery.app beat --loglevel=info
@@ -273,13 +273,13 @@ celery-exporter --broker-url=redis://localhost:6379/0
273
273
 
274
274
  ```bash
275
275
  # CPU-bound tasks
276
- celery -A pyworkflow.celery.app worker --concurrency=8 --pool=prefork
276
+ celery -A pyworkflow.celery.app worker --autoscale=8,2 --pool=prefork
277
277
 
278
278
  # I/O-bound tasks
279
- celery -A pyworkflow.celery.app worker --concurrency=100 --pool=gevent
279
+ celery -A pyworkflow.celery.app worker --autoscale=100,10 --pool=gevent
280
280
 
281
281
  # Mixed workload
282
- celery -A pyworkflow.celery.app worker --concurrency=16 --pool=eventlet
282
+ celery -A pyworkflow.celery.app worker --autoscale=16,4 --pool=eventlet
283
283
  ```
284
284
 
285
285
  ### Queue Priorities
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pyworkflow-engine
3
- Version: 0.1.19
3
+ Version: 0.1.21
4
4
  Summary: A Python implementation of durable, event-sourced workflows inspired by Vercel Workflow
5
5
  Author: PyWorkflow Contributors
6
6
  License: MIT
@@ -416,7 +416,6 @@ pyworkflow worker run
416
416
  | `--workflow` | Only process workflow orchestration tasks |
417
417
  | `--step` | Only process step execution tasks |
418
418
  | `--schedule` | Only process scheduled resumption tasks |
419
- | `--concurrency N` | Number of worker processes (default: 1) |
420
419
  | `--loglevel LEVEL` | Log level: `debug`, `info`, `warning`, `error` |
421
420
  | `--hostname NAME` | Custom worker hostname |
422
421
  | `--beat` | Also start Celery Beat scheduler |
@@ -439,7 +438,7 @@ pyworkflow worker run
439
438
  pyworkflow worker run --workflow
440
439
 
441
440
  # Terminal 2: Step execution (scale this for heavy work)
442
- pyworkflow worker run --step --concurrency 4
441
+ pyworkflow worker run --step --autoscale 10,2
443
442
 
444
443
  # Terminal 3: Scheduled tasks
445
444
  pyworkflow worker run --schedule
@@ -7,7 +7,7 @@ packages = [{include = "pyworkflow"}]
7
7
 
8
8
  [project]
9
9
  name = "pyworkflow-engine"
10
- version = "0.1.19"
10
+ version = "0.1.21"
11
11
  description = "A Python implementation of durable, event-sourced workflows inspired by Vercel Workflow"
12
12
  readme = "README.md"
13
13
  requires-python = ">=3.11"
@@ -29,7 +29,7 @@ Quick Start:
29
29
  >>> run_id = await start(my_workflow, "Alice")
30
30
  """
31
31
 
32
- __version__ = "0.1.19"
32
+ __version__ = "0.1.21"
33
33
 
34
34
  # Configuration
35
35
  from pyworkflow.config import (
@@ -1154,8 +1154,14 @@ async def _recover_workflow_on_worker(
1154
1154
  recovery_attempt=run.recovery_attempts,
1155
1155
  )
1156
1156
 
1157
- # Update status to RUNNING (from RUNNING or INTERRUPTED)
1158
- await storage.update_run_status(run_id=run_id, status=RunStatus.RUNNING)
1157
+ # Atomically claim the run for recovery.
1158
+ # The run may be in INTERRUPTED or RUNNING state after a worker crash.
1159
+ # Try INTERRUPTED -> RUNNING first (most common recovery path).
1160
+ # If the run is already RUNNING, just set it to RUNNING (idempotent).
1161
+ claimed = await storage.try_claim_run(run_id, RunStatus.INTERRUPTED, RunStatus.RUNNING)
1162
+ if not claimed:
1163
+ # May already be RUNNING from a previous partial recovery - update status directly
1164
+ await storage.update_run_status(run_id=run_id, status=RunStatus.RUNNING)
1159
1165
 
1160
1166
  # Load event log for replay
1161
1167
  events = await storage.get_events(run_id)
@@ -2048,14 +2054,14 @@ async def _resume_workflow_on_worker(
2048
2054
  )
2049
2055
  return None
2050
2056
 
2051
- # Prevent duplicate resume execution
2057
+ # Atomically claim the run: SUSPENDED -> RUNNING
2052
2058
  # Multiple resume tasks can be scheduled for the same workflow (e.g., race
2053
- # condition between step completion and suspension handler). Only proceed
2054
- # if the workflow is actually SUSPENDED. If status is RUNNING, another
2055
- # resume task got there first.
2056
- if run.status != RunStatus.SUSPENDED:
2059
+ # condition between step completion and suspension handler). Only one
2060
+ # succeeds; duplicates see the claim fail and return.
2061
+ claimed = await storage.try_claim_run(run_id, RunStatus.SUSPENDED, RunStatus.RUNNING)
2062
+ if not claimed:
2057
2063
  logger.info(
2058
- f"Workflow status is {run.status.value}, not SUSPENDED - skipping duplicate resume",
2064
+ "Workflow status is not SUSPENDED (already claimed) - skipping duplicate resume",
2059
2065
  run_id=run_id,
2060
2066
  workflow_name=run.workflow_name,
2061
2067
  )
@@ -2075,6 +2081,8 @@ async def _resume_workflow_on_worker(
2075
2081
  workflow_name=run.workflow_name,
2076
2082
  triggered_by_hook_id=triggered_by_hook_id,
2077
2083
  )
2084
+ # Revert status back to SUSPENDED since we won't actually resume
2085
+ await storage.update_run_status(run_id=run_id, status=RunStatus.SUSPENDED)
2078
2086
  return None
2079
2087
 
2080
2088
  # Check for cancellation flag
@@ -2084,7 +2092,7 @@ async def _resume_workflow_on_worker(
2084
2092
  f"Resuming workflow execution on worker: {run.workflow_name}",
2085
2093
  run_id=run_id,
2086
2094
  workflow_name=run.workflow_name,
2087
- current_status=run.status.value,
2095
+ current_status="running",
2088
2096
  cancellation_requested=cancellation_requested,
2089
2097
  )
2090
2098
 
@@ -2103,9 +2111,6 @@ async def _resume_workflow_on_worker(
2103
2111
  args = deserialize_args(run.input_args)
2104
2112
  kwargs = deserialize_kwargs(run.input_kwargs)
2105
2113
 
2106
- # Update status to running
2107
- await storage.update_run_status(run_id=run_id, status=RunStatus.RUNNING)
2108
-
2109
2114
  # Execute workflow with event replay
2110
2115
  try:
2111
2116
  result = await execute_workflow_with_context(
@@ -45,13 +45,6 @@ def worker() -> None:
45
45
  is_flag=True,
46
46
  help="Only process scheduled resumption tasks (pyworkflow.schedules queue)",
47
47
  )
48
- @click.option(
49
- "--concurrency",
50
- "-c",
51
- type=int,
52
- default=1,
53
- help="Number of worker processes (default: 1)",
54
- )
55
48
  @click.option(
56
49
  "--loglevel",
57
50
  "-l",
@@ -116,7 +109,6 @@ def run_worker(
116
109
  queue_workflow: bool,
117
110
  queue_step: bool,
118
111
  queue_schedule: bool,
119
- concurrency: int | None,
120
112
  loglevel: str,
121
113
  hostname: str | None,
122
114
  beat: bool,
@@ -144,9 +136,6 @@ def run_worker(
144
136
  # Start a workflow orchestration worker only
145
137
  pyworkflow worker run --workflow
146
138
 
147
- # Start a step execution worker (for heavy computation)
148
- pyworkflow worker run --step --concurrency 4
149
-
150
139
  # Start a schedule worker (for sleep resumption)
151
140
  pyworkflow worker run --schedule
152
141
 
@@ -229,7 +218,8 @@ def run_worker(
229
218
  if broker_url.startswith("sentinel://") or broker_url.startswith("sentinel+ssl://"):
230
219
  print_info(f"Sentinel master: {sentinel_master_name or 'mymaster'}")
231
220
  print_info(f"Queues: {', '.join(queues)}")
232
- print_info(f"Concurrency: {concurrency}")
221
+ if autoscale:
222
+ print_info(f"Autoscale: {autoscale} (min,max)")
233
223
  print_info(f"Pool: {pool}")
234
224
  if extra_args:
235
225
  print_info(f"Extra args: {' '.join(extra_args)}")
@@ -283,7 +273,6 @@ def run_worker(
283
273
  worker_args = [
284
274
  "worker",
285
275
  f"--loglevel={loglevel.upper()}",
286
- f"--concurrency={concurrency}", # Always set (default: 1)
287
276
  f"--pool={pool}", # Always set (default: prefork)
288
277
  ]
289
278
 
@@ -595,6 +595,7 @@ async def _dispatch_step_to_celery(
595
595
  """
596
596
  from pyworkflow.celery.tasks import execute_step_task
597
597
  from pyworkflow.core.exceptions import SuspensionSignal
598
+ from pyworkflow.engine.events import EventType
598
599
 
599
600
  logger.info(
600
601
  f"Dispatching step to Celery worker: {step_name}",
@@ -602,6 +603,25 @@ async def _dispatch_step_to_celery(
602
603
  step_id=step_id,
603
604
  )
604
605
 
606
+ # Defense-in-depth: check if STEP_STARTED was already recorded for this step.
607
+ # This guards against duplicate dispatch when two resume tasks race and both
608
+ # replay past the same step. If already started, re-suspend to wait.
609
+ events = await ctx.storage.get_events(ctx.run_id)
610
+ already_started = any(
611
+ evt.type == EventType.STEP_STARTED and evt.data.get("step_id") == step_id for evt in events
612
+ )
613
+ if already_started:
614
+ logger.info(
615
+ f"Step {step_name} already has STEP_STARTED event, re-suspending",
616
+ run_id=ctx.run_id,
617
+ step_id=step_id,
618
+ )
619
+ raise SuspensionSignal(
620
+ reason=f"step_dispatch:{step_id}",
621
+ step_id=step_id,
622
+ step_name=step_name,
623
+ )
624
+
605
625
  # Validate event limits before recording step event
606
626
  await ctx.validate_event_limits()
607
627
 
@@ -358,6 +358,35 @@ class StorageBackend(ABC):
358
358
  """
359
359
  pass
360
360
 
361
+ # Atomic Status Transition
362
+
363
+ async def try_claim_run(
364
+ self, run_id: str, from_status: RunStatus, to_status: RunStatus
365
+ ) -> bool:
366
+ """
367
+ Atomically transition run status if the current status matches.
368
+
369
+ This is a compare-and-swap operation: the status is only updated
370
+ if the current status equals `from_status`. Returns True if the
371
+ transition was applied, False if the current status did not match
372
+ (meaning another task already claimed this run).
373
+
374
+ Args:
375
+ run_id: Workflow run identifier
376
+ from_status: Expected current status
377
+ to_status: New status to set
378
+
379
+ Returns:
380
+ True if the transition succeeded, False otherwise
381
+ """
382
+ # Default implementation using get_run + update_run_status.
383
+ # Backends should override with truly atomic implementations.
384
+ run = await self.get_run(run_id)
385
+ if not run or run.status != from_status:
386
+ return False
387
+ await self.update_run_status(run_id, to_status)
388
+ return True
389
+
361
390
  # Cancellation Flag Operations
362
391
 
363
392
  @abstractmethod
@@ -1209,6 +1209,31 @@ class CassandraStorageBackend(StorageBackend):
1209
1209
  # Apply offset and limit
1210
1210
  return hooks[offset : offset + limit]
1211
1211
 
1212
+ # Atomic Status Transition
1213
+
1214
+ async def try_claim_run(
1215
+ self, run_id: str, from_status: RunStatus, to_status: RunStatus
1216
+ ) -> bool:
1217
+ """Atomically transition run status using lightweight transaction (IF)."""
1218
+ session = self._ensure_connected()
1219
+
1220
+ result = session.execute(
1221
+ SimpleStatement(
1222
+ """
1223
+ UPDATE workflow_runs
1224
+ SET status = %s, updated_at = %s
1225
+ WHERE run_id = %s
1226
+ IF status = %s
1227
+ """,
1228
+ consistency_level=ConsistencyLevel.SERIAL,
1229
+ ),
1230
+ (to_status.value, datetime.now(UTC), run_id, from_status.value),
1231
+ )
1232
+
1233
+ # Cassandra LWT returns [applied] column
1234
+ row = result.one()
1235
+ return row is not None and row[0] is True
1236
+
1212
1237
  # Cancellation Flag Operations
1213
1238
 
1214
1239
  async def set_cancellation_flag(self, run_id: str) -> None:
@@ -889,6 +889,37 @@ class DynamoDBStorageBackend(StorageBackend):
889
889
 
890
890
  return [self._item_to_hook(self._item_to_dict(item)) for item in items]
891
891
 
892
+ # Atomic Status Transition
893
+
894
+ async def try_claim_run(
895
+ self, run_id: str, from_status: RunStatus, to_status: RunStatus
896
+ ) -> bool:
897
+ """Atomically transition run status using conditional update."""
898
+ async with self._get_client() as client:
899
+ try:
900
+ now = datetime.now(UTC).isoformat()
901
+ await client.update_item(
902
+ TableName=self.table_name,
903
+ Key={
904
+ "PK": {"S": f"RUN#{run_id}"},
905
+ "SK": {"S": "#METADATA"},
906
+ },
907
+ UpdateExpression="SET #status = :new_status, updated_at = :now, GSI1SK = :gsi1sk",
908
+ ConditionExpression="#status = :expected_status",
909
+ ExpressionAttributeNames={"#status": "status"},
910
+ ExpressionAttributeValues={
911
+ ":new_status": {"S": to_status.value},
912
+ ":expected_status": {"S": from_status.value},
913
+ ":now": {"S": now},
914
+ ":gsi1sk": {"S": f"{to_status.value}#{now}"},
915
+ },
916
+ )
917
+ return True
918
+ except ClientError as e:
919
+ if e.response["Error"]["Code"] == "ConditionalCheckFailedException":
920
+ return False
921
+ raise
922
+
892
923
  # Cancellation Flag Operations
893
924
 
894
925
  async def set_cancellation_flag(self, run_id: str) -> None:
@@ -592,6 +592,34 @@ class FileStorageBackend(StorageBackend):
592
592
  hook_data_list = await asyncio.to_thread(_list)
593
593
  return [Hook.from_dict(data) for data in hook_data_list]
594
594
 
595
+ # Atomic Status Transition
596
+
597
+ async def try_claim_run(
598
+ self, run_id: str, from_status: RunStatus, to_status: RunStatus
599
+ ) -> bool:
600
+ """Atomically transition run status using file lock."""
601
+ run_file = self.runs_dir / f"{run_id}.json"
602
+
603
+ if not run_file.exists():
604
+ return False
605
+
606
+ lock_file = self.locks_dir / f"{run_id}.lock"
607
+ lock = FileLock(str(lock_file))
608
+
609
+ def _try_claim() -> bool:
610
+ with lock:
611
+ if not run_file.exists():
612
+ return False
613
+ data = json.loads(run_file.read_text())
614
+ if data.get("status") != from_status.value:
615
+ return False
616
+ data["status"] = to_status.value
617
+ data["updated_at"] = datetime.now(UTC).isoformat()
618
+ run_file.write_text(json.dumps(data, indent=2))
619
+ return True
620
+
621
+ return await asyncio.to_thread(_try_claim)
622
+
595
623
  # Cancellation Flag Operations
596
624
 
597
625
  async def set_cancellation_flag(self, run_id: str) -> None:
@@ -366,6 +366,20 @@ class InMemoryStorageBackend(StorageBackend):
366
366
  # Apply pagination
367
367
  return hooks[offset : offset + limit]
368
368
 
369
+ # Atomic Status Transition
370
+
371
+ async def try_claim_run(
372
+ self, run_id: str, from_status: RunStatus, to_status: RunStatus
373
+ ) -> bool:
374
+ """Atomically transition run status using lock-protected check-and-set."""
375
+ with self._lock:
376
+ run = self._runs.get(run_id)
377
+ if not run or run.status != from_status:
378
+ return False
379
+ run.status = to_status
380
+ run.updated_at = datetime.now(UTC)
381
+ return True
382
+
369
383
  # Cancellation Flag Operations
370
384
 
371
385
  async def set_cancellation_flag(self, run_id: str) -> None:
@@ -767,6 +767,26 @@ class MySQLStorageBackend(StorageBackend):
767
767
 
768
768
  return [self._row_to_hook(row) for row in rows]
769
769
 
770
+ # Atomic Status Transition
771
+
772
+ async def try_claim_run(
773
+ self, run_id: str, from_status: RunStatus, to_status: RunStatus
774
+ ) -> bool:
775
+ """Atomically transition run status using conditional UPDATE."""
776
+ pool = self._ensure_connected()
777
+
778
+ async with pool.acquire() as conn, conn.cursor() as cur:
779
+ await cur.execute(
780
+ """
781
+ UPDATE workflow_runs
782
+ SET status = %s, updated_at = %s
783
+ WHERE run_id = %s AND status = %s
784
+ """,
785
+ (to_status.value, datetime.now(UTC), run_id, from_status.value),
786
+ )
787
+
788
+ return cur.rowcount > 0
789
+
770
790
  # Cancellation Flag Operations
771
791
 
772
792
  async def set_cancellation_flag(self, run_id: str) -> None:
@@ -862,6 +862,30 @@ class PostgresStorageBackend(StorageBackend):
862
862
 
863
863
  return [self._row_to_hook(row) for row in rows]
864
864
 
865
+ # Atomic Status Transition
866
+
867
+ async def try_claim_run(
868
+ self, run_id: str, from_status: RunStatus, to_status: RunStatus
869
+ ) -> bool:
870
+ """Atomically transition run status using conditional UPDATE."""
871
+ pool = await self._get_pool()
872
+
873
+ async with pool.acquire() as conn:
874
+ result = await conn.execute(
875
+ """
876
+ UPDATE workflow_runs
877
+ SET status = $1, updated_at = $2
878
+ WHERE run_id = $3 AND status = $4
879
+ """,
880
+ to_status.value,
881
+ datetime.now(UTC),
882
+ run_id,
883
+ from_status.value,
884
+ )
885
+
886
+ # asyncpg returns 'UPDATE N' where N is rows affected
887
+ return result == "UPDATE 1"
888
+
865
889
  # Cancellation Flag Operations
866
890
 
867
891
  async def set_cancellation_flag(self, run_id: str) -> None:
@@ -750,6 +750,26 @@ class SQLiteStorageBackend(StorageBackend):
750
750
 
751
751
  return [self._row_to_hook(row) for row in rows]
752
752
 
753
+ # Atomic Status Transition
754
+
755
+ async def try_claim_run(
756
+ self, run_id: str, from_status: RunStatus, to_status: RunStatus
757
+ ) -> bool:
758
+ """Atomically transition run status using conditional UPDATE."""
759
+ db = self._ensure_connected()
760
+
761
+ cursor = await db.execute(
762
+ """
763
+ UPDATE workflow_runs
764
+ SET status = ?, updated_at = ?
765
+ WHERE run_id = ? AND status = ?
766
+ """,
767
+ (to_status.value, datetime.now(UTC).isoformat(), run_id, from_status.value),
768
+ )
769
+ await db.commit()
770
+
771
+ return cursor.rowcount > 0
772
+
753
773
  # Cancellation Flag Operations
754
774
 
755
775
  async def set_cancellation_flag(self, run_id: str) -> None:
@@ -199,15 +199,6 @@ class TestRunWorkerCommand:
199
199
  # Check extra args
200
200
  assert "--max-memory-per-child=150000" in args
201
201
 
202
- def test_concurrency_option(self, runner, mock_celery_app, mock_discovery, mock_list_functions):
203
- """--concurrency option is passed to Celery."""
204
- runner.invoke(worker, ["run", "--concurrency", "4"], obj={"config": {}, "module": None})
205
-
206
- mock_celery_app.worker_main.assert_called_once()
207
- args = mock_celery_app.worker_main.call_args[1]["argv"]
208
-
209
- assert "--concurrency=4" in args
210
-
211
202
  def test_loglevel_option(self, runner, mock_celery_app, mock_discovery, mock_list_functions):
212
203
  """--loglevel option is passed to Celery in uppercase."""
213
204
  runner.invoke(worker, ["run", "--loglevel", "debug"], obj={"config": {}, "module": None})