pyworkflow-engine 0.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (196) hide show
  1. dashboard/backend/app/__init__.py +1 -0
  2. dashboard/backend/app/config.py +32 -0
  3. dashboard/backend/app/controllers/__init__.py +6 -0
  4. dashboard/backend/app/controllers/run_controller.py +86 -0
  5. dashboard/backend/app/controllers/workflow_controller.py +33 -0
  6. dashboard/backend/app/dependencies/__init__.py +5 -0
  7. dashboard/backend/app/dependencies/storage.py +50 -0
  8. dashboard/backend/app/repositories/__init__.py +6 -0
  9. dashboard/backend/app/repositories/run_repository.py +80 -0
  10. dashboard/backend/app/repositories/workflow_repository.py +27 -0
  11. dashboard/backend/app/rest/__init__.py +8 -0
  12. dashboard/backend/app/rest/v1/__init__.py +12 -0
  13. dashboard/backend/app/rest/v1/health.py +33 -0
  14. dashboard/backend/app/rest/v1/runs.py +133 -0
  15. dashboard/backend/app/rest/v1/workflows.py +41 -0
  16. dashboard/backend/app/schemas/__init__.py +23 -0
  17. dashboard/backend/app/schemas/common.py +16 -0
  18. dashboard/backend/app/schemas/event.py +24 -0
  19. dashboard/backend/app/schemas/hook.py +25 -0
  20. dashboard/backend/app/schemas/run.py +54 -0
  21. dashboard/backend/app/schemas/step.py +28 -0
  22. dashboard/backend/app/schemas/workflow.py +31 -0
  23. dashboard/backend/app/server.py +87 -0
  24. dashboard/backend/app/services/__init__.py +6 -0
  25. dashboard/backend/app/services/run_service.py +240 -0
  26. dashboard/backend/app/services/workflow_service.py +155 -0
  27. dashboard/backend/main.py +18 -0
  28. docs/concepts/cancellation.mdx +362 -0
  29. docs/concepts/continue-as-new.mdx +434 -0
  30. docs/concepts/events.mdx +266 -0
  31. docs/concepts/fault-tolerance.mdx +370 -0
  32. docs/concepts/hooks.mdx +552 -0
  33. docs/concepts/limitations.mdx +167 -0
  34. docs/concepts/schedules.mdx +775 -0
  35. docs/concepts/sleep.mdx +312 -0
  36. docs/concepts/steps.mdx +301 -0
  37. docs/concepts/workflows.mdx +255 -0
  38. docs/guides/cli.mdx +942 -0
  39. docs/guides/configuration.mdx +560 -0
  40. docs/introduction.mdx +155 -0
  41. docs/quickstart.mdx +279 -0
  42. examples/__init__.py +1 -0
  43. examples/celery/__init__.py +1 -0
  44. examples/celery/durable/docker-compose.yml +55 -0
  45. examples/celery/durable/pyworkflow.config.yaml +12 -0
  46. examples/celery/durable/workflows/__init__.py +122 -0
  47. examples/celery/durable/workflows/basic.py +87 -0
  48. examples/celery/durable/workflows/batch_processing.py +102 -0
  49. examples/celery/durable/workflows/cancellation.py +273 -0
  50. examples/celery/durable/workflows/child_workflow_patterns.py +240 -0
  51. examples/celery/durable/workflows/child_workflows.py +202 -0
  52. examples/celery/durable/workflows/continue_as_new.py +260 -0
  53. examples/celery/durable/workflows/fault_tolerance.py +210 -0
  54. examples/celery/durable/workflows/hooks.py +211 -0
  55. examples/celery/durable/workflows/idempotency.py +112 -0
  56. examples/celery/durable/workflows/long_running.py +99 -0
  57. examples/celery/durable/workflows/retries.py +101 -0
  58. examples/celery/durable/workflows/schedules.py +209 -0
  59. examples/celery/transient/01_basic_workflow.py +91 -0
  60. examples/celery/transient/02_fault_tolerance.py +257 -0
  61. examples/celery/transient/__init__.py +20 -0
  62. examples/celery/transient/pyworkflow.config.yaml +25 -0
  63. examples/local/__init__.py +1 -0
  64. examples/local/durable/01_basic_workflow.py +94 -0
  65. examples/local/durable/02_file_storage.py +132 -0
  66. examples/local/durable/03_retries.py +169 -0
  67. examples/local/durable/04_long_running.py +119 -0
  68. examples/local/durable/05_event_log.py +145 -0
  69. examples/local/durable/06_idempotency.py +148 -0
  70. examples/local/durable/07_hooks.py +334 -0
  71. examples/local/durable/08_cancellation.py +233 -0
  72. examples/local/durable/09_child_workflows.py +198 -0
  73. examples/local/durable/10_child_workflow_patterns.py +265 -0
  74. examples/local/durable/11_continue_as_new.py +249 -0
  75. examples/local/durable/12_schedules.py +198 -0
  76. examples/local/durable/__init__.py +1 -0
  77. examples/local/transient/01_quick_tasks.py +87 -0
  78. examples/local/transient/02_retries.py +130 -0
  79. examples/local/transient/03_sleep.py +141 -0
  80. examples/local/transient/__init__.py +1 -0
  81. pyworkflow/__init__.py +256 -0
  82. pyworkflow/aws/__init__.py +68 -0
  83. pyworkflow/aws/context.py +234 -0
  84. pyworkflow/aws/handler.py +184 -0
  85. pyworkflow/aws/testing.py +310 -0
  86. pyworkflow/celery/__init__.py +41 -0
  87. pyworkflow/celery/app.py +198 -0
  88. pyworkflow/celery/scheduler.py +315 -0
  89. pyworkflow/celery/tasks.py +1746 -0
  90. pyworkflow/cli/__init__.py +132 -0
  91. pyworkflow/cli/__main__.py +6 -0
  92. pyworkflow/cli/commands/__init__.py +1 -0
  93. pyworkflow/cli/commands/hooks.py +640 -0
  94. pyworkflow/cli/commands/quickstart.py +495 -0
  95. pyworkflow/cli/commands/runs.py +773 -0
  96. pyworkflow/cli/commands/scheduler.py +130 -0
  97. pyworkflow/cli/commands/schedules.py +794 -0
  98. pyworkflow/cli/commands/setup.py +703 -0
  99. pyworkflow/cli/commands/worker.py +413 -0
  100. pyworkflow/cli/commands/workflows.py +1257 -0
  101. pyworkflow/cli/output/__init__.py +1 -0
  102. pyworkflow/cli/output/formatters.py +321 -0
  103. pyworkflow/cli/output/styles.py +121 -0
  104. pyworkflow/cli/utils/__init__.py +1 -0
  105. pyworkflow/cli/utils/async_helpers.py +30 -0
  106. pyworkflow/cli/utils/config.py +130 -0
  107. pyworkflow/cli/utils/config_generator.py +344 -0
  108. pyworkflow/cli/utils/discovery.py +53 -0
  109. pyworkflow/cli/utils/docker_manager.py +651 -0
  110. pyworkflow/cli/utils/interactive.py +364 -0
  111. pyworkflow/cli/utils/storage.py +115 -0
  112. pyworkflow/config.py +329 -0
  113. pyworkflow/context/__init__.py +63 -0
  114. pyworkflow/context/aws.py +230 -0
  115. pyworkflow/context/base.py +416 -0
  116. pyworkflow/context/local.py +930 -0
  117. pyworkflow/context/mock.py +381 -0
  118. pyworkflow/core/__init__.py +0 -0
  119. pyworkflow/core/exceptions.py +353 -0
  120. pyworkflow/core/registry.py +313 -0
  121. pyworkflow/core/scheduled.py +328 -0
  122. pyworkflow/core/step.py +494 -0
  123. pyworkflow/core/workflow.py +294 -0
  124. pyworkflow/discovery.py +248 -0
  125. pyworkflow/engine/__init__.py +0 -0
  126. pyworkflow/engine/events.py +879 -0
  127. pyworkflow/engine/executor.py +682 -0
  128. pyworkflow/engine/replay.py +273 -0
  129. pyworkflow/observability/__init__.py +19 -0
  130. pyworkflow/observability/logging.py +234 -0
  131. pyworkflow/primitives/__init__.py +33 -0
  132. pyworkflow/primitives/child_handle.py +174 -0
  133. pyworkflow/primitives/child_workflow.py +372 -0
  134. pyworkflow/primitives/continue_as_new.py +101 -0
  135. pyworkflow/primitives/define_hook.py +150 -0
  136. pyworkflow/primitives/hooks.py +97 -0
  137. pyworkflow/primitives/resume_hook.py +210 -0
  138. pyworkflow/primitives/schedule.py +545 -0
  139. pyworkflow/primitives/shield.py +96 -0
  140. pyworkflow/primitives/sleep.py +100 -0
  141. pyworkflow/runtime/__init__.py +21 -0
  142. pyworkflow/runtime/base.py +179 -0
  143. pyworkflow/runtime/celery.py +310 -0
  144. pyworkflow/runtime/factory.py +101 -0
  145. pyworkflow/runtime/local.py +706 -0
  146. pyworkflow/scheduler/__init__.py +9 -0
  147. pyworkflow/scheduler/local.py +248 -0
  148. pyworkflow/serialization/__init__.py +0 -0
  149. pyworkflow/serialization/decoder.py +146 -0
  150. pyworkflow/serialization/encoder.py +162 -0
  151. pyworkflow/storage/__init__.py +54 -0
  152. pyworkflow/storage/base.py +612 -0
  153. pyworkflow/storage/config.py +185 -0
  154. pyworkflow/storage/dynamodb.py +1315 -0
  155. pyworkflow/storage/file.py +827 -0
  156. pyworkflow/storage/memory.py +549 -0
  157. pyworkflow/storage/postgres.py +1161 -0
  158. pyworkflow/storage/schemas.py +486 -0
  159. pyworkflow/storage/sqlite.py +1136 -0
  160. pyworkflow/utils/__init__.py +0 -0
  161. pyworkflow/utils/duration.py +177 -0
  162. pyworkflow/utils/schedule.py +391 -0
  163. pyworkflow_engine-0.1.7.dist-info/METADATA +687 -0
  164. pyworkflow_engine-0.1.7.dist-info/RECORD +196 -0
  165. pyworkflow_engine-0.1.7.dist-info/WHEEL +5 -0
  166. pyworkflow_engine-0.1.7.dist-info/entry_points.txt +2 -0
  167. pyworkflow_engine-0.1.7.dist-info/licenses/LICENSE +21 -0
  168. pyworkflow_engine-0.1.7.dist-info/top_level.txt +5 -0
  169. tests/examples/__init__.py +0 -0
  170. tests/integration/__init__.py +0 -0
  171. tests/integration/test_cancellation.py +330 -0
  172. tests/integration/test_child_workflows.py +439 -0
  173. tests/integration/test_continue_as_new.py +428 -0
  174. tests/integration/test_dynamodb_storage.py +1146 -0
  175. tests/integration/test_fault_tolerance.py +369 -0
  176. tests/integration/test_schedule_storage.py +484 -0
  177. tests/unit/__init__.py +0 -0
  178. tests/unit/backends/__init__.py +1 -0
  179. tests/unit/backends/test_dynamodb_storage.py +1554 -0
  180. tests/unit/backends/test_postgres_storage.py +1281 -0
  181. tests/unit/backends/test_sqlite_storage.py +1460 -0
  182. tests/unit/conftest.py +41 -0
  183. tests/unit/test_cancellation.py +364 -0
  184. tests/unit/test_child_workflows.py +680 -0
  185. tests/unit/test_continue_as_new.py +441 -0
  186. tests/unit/test_event_limits.py +316 -0
  187. tests/unit/test_executor.py +320 -0
  188. tests/unit/test_fault_tolerance.py +334 -0
  189. tests/unit/test_hooks.py +495 -0
  190. tests/unit/test_registry.py +261 -0
  191. tests/unit/test_replay.py +420 -0
  192. tests/unit/test_schedule_schemas.py +285 -0
  193. tests/unit/test_schedule_utils.py +286 -0
  194. tests/unit/test_scheduled_workflow.py +274 -0
  195. tests/unit/test_step.py +353 -0
  196. tests/unit/test_workflow.py +243 -0
@@ -0,0 +1,413 @@
1
+ """Worker management commands for Celery runtime."""
2
+
3
+ import os
4
+
5
+ import click
6
+
7
+ from pyworkflow.cli.output.formatters import (
8
+ format_json,
9
+ format_table,
10
+ print_error,
11
+ print_info,
12
+ print_success,
13
+ print_warning,
14
+ )
15
+
16
+
17
+ @click.group(name="worker")
18
+ def worker() -> None:
19
+ """Manage Celery workers for workflow execution."""
20
+ pass
21
+
22
+
23
+ @worker.command(name="run")
24
+ @click.option(
25
+ "--workflow",
26
+ "queue_workflow",
27
+ is_flag=True,
28
+ help="Only process workflow orchestration tasks (pyworkflow.workflows queue)",
29
+ )
30
+ @click.option(
31
+ "--step",
32
+ "queue_step",
33
+ is_flag=True,
34
+ help="Only process step execution tasks (pyworkflow.steps queue)",
35
+ )
36
+ @click.option(
37
+ "--schedule",
38
+ "queue_schedule",
39
+ is_flag=True,
40
+ help="Only process scheduled resumption tasks (pyworkflow.schedules queue)",
41
+ )
42
+ @click.option(
43
+ "--concurrency",
44
+ "-c",
45
+ type=int,
46
+ default=None,
47
+ help="Number of worker processes (default: auto-detect)",
48
+ )
49
+ @click.option(
50
+ "--loglevel",
51
+ "-l",
52
+ type=click.Choice(["debug", "info", "warning", "error"], case_sensitive=False),
53
+ default="info",
54
+ help="Log level for the worker (default: info)",
55
+ )
56
+ @click.option(
57
+ "--hostname",
58
+ "-n",
59
+ default=None,
60
+ help="Worker hostname (default: auto-generated)",
61
+ )
62
+ @click.option(
63
+ "--beat",
64
+ is_flag=True,
65
+ help="Also start Celery Beat scheduler for periodic tasks",
66
+ )
67
+ @click.option(
68
+ "--pool",
69
+ type=click.Choice(["prefork", "solo", "eventlet", "gevent"], case_sensitive=False),
70
+ default=None,
71
+ help="Worker pool type. Use 'solo' for debugging with breakpoints",
72
+ )
73
+ @click.pass_context
74
+ def run_worker(
75
+ ctx: click.Context,
76
+ queue_workflow: bool,
77
+ queue_step: bool,
78
+ queue_schedule: bool,
79
+ concurrency: int | None,
80
+ loglevel: str,
81
+ hostname: str | None,
82
+ beat: bool,
83
+ pool: str | None,
84
+ ) -> None:
85
+ """
86
+ Start a Celery worker for processing workflows.
87
+
88
+ By default, processes all queues. Use --workflow, --step, or --schedule
89
+ flags to limit to specific queue types.
90
+
91
+ Examples:
92
+
93
+ # Start a worker processing all queues
94
+ pyworkflow worker run
95
+
96
+ # Start a workflow orchestration worker only
97
+ pyworkflow worker run --workflow
98
+
99
+ # Start a step execution worker (for heavy computation)
100
+ pyworkflow worker run --step --concurrency 4
101
+
102
+ # Start a schedule worker (for sleep resumption)
103
+ pyworkflow worker run --schedule
104
+
105
+ # Start with beat scheduler
106
+ pyworkflow worker run --beat
107
+
108
+ # Start with custom log level
109
+ pyworkflow worker run --loglevel debug
110
+ """
111
+ # Get config from CLI context (TOML config)
112
+ config = ctx.obj.get("config", {})
113
+ module = ctx.obj.get("module")
114
+
115
+ # Also try to load YAML config if it exists
116
+ from pyworkflow.cli.utils.discovery import _load_yaml_config
117
+
118
+ yaml_config = _load_yaml_config()
119
+ if yaml_config:
120
+ # Merge YAML config (lower priority) with TOML config (higher priority)
121
+ merged_config = {**yaml_config, **config}
122
+ # For nested dicts like 'celery', merge them too
123
+ if "celery" in yaml_config and "celery" not in config:
124
+ merged_config["celery"] = yaml_config["celery"]
125
+ config = merged_config
126
+
127
+ # Determine queues to process
128
+ queues = []
129
+ if queue_workflow:
130
+ queues.append("pyworkflow.workflows")
131
+ if queue_step:
132
+ queues.append("pyworkflow.steps")
133
+ if queue_schedule:
134
+ queues.append("pyworkflow.schedules")
135
+
136
+ # If no specific queue selected, process all
137
+ if not queues:
138
+ queues = [
139
+ "pyworkflow.default",
140
+ "pyworkflow.workflows",
141
+ "pyworkflow.steps",
142
+ "pyworkflow.schedules",
143
+ ]
144
+
145
+ # Get broker config from config file or environment
146
+ celery_config = config.get("celery", {})
147
+ broker_url = celery_config.get(
148
+ "broker",
149
+ os.getenv("PYWORKFLOW_CELERY_BROKER", "redis://localhost:6379/0"),
150
+ )
151
+ result_backend = celery_config.get(
152
+ "result_backend",
153
+ os.getenv("PYWORKFLOW_CELERY_RESULT_BACKEND", "redis://localhost:6379/1"),
154
+ )
155
+
156
+ print_info("Starting Celery worker...")
157
+ print_info(f"Broker: {broker_url}")
158
+ print_info(f"Queues: {', '.join(queues)}")
159
+
160
+ if concurrency:
161
+ print_info(f"Concurrency: {concurrency}")
162
+
163
+ if pool:
164
+ print_info(f"Pool: {pool}")
165
+
166
+ try:
167
+ # Discover workflows using CLI discovery (reads from --module, env var, or YAML config)
168
+ from pyworkflow.cli.utils.discovery import discover_workflows
169
+
170
+ discover_workflows(module, config)
171
+
172
+ # Import and configure Celery app (after discovery so workflows are registered)
173
+ from pyworkflow.celery.app import create_celery_app
174
+
175
+ # Create or get Celery app with configured broker
176
+ app = create_celery_app(
177
+ broker_url=broker_url,
178
+ result_backend=result_backend,
179
+ )
180
+
181
+ # Log discovered workflows and steps
182
+ from pyworkflow import list_steps, list_workflows
183
+
184
+ workflows = list_workflows()
185
+ steps = list_steps()
186
+
187
+ if workflows:
188
+ print_info(f"Registered {len(workflows)} workflow(s):")
189
+ for name in sorted(workflows.keys()):
190
+ print_info(f" - {name}")
191
+ else:
192
+ print_warning("No workflows registered!")
193
+ print_warning("Specify workflows using one of:")
194
+ print_warning(" 1. --module flag: pyworkflow --module myapp.workflows worker run")
195
+ print_warning(
196
+ " 2. Environment: PYWORKFLOW_DISCOVER=myapp.workflows pyworkflow worker run"
197
+ )
198
+ print_warning(
199
+ " 3. Config file: Create pyworkflow.config.yaml with 'module: myapp.workflows'"
200
+ )
201
+ print_info("")
202
+
203
+ if steps:
204
+ print_info(f"Registered {len(steps)} step(s):")
205
+ for name in sorted(steps.keys()):
206
+ print_info(f" - {name}")
207
+
208
+ print_info("")
209
+
210
+ # Configure worker arguments
211
+ worker_args = [
212
+ "worker",
213
+ f"--loglevel={loglevel.upper()}",
214
+ f"--queues={','.join(queues)}",
215
+ ]
216
+
217
+ if concurrency:
218
+ worker_args.append(f"--concurrency={concurrency}")
219
+
220
+ if hostname:
221
+ worker_args.append(f"--hostname={hostname}")
222
+
223
+ if beat:
224
+ worker_args.append("--beat")
225
+ worker_args.append("--scheduler=pyworkflow.celery.scheduler:PyWorkflowScheduler")
226
+
227
+ if pool:
228
+ worker_args.append(f"--pool={pool}")
229
+
230
+ print_success("Worker starting...")
231
+ print_info("Press Ctrl+C to stop")
232
+ print_info("")
233
+
234
+ # Start the worker using Celery's programmatic API
235
+ app.worker_main(argv=worker_args)
236
+
237
+ except ImportError as e:
238
+ print_error(f"Failed to import Celery: {e}")
239
+ print_error("Make sure Celery is installed: pip install celery[redis]")
240
+ raise click.Abort()
241
+
242
+ except KeyboardInterrupt:
243
+ print_info("\nWorker stopped")
244
+
245
+ except Exception as e:
246
+ print_error(f"Worker failed: {e}")
247
+ if ctx.obj.get("verbose"):
248
+ raise
249
+ raise click.Abort()
250
+
251
+
252
+ @worker.command(name="status")
253
+ @click.pass_context
254
+ def worker_status(ctx: click.Context) -> None:
255
+ """
256
+ Show status of active Celery workers.
257
+
258
+ Examples:
259
+
260
+ pyworkflow worker status
261
+ """
262
+ config = ctx.obj.get("config", {})
263
+ output = ctx.obj.get("output", "table")
264
+
265
+ # Get broker config
266
+ celery_config = config.get("celery", {})
267
+ broker_url = celery_config.get(
268
+ "broker",
269
+ os.getenv("PYWORKFLOW_CELERY_BROKER", "redis://localhost:6379/0"),
270
+ )
271
+
272
+ try:
273
+ from pyworkflow.celery.app import create_celery_app
274
+
275
+ app = create_celery_app(broker_url=broker_url)
276
+
277
+ # Get active workers
278
+ inspect = app.control.inspect()
279
+ active = inspect.active()
280
+ stats = inspect.stats()
281
+ ping = inspect.ping()
282
+
283
+ if not ping:
284
+ print_warning("No active workers found")
285
+ print_info("\nStart a worker with: pyworkflow worker run")
286
+ return
287
+
288
+ workers = []
289
+ for worker_name, worker_stats in (stats or {}).items():
290
+ worker_info = {
291
+ "name": worker_name,
292
+ "status": "online" if worker_name in (ping or {}) else "offline",
293
+ "concurrency": worker_stats.get("pool", {}).get("max-concurrency", "N/A"),
294
+ "processed": worker_stats.get("total", {}).get("pyworkflow.start_workflow", 0)
295
+ + worker_stats.get("total", {}).get("pyworkflow.execute_step", 0)
296
+ + worker_stats.get("total", {}).get("pyworkflow.resume_workflow", 0),
297
+ }
298
+
299
+ # Get active tasks count
300
+ if active and worker_name in active:
301
+ worker_info["active_tasks"] = len(active[worker_name])
302
+ else:
303
+ worker_info["active_tasks"] = 0
304
+
305
+ workers.append(worker_info)
306
+
307
+ if output == "json":
308
+ format_json(workers)
309
+ elif output == "plain":
310
+ for w in workers:
311
+ print(f"{w['name']}: {w['status']}")
312
+ else:
313
+ table_data = [
314
+ {
315
+ "Worker": w["name"],
316
+ "Status": w["status"],
317
+ "Concurrency": str(w["concurrency"]),
318
+ "Active Tasks": str(w["active_tasks"]),
319
+ "Processed": str(w["processed"]),
320
+ }
321
+ for w in workers
322
+ ]
323
+ format_table(
324
+ table_data,
325
+ ["Worker", "Status", "Concurrency", "Active Tasks", "Processed"],
326
+ title="Celery Workers",
327
+ )
328
+
329
+ except ImportError as e:
330
+ print_error(f"Failed to import Celery: {e}")
331
+ raise click.Abort()
332
+
333
+ except Exception as e:
334
+ print_error(f"Failed to get worker status: {e}")
335
+ print_info("Make sure the broker is running and accessible")
336
+ if ctx.obj.get("verbose"):
337
+ raise
338
+ raise click.Abort()
339
+
340
+
341
+ @worker.command(name="list")
342
+ @click.pass_context
343
+ def list_workers(ctx: click.Context) -> None:
344
+ """
345
+ List all registered Celery workers.
346
+
347
+ Examples:
348
+
349
+ pyworkflow worker list
350
+ """
351
+ # This is an alias for status with simplified output
352
+ ctx.invoke(worker_status)
353
+
354
+
355
+ @worker.command(name="queues")
356
+ @click.pass_context
357
+ def list_queues(ctx: click.Context) -> None:
358
+ """
359
+ Show available task queues and their configuration.
360
+
361
+ Examples:
362
+
363
+ pyworkflow worker queues
364
+ """
365
+ output = ctx.obj.get("output", "table")
366
+
367
+ queues = [
368
+ {
369
+ "name": "pyworkflow.default",
370
+ "purpose": "General tasks",
371
+ "routing_key": "workflow.#",
372
+ },
373
+ {
374
+ "name": "pyworkflow.workflows",
375
+ "purpose": "Workflow orchestration",
376
+ "routing_key": "workflow.workflow.#",
377
+ },
378
+ {
379
+ "name": "pyworkflow.steps",
380
+ "purpose": "Step execution (heavy work)",
381
+ "routing_key": "workflow.step.#",
382
+ },
383
+ {
384
+ "name": "pyworkflow.schedules",
385
+ "purpose": "Sleep resumption scheduling",
386
+ "routing_key": "workflow.schedule.#",
387
+ },
388
+ ]
389
+
390
+ if output == "json":
391
+ format_json(queues)
392
+ elif output == "plain":
393
+ for q in queues:
394
+ print(q["name"])
395
+ else:
396
+ table_data = [
397
+ {
398
+ "Queue": q["name"],
399
+ "Purpose": q["purpose"],
400
+ "Routing Key": q["routing_key"],
401
+ }
402
+ for q in queues
403
+ ]
404
+ format_table(
405
+ table_data,
406
+ ["Queue", "Purpose", "Routing Key"],
407
+ title="Task Queues",
408
+ )
409
+
410
+ print_info("\nUsage:")
411
+ print_info(" pyworkflow worker run --workflow # Process workflow queue only")
412
+ print_info(" pyworkflow worker run --step # Process step queue only")
413
+ print_info(" pyworkflow worker run --schedule # Process schedule queue only")