pyworkflow-engine 0.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (196) hide show
  1. dashboard/backend/app/__init__.py +1 -0
  2. dashboard/backend/app/config.py +32 -0
  3. dashboard/backend/app/controllers/__init__.py +6 -0
  4. dashboard/backend/app/controllers/run_controller.py +86 -0
  5. dashboard/backend/app/controllers/workflow_controller.py +33 -0
  6. dashboard/backend/app/dependencies/__init__.py +5 -0
  7. dashboard/backend/app/dependencies/storage.py +50 -0
  8. dashboard/backend/app/repositories/__init__.py +6 -0
  9. dashboard/backend/app/repositories/run_repository.py +80 -0
  10. dashboard/backend/app/repositories/workflow_repository.py +27 -0
  11. dashboard/backend/app/rest/__init__.py +8 -0
  12. dashboard/backend/app/rest/v1/__init__.py +12 -0
  13. dashboard/backend/app/rest/v1/health.py +33 -0
  14. dashboard/backend/app/rest/v1/runs.py +133 -0
  15. dashboard/backend/app/rest/v1/workflows.py +41 -0
  16. dashboard/backend/app/schemas/__init__.py +23 -0
  17. dashboard/backend/app/schemas/common.py +16 -0
  18. dashboard/backend/app/schemas/event.py +24 -0
  19. dashboard/backend/app/schemas/hook.py +25 -0
  20. dashboard/backend/app/schemas/run.py +54 -0
  21. dashboard/backend/app/schemas/step.py +28 -0
  22. dashboard/backend/app/schemas/workflow.py +31 -0
  23. dashboard/backend/app/server.py +87 -0
  24. dashboard/backend/app/services/__init__.py +6 -0
  25. dashboard/backend/app/services/run_service.py +240 -0
  26. dashboard/backend/app/services/workflow_service.py +155 -0
  27. dashboard/backend/main.py +18 -0
  28. docs/concepts/cancellation.mdx +362 -0
  29. docs/concepts/continue-as-new.mdx +434 -0
  30. docs/concepts/events.mdx +266 -0
  31. docs/concepts/fault-tolerance.mdx +370 -0
  32. docs/concepts/hooks.mdx +552 -0
  33. docs/concepts/limitations.mdx +167 -0
  34. docs/concepts/schedules.mdx +775 -0
  35. docs/concepts/sleep.mdx +312 -0
  36. docs/concepts/steps.mdx +301 -0
  37. docs/concepts/workflows.mdx +255 -0
  38. docs/guides/cli.mdx +942 -0
  39. docs/guides/configuration.mdx +560 -0
  40. docs/introduction.mdx +155 -0
  41. docs/quickstart.mdx +279 -0
  42. examples/__init__.py +1 -0
  43. examples/celery/__init__.py +1 -0
  44. examples/celery/durable/docker-compose.yml +55 -0
  45. examples/celery/durable/pyworkflow.config.yaml +12 -0
  46. examples/celery/durable/workflows/__init__.py +122 -0
  47. examples/celery/durable/workflows/basic.py +87 -0
  48. examples/celery/durable/workflows/batch_processing.py +102 -0
  49. examples/celery/durable/workflows/cancellation.py +273 -0
  50. examples/celery/durable/workflows/child_workflow_patterns.py +240 -0
  51. examples/celery/durable/workflows/child_workflows.py +202 -0
  52. examples/celery/durable/workflows/continue_as_new.py +260 -0
  53. examples/celery/durable/workflows/fault_tolerance.py +210 -0
  54. examples/celery/durable/workflows/hooks.py +211 -0
  55. examples/celery/durable/workflows/idempotency.py +112 -0
  56. examples/celery/durable/workflows/long_running.py +99 -0
  57. examples/celery/durable/workflows/retries.py +101 -0
  58. examples/celery/durable/workflows/schedules.py +209 -0
  59. examples/celery/transient/01_basic_workflow.py +91 -0
  60. examples/celery/transient/02_fault_tolerance.py +257 -0
  61. examples/celery/transient/__init__.py +20 -0
  62. examples/celery/transient/pyworkflow.config.yaml +25 -0
  63. examples/local/__init__.py +1 -0
  64. examples/local/durable/01_basic_workflow.py +94 -0
  65. examples/local/durable/02_file_storage.py +132 -0
  66. examples/local/durable/03_retries.py +169 -0
  67. examples/local/durable/04_long_running.py +119 -0
  68. examples/local/durable/05_event_log.py +145 -0
  69. examples/local/durable/06_idempotency.py +148 -0
  70. examples/local/durable/07_hooks.py +334 -0
  71. examples/local/durable/08_cancellation.py +233 -0
  72. examples/local/durable/09_child_workflows.py +198 -0
  73. examples/local/durable/10_child_workflow_patterns.py +265 -0
  74. examples/local/durable/11_continue_as_new.py +249 -0
  75. examples/local/durable/12_schedules.py +198 -0
  76. examples/local/durable/__init__.py +1 -0
  77. examples/local/transient/01_quick_tasks.py +87 -0
  78. examples/local/transient/02_retries.py +130 -0
  79. examples/local/transient/03_sleep.py +141 -0
  80. examples/local/transient/__init__.py +1 -0
  81. pyworkflow/__init__.py +256 -0
  82. pyworkflow/aws/__init__.py +68 -0
  83. pyworkflow/aws/context.py +234 -0
  84. pyworkflow/aws/handler.py +184 -0
  85. pyworkflow/aws/testing.py +310 -0
  86. pyworkflow/celery/__init__.py +41 -0
  87. pyworkflow/celery/app.py +198 -0
  88. pyworkflow/celery/scheduler.py +315 -0
  89. pyworkflow/celery/tasks.py +1746 -0
  90. pyworkflow/cli/__init__.py +132 -0
  91. pyworkflow/cli/__main__.py +6 -0
  92. pyworkflow/cli/commands/__init__.py +1 -0
  93. pyworkflow/cli/commands/hooks.py +640 -0
  94. pyworkflow/cli/commands/quickstart.py +495 -0
  95. pyworkflow/cli/commands/runs.py +773 -0
  96. pyworkflow/cli/commands/scheduler.py +130 -0
  97. pyworkflow/cli/commands/schedules.py +794 -0
  98. pyworkflow/cli/commands/setup.py +703 -0
  99. pyworkflow/cli/commands/worker.py +413 -0
  100. pyworkflow/cli/commands/workflows.py +1257 -0
  101. pyworkflow/cli/output/__init__.py +1 -0
  102. pyworkflow/cli/output/formatters.py +321 -0
  103. pyworkflow/cli/output/styles.py +121 -0
  104. pyworkflow/cli/utils/__init__.py +1 -0
  105. pyworkflow/cli/utils/async_helpers.py +30 -0
  106. pyworkflow/cli/utils/config.py +130 -0
  107. pyworkflow/cli/utils/config_generator.py +344 -0
  108. pyworkflow/cli/utils/discovery.py +53 -0
  109. pyworkflow/cli/utils/docker_manager.py +651 -0
  110. pyworkflow/cli/utils/interactive.py +364 -0
  111. pyworkflow/cli/utils/storage.py +115 -0
  112. pyworkflow/config.py +329 -0
  113. pyworkflow/context/__init__.py +63 -0
  114. pyworkflow/context/aws.py +230 -0
  115. pyworkflow/context/base.py +416 -0
  116. pyworkflow/context/local.py +930 -0
  117. pyworkflow/context/mock.py +381 -0
  118. pyworkflow/core/__init__.py +0 -0
  119. pyworkflow/core/exceptions.py +353 -0
  120. pyworkflow/core/registry.py +313 -0
  121. pyworkflow/core/scheduled.py +328 -0
  122. pyworkflow/core/step.py +494 -0
  123. pyworkflow/core/workflow.py +294 -0
  124. pyworkflow/discovery.py +248 -0
  125. pyworkflow/engine/__init__.py +0 -0
  126. pyworkflow/engine/events.py +879 -0
  127. pyworkflow/engine/executor.py +682 -0
  128. pyworkflow/engine/replay.py +273 -0
  129. pyworkflow/observability/__init__.py +19 -0
  130. pyworkflow/observability/logging.py +234 -0
  131. pyworkflow/primitives/__init__.py +33 -0
  132. pyworkflow/primitives/child_handle.py +174 -0
  133. pyworkflow/primitives/child_workflow.py +372 -0
  134. pyworkflow/primitives/continue_as_new.py +101 -0
  135. pyworkflow/primitives/define_hook.py +150 -0
  136. pyworkflow/primitives/hooks.py +97 -0
  137. pyworkflow/primitives/resume_hook.py +210 -0
  138. pyworkflow/primitives/schedule.py +545 -0
  139. pyworkflow/primitives/shield.py +96 -0
  140. pyworkflow/primitives/sleep.py +100 -0
  141. pyworkflow/runtime/__init__.py +21 -0
  142. pyworkflow/runtime/base.py +179 -0
  143. pyworkflow/runtime/celery.py +310 -0
  144. pyworkflow/runtime/factory.py +101 -0
  145. pyworkflow/runtime/local.py +706 -0
  146. pyworkflow/scheduler/__init__.py +9 -0
  147. pyworkflow/scheduler/local.py +248 -0
  148. pyworkflow/serialization/__init__.py +0 -0
  149. pyworkflow/serialization/decoder.py +146 -0
  150. pyworkflow/serialization/encoder.py +162 -0
  151. pyworkflow/storage/__init__.py +54 -0
  152. pyworkflow/storage/base.py +612 -0
  153. pyworkflow/storage/config.py +185 -0
  154. pyworkflow/storage/dynamodb.py +1315 -0
  155. pyworkflow/storage/file.py +827 -0
  156. pyworkflow/storage/memory.py +549 -0
  157. pyworkflow/storage/postgres.py +1161 -0
  158. pyworkflow/storage/schemas.py +486 -0
  159. pyworkflow/storage/sqlite.py +1136 -0
  160. pyworkflow/utils/__init__.py +0 -0
  161. pyworkflow/utils/duration.py +177 -0
  162. pyworkflow/utils/schedule.py +391 -0
  163. pyworkflow_engine-0.1.7.dist-info/METADATA +687 -0
  164. pyworkflow_engine-0.1.7.dist-info/RECORD +196 -0
  165. pyworkflow_engine-0.1.7.dist-info/WHEEL +5 -0
  166. pyworkflow_engine-0.1.7.dist-info/entry_points.txt +2 -0
  167. pyworkflow_engine-0.1.7.dist-info/licenses/LICENSE +21 -0
  168. pyworkflow_engine-0.1.7.dist-info/top_level.txt +5 -0
  169. tests/examples/__init__.py +0 -0
  170. tests/integration/__init__.py +0 -0
  171. tests/integration/test_cancellation.py +330 -0
  172. tests/integration/test_child_workflows.py +439 -0
  173. tests/integration/test_continue_as_new.py +428 -0
  174. tests/integration/test_dynamodb_storage.py +1146 -0
  175. tests/integration/test_fault_tolerance.py +369 -0
  176. tests/integration/test_schedule_storage.py +484 -0
  177. tests/unit/__init__.py +0 -0
  178. tests/unit/backends/__init__.py +1 -0
  179. tests/unit/backends/test_dynamodb_storage.py +1554 -0
  180. tests/unit/backends/test_postgres_storage.py +1281 -0
  181. tests/unit/backends/test_sqlite_storage.py +1460 -0
  182. tests/unit/conftest.py +41 -0
  183. tests/unit/test_cancellation.py +364 -0
  184. tests/unit/test_child_workflows.py +680 -0
  185. tests/unit/test_continue_as_new.py +441 -0
  186. tests/unit/test_event_limits.py +316 -0
  187. tests/unit/test_executor.py +320 -0
  188. tests/unit/test_fault_tolerance.py +334 -0
  189. tests/unit/test_hooks.py +495 -0
  190. tests/unit/test_registry.py +261 -0
  191. tests/unit/test_replay.py +420 -0
  192. tests/unit/test_schedule_schemas.py +285 -0
  193. tests/unit/test_schedule_utils.py +286 -0
  194. tests/unit/test_scheduled_workflow.py +274 -0
  195. tests/unit/test_step.py +353 -0
  196. tests/unit/test_workflow.py +243 -0
@@ -0,0 +1,703 @@
1
+ """Interactive setup command for PyWorkflow."""
2
+
3
+ import sys
4
+ from pathlib import Path
5
+
6
+ import click
7
+
8
+ from pyworkflow.cli.output.formatters import (
9
+ print_error,
10
+ print_info,
11
+ print_success,
12
+ print_warning,
13
+ )
14
+ from pyworkflow.cli.utils.config_generator import (
15
+ display_config_summary,
16
+ find_yaml_config,
17
+ generate_yaml_config,
18
+ load_yaml_config,
19
+ write_yaml_config,
20
+ )
21
+ from pyworkflow.cli.utils.docker_manager import (
22
+ check_docker_available,
23
+ check_service_health,
24
+ generate_docker_compose_content,
25
+ generate_postgres_docker_compose_content,
26
+ run_docker_command,
27
+ write_docker_compose,
28
+ )
29
+ from pyworkflow.cli.utils.interactive import (
30
+ confirm,
31
+ filepath,
32
+ input_text,
33
+ select,
34
+ validate_module_path,
35
+ )
36
+
37
+
38
+ def _flatten_yaml_config(nested_config: dict) -> dict:
39
+ """
40
+ Convert nested YAML config to flat format expected by setup internals.
41
+
42
+ Nested format (from YAML):
43
+ {
44
+ "module": "workflows",
45
+ "runtime": "celery",
46
+ "storage": {"type": "sqlite", "base_path": "..."},
47
+ "celery": {"broker": "...", "result_backend": "..."}
48
+ }
49
+
50
+ Flat format (for setup):
51
+ {
52
+ "module": "workflows",
53
+ "runtime": "celery",
54
+ "storage_type": "sqlite",
55
+ "storage_path": "...",
56
+ "broker_url": "...",
57
+ "result_backend": "..."
58
+ }
59
+ """
60
+ storage = nested_config.get("storage", {})
61
+ celery = nested_config.get("celery", {})
62
+
63
+ return {
64
+ "module": nested_config.get("module"),
65
+ "runtime": nested_config.get("runtime", "celery"),
66
+ "storage_type": storage.get("type", "file"),
67
+ "storage_path": storage.get("base_path") or storage.get("path"),
68
+ "broker_url": celery.get("broker", "redis://localhost:6379/0"),
69
+ "result_backend": celery.get("result_backend", "redis://localhost:6379/1"),
70
+ }
71
+
72
+
73
+ @click.command(name="setup")
74
+ @click.option(
75
+ "--non-interactive",
76
+ is_flag=True,
77
+ help="Run without prompts (use defaults)",
78
+ )
79
+ @click.option(
80
+ "--skip-docker",
81
+ is_flag=True,
82
+ help="Skip Docker infrastructure setup",
83
+ )
84
+ @click.option(
85
+ "--module",
86
+ help="Workflow module path (e.g., myapp.workflows)",
87
+ )
88
+ @click.option(
89
+ "--storage",
90
+ type=click.Choice(["file", "memory", "sqlite", "postgres", "dynamodb"], case_sensitive=False),
91
+ help="Storage backend type",
92
+ )
93
+ @click.option(
94
+ "--storage-path",
95
+ help="Storage path for file/sqlite backends",
96
+ )
97
+ @click.pass_context
98
+ def setup(
99
+ ctx: click.Context,
100
+ non_interactive: bool,
101
+ skip_docker: bool,
102
+ module: str | None,
103
+ storage: str | None,
104
+ storage_path: str | None,
105
+ ) -> None:
106
+ """
107
+ Interactive setup for PyWorkflow environment.
108
+
109
+ This command will:
110
+ 1. Detect or create pyworkflow.config.yaml
111
+ 2. Generate docker-compose.yml and Dockerfiles
112
+ 3. Start Redis and Dashboard services via Docker
113
+ 4. Validate the complete setup
114
+
115
+ Examples:
116
+
117
+ # Interactive setup (recommended)
118
+ $ pyworkflow setup
119
+
120
+ # Non-interactive with defaults
121
+ $ pyworkflow setup --non-interactive
122
+
123
+ # Skip Docker setup
124
+ $ pyworkflow setup --skip-docker
125
+
126
+ # Specify options directly
127
+ $ pyworkflow setup --module myapp.workflows --storage sqlite
128
+ """
129
+ try:
130
+ _run_setup(
131
+ ctx=ctx,
132
+ non_interactive=non_interactive,
133
+ skip_docker=skip_docker,
134
+ module_override=module,
135
+ storage_override=storage,
136
+ storage_path_override=storage_path,
137
+ )
138
+ except click.Abort:
139
+ print_warning("\nSetup cancelled by user")
140
+ sys.exit(1)
141
+ except Exception as e:
142
+ print_error(f"\nSetup failed: {str(e)}")
143
+ if ctx.obj.get("verbose"):
144
+ raise
145
+ sys.exit(1)
146
+
147
+
148
+ def _run_setup(
149
+ ctx: click.Context,
150
+ non_interactive: bool,
151
+ skip_docker: bool,
152
+ module_override: str | None,
153
+ storage_override: str | None,
154
+ storage_path_override: str | None,
155
+ ) -> None:
156
+ """Main setup workflow."""
157
+ # 1. Welcome & Banner
158
+ _print_welcome()
159
+
160
+ # 2. Pre-flight checks
161
+ docker_available, docker_error = check_docker_available()
162
+ if not docker_available:
163
+ print_warning(f"Docker: {docker_error}")
164
+ if not skip_docker:
165
+ if non_interactive:
166
+ print_info("Continuing without Docker (--non-interactive mode)")
167
+ skip_docker = True
168
+ else:
169
+ if not confirm("Continue without Docker?", default=False):
170
+ print_info("\nPlease install Docker and try again:")
171
+ print_info(" https://docs.docker.com/get-docker/")
172
+ raise click.Abort()
173
+ skip_docker = True
174
+
175
+ # 3. Detect existing config
176
+ config_path = Path.cwd() / "pyworkflow.config.yaml"
177
+ config_data = None
178
+
179
+ existing_config = find_yaml_config()
180
+ if existing_config and not non_interactive:
181
+ print_info(f"\nFound existing config: {existing_config}")
182
+
183
+ choice = select(
184
+ "What would you like to do?",
185
+ choices=[
186
+ {"name": "Use existing configuration", "value": "use"},
187
+ {"name": "View configuration first", "value": "view"},
188
+ {"name": "Create new configuration", "value": "new"},
189
+ ],
190
+ )
191
+
192
+ if choice == "use":
193
+ config_data = _flatten_yaml_config(load_yaml_config(existing_config))
194
+ print_success("Using existing configuration")
195
+
196
+ elif choice == "view":
197
+ # Display config
198
+ print_info("\nCurrent configuration:")
199
+ print_info("-" * 50)
200
+ with open(existing_config) as f:
201
+ for line in f:
202
+ print_info(f" {line.rstrip()}")
203
+ print_info("-" * 50)
204
+
205
+ if confirm("\nUse this configuration?"):
206
+ config_data = _flatten_yaml_config(load_yaml_config(existing_config))
207
+
208
+ # 4. Interactive configuration (if needed)
209
+ if not config_data:
210
+ config_data = _run_interactive_configuration(
211
+ non_interactive=non_interactive,
212
+ module_override=module_override,
213
+ storage_override=storage_override,
214
+ storage_path_override=storage_path_override,
215
+ )
216
+
217
+ # 5. Display summary
218
+ print_info("")
219
+ # Convert flat config_data to nested structure for display
220
+ display_config = {
221
+ "module": config_data.get("module"),
222
+ "runtime": config_data["runtime"],
223
+ "storage": {
224
+ "type": config_data["storage_type"],
225
+ "base_path": config_data.get("storage_path"),
226
+ },
227
+ "celery": {
228
+ "broker": config_data["broker_url"],
229
+ "result_backend": config_data["result_backend"],
230
+ },
231
+ }
232
+ for line in display_config_summary(display_config):
233
+ print_info(line)
234
+
235
+ if not non_interactive:
236
+ if not confirm("\nProceed with this configuration?"):
237
+ print_warning("Setup cancelled")
238
+ raise click.Abort()
239
+
240
+ # 6. Write configuration file
241
+ print_info("\nGenerating configuration...")
242
+ yaml_content = generate_yaml_config(
243
+ module=config_data.get("module"),
244
+ runtime=config_data["runtime"],
245
+ storage_type=config_data["storage_type"],
246
+ storage_path=config_data.get("storage_path"),
247
+ broker_url=config_data["broker_url"],
248
+ result_backend=config_data["result_backend"],
249
+ postgres_host=config_data.get("postgres_host"),
250
+ postgres_port=config_data.get("postgres_port"),
251
+ postgres_user=config_data.get("postgres_user"),
252
+ postgres_password=config_data.get("postgres_password"),
253
+ postgres_database=config_data.get("postgres_database"),
254
+ dynamodb_table_name=config_data.get("dynamodb_table_name"),
255
+ dynamodb_region=config_data.get("dynamodb_region"),
256
+ dynamodb_endpoint_url=config_data.get("dynamodb_endpoint_url"),
257
+ )
258
+
259
+ config_file_path = write_yaml_config(yaml_content, config_path, backup=True)
260
+ print_success(f"Configuration saved: {config_file_path}")
261
+
262
+ # 7. Docker setup (if enabled)
263
+ dashboard_available = False
264
+ if not skip_docker:
265
+ dashboard_available = _setup_docker_infrastructure(
266
+ config_data=config_data,
267
+ non_interactive=non_interactive,
268
+ )
269
+
270
+ # 8. Final validation
271
+ _validate_setup(config_data, skip_docker)
272
+
273
+ # 9. Show next steps
274
+ _show_next_steps(config_data, skip_docker, dashboard_available)
275
+
276
+
277
+ def _print_welcome() -> None:
278
+ """Print welcome banner."""
279
+ print_info("")
280
+ print_info("=" * 60)
281
+ print_info(" PyWorkflow Interactive Setup")
282
+ print_info("=" * 60)
283
+ print_info("")
284
+
285
+
286
+ def _check_sqlite_available() -> bool:
287
+ """
288
+ Check if SQLite is available in the Python build.
289
+
290
+ Returns:
291
+ True if SQLite is available, False otherwise
292
+ """
293
+ try:
294
+ import sqlite3 # noqa: F401
295
+
296
+ return True
297
+ except ImportError:
298
+ return False
299
+
300
+
301
+ def _check_postgres_available() -> bool:
302
+ """
303
+ Check if asyncpg is installed for PostgreSQL support.
304
+
305
+ Returns:
306
+ True if asyncpg is available, False otherwise
307
+ """
308
+ try:
309
+ import asyncpg # noqa: F401
310
+
311
+ return True
312
+ except ImportError:
313
+ return False
314
+
315
+
316
+ def _run_interactive_configuration(
317
+ non_interactive: bool,
318
+ module_override: str | None,
319
+ storage_override: str | None,
320
+ storage_path_override: str | None,
321
+ ) -> dict[str, str]:
322
+ """Run interactive configuration prompts."""
323
+ print_info("Let's configure PyWorkflow for your project...\n")
324
+
325
+ config_data: dict[str, str] = {}
326
+
327
+ # Module (optional)
328
+ if module_override:
329
+ config_data["module"] = module_override
330
+ elif not non_interactive:
331
+ if confirm("Do you want to specify a workflow module now?", default=False):
332
+ module = input_text(
333
+ "Workflow module path (e.g., myapp.workflows):",
334
+ default="",
335
+ validate=validate_module_path,
336
+ )
337
+ if module:
338
+ config_data["module"] = module
339
+
340
+ # Runtime (currently only Celery)
341
+ config_data["runtime"] = "celery"
342
+ print_info("✓ Runtime: Celery (distributed workers)")
343
+
344
+ # Broker (currently only Redis)
345
+ config_data["broker_url"] = "redis://localhost:6379/0"
346
+ config_data["result_backend"] = "redis://localhost:6379/1"
347
+ print_info("✓ Broker: Redis (will be started via Docker)")
348
+
349
+ # Check if SQLite and PostgreSQL are available
350
+ sqlite_available = _check_sqlite_available()
351
+ postgres_available = _check_postgres_available()
352
+
353
+ # Storage backend
354
+ if storage_override:
355
+ storage_type = storage_override.lower()
356
+ # Validate if sqlite was requested but not available
357
+ if storage_type == "sqlite" and not sqlite_available:
358
+ print_error("\nSQLite storage backend is not available!")
359
+ print_info("\nYour Python installation was built without SQLite support.")
360
+ print_info("To fix this, install SQLite development libraries and rebuild Python:")
361
+ print_info("")
362
+ print_info(" # On Ubuntu/Debian:")
363
+ print_info(" sudo apt-get install libsqlite3-dev")
364
+ print_info("")
365
+ print_info(" # Then rebuild Python:")
366
+ print_info(" pyenv uninstall 3.13.5")
367
+ print_info(" pyenv install 3.13.5")
368
+ print_info("")
369
+ print_info("Or choose a different storage backend: --storage file")
370
+ raise click.Abort()
371
+ # Validate if postgres was requested but not available
372
+ if storage_type == "postgres" and not postgres_available:
373
+ print_error("\nPostgreSQL storage backend is not available!")
374
+ print_info("\nasyncpg package is not installed.")
375
+ print_info("To fix this, install asyncpg:")
376
+ print_info("")
377
+ print_info(" pip install asyncpg")
378
+ print_info("")
379
+ print_info("Or choose a different storage backend: --storage sqlite")
380
+ raise click.Abort()
381
+ elif non_interactive:
382
+ if sqlite_available:
383
+ storage_type = "sqlite"
384
+ else:
385
+ print_error("\nSQLite storage backend is not available!")
386
+ print_info("\nYour Python installation was built without SQLite support.")
387
+ print_info("To fix this, install SQLite development libraries and rebuild Python:")
388
+ print_info("")
389
+ print_info(" # On Ubuntu/Debian:")
390
+ print_info(" sudo apt-get install libsqlite3-dev")
391
+ print_info("")
392
+ print_info(" # Then rebuild Python:")
393
+ print_info(" pyenv uninstall 3.13.5")
394
+ print_info(" pyenv install 3.13.5")
395
+ print_info("")
396
+ print_info("To use setup in non-interactive mode, specify: --storage file")
397
+ raise click.Abort()
398
+ else:
399
+ print_info("")
400
+ # Build choices based on available backends
401
+ choices = []
402
+ if sqlite_available:
403
+ choices.append(
404
+ {"name": "SQLite - Single file database (recommended)", "value": "sqlite"}
405
+ )
406
+ if postgres_available:
407
+ choices.append(
408
+ {"name": "PostgreSQL - Scalable production database", "value": "postgres"}
409
+ )
410
+ choices.extend(
411
+ [
412
+ {
413
+ "name": "File - JSON files on disk"
414
+ + (" (recommended)" if not sqlite_available else ""),
415
+ "value": "file",
416
+ },
417
+ {"name": "Memory - In-memory only (dev/testing)", "value": "memory"},
418
+ {"name": "DynamoDB - AWS serverless storage (cloud)", "value": "dynamodb"},
419
+ ]
420
+ )
421
+
422
+ if not sqlite_available:
423
+ print_warning("\nNote: SQLite is not available in your Python build")
424
+ print_info("To enable SQLite, install libsqlite3-dev and rebuild Python")
425
+ print_info("")
426
+
427
+ if not postgres_available:
428
+ print_info("Note: PostgreSQL backend available after: pip install asyncpg")
429
+ print_info("")
430
+
431
+ storage_type = select(
432
+ "Choose storage backend:",
433
+ choices=choices,
434
+ )
435
+
436
+ config_data["storage_type"] = storage_type
437
+
438
+ # Storage path (for file/sqlite)
439
+ if storage_type in ["file", "sqlite"]:
440
+ if storage_path_override:
441
+ final_storage_path = storage_path_override
442
+ elif non_interactive:
443
+ final_storage_path = (
444
+ "./pyworkflow_data/pyworkflow.db"
445
+ if storage_type == "sqlite"
446
+ else "./pyworkflow_data"
447
+ )
448
+ else:
449
+ default_path = (
450
+ "./pyworkflow_data/pyworkflow.db"
451
+ if storage_type == "sqlite"
452
+ else "./pyworkflow_data"
453
+ )
454
+ final_storage_path = filepath(
455
+ "Storage path:",
456
+ default=default_path,
457
+ only_directories=(storage_type == "file"),
458
+ )
459
+
460
+ config_data["storage_path"] = final_storage_path
461
+
462
+ # PostgreSQL connection (for postgres backend)
463
+ if storage_type == "postgres":
464
+ if non_interactive:
465
+ # Use default connection settings for non-interactive mode
466
+ config_data["postgres_host"] = "localhost"
467
+ config_data["postgres_port"] = "5432"
468
+ config_data["postgres_user"] = "pyworkflow"
469
+ config_data["postgres_password"] = "pyworkflow"
470
+ config_data["postgres_database"] = "pyworkflow"
471
+ else:
472
+ print_info("\nConfigure PostgreSQL connection:")
473
+ config_data["postgres_host"] = input_text(
474
+ "PostgreSQL host:",
475
+ default="localhost",
476
+ )
477
+ config_data["postgres_port"] = input_text(
478
+ "PostgreSQL port:",
479
+ default="5432",
480
+ )
481
+ config_data["postgres_database"] = input_text(
482
+ "Database name:",
483
+ default="pyworkflow",
484
+ )
485
+ config_data["postgres_user"] = input_text(
486
+ "Database user:",
487
+ default="pyworkflow",
488
+ )
489
+ config_data["postgres_password"] = input_text(
490
+ "Database password:",
491
+ default="pyworkflow",
492
+ )
493
+
494
+ # DynamoDB configuration
495
+ elif storage_type == "dynamodb":
496
+ if non_interactive:
497
+ config_data["dynamodb_table_name"] = "pyworkflow"
498
+ config_data["dynamodb_region"] = "us-east-1"
499
+ else:
500
+ table_name = input_text(
501
+ "DynamoDB table name:",
502
+ default="pyworkflow",
503
+ )
504
+ config_data["dynamodb_table_name"] = table_name
505
+
506
+ region = input_text(
507
+ "AWS region:",
508
+ default="us-east-1",
509
+ )
510
+ config_data["dynamodb_region"] = region
511
+
512
+ # Optional local endpoint for development
513
+ if confirm("Use local DynamoDB endpoint (for development)?", default=False):
514
+ endpoint = input_text(
515
+ "Local endpoint URL:",
516
+ default="http://localhost:8000",
517
+ )
518
+ config_data["dynamodb_endpoint_url"] = endpoint
519
+
520
+ return config_data
521
+
522
+
523
+ def _setup_docker_infrastructure(
524
+ config_data: dict[str, str],
525
+ non_interactive: bool,
526
+ ) -> bool:
527
+ """Set up Docker infrastructure.
528
+
529
+ Returns:
530
+ True if dashboard is available, False otherwise
531
+ """
532
+ print_info("\nSetting up Docker infrastructure...")
533
+
534
+ # Generate docker-compose.yml based on storage type
535
+ print_info(" Generating docker-compose.yml...")
536
+ storage_type = config_data["storage_type"]
537
+
538
+ if storage_type == "postgres":
539
+ compose_content = generate_postgres_docker_compose_content(
540
+ postgres_host="postgres",
541
+ postgres_port=int(config_data.get("postgres_port", "5432")),
542
+ postgres_user=config_data.get("postgres_user", "pyworkflow"),
543
+ postgres_password=config_data.get("postgres_password", "pyworkflow"),
544
+ postgres_database=config_data.get("postgres_database", "pyworkflow"),
545
+ )
546
+ else:
547
+ compose_content = generate_docker_compose_content(
548
+ storage_type=storage_type,
549
+ storage_path=config_data.get("storage_path"),
550
+ )
551
+
552
+ compose_path = Path.cwd() / "docker-compose.yml"
553
+ write_docker_compose(compose_content, compose_path)
554
+ print_success(f" Created: {compose_path}")
555
+
556
+ # Pull images
557
+ print_info("\n Pulling Docker images...")
558
+ print_info("")
559
+ pull_success, output = run_docker_command(
560
+ ["pull"],
561
+ compose_file=compose_path,
562
+ stream_output=True,
563
+ )
564
+
565
+ dashboard_available = pull_success
566
+ if not pull_success:
567
+ print_warning("\n Failed to pull dashboard images")
568
+ print_info(" Continuing with Redis setup only...")
569
+ print_info(" You can still use PyWorkflow without the dashboard.")
570
+ else:
571
+ print_success("\n Images pulled successfully")
572
+
573
+ # Start services
574
+ print_info("\n Starting services...")
575
+ print_info("")
576
+
577
+ # Include postgres in services to start if using postgres storage
578
+ services_to_start = ["redis"]
579
+ if storage_type == "postgres":
580
+ services_to_start.insert(0, "postgres")
581
+ if dashboard_available:
582
+ services_to_start.extend(["dashboard-backend", "dashboard-frontend"])
583
+
584
+ success, output = run_docker_command(
585
+ ["up", "-d"] + services_to_start,
586
+ compose_file=compose_path,
587
+ stream_output=True,
588
+ )
589
+
590
+ if not success:
591
+ print_error("\n Failed to start services")
592
+ print_info("\n Troubleshooting:")
593
+ ports_in_use = "6379, 8585, 5173"
594
+ if storage_type == "postgres":
595
+ postgres_port = config_data.get("postgres_port", "5432")
596
+ ports_in_use = f"{postgres_port}, {ports_in_use}"
597
+ print_info(f" • Check if ports {ports_in_use} are already in use")
598
+ print_info(" • View logs: docker compose logs")
599
+ print_info(" • Try: docker compose down && docker compose up -d")
600
+ return False
601
+
602
+ print_success("\n Services started")
603
+
604
+ # Health checks
605
+ print_info("\n Checking service health...")
606
+ health_checks = {
607
+ "Redis": {"type": "tcp", "host": "localhost", "port": 6379},
608
+ }
609
+
610
+ # Add PostgreSQL health check if using postgres storage
611
+ if storage_type == "postgres":
612
+ pg_port = int(config_data.get("postgres_port", "5432"))
613
+ health_checks["PostgreSQL"] = {"type": "tcp", "host": "localhost", "port": pg_port}
614
+
615
+ # Only check dashboard health if it was started
616
+ if dashboard_available:
617
+ health_checks["Dashboard Backend"] = {
618
+ "type": "http",
619
+ "url": "http://localhost:8585/api/v1/health",
620
+ }
621
+ health_checks["Dashboard Frontend"] = {"type": "http", "url": "http://localhost:5173"}
622
+
623
+ health_results = check_service_health(health_checks)
624
+
625
+ for service_name, healthy in health_results.items():
626
+ if healthy:
627
+ print_success(f" {service_name}: Ready")
628
+ else:
629
+ print_warning(f" {service_name}: Not responding (may still be starting)")
630
+
631
+ return dashboard_available
632
+
633
+
634
+ def _validate_setup(config_data: dict[str, str], skip_docker: bool) -> None:
635
+ """Validate the setup."""
636
+ print_info("\nValidating setup...")
637
+
638
+ checks_passed = True
639
+
640
+ # Check config file exists
641
+ config_path = Path.cwd() / "pyworkflow.config.yaml"
642
+ if config_path.exists():
643
+ print_success(" Configuration file: OK")
644
+ else:
645
+ print_error(" Configuration file: Missing")
646
+ checks_passed = False
647
+
648
+ # Check docker compose file (if docker enabled)
649
+ if not skip_docker:
650
+ compose_path = Path.cwd() / "docker-compose.yml"
651
+ if compose_path.exists():
652
+ print_success(" Docker Compose file: OK")
653
+ else:
654
+ print_warning(" Docker Compose file: Missing")
655
+
656
+ if checks_passed:
657
+ print_success("\nValidation passed!")
658
+ else:
659
+ print_warning("\nValidation completed with warnings")
660
+
661
+
662
+ def _show_next_steps(
663
+ config_data: dict[str, str], skip_docker: bool, dashboard_available: bool = False
664
+ ) -> None:
665
+ """Display next steps to the user."""
666
+ print_info("\n" + "=" * 60)
667
+ print_success("Setup Complete!")
668
+ print_info("=" * 60)
669
+
670
+ if not skip_docker:
671
+ print_info("\nServices running:")
672
+ if config_data.get("storage_type") == "postgres":
673
+ postgres_port = config_data.get("postgres_port", "5432")
674
+ print_info(f" • PostgreSQL: localhost:{postgres_port}")
675
+ print_info(" • Redis: redis://localhost:6379")
676
+ if dashboard_available:
677
+ print_info(" • Dashboard: http://localhost:5173")
678
+ print_info(" • Dashboard API: http://localhost:8585/docs")
679
+
680
+ print_info("\nNext steps:")
681
+ print_info("")
682
+ print_info(" 1. Start a Celery worker:")
683
+ print_info(" $ pyworkflow worker run")
684
+ print_info("")
685
+ print_info(" 2. Run a workflow:")
686
+ print_info(" $ pyworkflow workflows run <workflow_name>")
687
+
688
+ if not skip_docker and dashboard_available:
689
+ print_info("")
690
+ print_info(" 3. View the dashboard:")
691
+ print_info(" Open http://localhost:5173 in your browser")
692
+
693
+ if not config_data.get("module"):
694
+ print_info("")
695
+ print_warning(" Note: No workflow module configured yet")
696
+ print_info(" Add 'module: your.workflows' to pyworkflow.config.yaml")
697
+
698
+ if not skip_docker:
699
+ print_info("")
700
+ print_info("To stop services:")
701
+ print_info(" $ docker compose down")
702
+
703
+ print_info("")