pyworkflow-engine 0.1.24__py3-none-any.whl → 0.1.25__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pyworkflow/__init__.py CHANGED
@@ -29,7 +29,7 @@ Quick Start:
29
29
  >>> run_id = await start(my_workflow, "Alice")
30
30
  """
31
31
 
32
- __version__ = "0.1.24"
32
+ __version__ = "0.1.25"
33
33
 
34
34
  # Configuration
35
35
  from pyworkflow.config import (
pyworkflow/celery/app.py CHANGED
@@ -202,11 +202,16 @@ def create_celery_app(
202
202
  """
203
203
  # Priority: parameter > environment variable > hardcoded default
204
204
  broker_url = broker_url or os.getenv("PYWORKFLOW_CELERY_BROKER") or "redis://localhost:6379/0"
205
- result_backend = (
206
- result_backend
207
- or os.getenv("PYWORKFLOW_CELERY_RESULT_BACKEND")
208
- or "redis://localhost:6379/1"
209
- )
205
+
206
+ # Result backend defaults to None (disabled) unless explicitly set
207
+ if result_backend is None and "PYWORKFLOW_CELERY_RESULT_BACKEND" not in os.environ:
208
+ result_backend = None # Disabled by default
209
+ else:
210
+ result_backend = (
211
+ result_backend
212
+ or os.getenv("PYWORKFLOW_CELERY_RESULT_BACKEND")
213
+ or "redis://localhost:6379/1"
214
+ )
210
215
 
211
216
  # Worker memory limits (KB) - prevents memory leaks from accumulating
212
217
  # Priority: parameter > env var > None (no limit by default)
@@ -218,7 +223,7 @@ def create_celery_app(
218
223
 
219
224
  # Detect broker and backend types
220
225
  is_sentinel_broker = is_sentinel_url(broker_url)
221
- is_sentinel_backend = is_sentinel_url(result_backend)
226
+ is_sentinel_backend = is_sentinel_url(result_backend) if result_backend else False
222
227
  is_redis_broker = broker_url.startswith("redis://") or broker_url.startswith("rediss://")
223
228
 
224
229
  # Get Sentinel master name from param, env, or default
@@ -264,25 +269,24 @@ def create_celery_app(
264
269
  ],
265
270
  )
266
271
 
267
- # Configure Celery
268
- app.conf.update(
272
+ # Build configuration dict
273
+ config_dict = {
269
274
  # Task execution settings
270
- task_serializer="json",
271
- result_serializer="json",
272
- accept_content=["json"],
273
- timezone="UTC",
274
- enable_utc=True,
275
+ "task_serializer": "json",
276
+ "result_serializer": "json",
277
+ "accept_content": ["json"],
278
+ "timezone": "UTC",
279
+ "enable_utc": True,
275
280
  # Broker transport options - prevent task redelivery
276
281
  # See: https://github.com/celery/celery/issues/5935
277
- broker_transport_options=final_broker_opts,
278
- result_backend_transport_options=final_backend_opts,
282
+ "broker_transport_options": final_broker_opts,
279
283
  # Task routing
280
- task_default_queue="pyworkflow.default",
281
- task_default_exchange="pyworkflow",
282
- task_default_exchange_type="topic",
283
- task_default_routing_key="workflow.default",
284
+ "task_default_queue": "pyworkflow.default",
285
+ "task_default_exchange": "pyworkflow",
286
+ "task_default_exchange_type": "topic",
287
+ "task_default_routing_key": "workflow.default",
284
288
  # Task queues
285
- task_queues=(
289
+ "task_queues": (
286
290
  Queue(
287
291
  "pyworkflow.default",
288
292
  Exchange("pyworkflow", type="topic"),
@@ -304,42 +308,54 @@ def create_celery_app(
304
308
  routing_key="workflow.schedule.#",
305
309
  ),
306
310
  ),
307
- # Result backend settings
308
- result_expires=3600, # 1 hour
309
- result_persistent=True,
310
311
  # Task execution
311
- task_acks_late=True,
312
- task_reject_on_worker_lost=True,
313
- worker_prefetch_multiplier=1, # Fair task distribution
312
+ "task_acks_late": True,
313
+ "task_reject_on_worker_lost": True,
314
+ "worker_prefetch_multiplier": 1, # Fair task distribution
314
315
  # Retry settings
315
- task_autoretry_for=(),
316
- task_retry_backoff=True,
317
- task_retry_backoff_max=600, # 10 minutes max
318
- task_retry_jitter=True,
316
+ "task_autoretry_for": (),
317
+ "task_retry_backoff": True,
318
+ "task_retry_backoff_max": 600, # 10 minutes max
319
+ "task_retry_jitter": True,
319
320
  # Monitoring
320
- worker_send_task_events=True,
321
- task_send_sent_event=True,
321
+ "worker_send_task_events": True,
322
+ "task_send_sent_event": True,
322
323
  # Beat scheduler (for sleep resumption)
323
- beat_schedule={},
324
+ "beat_schedule": {},
324
325
  # Logging
325
- worker_log_format="[%(asctime)s: %(levelname)s/%(processName)s] %(message)s",
326
- worker_task_log_format="[%(asctime)s: %(levelname)s/%(processName)s] [%(task_name)s(%(task_id)s)] %(message)s",
326
+ "worker_log_format": "[%(asctime)s: %(levelname)s/%(processName)s] %(message)s",
327
+ "worker_task_log_format": "[%(asctime)s: %(levelname)s/%(processName)s] [%(task_name)s(%(task_id)s)] %(message)s",
327
328
  # Worker memory management - prevents memory leaks from accumulating
328
329
  # When set, workers are recycled after exceeding these limits
329
- worker_max_memory_per_child=max_memory, # KB, None = no limit
330
- worker_max_tasks_per_child=max_tasks, # None = no limit
331
- )
330
+ "worker_max_memory_per_child": max_memory, # KB, None = no limit
331
+ "worker_max_tasks_per_child": max_tasks, # None = no limit
332
+ }
333
+
334
+ # Only add result backend settings if enabled
335
+ if result_backend is not None:
336
+ config_dict.update(
337
+ {
338
+ "result_backend_transport_options": final_backend_opts,
339
+ "result_expires": 3600, # 1 hour
340
+ "result_persistent": True,
341
+ }
342
+ )
343
+
344
+ # Configure Celery
345
+ app.conf.update(config_dict)
332
346
 
333
347
  # Configure singleton locking for Redis or Sentinel brokers
334
348
  # This enables distributed locking to prevent duplicate task execution
349
+ # Uses broker URL since result backend may be disabled
335
350
  if is_redis_broker or is_sentinel_broker:
336
- app.conf.update(
337
- singleton_backend_url=broker_url,
338
- singleton_backend_is_sentinel=is_sentinel_broker,
339
- singleton_sentinel_master=master_name if is_sentinel_broker else None,
340
- singleton_key_prefix="pyworkflow:lock:",
341
- singleton_lock_expiry=3600, # 1 hour TTL (safety net)
342
- )
351
+ singleton_config = {
352
+ "singleton_backend_url": broker_url, # Use broker, not result backend
353
+ "singleton_backend_is_sentinel": is_sentinel_broker,
354
+ "singleton_sentinel_master": master_name if is_sentinel_broker else None,
355
+ "singleton_key_prefix": "pyworkflow:lock:",
356
+ "singleton_lock_expiry": 3600, # 1 hour TTL (safety net)
357
+ }
358
+ app.conf.update(singleton_config)
343
359
 
344
360
  # Note: Logging is configured via Celery signals (worker_init, worker_process_init)
345
361
  # to ensure proper initialization AFTER process forking.
@@ -16,6 +16,7 @@ the pool is automatically recreated when a loop change is detected.
16
16
  import asyncio
17
17
  import contextlib
18
18
  import json
19
+ import os
19
20
  from datetime import UTC, datetime
20
21
  from typing import Any
21
22
 
@@ -163,10 +164,10 @@ class PostgresStorageBackend(StorageBackend):
163
164
  user: str = "pyworkflow",
164
165
  password: str = "",
165
166
  database: str = "pyworkflow",
166
- min_pool_size: int = 1,
167
- max_pool_size: int = 10,
168
- max_inactive_connection_lifetime: float = 1800.0,
169
- command_timeout: float | None = 60.0,
167
+ min_pool_size: int | None = None,
168
+ max_pool_size: int | None = None,
169
+ max_inactive_connection_lifetime: float | None = None,
170
+ command_timeout: float | None = None,
170
171
  ):
171
172
  """
172
173
  Initialize PostgreSQL storage backend.
@@ -178,11 +179,13 @@ class PostgresStorageBackend(StorageBackend):
178
179
  user: Database user (used if dsn not provided)
179
180
  password: Database password (used if dsn not provided)
180
181
  database: Database name (used if dsn not provided)
181
- min_pool_size: Minimum connections in pool
182
- max_pool_size: Maximum connections in pool
182
+ min_pool_size: Minimum connections in pool (defaults to env var PYWORKFLOW_POSTGRES_MIN_POOL_SIZE or 1)
183
+ max_pool_size: Maximum connections in pool (defaults to env var PYWORKFLOW_POSTGRES_MAX_POOL_SIZE or 10)
183
184
  max_inactive_connection_lifetime: How long (seconds) an idle connection can
184
- stay in the pool before being closed. Default 1800s (30 min).
185
- command_timeout: Default timeout (seconds) for queries. None for no timeout. Default 60s.
185
+ stay in the pool before being closed.
186
+ Defaults to env var PYWORKFLOW_POSTGRES_MAX_INACTIVE_LIFETIME or 1800s (30 min).
187
+ command_timeout: Default timeout (seconds) for queries. None for no timeout.
188
+ Defaults to env var PYWORKFLOW_POSTGRES_COMMAND_TIMEOUT or 60s.
186
189
  """
187
190
  self.dsn = dsn
188
191
  self.host = host
@@ -190,10 +193,27 @@ class PostgresStorageBackend(StorageBackend):
190
193
  self.user = user
191
194
  self.password = password
192
195
  self.database = database
193
- self.min_pool_size = min_pool_size
194
- self.max_pool_size = max_pool_size
195
- self.max_inactive_connection_lifetime = max_inactive_connection_lifetime
196
- self.command_timeout = command_timeout
196
+
197
+ # Read from env vars if not provided
198
+ self.min_pool_size = min_pool_size or int(
199
+ os.getenv("PYWORKFLOW_POSTGRES_MIN_POOL_SIZE", "1")
200
+ )
201
+ self.max_pool_size = max_pool_size or int(
202
+ os.getenv("PYWORKFLOW_POSTGRES_MAX_POOL_SIZE", "10")
203
+ )
204
+ self.max_inactive_connection_lifetime = max_inactive_connection_lifetime or float(
205
+ os.getenv("PYWORKFLOW_POSTGRES_MAX_INACTIVE_LIFETIME", "1800.0")
206
+ )
207
+ self.command_timeout = (
208
+ command_timeout
209
+ if command_timeout is not None
210
+ else (
211
+ float(os.getenv("PYWORKFLOW_POSTGRES_COMMAND_TIMEOUT", "60.0"))
212
+ if os.getenv("PYWORKFLOW_POSTGRES_COMMAND_TIMEOUT")
213
+ else 60.0
214
+ )
215
+ )
216
+
197
217
  self._pool: asyncpg.Pool | None = None
198
218
  self._pool_loop_id: int | None = None # Track which loop the pool was created on
199
219
  self._initialized = False
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pyworkflow-engine
3
- Version: 0.1.24
3
+ Version: 0.1.25
4
4
  Summary: A Python implementation of durable, event-sourced workflows inspired by Vercel Workflow
5
5
  Author: PyWorkflow Contributors
6
6
  License: MIT
@@ -1,4 +1,4 @@
1
- pyworkflow/__init__.py,sha256=ySBxZ69-klgc95RAB5PIB2p1g155-5YtVpXph7N5kME,6281
1
+ pyworkflow/__init__.py,sha256=ZCCbsFr23DIq_ygRvn6rs6mXykW1uvo9Plwd3dMaoxg,6281
2
2
  pyworkflow/config.py,sha256=pKwPrpCwBJiDpB-MIjM0U7GW1TFmQFO341pihL5-vTM,14455
3
3
  pyworkflow/discovery.py,sha256=snW3l4nvY3Nc067TGlwtn_qdzTU9ybN7YPr8FbvY8iM,8066
4
4
  pyworkflow/aws/__init__.py,sha256=Ak_xHcR9LTRX-CwcS0XecYmzrXZw4EM3V9aKBBDEmIk,1741
@@ -6,7 +6,7 @@ pyworkflow/aws/context.py,sha256=C_wBr_YRUYT4IHyaAGPdiqRPNbH8mDLkoMpVtQVIg28,822
6
6
  pyworkflow/aws/handler.py,sha256=0SnQuIfQVD99QKMCRFPtrsrV_l1LYKFkzPIRx_2UkSI,5849
7
7
  pyworkflow/aws/testing.py,sha256=WrRk9wjbycM-UyHFQWNnA83UE9IrYnhfT38WrbxQT2U,8844
8
8
  pyworkflow/celery/__init__.py,sha256=FywVyqnT8AYz9cXkr-wel7_-N7dHFsPNASEPMFESf4Q,1179
9
- pyworkflow/celery/app.py,sha256=MXmD5N8DCrX9YpdccOWPy2ob7k4vg9r0ix1kDgDuR3w,15753
9
+ pyworkflow/celery/app.py,sha256=PBlawOq349IZWJNQyzSC59LBttaUTinVaDPuf8uItS0,16469
10
10
  pyworkflow/celery/loop.py,sha256=mu8cIfMJYgHAoGCN_DdDoNoXK3QHzHpLmrPCyFDQYIY,3016
11
11
  pyworkflow/celery/scheduler.py,sha256=Ms4rqRpdpMiLM8l4y3DK-Divunj9afYuUaGGoNQe7P4,11288
12
12
  pyworkflow/celery/singleton.py,sha256=9gdVHzqFjShZ9OJOJlJNABUg9oqnl6ITGROtomcOtsg,16070
@@ -80,7 +80,7 @@ pyworkflow/storage/dynamodb.py,sha256=hER4rOc0GWh0-yk0YiFrxZkiHJ6DKEOpvZQ9GCEx8Q
80
80
  pyworkflow/storage/file.py,sha256=Ag4K5Rt-8YaENZLx-HnN77EAfNc37WqkymmaPlVJbYg,32665
81
81
  pyworkflow/storage/memory.py,sha256=HiuGU9jgzKwNGrF98YHdHYrzQTNUu96VyL5HVVeII7A,22179
82
82
  pyworkflow/storage/mysql.py,sha256=lS9dZHmtPL14cY1lrzDFilFKmsjMD_b0NnYJfiPDFKQ,52537
83
- pyworkflow/storage/postgres.py,sha256=Rg6h9syMgjw32-lxbNFroSxD9lBvv-WjaO4uZnglvOo,54056
83
+ pyworkflow/storage/postgres.py,sha256=mup7nk4u14YnrMUUv6hA9-hW-C1Y3iAsH2fAaVss4Ok,54978
84
84
  pyworkflow/storage/schemas.py,sha256=o1ntTYNgQQ5YVuXtPCShtENEsndVjdrXclWrkCgkitg,18002
85
85
  pyworkflow/storage/sqlite.py,sha256=EJ8n66WfhZnfFLKcNdUOoOCswdftttoH6T31spS78Qo,47667
86
86
  pyworkflow/storage/migrations/__init__.py,sha256=R8b-VoVzpYJ6A-Z5FDNbrbOO-2Nnt4NrcAS8Wo08VPo,425
@@ -88,9 +88,9 @@ pyworkflow/storage/migrations/base.py,sha256=Thq38E8BDpC7EmBlwpfm727Zyz4TVmADQ1k
88
88
  pyworkflow/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
89
89
  pyworkflow/utils/duration.py,sha256=C-itmiSQQlplw7j6XB679hLF9xYGnyCwm7twO88OF8U,3978
90
90
  pyworkflow/utils/schedule.py,sha256=dO_MkGFyfwZpb0LDlW6BGyZzlPuQIA6dc6j9nk9lc4Y,10691
91
- pyworkflow_engine-0.1.24.dist-info/licenses/LICENSE,sha256=Y49RCTZ5ayn_yzBcRxnyIFdcMCyuYm150aty_FIznfY,1080
92
- pyworkflow_engine-0.1.24.dist-info/METADATA,sha256=OoG1XNfV_RIwP4cL0QV66cbFKKwv6QqQNl_Mt6FwTgk,19628
93
- pyworkflow_engine-0.1.24.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
94
- pyworkflow_engine-0.1.24.dist-info/entry_points.txt,sha256=3IGAfuylnS39U0YX0pxnjrj54kB4iT_bNYrmsiDB-dE,51
95
- pyworkflow_engine-0.1.24.dist-info/top_level.txt,sha256=FLTv9pQmLDBXrQdLOhTMIS3njFibliMsQEfumqmdzBE,11
96
- pyworkflow_engine-0.1.24.dist-info/RECORD,,
91
+ pyworkflow_engine-0.1.25.dist-info/licenses/LICENSE,sha256=Y49RCTZ5ayn_yzBcRxnyIFdcMCyuYm150aty_FIznfY,1080
92
+ pyworkflow_engine-0.1.25.dist-info/METADATA,sha256=AaPlIVbbji6szHA7Z5C2KhVSGmVAGAEKtRov0XEkSV4,19628
93
+ pyworkflow_engine-0.1.25.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
94
+ pyworkflow_engine-0.1.25.dist-info/entry_points.txt,sha256=3IGAfuylnS39U0YX0pxnjrj54kB4iT_bNYrmsiDB-dE,51
95
+ pyworkflow_engine-0.1.25.dist-info/top_level.txt,sha256=FLTv9pQmLDBXrQdLOhTMIS3njFibliMsQEfumqmdzBE,11
96
+ pyworkflow_engine-0.1.25.dist-info/RECORD,,