pyworkflow-engine 0.1.10__py3-none-any.whl → 0.1.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -5,12 +5,42 @@ This module provides functions to serialize storage backends to configuration
5
5
  dicts and recreate storage backends from configuration dicts. This is used
6
6
  for passing storage configuration to Celery tasks and other cross-process
7
7
  communication.
8
+
9
+ Storage backends are cached per-process to reuse connection pools and avoid
10
+ connection exhaustion (e.g., "too many clients" errors with PostgreSQL).
11
+
12
+ Note: For async backends (postgres, mysql), the backends handle event loop
13
+ changes internally by detecting loop mismatches and recreating the pool.
8
14
  """
9
15
 
16
+ import contextlib
17
+ import hashlib
18
+ import json
10
19
  from typing import Any
11
20
 
12
21
  from pyworkflow.storage.base import StorageBackend
13
22
 
23
+ # Module-level cache for storage backends (per-worker singleton pattern)
24
+ # Key: hash of config dict, Value: tuple of (StorageBackend, reserved for future use)
25
+ _storage_cache: dict[str, tuple[StorageBackend, None]] = {}
26
+
27
+
28
+ def _config_to_cache_key(config: dict[str, Any] | None) -> str:
29
+ """
30
+ Create a cache key from config dict.
31
+
32
+ Args:
33
+ config: Configuration dict
34
+
35
+ Returns:
36
+ Cache key string (MD5 hash of serialized config)
37
+ """
38
+ if config is None:
39
+ return "default"
40
+ # Sort keys for consistent hashing
41
+ serialized = json.dumps(config, sort_keys=True)
42
+ return hashlib.md5(serialized.encode()).hexdigest()
43
+
14
44
 
15
45
  def storage_to_config(storage: StorageBackend | None) -> dict[str, Any] | None:
16
46
  """
@@ -106,14 +136,20 @@ def storage_to_config(storage: StorageBackend | None) -> dict[str, Any] | None:
106
136
 
107
137
  def config_to_storage(config: dict[str, Any] | None = None) -> StorageBackend:
108
138
  """
109
- Create storage backend from configuration dict.
139
+ Create or return cached storage backend from configuration dict.
140
+
141
+ Storage backends are cached per-process to reuse connection pools.
142
+ This prevents connection exhaustion with pooled backends like PostgreSQL.
143
+
144
+ For async backends (postgres, mysql), the backend handles event loop
145
+ changes internally by detecting loop mismatches and recreating the pool.
110
146
 
111
147
  Args:
112
148
  config: Configuration dict with 'type' and backend-specific params.
113
149
  If None, returns default FileStorageBackend.
114
150
 
115
151
  Returns:
116
- Storage backend instance
152
+ Storage backend instance (may be cached)
117
153
 
118
154
  Raises:
119
155
  ValueError: If storage type is unknown
@@ -124,6 +160,25 @@ def config_to_storage(config: dict[str, Any] | None = None) -> StorageBackend:
124
160
  >>> isinstance(storage, FileStorageBackend)
125
161
  True
126
162
  """
163
+ cache_key = _config_to_cache_key(config)
164
+
165
+ if cache_key in _storage_cache:
166
+ cached_storage, _ = _storage_cache[cache_key]
167
+ return cached_storage
168
+
169
+ # Create new instance
170
+ storage = _create_storage_backend(config)
171
+ _storage_cache[cache_key] = (storage, None)
172
+ return storage
173
+
174
+
175
+ def _create_storage_backend(config: dict[str, Any] | None) -> StorageBackend:
176
+ """
177
+ Internal function that creates a new storage backend instance.
178
+
179
+ This should not be called directly - use config_to_storage() instead
180
+ to benefit from caching.
181
+ """
127
182
  if not config:
128
183
  from pyworkflow.storage.file import FileStorageBackend
129
184
 
@@ -252,3 +307,27 @@ def config_to_storage(config: dict[str, Any] | None = None) -> StorageBackend:
252
307
 
253
308
  else:
254
309
  raise ValueError(f"Unknown storage type: {storage_type}")
310
+
311
+
312
+ async def disconnect_all_cached() -> None:
313
+ """
314
+ Disconnect all cached storage backends.
315
+
316
+ Call this on worker shutdown to properly close connection pools.
317
+ This is automatically called by the Celery worker_shutdown signal handler.
318
+ """
319
+ for storage, _ in _storage_cache.values():
320
+ if hasattr(storage, "disconnect"):
321
+ with contextlib.suppress(Exception):
322
+ await storage.disconnect()
323
+ _storage_cache.clear()
324
+
325
+
326
+ def clear_storage_cache() -> None:
327
+ """
328
+ Clear the storage cache without disconnecting.
329
+
330
+ Primarily used for testing to ensure fresh instances.
331
+ For production cleanup, use disconnect_all_cached() instead.
332
+ """
333
+ _storage_cache.clear()
@@ -7,8 +7,14 @@ This backend stores workflow data in a PostgreSQL database, suitable for:
7
7
  - High-availability requirements
8
8
 
9
9
  Provides ACID guarantees, connection pooling, and efficient querying with SQL indexes.
10
+
11
+ Note: The connection pool is bound to a specific event loop. When running in
12
+ environments where each task creates a new event loop (e.g., Celery prefork),
13
+ the pool is automatically recreated when a loop change is detected.
10
14
  """
11
15
 
16
+ import asyncio
17
+ import contextlib
12
18
  import json
13
19
  from datetime import UTC, datetime
14
20
  from typing import Any
@@ -72,6 +78,7 @@ class PostgresStorageBackend(StorageBackend):
72
78
  self.min_pool_size = min_pool_size
73
79
  self.max_pool_size = max_pool_size
74
80
  self._pool: asyncpg.Pool | None = None
81
+ self._pool_loop_id: int | None = None # Track which loop the pool was created on
75
82
  self._initialized = False
76
83
 
77
84
  def _build_dsn(self) -> str:
@@ -83,13 +90,29 @@ class PostgresStorageBackend(StorageBackend):
83
90
  return f"postgresql://{self.user}@{self.host}:{self.port}/{self.database}"
84
91
 
85
92
  async def connect(self) -> None:
86
- """Initialize connection pool and create tables if needed."""
93
+ """Initialize connection pool and create tables if needed.
94
+
95
+ The pool is bound to the current event loop. If the loop has changed
96
+ since the pool was created (e.g., in Celery prefork workers), the old
97
+ pool is closed and a new one is created.
98
+ """
99
+ current_loop_id = id(asyncio.get_running_loop())
100
+
101
+ # Check if we need to recreate the pool due to loop change
102
+ if self._pool is not None and self._pool_loop_id != current_loop_id:
103
+ # Loop changed - the old pool is invalid, close it
104
+ with contextlib.suppress(Exception):
105
+ self._pool.terminate() # Use terminate() instead of close() to avoid awaiting on wrong loop
106
+ self._pool = None
107
+ self._initialized = False
108
+
87
109
  if self._pool is None:
88
110
  self._pool = await asyncpg.create_pool(
89
111
  dsn=self.dsn or self._build_dsn(),
90
112
  min_size=self.min_pool_size,
91
113
  max_size=self.max_pool_size,
92
114
  )
115
+ self._pool_loop_id = current_loop_id
93
116
 
94
117
  if not self._initialized:
95
118
  await self._initialize_schema()
@@ -100,6 +123,7 @@ class PostgresStorageBackend(StorageBackend):
100
123
  if self._pool:
101
124
  await self._pool.close()
102
125
  self._pool = None
126
+ self._pool_loop_id = None
103
127
  self._initialized = False
104
128
 
105
129
  async def _initialize_schema(self) -> None:
@@ -107,7 +131,7 @@ class PostgresStorageBackend(StorageBackend):
107
131
  if not self._pool:
108
132
  await self.connect()
109
133
 
110
- pool = self._ensure_connected()
134
+ pool = await self._get_pool()
111
135
  async with pool.acquire() as conn:
112
136
  # Workflow runs table
113
137
  await conn.execute("""
@@ -254,17 +278,62 @@ class PostgresStorageBackend(StorageBackend):
254
278
  )
255
279
  """)
256
280
 
281
+ async def _get_pool(self) -> asyncpg.Pool:
282
+ """Get the connection pool, connecting/reconnecting if needed.
283
+
284
+ This method ensures the pool is connected and on the correct event loop.
285
+ It handles automatic reconnection when the event loop has changed.
286
+ """
287
+ current_loop_id = id(asyncio.get_running_loop())
288
+
289
+ # Check if we need to connect or reconnect
290
+ # - If no pool exists, we need to connect
291
+ # - If pool exists but was created on a different loop, we need to reconnect
292
+ # - If _pool_loop_id is None but pool exists (e.g., mocked for testing),
293
+ # we trust the pool and set the loop ID to current
294
+ if self._pool is None:
295
+ await self.connect()
296
+ elif self._pool_loop_id is not None and self._pool_loop_id != current_loop_id:
297
+ # Pool was created on a different loop - need to reconnect
298
+ await self.connect()
299
+ elif self._pool_loop_id is None:
300
+ # Pool was set externally (e.g., for testing) - track current loop
301
+ self._pool_loop_id = current_loop_id
302
+
303
+ return self._pool # type: ignore
304
+
257
305
  def _ensure_connected(self) -> asyncpg.Pool:
258
- """Ensure database pool is connected."""
306
+ """Ensure database pool is connected.
307
+
308
+ DEPRECATED: Use _get_pool() instead for automatic reconnection.
309
+ This method is kept for backward compatibility but will raise an error
310
+ if the pool is on a different event loop.
311
+ """
259
312
  if not self._pool:
260
313
  raise RuntimeError("Database not connected. Call connect() first.")
314
+
315
+ # Check if we're on a different event loop than when the pool was created
316
+ try:
317
+ current_loop_id = id(asyncio.get_running_loop())
318
+ if self._pool_loop_id is not None and self._pool_loop_id != current_loop_id:
319
+ raise RuntimeError(
320
+ "Database pool was created on a different event loop. "
321
+ "Call connect() to recreate the pool on the current loop."
322
+ )
323
+ except RuntimeError as e:
324
+ if "no running event loop" in str(e).lower():
325
+ # No running loop - this will fail anyway when we try to use the pool
326
+ pass
327
+ else:
328
+ raise
329
+
261
330
  return self._pool
262
331
 
263
332
  # Workflow Run Operations
264
333
 
265
334
  async def create_run(self, run: WorkflowRun) -> None:
266
335
  """Create a new workflow run record."""
267
- pool = self._ensure_connected()
336
+ pool = await self._get_pool()
268
337
 
269
338
  async with pool.acquire() as conn:
270
339
  await conn.execute(
@@ -302,7 +371,7 @@ class PostgresStorageBackend(StorageBackend):
302
371
 
303
372
  async def get_run(self, run_id: str) -> WorkflowRun | None:
304
373
  """Retrieve a workflow run by ID."""
305
- pool = self._ensure_connected()
374
+ pool = await self._get_pool()
306
375
 
307
376
  async with pool.acquire() as conn:
308
377
  row = await conn.fetchrow("SELECT * FROM workflow_runs WHERE run_id = $1", run_id)
@@ -314,7 +383,7 @@ class PostgresStorageBackend(StorageBackend):
314
383
 
315
384
  async def get_run_by_idempotency_key(self, key: str) -> WorkflowRun | None:
316
385
  """Retrieve a workflow run by idempotency key."""
317
- pool = self._ensure_connected()
386
+ pool = await self._get_pool()
318
387
 
319
388
  async with pool.acquire() as conn:
320
389
  row = await conn.fetchrow("SELECT * FROM workflow_runs WHERE idempotency_key = $1", key)
@@ -332,7 +401,7 @@ class PostgresStorageBackend(StorageBackend):
332
401
  error: str | None = None,
333
402
  ) -> None:
334
403
  """Update workflow run status."""
335
- pool = self._ensure_connected()
404
+ pool = await self._get_pool()
336
405
 
337
406
  now = datetime.now(UTC)
338
407
  completed_at = now if status == RunStatus.COMPLETED else None
@@ -371,7 +440,7 @@ class PostgresStorageBackend(StorageBackend):
371
440
  recovery_attempts: int,
372
441
  ) -> None:
373
442
  """Update the recovery attempts counter for a workflow run."""
374
- pool = self._ensure_connected()
443
+ pool = await self._get_pool()
375
444
 
376
445
  async with pool.acquire() as conn:
377
446
  await conn.execute(
@@ -391,7 +460,7 @@ class PostgresStorageBackend(StorageBackend):
391
460
  context: dict,
392
461
  ) -> None:
393
462
  """Update the step context for a workflow run."""
394
- pool = self._ensure_connected()
463
+ pool = await self._get_pool()
395
464
 
396
465
  async with pool.acquire() as conn:
397
466
  await conn.execute(
@@ -420,7 +489,7 @@ class PostgresStorageBackend(StorageBackend):
420
489
  cursor: str | None = None,
421
490
  ) -> tuple[list[WorkflowRun], str | None]:
422
491
  """List workflow runs with optional filtering and pagination."""
423
- pool = self._ensure_connected()
492
+ pool = await self._get_pool()
424
493
 
425
494
  conditions = []
426
495
  params: list[Any] = []
@@ -482,7 +551,7 @@ class PostgresStorageBackend(StorageBackend):
482
551
 
483
552
  async def record_event(self, event: Event) -> None:
484
553
  """Record an event to the append-only event log."""
485
- pool = self._ensure_connected()
554
+ pool = await self._get_pool()
486
555
 
487
556
  async with pool.acquire() as conn, conn.transaction():
488
557
  # Get next sequence number and insert in a transaction
@@ -511,7 +580,7 @@ class PostgresStorageBackend(StorageBackend):
511
580
  event_types: list[str] | None = None,
512
581
  ) -> list[Event]:
513
582
  """Retrieve all events for a workflow run, ordered by sequence."""
514
- pool = self._ensure_connected()
583
+ pool = await self._get_pool()
515
584
 
516
585
  async with pool.acquire() as conn:
517
586
  if event_types:
@@ -538,7 +607,7 @@ class PostgresStorageBackend(StorageBackend):
538
607
  event_type: str | None = None,
539
608
  ) -> Event | None:
540
609
  """Get the latest event for a run, optionally filtered by type."""
541
- pool = self._ensure_connected()
610
+ pool = await self._get_pool()
542
611
 
543
612
  async with pool.acquire() as conn:
544
613
  if event_type:
@@ -572,7 +641,7 @@ class PostgresStorageBackend(StorageBackend):
572
641
 
573
642
  async def create_step(self, step: StepExecution) -> None:
574
643
  """Create a step execution record."""
575
- pool = self._ensure_connected()
644
+ pool = await self._get_pool()
576
645
 
577
646
  async with pool.acquire() as conn:
578
647
  await conn.execute(
@@ -598,7 +667,7 @@ class PostgresStorageBackend(StorageBackend):
598
667
 
599
668
  async def get_step(self, step_id: str) -> StepExecution | None:
600
669
  """Retrieve a step execution by ID."""
601
- pool = self._ensure_connected()
670
+ pool = await self._get_pool()
602
671
 
603
672
  async with pool.acquire() as conn:
604
673
  row = await conn.fetchrow("SELECT * FROM steps WHERE step_id = $1", step_id)
@@ -616,7 +685,7 @@ class PostgresStorageBackend(StorageBackend):
616
685
  error: str | None = None,
617
686
  ) -> None:
618
687
  """Update step execution status."""
619
- pool = self._ensure_connected()
688
+ pool = await self._get_pool()
620
689
 
621
690
  updates = ["status = $1"]
622
691
  params: list[Any] = [status]
@@ -647,7 +716,7 @@ class PostgresStorageBackend(StorageBackend):
647
716
 
648
717
  async def list_steps(self, run_id: str) -> list[StepExecution]:
649
718
  """List all steps for a workflow run."""
650
- pool = self._ensure_connected()
719
+ pool = await self._get_pool()
651
720
 
652
721
  async with pool.acquire() as conn:
653
722
  rows = await conn.fetch(
@@ -661,7 +730,7 @@ class PostgresStorageBackend(StorageBackend):
661
730
 
662
731
  async def create_hook(self, hook: Hook) -> None:
663
732
  """Create a hook record."""
664
- pool = self._ensure_connected()
733
+ pool = await self._get_pool()
665
734
 
666
735
  async with pool.acquire() as conn:
667
736
  await conn.execute(
@@ -684,7 +753,7 @@ class PostgresStorageBackend(StorageBackend):
684
753
 
685
754
  async def get_hook(self, hook_id: str) -> Hook | None:
686
755
  """Retrieve a hook by ID."""
687
- pool = self._ensure_connected()
756
+ pool = await self._get_pool()
688
757
 
689
758
  async with pool.acquire() as conn:
690
759
  row = await conn.fetchrow("SELECT * FROM hooks WHERE hook_id = $1", hook_id)
@@ -696,7 +765,7 @@ class PostgresStorageBackend(StorageBackend):
696
765
 
697
766
  async def get_hook_by_token(self, token: str) -> Hook | None:
698
767
  """Retrieve a hook by its token."""
699
- pool = self._ensure_connected()
768
+ pool = await self._get_pool()
700
769
 
701
770
  async with pool.acquire() as conn:
702
771
  row = await conn.fetchrow("SELECT * FROM hooks WHERE token = $1", token)
@@ -713,7 +782,7 @@ class PostgresStorageBackend(StorageBackend):
713
782
  payload: str | None = None,
714
783
  ) -> None:
715
784
  """Update hook status and optionally payload."""
716
- pool = self._ensure_connected()
785
+ pool = await self._get_pool()
717
786
 
718
787
  updates = ["status = $1"]
719
788
  params: list[Any] = [status.value]
@@ -745,7 +814,7 @@ class PostgresStorageBackend(StorageBackend):
745
814
  offset: int = 0,
746
815
  ) -> list[Hook]:
747
816
  """List hooks with optional filtering."""
748
- pool = self._ensure_connected()
817
+ pool = await self._get_pool()
749
818
 
750
819
  conditions = []
751
820
  params: list[Any] = []
@@ -780,7 +849,7 @@ class PostgresStorageBackend(StorageBackend):
780
849
 
781
850
  async def set_cancellation_flag(self, run_id: str) -> None:
782
851
  """Set a cancellation flag for a workflow run."""
783
- pool = self._ensure_connected()
852
+ pool = await self._get_pool()
784
853
 
785
854
  async with pool.acquire() as conn:
786
855
  await conn.execute(
@@ -795,7 +864,7 @@ class PostgresStorageBackend(StorageBackend):
795
864
 
796
865
  async def check_cancellation_flag(self, run_id: str) -> bool:
797
866
  """Check if a cancellation flag is set for a workflow run."""
798
- pool = self._ensure_connected()
867
+ pool = await self._get_pool()
799
868
 
800
869
  async with pool.acquire() as conn:
801
870
  row = await conn.fetchrow("SELECT 1 FROM cancellation_flags WHERE run_id = $1", run_id)
@@ -804,7 +873,7 @@ class PostgresStorageBackend(StorageBackend):
804
873
 
805
874
  async def clear_cancellation_flag(self, run_id: str) -> None:
806
875
  """Clear the cancellation flag for a workflow run."""
807
- pool = self._ensure_connected()
876
+ pool = await self._get_pool()
808
877
 
809
878
  async with pool.acquire() as conn:
810
879
  await conn.execute("DELETE FROM cancellation_flags WHERE run_id = $1", run_id)
@@ -817,7 +886,7 @@ class PostgresStorageBackend(StorageBackend):
817
886
  continued_to_run_id: str,
818
887
  ) -> None:
819
888
  """Update the continuation link for a workflow run."""
820
- pool = self._ensure_connected()
889
+ pool = await self._get_pool()
821
890
 
822
891
  async with pool.acquire() as conn:
823
892
  await conn.execute(
@@ -836,7 +905,7 @@ class PostgresStorageBackend(StorageBackend):
836
905
  run_id: str,
837
906
  ) -> list[WorkflowRun]:
838
907
  """Get all runs in a continue-as-new chain."""
839
- pool = self._ensure_connected()
908
+ pool = await self._get_pool()
840
909
 
841
910
  # Find the first run in the chain
842
911
  current_id: str | None = run_id
@@ -871,7 +940,7 @@ class PostgresStorageBackend(StorageBackend):
871
940
  status: RunStatus | None = None,
872
941
  ) -> list[WorkflowRun]:
873
942
  """Get all child workflow runs for a parent workflow."""
874
- pool = self._ensure_connected()
943
+ pool = await self._get_pool()
875
944
 
876
945
  async with pool.acquire() as conn:
877
946
  if status:
@@ -913,7 +982,7 @@ class PostgresStorageBackend(StorageBackend):
913
982
 
914
983
  async def create_schedule(self, schedule: Schedule) -> None:
915
984
  """Create a new schedule record."""
916
- pool = self._ensure_connected()
985
+ pool = await self._get_pool()
917
986
 
918
987
  # Derive spec_type from the ScheduleSpec
919
988
  spec_type = (
@@ -951,7 +1020,7 @@ class PostgresStorageBackend(StorageBackend):
951
1020
 
952
1021
  async def get_schedule(self, schedule_id: str) -> Schedule | None:
953
1022
  """Retrieve a schedule by ID."""
954
- pool = self._ensure_connected()
1023
+ pool = await self._get_pool()
955
1024
 
956
1025
  async with pool.acquire() as conn:
957
1026
  row = await conn.fetchrow("SELECT * FROM schedules WHERE schedule_id = $1", schedule_id)
@@ -963,7 +1032,7 @@ class PostgresStorageBackend(StorageBackend):
963
1032
 
964
1033
  async def update_schedule(self, schedule: Schedule) -> None:
965
1034
  """Update an existing schedule."""
966
- pool = self._ensure_connected()
1035
+ pool = await self._get_pool()
967
1036
 
968
1037
  # Derive spec_type from the ScheduleSpec
969
1038
  spec_type = (
@@ -1000,7 +1069,7 @@ class PostgresStorageBackend(StorageBackend):
1000
1069
 
1001
1070
  async def delete_schedule(self, schedule_id: str) -> None:
1002
1071
  """Mark a schedule as deleted (soft delete)."""
1003
- pool = self._ensure_connected()
1072
+ pool = await self._get_pool()
1004
1073
 
1005
1074
  now = datetime.now(UTC)
1006
1075
  async with pool.acquire() as conn:
@@ -1024,7 +1093,7 @@ class PostgresStorageBackend(StorageBackend):
1024
1093
  offset: int = 0,
1025
1094
  ) -> list[Schedule]:
1026
1095
  """List schedules with optional filtering."""
1027
- pool = self._ensure_connected()
1096
+ pool = await self._get_pool()
1028
1097
 
1029
1098
  conditions = []
1030
1099
  params: list[Any] = []
@@ -1057,7 +1126,7 @@ class PostgresStorageBackend(StorageBackend):
1057
1126
 
1058
1127
  async def get_due_schedules(self, now: datetime) -> list[Schedule]:
1059
1128
  """Get all schedules that are due to run."""
1060
- pool = self._ensure_connected()
1129
+ pool = await self._get_pool()
1061
1130
 
1062
1131
  async with pool.acquire() as conn:
1063
1132
  rows = await conn.fetch(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pyworkflow-engine
3
- Version: 0.1.10
3
+ Version: 0.1.12
4
4
  Summary: A Python implementation of durable, event-sourced workflows inspired by Vercel Workflow
5
5
  Author: PyWorkflow Contributors
6
6
  License: MIT
@@ -1,14 +1,15 @@
1
- pyworkflow/__init__.py,sha256=x1UEkpGJteYrluuAL54uE2I3rk2TRRcGeTuH5ZSMH7s,6281
2
- pyworkflow/config.py,sha256=yw_3sJNzBanI9xIqU0kh__QL4hs3UVUBXkeCEmw5cfA,14164
1
+ pyworkflow/__init__.py,sha256=q49wycfPtEoYrdpy3Ic-ox-k1f_zUw1NaOjUvO3EuRU,6281
2
+ pyworkflow/config.py,sha256=pKwPrpCwBJiDpB-MIjM0U7GW1TFmQFO341pihL5-vTM,14455
3
3
  pyworkflow/discovery.py,sha256=snW3l4nvY3Nc067TGlwtn_qdzTU9ybN7YPr8FbvY8iM,8066
4
4
  pyworkflow/aws/__init__.py,sha256=Ak_xHcR9LTRX-CwcS0XecYmzrXZw4EM3V9aKBBDEmIk,1741
5
5
  pyworkflow/aws/context.py,sha256=Vjyjip6U1Emg-WA5TlBaxFhcg15rf9mVJiPfT4VywHc,8217
6
6
  pyworkflow/aws/handler.py,sha256=0SnQuIfQVD99QKMCRFPtrsrV_l1LYKFkzPIRx_2UkSI,5849
7
7
  pyworkflow/aws/testing.py,sha256=WrRk9wjbycM-UyHFQWNnA83UE9IrYnhfT38WrbxQT2U,8844
8
8
  pyworkflow/celery/__init__.py,sha256=FywVyqnT8AYz9cXkr-wel7_-N7dHFsPNASEPMFESf4Q,1179
9
- pyworkflow/celery/app.py,sha256=EsmRqervXqnJn7Jl76ZDV9OIcNnIb6fRjDeuZEfYJL8,6456
9
+ pyworkflow/celery/app.py,sha256=sIbnz40KGIvPC84f7X-Ds7_GvFhzOZOciSeslk57Cig,9223
10
+ pyworkflow/celery/loop.py,sha256=mu8cIfMJYgHAoGCN_DdDoNoXK3QHzHpLmrPCyFDQYIY,3016
10
11
  pyworkflow/celery/scheduler.py,sha256=Ms4rqRpdpMiLM8l4y3DK-Divunj9afYuUaGGoNQe7P4,11288
11
- pyworkflow/celery/tasks.py,sha256=HJXwJjlhWu9aKQKZn4Os8b3y5OOHK9CUnHMH6xVnlyk,64740
12
+ pyworkflow/celery/tasks.py,sha256=gPdZZ0YOpqPVcj4fNgwVreSKf8gAGaw6v2yGZyYrUvA,82663
12
13
  pyworkflow/cli/__init__.py,sha256=tcbe-fcZmyeEKUy_aEo8bsEF40HsNKOwvyMBZIJZPwc,3844
13
14
  pyworkflow/cli/__main__.py,sha256=LxLLS4FEEPXa5rWpLTtKuivn6Xp9pGia-QKGoxt9SS0,148
14
15
  pyworkflow/cli/commands/__init__.py,sha256=IXvnTgukALckkO8fTlZhVRq80ojSqpnIIgboAg_-yZU,39
@@ -18,7 +19,7 @@ pyworkflow/cli/commands/runs.py,sha256=dkAx0WSBLyooD-vUUDPqgrmM3ElFwqO4nycEZGkNq
18
19
  pyworkflow/cli/commands/scheduler.py,sha256=w2iUoJ1CtEtOg_4TWslTHbzEPVsV-YybqWU9jkf38gs,3706
19
20
  pyworkflow/cli/commands/schedules.py,sha256=UCKZLTWsiLwCewCEXmqOVQnptvvuIKsWSTXai61RYbM,23466
20
21
  pyworkflow/cli/commands/setup.py,sha256=J-9lvz3m2sZiiLzQtQIfjmX0l8IpJ4L-xp5U4P7UmRY,32256
21
- pyworkflow/cli/commands/worker.py,sha256=UJ8bQJTXMEk3BoMiivClTCKNt_f-g75jJ5O-khfcfsY,12110
22
+ pyworkflow/cli/commands/worker.py,sha256=PamHnEbgr2GQhFikyEEjT_Oai_iIvSs-a8GGXF4lHv0,12196
22
23
  pyworkflow/cli/commands/workflows.py,sha256=zRBFeqCa4Uo_wwEjgk0SBmkqgcaMznS6ghe1N0ub8Zs,42673
23
24
  pyworkflow/cli/output/__init__.py,sha256=5VxKL3mXah5rCKmctxcAKVwp42T47qT1oBK5LFVHHEg,48
24
25
  pyworkflow/cli/output/formatters.py,sha256=QzsgPR3cjIbH0723wuG_HzUx9xC7XMA6-NkT2y2lwtM,8785
@@ -33,22 +34,22 @@ pyworkflow/cli/utils/interactive.py,sha256=S2Ell-rUzzt3V10diGo5XCgiDcYFYSxoXNYkJ
33
34
  pyworkflow/cli/utils/storage.py,sha256=a5Iu2Xe1_mPgBVYc8B6I63MFfW12ko7wURqcpq3RBPA,4018
34
35
  pyworkflow/context/__init__.py,sha256=dI5zW1lAFGw68jI2UpKUqyADozDboGNl-RmhEvSTuCI,2150
35
36
  pyworkflow/context/aws.py,sha256=MYxrFsRzCgaZ0YQAyE26UOT_ryxuag5DwiDSodclQIg,7571
36
- pyworkflow/context/base.py,sha256=sq2L5odO3IIzgAd_I_ww1-3hCOe3tyJtjrunriPAI7o,13570
37
- pyworkflow/context/local.py,sha256=jXlY5h3EisP-7TqNVUSMi7mzHOCZNjAMZgCNf6R5OfU,35991
37
+ pyworkflow/context/base.py,sha256=Hlfm5MNHh_BVbRCgEcILmHiqsn81iYFqt0GSLkFGo00,13772
38
+ pyworkflow/context/local.py,sha256=H9UTuIWjelP4Nsc16guDf_fSMxVsIyfzbymJclvADhw,37118
38
39
  pyworkflow/context/mock.py,sha256=TJzQ3P3_ZHm1lCJZJACIFFvz2ydFxz2cT9eEGOQS5I0,12061
39
- pyworkflow/context/step_context.py,sha256=fW0I1t5A-rWqaBN85MNlNmSLAs3W-qf4arcYne0J4Xw,9261
40
+ pyworkflow/context/step_context.py,sha256=6P2jn1v7MTlYaWCTt6DBq7Nkmxm7nvna4oGpTZJeMbg,8862
40
41
  pyworkflow/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
42
  pyworkflow/core/exceptions.py,sha256=F2nbXyoed7wlIJMeGfpgsIC8ZyWcYN0iKtOnBA7-xnQ,10719
42
43
  pyworkflow/core/registry.py,sha256=ZUf2YTpBvWpC9EehRbMF8soXOk9VsjNruoi6lR4O33M,9361
43
44
  pyworkflow/core/scheduled.py,sha256=479A7IvjHiMob7ZrZtfE6VqtypG6DLIGMGhh16jLIWM,10522
44
- pyworkflow/core/step.py,sha256=RY7i0j44Gjg2aziLrnSpdz63fn5GFas4XVb-PTZw2jQ,22473
45
+ pyworkflow/core/step.py,sha256=8S_O_KVoaCJSkE5oUslj8orR-S4qSR6_vQoE0NtZ4dU,23463
45
46
  pyworkflow/core/workflow.py,sha256=dlcICq1B69-nxUJth_n-H8U9TjP3QZyjvquQXxWHcxs,12076
46
47
  pyworkflow/engine/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
47
- pyworkflow/engine/events.py,sha256=-ix7EZnNRLfSRk4GJAl5-18uela0BDoCghI_4p5UVKc,25114
48
- pyworkflow/engine/executor.py,sha256=5b50m-a4XjrOoIS9hS4Rsgk_N114s5js6b-LuW2L0Jw,20333
49
- pyworkflow/engine/replay.py,sha256=SYHR5PkbyZp1cUdGNwbzx-VKNHus7-SohCR6Tox26vA,10875
48
+ pyworkflow/engine/events.py,sha256=KFtyIqQjr1B9Frtd5V1Zq0ph1iwg_Ky3uPzmTYZ1Tnk,25827
49
+ pyworkflow/engine/executor.py,sha256=l2HlpQfqzSL_0WuTFb1c4pEIgYU9JcN792hkGICqCAk,21109
50
+ pyworkflow/engine/replay.py,sha256=bmMb4wzPKaZwPOage3Z-g_5DndYNoSmavMZ9sPiFzYI,9386
50
51
  pyworkflow/observability/__init__.py,sha256=M_Uc3WdtshQSxLnj3T8D0M7f4zcCuFzVs8e8PKCuXDc,380
51
- pyworkflow/observability/logging.py,sha256=4b_N4bIHUxlgOzEn5u1uB-ngCWPNDSU7daKAKxkjBUM,7018
52
+ pyworkflow/observability/logging.py,sha256=4WYR188z8NppWWpgsUkkJfBLWZp5St6ro3i7DJq4LP4,8803
52
53
  pyworkflow/primitives/__init__.py,sha256=rEahSVLhG3nSxvcRhJeM1LBSBIV7AkcRTnxuMLmZMTM,1041
53
54
  pyworkflow/primitives/child_handle.py,sha256=7NcIaNUQdZEoxmk5gQH1CJ6uQzpro3eFo-sEaM6l6w0,5466
54
55
  pyworkflow/primitives/child_workflow.py,sha256=_T7PCqiH0tjIm_lpJ6NmfUPWCFx-MjH6t-C1orwohKs,13134
@@ -61,9 +62,9 @@ pyworkflow/primitives/shield.py,sha256=MUYakU0euZoYNb6MbFyRfJN8GEXsRFkIbZEo84vRN
61
62
  pyworkflow/primitives/sleep.py,sha256=iH1e5CoWY-jZbYNAU3GRW1xR_8EtCuPIcIohzU4jWJo,3097
62
63
  pyworkflow/runtime/__init__.py,sha256=DkwTgFCMRGyyW8NGcW7Nyy9beOg5kO1TXhqhysj1-aY,649
63
64
  pyworkflow/runtime/base.py,sha256=-X2pct03XuA3o1P6yD5ywTDgegN6_a450gG8MBVeKRE,5190
64
- pyworkflow/runtime/celery.py,sha256=0hSwN4alL69ZgnIgYiITcJ0s_iTi8A_xrsdKo89k4Hs,9431
65
+ pyworkflow/runtime/celery.py,sha256=FMxiLiRf1pLWD7itEyd6klrH8PjSUOLTxWd7E8TyOG4,9476
65
66
  pyworkflow/runtime/factory.py,sha256=TRbqWPfyZ0tPFKb0faI9SkBRXxE5AEVTwGW4pS2diM8,2684
66
- pyworkflow/runtime/local.py,sha256=WsjaNcS_aJKLIdglaukeOj_YZqrQ75glsv1MeqS6VhM,24128
67
+ pyworkflow/runtime/local.py,sha256=8mhUyMJDseXFsO0XvwJN6QkXd_4tjPEHG1_N_BmJgsE,25868
67
68
  pyworkflow/scheduler/__init__.py,sha256=lQQo0Cia_ULIg-KPIrqILV30rUIzybxj1k_ZZTQNZyg,222
68
69
  pyworkflow/scheduler/local.py,sha256=CnK4UC6ofD3_AZJUlO9iUAdgAnbMmJvPaL_VucNKs5Q,8154
69
70
  pyworkflow/serialization/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -72,20 +73,20 @@ pyworkflow/serialization/encoder.py,sha256=ZBwAxe5Bb4MCfFJePHw7ArJlIbBieSwUgsysG
72
73
  pyworkflow/storage/__init__.py,sha256=LhVjLNZdo4Mi5dEC75hjSPnbQr9jBoIsTOrC8vzTGOM,1924
73
74
  pyworkflow/storage/base.py,sha256=DxgOB9kr3i1uaitY_E9PzhnNWxaq1U5EvbbSjKyoH8M,16104
74
75
  pyworkflow/storage/cassandra.py,sha256=Nig0SUlTyxuNgPjOXnVBlzDq3PAGci4jIT1JI0i-GOk,61428
75
- pyworkflow/storage/config.py,sha256=M5s0ekcGN1Hnj0UkIeaHE4-n-SpsyawN47LyKxKnIqo,9787
76
+ pyworkflow/storage/config.py,sha256=45UMPxRoqgK4ZwE7HIK9ctxE_eoK3eAE_1tRhn3Psd4,12410
76
77
  pyworkflow/storage/dynamodb.py,sha256=tGNQQqESxhZzOP5NJULCZKcQf9UuSQNL17TJo6R1jlw,53301
77
78
  pyworkflow/storage/file.py,sha256=lKilavXn_CRiIVL5XeV7tY9lm2vJADH-h9Teg0gA84A,28842
78
79
  pyworkflow/storage/memory.py,sha256=r2z6LiRw8J2AbO9Qw2wtYjzGfX-VJlRX_RVI2U8c-hs,19753
79
80
  pyworkflow/storage/mysql.py,sha256=f1aGyAL8fGsLnmHkpEwP4MFSwvYTpQxOBECHKCnetGI,42904
80
- pyworkflow/storage/postgres.py,sha256=s9NRBBHRhCOWbqhhg6vXe_8oX8ld0BgrsKW131j0A5s,41327
81
+ pyworkflow/storage/postgres.py,sha256=KrDVF715YSEFouNVQOG6g-ekNvkTtrNxqWSvJMYueeM,44450
81
82
  pyworkflow/storage/schemas.py,sha256=o1ntTYNgQQ5YVuXtPCShtENEsndVjdrXclWrkCgkitg,18002
82
83
  pyworkflow/storage/sqlite.py,sha256=oBzJnnOp2uk0-U7hMTQk9QgJq3RBwXPQfrmYpivjdgE,39529
83
84
  pyworkflow/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
84
85
  pyworkflow/utils/duration.py,sha256=C-itmiSQQlplw7j6XB679hLF9xYGnyCwm7twO88OF8U,3978
85
86
  pyworkflow/utils/schedule.py,sha256=dO_MkGFyfwZpb0LDlW6BGyZzlPuQIA6dc6j9nk9lc4Y,10691
86
- pyworkflow_engine-0.1.10.dist-info/licenses/LICENSE,sha256=Y49RCTZ5ayn_yzBcRxnyIFdcMCyuYm150aty_FIznfY,1080
87
- pyworkflow_engine-0.1.10.dist-info/METADATA,sha256=8mRzFtjSIFyJmSui5vzZzzl1jf8KmDM-wQ2gCRKeDbA,19628
88
- pyworkflow_engine-0.1.10.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
89
- pyworkflow_engine-0.1.10.dist-info/entry_points.txt,sha256=3IGAfuylnS39U0YX0pxnjrj54kB4iT_bNYrmsiDB-dE,51
90
- pyworkflow_engine-0.1.10.dist-info/top_level.txt,sha256=FLTv9pQmLDBXrQdLOhTMIS3njFibliMsQEfumqmdzBE,11
91
- pyworkflow_engine-0.1.10.dist-info/RECORD,,
87
+ pyworkflow_engine-0.1.12.dist-info/licenses/LICENSE,sha256=Y49RCTZ5ayn_yzBcRxnyIFdcMCyuYm150aty_FIznfY,1080
88
+ pyworkflow_engine-0.1.12.dist-info/METADATA,sha256=lzNct37FWxwhEN07OT37QwSFOiQ2YPSjo5TxonKN1sY,19628
89
+ pyworkflow_engine-0.1.12.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
90
+ pyworkflow_engine-0.1.12.dist-info/entry_points.txt,sha256=3IGAfuylnS39U0YX0pxnjrj54kB4iT_bNYrmsiDB-dE,51
91
+ pyworkflow_engine-0.1.12.dist-info/top_level.txt,sha256=FLTv9pQmLDBXrQdLOhTMIS3njFibliMsQEfumqmdzBE,11
92
+ pyworkflow_engine-0.1.12.dist-info/RECORD,,