pyworkflow-engine 0.1.12__py3-none-any.whl → 0.1.14__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyworkflow/__init__.py +1 -1
- pyworkflow/celery/app.py +10 -0
- pyworkflow/celery/singleton.py +370 -0
- pyworkflow/celery/tasks.py +125 -54
- pyworkflow/context/local.py +46 -0
- pyworkflow/core/step.py +8 -0
- pyworkflow/core/validation.py +112 -0
- pyworkflow/primitives/resume_hook.py +2 -1
- pyworkflow/runtime/base.py +4 -0
- pyworkflow/runtime/celery.py +12 -1
- pyworkflow/runtime/local.py +8 -0
- pyworkflow/storage/base.py +4 -1
- pyworkflow/storage/cassandra.py +30 -25
- pyworkflow/storage/dynamodb.py +32 -16
- pyworkflow/storage/file.py +39 -13
- pyworkflow/storage/memory.py +28 -11
- pyworkflow/storage/mysql.py +27 -11
- pyworkflow/storage/postgres.py +29 -12
- pyworkflow/storage/sqlite.py +29 -12
- {pyworkflow_engine-0.1.12.dist-info → pyworkflow_engine-0.1.14.dist-info}/METADATA +1 -1
- {pyworkflow_engine-0.1.12.dist-info → pyworkflow_engine-0.1.14.dist-info}/RECORD +25 -23
- {pyworkflow_engine-0.1.12.dist-info → pyworkflow_engine-0.1.14.dist-info}/WHEEL +0 -0
- {pyworkflow_engine-0.1.12.dist-info → pyworkflow_engine-0.1.14.dist-info}/entry_points.txt +0 -0
- {pyworkflow_engine-0.1.12.dist-info → pyworkflow_engine-0.1.14.dist-info}/licenses/LICENSE +0 -0
- {pyworkflow_engine-0.1.12.dist-info → pyworkflow_engine-0.1.14.dist-info}/top_level.txt +0 -0
pyworkflow/storage/memory.py
CHANGED
|
@@ -43,10 +43,10 @@ class InMemoryStorageBackend(StorageBackend):
|
|
|
43
43
|
self._runs: dict[str, WorkflowRun] = {}
|
|
44
44
|
self._events: dict[str, list[Event]] = {}
|
|
45
45
|
self._steps: dict[str, StepExecution] = {}
|
|
46
|
-
self._hooks: dict[str, Hook] = {}
|
|
46
|
+
self._hooks: dict[tuple[str, str], Hook] = {} # (run_id, hook_id) -> Hook
|
|
47
47
|
self._schedules: dict[str, Schedule] = {}
|
|
48
48
|
self._idempotency_index: dict[str, str] = {} # key -> run_id
|
|
49
|
-
self._token_index: dict[str, str] = {} # token -> hook_id
|
|
49
|
+
self._token_index: dict[str, tuple[str, str]] = {} # token -> (run_id, hook_id)
|
|
50
50
|
self._cancellation_flags: dict[str, bool] = {} # run_id -> cancelled
|
|
51
51
|
self._lock = threading.RLock()
|
|
52
52
|
self._event_sequences: dict[str, int] = {} # run_id -> next sequence
|
|
@@ -292,20 +292,28 @@ class InMemoryStorageBackend(StorageBackend):
|
|
|
292
292
|
async def create_hook(self, hook: Hook) -> None:
|
|
293
293
|
"""Create a hook record."""
|
|
294
294
|
with self._lock:
|
|
295
|
-
|
|
296
|
-
self.
|
|
295
|
+
key = (hook.run_id, hook.hook_id)
|
|
296
|
+
self._hooks[key] = hook
|
|
297
|
+
self._token_index[hook.token] = key
|
|
297
298
|
|
|
298
|
-
async def get_hook(self, hook_id: str) -> Hook | None:
|
|
299
|
-
"""Retrieve a hook by ID."""
|
|
299
|
+
async def get_hook(self, hook_id: str, run_id: str | None = None) -> Hook | None:
|
|
300
|
+
"""Retrieve a hook by ID (requires run_id for composite key lookup)."""
|
|
300
301
|
with self._lock:
|
|
301
|
-
|
|
302
|
+
if run_id:
|
|
303
|
+
return self._hooks.get((run_id, hook_id))
|
|
304
|
+
else:
|
|
305
|
+
# Fallback: find any hook with this ID (may return wrong one if duplicates)
|
|
306
|
+
for (_r_id, h_id), hook in self._hooks.items():
|
|
307
|
+
if h_id == hook_id:
|
|
308
|
+
return hook
|
|
309
|
+
return None
|
|
302
310
|
|
|
303
311
|
async def get_hook_by_token(self, token: str) -> Hook | None:
|
|
304
312
|
"""Retrieve a hook by its token."""
|
|
305
313
|
with self._lock:
|
|
306
|
-
|
|
307
|
-
if
|
|
308
|
-
return self._hooks.get(
|
|
314
|
+
key = self._token_index.get(token)
|
|
315
|
+
if key:
|
|
316
|
+
return self._hooks.get(key)
|
|
309
317
|
return None
|
|
310
318
|
|
|
311
319
|
async def update_hook_status(
|
|
@@ -313,10 +321,19 @@ class InMemoryStorageBackend(StorageBackend):
|
|
|
313
321
|
hook_id: str,
|
|
314
322
|
status: HookStatus,
|
|
315
323
|
payload: str | None = None,
|
|
324
|
+
run_id: str | None = None,
|
|
316
325
|
) -> None:
|
|
317
326
|
"""Update hook status and optionally payload."""
|
|
318
327
|
with self._lock:
|
|
319
|
-
|
|
328
|
+
if run_id:
|
|
329
|
+
hook = self._hooks.get((run_id, hook_id))
|
|
330
|
+
else:
|
|
331
|
+
# Fallback: find any hook with this ID
|
|
332
|
+
hook = None
|
|
333
|
+
for (_r_id, h_id), h in self._hooks.items():
|
|
334
|
+
if h_id == hook_id:
|
|
335
|
+
hook = h
|
|
336
|
+
break
|
|
320
337
|
if hook:
|
|
321
338
|
hook.status = status
|
|
322
339
|
if payload is not None:
|
pyworkflow/storage/mysql.py
CHANGED
|
@@ -175,11 +175,11 @@ class MySQLStorageBackend(StorageBackend):
|
|
|
175
175
|
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
|
|
176
176
|
""")
|
|
177
177
|
|
|
178
|
-
# Hooks table
|
|
178
|
+
# Hooks table (composite PK: run_id + hook_id since hook_id is only unique per run)
|
|
179
179
|
await cur.execute("""
|
|
180
180
|
CREATE TABLE IF NOT EXISTS hooks (
|
|
181
|
-
hook_id VARCHAR(255) PRIMARY KEY,
|
|
182
181
|
run_id VARCHAR(255) NOT NULL,
|
|
182
|
+
hook_id VARCHAR(255) NOT NULL,
|
|
183
183
|
token VARCHAR(255) UNIQUE NOT NULL,
|
|
184
184
|
created_at DATETIME(6) NOT NULL,
|
|
185
185
|
received_at DATETIME(6),
|
|
@@ -187,6 +187,7 @@ class MySQLStorageBackend(StorageBackend):
|
|
|
187
187
|
status VARCHAR(50) NOT NULL,
|
|
188
188
|
payload LONGTEXT,
|
|
189
189
|
metadata LONGTEXT DEFAULT '{}',
|
|
190
|
+
PRIMARY KEY (run_id, hook_id),
|
|
190
191
|
UNIQUE INDEX idx_hooks_token (token),
|
|
191
192
|
INDEX idx_hooks_run_id (run_id),
|
|
192
193
|
INDEX idx_hooks_status (status),
|
|
@@ -660,12 +661,19 @@ class MySQLStorageBackend(StorageBackend):
|
|
|
660
661
|
),
|
|
661
662
|
)
|
|
662
663
|
|
|
663
|
-
async def get_hook(self, hook_id: str) -> Hook | None:
|
|
664
|
-
"""Retrieve a hook by ID."""
|
|
664
|
+
async def get_hook(self, hook_id: str, run_id: str | None = None) -> Hook | None:
|
|
665
|
+
"""Retrieve a hook by ID (requires run_id for composite key lookup)."""
|
|
665
666
|
pool = self._ensure_connected()
|
|
666
667
|
|
|
667
668
|
async with pool.acquire() as conn, conn.cursor(aiomysql.DictCursor) as cur:
|
|
668
|
-
|
|
669
|
+
if run_id:
|
|
670
|
+
await cur.execute(
|
|
671
|
+
"SELECT * FROM hooks WHERE run_id = %s AND hook_id = %s",
|
|
672
|
+
(run_id, hook_id),
|
|
673
|
+
)
|
|
674
|
+
else:
|
|
675
|
+
# Fallback: find any hook with this ID (may return wrong one if duplicates)
|
|
676
|
+
await cur.execute("SELECT * FROM hooks WHERE hook_id = %s", (hook_id,))
|
|
669
677
|
row = await cur.fetchone()
|
|
670
678
|
|
|
671
679
|
if not row:
|
|
@@ -691,6 +699,7 @@ class MySQLStorageBackend(StorageBackend):
|
|
|
691
699
|
hook_id: str,
|
|
692
700
|
status: HookStatus,
|
|
693
701
|
payload: str | None = None,
|
|
702
|
+
run_id: str | None = None,
|
|
694
703
|
) -> None:
|
|
695
704
|
"""Update hook status and optionally payload."""
|
|
696
705
|
pool = self._ensure_connected()
|
|
@@ -706,13 +715,20 @@ class MySQLStorageBackend(StorageBackend):
|
|
|
706
715
|
updates.append("received_at = %s")
|
|
707
716
|
params.append(datetime.now(UTC))
|
|
708
717
|
|
|
709
|
-
params.append(hook_id)
|
|
710
|
-
|
|
711
718
|
async with pool.acquire() as conn, conn.cursor() as cur:
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
719
|
+
if run_id:
|
|
720
|
+
params.append(run_id)
|
|
721
|
+
params.append(hook_id)
|
|
722
|
+
await cur.execute(
|
|
723
|
+
f"UPDATE hooks SET {', '.join(updates)} WHERE run_id = %s AND hook_id = %s",
|
|
724
|
+
tuple(params),
|
|
725
|
+
)
|
|
726
|
+
else:
|
|
727
|
+
params.append(hook_id)
|
|
728
|
+
await cur.execute(
|
|
729
|
+
f"UPDATE hooks SET {', '.join(updates)} WHERE hook_id = %s",
|
|
730
|
+
tuple(params),
|
|
731
|
+
)
|
|
716
732
|
|
|
717
733
|
async def list_hooks(
|
|
718
734
|
self,
|
pyworkflow/storage/postgres.py
CHANGED
|
@@ -216,18 +216,19 @@ class PostgresStorageBackend(StorageBackend):
|
|
|
216
216
|
# Indexes for steps
|
|
217
217
|
await conn.execute("CREATE INDEX IF NOT EXISTS idx_steps_run_id ON steps(run_id)")
|
|
218
218
|
|
|
219
|
-
# Hooks table
|
|
219
|
+
# Hooks table (composite PK: run_id + hook_id since hook_id is only unique per run)
|
|
220
220
|
await conn.execute("""
|
|
221
221
|
CREATE TABLE IF NOT EXISTS hooks (
|
|
222
|
-
hook_id TEXT PRIMARY KEY,
|
|
223
222
|
run_id TEXT NOT NULL REFERENCES workflow_runs(run_id) ON DELETE CASCADE,
|
|
223
|
+
hook_id TEXT NOT NULL,
|
|
224
224
|
token TEXT UNIQUE NOT NULL,
|
|
225
225
|
created_at TIMESTAMPTZ NOT NULL,
|
|
226
226
|
received_at TIMESTAMPTZ,
|
|
227
227
|
expires_at TIMESTAMPTZ,
|
|
228
228
|
status TEXT NOT NULL,
|
|
229
229
|
payload TEXT,
|
|
230
|
-
metadata TEXT DEFAULT '{}'
|
|
230
|
+
metadata TEXT DEFAULT '{}',
|
|
231
|
+
PRIMARY KEY (run_id, hook_id)
|
|
231
232
|
)
|
|
232
233
|
""")
|
|
233
234
|
|
|
@@ -751,12 +752,20 @@ class PostgresStorageBackend(StorageBackend):
|
|
|
751
752
|
json.dumps(hook.metadata),
|
|
752
753
|
)
|
|
753
754
|
|
|
754
|
-
async def get_hook(self, hook_id: str) -> Hook | None:
|
|
755
|
-
"""Retrieve a hook by ID."""
|
|
755
|
+
async def get_hook(self, hook_id: str, run_id: str | None = None) -> Hook | None:
|
|
756
|
+
"""Retrieve a hook by ID (requires run_id for composite key lookup)."""
|
|
756
757
|
pool = await self._get_pool()
|
|
757
758
|
|
|
758
759
|
async with pool.acquire() as conn:
|
|
759
|
-
|
|
760
|
+
if run_id:
|
|
761
|
+
row = await conn.fetchrow(
|
|
762
|
+
"SELECT * FROM hooks WHERE run_id = $1 AND hook_id = $2",
|
|
763
|
+
run_id,
|
|
764
|
+
hook_id,
|
|
765
|
+
)
|
|
766
|
+
else:
|
|
767
|
+
# Fallback: find any hook with this ID (may return wrong one if duplicates)
|
|
768
|
+
row = await conn.fetchrow("SELECT * FROM hooks WHERE hook_id = $1", hook_id)
|
|
760
769
|
|
|
761
770
|
if not row:
|
|
762
771
|
return None
|
|
@@ -780,6 +789,7 @@ class PostgresStorageBackend(StorageBackend):
|
|
|
780
789
|
hook_id: str,
|
|
781
790
|
status: HookStatus,
|
|
782
791
|
payload: str | None = None,
|
|
792
|
+
run_id: str | None = None,
|
|
783
793
|
) -> None:
|
|
784
794
|
"""Update hook status and optionally payload."""
|
|
785
795
|
pool = await self._get_pool()
|
|
@@ -798,13 +808,20 @@ class PostgresStorageBackend(StorageBackend):
|
|
|
798
808
|
params.append(datetime.now(UTC))
|
|
799
809
|
param_idx += 1
|
|
800
810
|
|
|
801
|
-
params.append(hook_id)
|
|
802
|
-
|
|
803
811
|
async with pool.acquire() as conn:
|
|
804
|
-
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
|
|
812
|
+
if run_id:
|
|
813
|
+
params.append(run_id)
|
|
814
|
+
params.append(hook_id)
|
|
815
|
+
await conn.execute(
|
|
816
|
+
f"UPDATE hooks SET {', '.join(updates)} WHERE run_id = ${param_idx} AND hook_id = ${param_idx + 1}",
|
|
817
|
+
*params,
|
|
818
|
+
)
|
|
819
|
+
else:
|
|
820
|
+
params.append(hook_id)
|
|
821
|
+
await conn.execute(
|
|
822
|
+
f"UPDATE hooks SET {', '.join(updates)} WHERE hook_id = ${param_idx}",
|
|
823
|
+
*params,
|
|
824
|
+
)
|
|
808
825
|
|
|
809
826
|
async def list_hooks(
|
|
810
827
|
self,
|
pyworkflow/storage/sqlite.py
CHANGED
|
@@ -164,11 +164,11 @@ class SQLiteStorageBackend(StorageBackend):
|
|
|
164
164
|
# Indexes for steps
|
|
165
165
|
await db.execute("CREATE INDEX IF NOT EXISTS idx_steps_run_id ON steps(run_id)")
|
|
166
166
|
|
|
167
|
-
# Hooks table
|
|
167
|
+
# Hooks table (composite PK: run_id + hook_id since hook_id is only unique per run)
|
|
168
168
|
await db.execute("""
|
|
169
169
|
CREATE TABLE IF NOT EXISTS hooks (
|
|
170
|
-
hook_id TEXT PRIMARY KEY,
|
|
171
170
|
run_id TEXT NOT NULL,
|
|
171
|
+
hook_id TEXT NOT NULL,
|
|
172
172
|
token TEXT UNIQUE NOT NULL,
|
|
173
173
|
created_at TIMESTAMP NOT NULL,
|
|
174
174
|
received_at TIMESTAMP,
|
|
@@ -176,6 +176,7 @@ class SQLiteStorageBackend(StorageBackend):
|
|
|
176
176
|
status TEXT NOT NULL,
|
|
177
177
|
payload TEXT,
|
|
178
178
|
metadata TEXT DEFAULT '{}',
|
|
179
|
+
PRIMARY KEY (run_id, hook_id),
|
|
179
180
|
FOREIGN KEY (run_id) REFERENCES workflow_runs(run_id) ON DELETE CASCADE
|
|
180
181
|
)
|
|
181
182
|
""")
|
|
@@ -645,12 +646,20 @@ class SQLiteStorageBackend(StorageBackend):
|
|
|
645
646
|
)
|
|
646
647
|
await db.commit()
|
|
647
648
|
|
|
648
|
-
async def get_hook(self, hook_id: str) -> Hook | None:
|
|
649
|
-
"""Retrieve a hook by ID."""
|
|
649
|
+
async def get_hook(self, hook_id: str, run_id: str | None = None) -> Hook | None:
|
|
650
|
+
"""Retrieve a hook by ID (requires run_id for composite key lookup)."""
|
|
650
651
|
db = self._ensure_connected()
|
|
651
652
|
|
|
652
|
-
|
|
653
|
-
|
|
653
|
+
if run_id:
|
|
654
|
+
async with db.execute(
|
|
655
|
+
"SELECT * FROM hooks WHERE run_id = ? AND hook_id = ?",
|
|
656
|
+
(run_id, hook_id),
|
|
657
|
+
) as cursor:
|
|
658
|
+
row = await cursor.fetchone()
|
|
659
|
+
else:
|
|
660
|
+
# Fallback: find any hook with this ID (may return wrong one if duplicates)
|
|
661
|
+
async with db.execute("SELECT * FROM hooks WHERE hook_id = ?", (hook_id,)) as cursor:
|
|
662
|
+
row = await cursor.fetchone()
|
|
654
663
|
|
|
655
664
|
if not row:
|
|
656
665
|
return None
|
|
@@ -674,6 +683,7 @@ class SQLiteStorageBackend(StorageBackend):
|
|
|
674
683
|
hook_id: str,
|
|
675
684
|
status: HookStatus,
|
|
676
685
|
payload: str | None = None,
|
|
686
|
+
run_id: str | None = None,
|
|
677
687
|
) -> None:
|
|
678
688
|
"""Update hook status and optionally payload."""
|
|
679
689
|
db = self._ensure_connected()
|
|
@@ -689,12 +699,19 @@ class SQLiteStorageBackend(StorageBackend):
|
|
|
689
699
|
updates.append("received_at = ?")
|
|
690
700
|
params.append(datetime.now(UTC).isoformat())
|
|
691
701
|
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
702
|
+
if run_id:
|
|
703
|
+
params.append(run_id)
|
|
704
|
+
params.append(hook_id)
|
|
705
|
+
await db.execute(
|
|
706
|
+
f"UPDATE hooks SET {', '.join(updates)} WHERE run_id = ? AND hook_id = ?",
|
|
707
|
+
tuple(params),
|
|
708
|
+
)
|
|
709
|
+
else:
|
|
710
|
+
params.append(hook_id)
|
|
711
|
+
await db.execute(
|
|
712
|
+
f"UPDATE hooks SET {', '.join(updates)} WHERE hook_id = ?",
|
|
713
|
+
tuple(params),
|
|
714
|
+
)
|
|
698
715
|
await db.commit()
|
|
699
716
|
|
|
700
717
|
async def list_hooks(
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
pyworkflow/__init__.py,sha256=
|
|
1
|
+
pyworkflow/__init__.py,sha256=_VlxxYt3VmBKyIU9jbX7QfZ8DEHbNTU6UhogeO-Gm-s,6281
|
|
2
2
|
pyworkflow/config.py,sha256=pKwPrpCwBJiDpB-MIjM0U7GW1TFmQFO341pihL5-vTM,14455
|
|
3
3
|
pyworkflow/discovery.py,sha256=snW3l4nvY3Nc067TGlwtn_qdzTU9ybN7YPr8FbvY8iM,8066
|
|
4
4
|
pyworkflow/aws/__init__.py,sha256=Ak_xHcR9LTRX-CwcS0XecYmzrXZw4EM3V9aKBBDEmIk,1741
|
|
@@ -6,10 +6,11 @@ pyworkflow/aws/context.py,sha256=Vjyjip6U1Emg-WA5TlBaxFhcg15rf9mVJiPfT4VywHc,821
|
|
|
6
6
|
pyworkflow/aws/handler.py,sha256=0SnQuIfQVD99QKMCRFPtrsrV_l1LYKFkzPIRx_2UkSI,5849
|
|
7
7
|
pyworkflow/aws/testing.py,sha256=WrRk9wjbycM-UyHFQWNnA83UE9IrYnhfT38WrbxQT2U,8844
|
|
8
8
|
pyworkflow/celery/__init__.py,sha256=FywVyqnT8AYz9cXkr-wel7_-N7dHFsPNASEPMFESf4Q,1179
|
|
9
|
-
pyworkflow/celery/app.py,sha256=
|
|
9
|
+
pyworkflow/celery/app.py,sha256=UwZauZjVzOxMPX3WmPilRi8Emg5_VbMjHjNn7uz7R14,9670
|
|
10
10
|
pyworkflow/celery/loop.py,sha256=mu8cIfMJYgHAoGCN_DdDoNoXK3QHzHpLmrPCyFDQYIY,3016
|
|
11
11
|
pyworkflow/celery/scheduler.py,sha256=Ms4rqRpdpMiLM8l4y3DK-Divunj9afYuUaGGoNQe7P4,11288
|
|
12
|
-
pyworkflow/celery/
|
|
12
|
+
pyworkflow/celery/singleton.py,sha256=J4a5LY5GsSFbO2evkql4Pw7h38tA2rQbR3J2cXkJRZg,13155
|
|
13
|
+
pyworkflow/celery/tasks.py,sha256=BNHZwWTSRc3q8EgAy4tEmXAm6O0vtVLgrG7MrO0ZZXA,86049
|
|
13
14
|
pyworkflow/cli/__init__.py,sha256=tcbe-fcZmyeEKUy_aEo8bsEF40HsNKOwvyMBZIJZPwc,3844
|
|
14
15
|
pyworkflow/cli/__main__.py,sha256=LxLLS4FEEPXa5rWpLTtKuivn6Xp9pGia-QKGoxt9SS0,148
|
|
15
16
|
pyworkflow/cli/commands/__init__.py,sha256=IXvnTgukALckkO8fTlZhVRq80ojSqpnIIgboAg_-yZU,39
|
|
@@ -35,14 +36,15 @@ pyworkflow/cli/utils/storage.py,sha256=a5Iu2Xe1_mPgBVYc8B6I63MFfW12ko7wURqcpq3RB
|
|
|
35
36
|
pyworkflow/context/__init__.py,sha256=dI5zW1lAFGw68jI2UpKUqyADozDboGNl-RmhEvSTuCI,2150
|
|
36
37
|
pyworkflow/context/aws.py,sha256=MYxrFsRzCgaZ0YQAyE26UOT_ryxuag5DwiDSodclQIg,7571
|
|
37
38
|
pyworkflow/context/base.py,sha256=Hlfm5MNHh_BVbRCgEcILmHiqsn81iYFqt0GSLkFGo00,13772
|
|
38
|
-
pyworkflow/context/local.py,sha256=
|
|
39
|
+
pyworkflow/context/local.py,sha256=eKBF-e_WSkVIqbynVysQy6rH02rXmPts29KtjY41IQI,38853
|
|
39
40
|
pyworkflow/context/mock.py,sha256=TJzQ3P3_ZHm1lCJZJACIFFvz2ydFxz2cT9eEGOQS5I0,12061
|
|
40
41
|
pyworkflow/context/step_context.py,sha256=6P2jn1v7MTlYaWCTt6DBq7Nkmxm7nvna4oGpTZJeMbg,8862
|
|
41
42
|
pyworkflow/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
42
43
|
pyworkflow/core/exceptions.py,sha256=F2nbXyoed7wlIJMeGfpgsIC8ZyWcYN0iKtOnBA7-xnQ,10719
|
|
43
44
|
pyworkflow/core/registry.py,sha256=ZUf2YTpBvWpC9EehRbMF8soXOk9VsjNruoi6lR4O33M,9361
|
|
44
45
|
pyworkflow/core/scheduled.py,sha256=479A7IvjHiMob7ZrZtfE6VqtypG6DLIGMGhh16jLIWM,10522
|
|
45
|
-
pyworkflow/core/step.py,sha256=
|
|
46
|
+
pyworkflow/core/step.py,sha256=9JG9udEDph_6ecfruVdY0qC3ruoC6bjd0F91chg8QZM,23913
|
|
47
|
+
pyworkflow/core/validation.py,sha256=0VaZyQ9YGK8WFy4ZG4Bjt9MYAp0vz6xEOe80kcgaP5g,3362
|
|
46
48
|
pyworkflow/core/workflow.py,sha256=dlcICq1B69-nxUJth_n-H8U9TjP3QZyjvquQXxWHcxs,12076
|
|
47
49
|
pyworkflow/engine/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
48
50
|
pyworkflow/engine/events.py,sha256=KFtyIqQjr1B9Frtd5V1Zq0ph1iwg_Ky3uPzmTYZ1Tnk,25827
|
|
@@ -56,37 +58,37 @@ pyworkflow/primitives/child_workflow.py,sha256=_T7PCqiH0tjIm_lpJ6NmfUPWCFx-MjH6t
|
|
|
56
58
|
pyworkflow/primitives/continue_as_new.py,sha256=NKcimHsgr5ExkvRvfO28hxgPw_I7Q74Vz9WL8r0PhPc,3329
|
|
57
59
|
pyworkflow/primitives/define_hook.py,sha256=gNzk7DuObfWG1T9AdHnDnGLHNKjnApiVRlCKPObugfY,4443
|
|
58
60
|
pyworkflow/primitives/hooks.py,sha256=ws9U81ymsY8M4FFTvJ2X4EMGmIrilb3vCKZ0V_EGZdE,3085
|
|
59
|
-
pyworkflow/primitives/resume_hook.py,sha256=
|
|
61
|
+
pyworkflow/primitives/resume_hook.py,sha256=vwa0znU1DuasStzQVTES-jp9XUzbVq4vimmbpiZH6yg,6245
|
|
60
62
|
pyworkflow/primitives/schedule.py,sha256=2hVM2Swl9dRx3RHd5nblJLaU8HaSy-NHYue2Cf9TOcU,14961
|
|
61
63
|
pyworkflow/primitives/shield.py,sha256=MUYakU0euZoYNb6MbFyRfJN8GEXsRFkIbZEo84vRN9c,2924
|
|
62
64
|
pyworkflow/primitives/sleep.py,sha256=iH1e5CoWY-jZbYNAU3GRW1xR_8EtCuPIcIohzU4jWJo,3097
|
|
63
65
|
pyworkflow/runtime/__init__.py,sha256=DkwTgFCMRGyyW8NGcW7Nyy9beOg5kO1TXhqhysj1-aY,649
|
|
64
|
-
pyworkflow/runtime/base.py,sha256
|
|
65
|
-
pyworkflow/runtime/celery.py,sha256=
|
|
66
|
+
pyworkflow/runtime/base.py,sha256=ATlPeheYzUMlk_v-9abLyUQf4y4iYB53VpaoQ73pYsc,5465
|
|
67
|
+
pyworkflow/runtime/celery.py,sha256=JadRjmpL1rsp-zNAHq3vL8XdAhKucu1vEsxGvshaLjk,9961
|
|
66
68
|
pyworkflow/runtime/factory.py,sha256=TRbqWPfyZ0tPFKb0faI9SkBRXxE5AEVTwGW4pS2diM8,2684
|
|
67
|
-
pyworkflow/runtime/local.py,sha256=
|
|
69
|
+
pyworkflow/runtime/local.py,sha256=UPOdJsejrWzdz5p1XpTfMgWC0EK85qT7HHBUjmJ4xOQ,26228
|
|
68
70
|
pyworkflow/scheduler/__init__.py,sha256=lQQo0Cia_ULIg-KPIrqILV30rUIzybxj1k_ZZTQNZyg,222
|
|
69
71
|
pyworkflow/scheduler/local.py,sha256=CnK4UC6ofD3_AZJUlO9iUAdgAnbMmJvPaL_VucNKs5Q,8154
|
|
70
72
|
pyworkflow/serialization/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
71
73
|
pyworkflow/serialization/decoder.py,sha256=F7Ofuw1Yzo82iSFFXiK2yoW_v2YRbLMpX3CQbKjm0Ls,3860
|
|
72
74
|
pyworkflow/serialization/encoder.py,sha256=ZBwAxe5Bb4MCfFJePHw7ArJlIbBieSwUgsysGCI2iPU,4108
|
|
73
75
|
pyworkflow/storage/__init__.py,sha256=LhVjLNZdo4Mi5dEC75hjSPnbQr9jBoIsTOrC8vzTGOM,1924
|
|
74
|
-
pyworkflow/storage/base.py,sha256=
|
|
75
|
-
pyworkflow/storage/cassandra.py,sha256=
|
|
76
|
+
pyworkflow/storage/base.py,sha256=ZS0jTdBRmT4nhADIPpr_MmOvqQvhse3LSb6CDt5rr-4,16324
|
|
77
|
+
pyworkflow/storage/cassandra.py,sha256=DTJJr4yqAV-941YjQQdoO1-5SsFoqke82sXpBrXPyKg,61741
|
|
76
78
|
pyworkflow/storage/config.py,sha256=45UMPxRoqgK4ZwE7HIK9ctxE_eoK3eAE_1tRhn3Psd4,12410
|
|
77
|
-
pyworkflow/storage/dynamodb.py,sha256=
|
|
78
|
-
pyworkflow/storage/file.py,sha256=
|
|
79
|
-
pyworkflow/storage/memory.py,sha256=
|
|
80
|
-
pyworkflow/storage/mysql.py,sha256=
|
|
81
|
-
pyworkflow/storage/postgres.py,sha256=
|
|
79
|
+
pyworkflow/storage/dynamodb.py,sha256=8ClKZeP3TURDVJN2SJMAoLWWSXAgqysQXnqUXoRhaos,54159
|
|
80
|
+
pyworkflow/storage/file.py,sha256=g2eO-MOaQKliM97_gTVGPS27GmH5mahi5GBZ99RP8Rs,30133
|
|
81
|
+
pyworkflow/storage/memory.py,sha256=J5ZA2DsXE5tEQDn5dmtJ3mNEWy9okR6sTYZ-xp3slWY,20554
|
|
82
|
+
pyworkflow/storage/mysql.py,sha256=lA2TRBvYlWEq5mQcnfyRivFO1n0kviq_yH3mEzGp8rs,43764
|
|
83
|
+
pyworkflow/storage/postgres.py,sha256=GUW2OjV6CHWi9fUkUPswmGwvaY7c76QxIQ9kRL8BM6o,45351
|
|
82
84
|
pyworkflow/storage/schemas.py,sha256=o1ntTYNgQQ5YVuXtPCShtENEsndVjdrXclWrkCgkitg,18002
|
|
83
|
-
pyworkflow/storage/sqlite.py,sha256=
|
|
85
|
+
pyworkflow/storage/sqlite.py,sha256=qDhFjyFAenwYq6MF_66FFhDaBG7CEr7ni9Uy72X7MvQ,40377
|
|
84
86
|
pyworkflow/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
85
87
|
pyworkflow/utils/duration.py,sha256=C-itmiSQQlplw7j6XB679hLF9xYGnyCwm7twO88OF8U,3978
|
|
86
88
|
pyworkflow/utils/schedule.py,sha256=dO_MkGFyfwZpb0LDlW6BGyZzlPuQIA6dc6j9nk9lc4Y,10691
|
|
87
|
-
pyworkflow_engine-0.1.
|
|
88
|
-
pyworkflow_engine-0.1.
|
|
89
|
-
pyworkflow_engine-0.1.
|
|
90
|
-
pyworkflow_engine-0.1.
|
|
91
|
-
pyworkflow_engine-0.1.
|
|
92
|
-
pyworkflow_engine-0.1.
|
|
89
|
+
pyworkflow_engine-0.1.14.dist-info/licenses/LICENSE,sha256=Y49RCTZ5ayn_yzBcRxnyIFdcMCyuYm150aty_FIznfY,1080
|
|
90
|
+
pyworkflow_engine-0.1.14.dist-info/METADATA,sha256=9-fZ4yxwmOu2fjwAIYAaDqrlBqE_F4WRFsSTwt-6JaY,19628
|
|
91
|
+
pyworkflow_engine-0.1.14.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
92
|
+
pyworkflow_engine-0.1.14.dist-info/entry_points.txt,sha256=3IGAfuylnS39U0YX0pxnjrj54kB4iT_bNYrmsiDB-dE,51
|
|
93
|
+
pyworkflow_engine-0.1.14.dist-info/top_level.txt,sha256=FLTv9pQmLDBXrQdLOhTMIS3njFibliMsQEfumqmdzBE,11
|
|
94
|
+
pyworkflow_engine-0.1.14.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|