ddeutil-workflow 0.0.48__py3-none-any.whl → 0.0.50__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/__init__.py +8 -1
- ddeutil/workflow/api/routes/logs.py +6 -5
- ddeutil/workflow/conf.py +40 -40
- ddeutil/workflow/exceptions.py +3 -3
- ddeutil/workflow/job.py +132 -76
- ddeutil/workflow/logs.py +145 -81
- ddeutil/workflow/result.py +20 -10
- ddeutil/workflow/reusables.py +3 -3
- ddeutil/workflow/scheduler.py +54 -44
- ddeutil/workflow/stages.py +514 -114
- ddeutil/workflow/utils.py +44 -40
- ddeutil/workflow/workflow.py +125 -112
- {ddeutil_workflow-0.0.48.dist-info → ddeutil_workflow-0.0.50.dist-info}/METADATA +5 -6
- ddeutil_workflow-0.0.50.dist-info/RECORD +31 -0
- ddeutil_workflow-0.0.48.dist-info/RECORD +0 -31
- {ddeutil_workflow-0.0.48.dist-info → ddeutil_workflow-0.0.50.dist-info}/WHEEL +0 -0
- {ddeutil_workflow-0.0.48.dist-info → ddeutil_workflow-0.0.50.dist-info}/licenses/LICENSE +0 -0
- {ddeutil_workflow-0.0.48.dist-info → ddeutil_workflow-0.0.50.dist-info}/top_level.txt +0 -0
ddeutil/workflow/utils.py
CHANGED
@@ -15,7 +15,7 @@ from inspect import isfunction
|
|
15
15
|
from itertools import chain, islice, product
|
16
16
|
from pathlib import Path
|
17
17
|
from random import randrange
|
18
|
-
from typing import Any, TypeVar
|
18
|
+
from typing import Any, Final, TypeVar
|
19
19
|
from zoneinfo import ZoneInfo
|
20
20
|
|
21
21
|
from ddeutil.core import hash_str
|
@@ -23,12 +23,14 @@ from ddeutil.core import hash_str
|
|
23
23
|
from .__types import DictData, Matrix
|
24
24
|
|
25
25
|
T = TypeVar("T")
|
26
|
-
UTC = ZoneInfo("UTC")
|
26
|
+
UTC: Final[ZoneInfo] = ZoneInfo("UTC")
|
27
27
|
|
28
28
|
|
29
|
-
def
|
30
|
-
|
31
|
-
|
29
|
+
def replace_sec(dt: datetime) -> datetime:
|
30
|
+
return dt.replace(second=0, microsecond=0)
|
31
|
+
|
32
|
+
|
33
|
+
def get_dt_now(tz: ZoneInfo | None = None, offset: float = 0.0) -> datetime:
|
32
34
|
"""Return the current datetime object.
|
33
35
|
|
34
36
|
:param tz: A ZoneInfo object for replace timezone of return datetime object.
|
@@ -54,42 +56,31 @@ def get_d_now(
|
|
54
56
|
return (datetime.now(tz=(tz or UTC)) - timedelta(seconds=offset)).date()
|
55
57
|
|
56
58
|
|
57
|
-
def get_diff_sec(
|
58
|
-
dt: datetime, tz: ZoneInfo | None = None, offset: float = 0.0
|
59
|
-
) -> int: # pragma: no cov
|
59
|
+
def get_diff_sec(dt: datetime, offset: float = 0.0) -> int:
|
60
60
|
"""Return second value that come from diff of an input datetime and the
|
61
61
|
current datetime with specific timezone.
|
62
62
|
|
63
|
-
:param dt:
|
64
|
-
:param
|
65
|
-
:param offset: An offset second value.
|
63
|
+
:param dt: (datetime) A datetime object that want to get different second value.
|
64
|
+
:param offset: (float) An offset second value.
|
66
65
|
|
67
66
|
:rtype: int
|
68
67
|
"""
|
69
68
|
return round(
|
70
69
|
(
|
71
|
-
dt - datetime.now(tz=
|
70
|
+
dt - datetime.now(tz=dt.tzinfo) - timedelta(seconds=offset)
|
72
71
|
).total_seconds()
|
73
72
|
)
|
74
73
|
|
75
74
|
|
76
|
-
def reach_next_minute(
|
77
|
-
dt: datetime, tz: ZoneInfo | None = None, offset: float = 0.0
|
78
|
-
) -> bool:
|
75
|
+
def reach_next_minute(dt: datetime, offset: float = 0.0) -> bool:
|
79
76
|
"""Check this datetime object is not in range of minute level on the current
|
80
77
|
datetime.
|
81
78
|
|
82
|
-
:param dt:
|
83
|
-
:param
|
84
|
-
:param offset: An offset second value.
|
79
|
+
:param dt: (datetime) A datetime object that want to check.
|
80
|
+
:param offset: (float) An offset second value.
|
85
81
|
"""
|
86
82
|
diff: float = (
|
87
|
-
dt
|
88
|
-
- (
|
89
|
-
get_dt_now(tz=(tz or UTC), offset=offset).replace(
|
90
|
-
second=0, microsecond=0
|
91
|
-
)
|
92
|
-
)
|
83
|
+
replace_sec(dt) - replace_sec(get_dt_now(tz=dt.tzinfo, offset=offset))
|
93
84
|
).total_seconds()
|
94
85
|
if diff >= 60:
|
95
86
|
return True
|
@@ -106,7 +97,7 @@ def wait_to_next_minute(
|
|
106
97
|
dt: datetime, second: float = 0
|
107
98
|
) -> None: # pragma: no cov
|
108
99
|
"""Wait with sleep to the next minute with an offset second value."""
|
109
|
-
future = dt
|
100
|
+
future: datetime = replace_sec(dt) + timedelta(minutes=1)
|
110
101
|
time.sleep((future - dt).total_seconds() + second)
|
111
102
|
|
112
103
|
|
@@ -114,7 +105,7 @@ def delay(second: float = 0) -> None: # pragma: no cov
|
|
114
105
|
"""Delay time that use time.sleep with random second value between
|
115
106
|
0.00 - 0.99 seconds.
|
116
107
|
|
117
|
-
:param second: A second number that want to adds-on random value.
|
108
|
+
:param second: (float) A second number that want to adds-on random value.
|
118
109
|
"""
|
119
110
|
time.sleep(second + randrange(0, 99, step=10) / 100)
|
120
111
|
|
@@ -124,32 +115,42 @@ def gen_id(
|
|
124
115
|
*,
|
125
116
|
sensitive: bool = True,
|
126
117
|
unique: bool = False,
|
118
|
+
simple_mode: bool | None = None,
|
119
|
+
extras: DictData | None = None,
|
127
120
|
) -> str:
|
128
|
-
"""Generate running ID for able to tracking. This generates process use
|
129
|
-
algorithm function if
|
130
|
-
false. But it will cut this hashing value length to 10 it the setting
|
131
|
-
set to true.
|
121
|
+
"""Generate running ID for able to tracking. This generates process use
|
122
|
+
`md5` algorithm function if `WORKFLOW_CORE_WORKFLOW_ID_SIMPLE_MODE` set
|
123
|
+
to false. But it will cut this hashing value length to 10 it the setting
|
124
|
+
value set to true.
|
125
|
+
|
126
|
+
Simple Mode:
|
127
|
+
|
128
|
+
... 0000 00 00 00 00 00 000000 T 0000000000
|
129
|
+
... year month day hour minute second microsecond sep simple-id
|
132
130
|
|
133
131
|
:param value: A value that want to add to prefix before hashing with md5.
|
134
132
|
:param sensitive: A flag that convert the value to lower case before hashing
|
135
133
|
:param unique: A flag that add timestamp at microsecond level to value
|
136
134
|
before hashing.
|
135
|
+
:param simple_mode: A flag for generate ID by simple mode.
|
136
|
+
:param extras: An extra parameter that use for override config value.
|
137
137
|
|
138
138
|
:rtype: str
|
139
139
|
"""
|
140
|
-
from .conf import
|
140
|
+
from .conf import dynamic
|
141
141
|
|
142
142
|
if not isinstance(value, str):
|
143
143
|
value: str = str(value)
|
144
144
|
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
145
|
+
dt: datetime = datetime.now(tz=dynamic("tz", extras=extras))
|
146
|
+
if dynamic("generate_id_simple_mode", f=simple_mode, extras=extras):
|
147
|
+
return (f"{dt:%Y%m%d%H%M%S%f}T" if unique else "") + hash_str(
|
148
|
+
f"{(value if sensitive else value.lower())}", n=10
|
149
|
+
)
|
149
150
|
|
150
151
|
return md5(
|
151
152
|
(
|
152
|
-
(f"{
|
153
|
+
(f"{dt}T" if unique else "")
|
153
154
|
+ f"{(value if sensitive else value.lower())}"
|
154
155
|
).encode()
|
155
156
|
).hexdigest()
|
@@ -243,12 +244,15 @@ def cut_id(run_id: str, *, num: int = 6) -> str:
|
|
243
244
|
"""Cutting running ID with length.
|
244
245
|
|
245
246
|
Example:
|
246
|
-
>>> cut_id(run_id='
|
247
|
-
'
|
247
|
+
>>> cut_id(run_id='20240101081330000000T1354680202')
|
248
|
+
'202401010813680202'
|
248
249
|
|
249
|
-
:param run_id:
|
250
|
+
:param run_id: A running ID That want to cut
|
250
251
|
:param num:
|
251
252
|
|
252
253
|
:rtype: str
|
253
254
|
"""
|
254
|
-
|
255
|
+
if "T" in run_id:
|
256
|
+
dt, simple = run_id.split("T", maxsplit=1)
|
257
|
+
return dt[:12] + simple[-num:]
|
258
|
+
return run_id[:12] + run_id[-num:]
|
ddeutil/workflow/workflow.py
CHANGED
@@ -4,7 +4,9 @@
|
|
4
4
|
# license information.
|
5
5
|
# ------------------------------------------------------------------------------
|
6
6
|
# [x] Use dynamic config
|
7
|
-
"""A Workflow module that is the core
|
7
|
+
"""A Workflow module that is the core module of this package. It keeps Release
|
8
|
+
and Workflow Pydantic models.
|
9
|
+
"""
|
8
10
|
from __future__ import annotations
|
9
11
|
|
10
12
|
import copy
|
@@ -98,8 +100,8 @@ class Release:
|
|
98
100
|
|
99
101
|
:param dt: (datetime | str) A datetime object or string that want to
|
100
102
|
construct to the Release object.
|
101
|
-
:param extras: An extra parameters that want to pass to
|
102
|
-
config.
|
103
|
+
:param extras: (DictData) An extra parameters that want to pass to
|
104
|
+
override config values.
|
103
105
|
|
104
106
|
:raise TypeError: If the type of the dt argument does not valid with
|
105
107
|
datetime or str object.
|
@@ -159,7 +161,7 @@ class ReleaseQueue:
|
|
159
161
|
complete: list[Release] = field(default_factory=list)
|
160
162
|
extras: DictData = Field(
|
161
163
|
default_factory=dict,
|
162
|
-
description="An extra override config values.",
|
164
|
+
description="An extra parameters that want to override config values.",
|
163
165
|
)
|
164
166
|
|
165
167
|
@classmethod
|
@@ -172,7 +174,8 @@ class ReleaseQueue:
|
|
172
174
|
with list of datetime or list of Release.
|
173
175
|
|
174
176
|
:param queue:
|
175
|
-
:param extras: An extra parameter that want to override core config
|
177
|
+
:param extras: An extra parameter that want to override core config
|
178
|
+
values.
|
176
179
|
|
177
180
|
:raise TypeError: If the type of input queue does not valid.
|
178
181
|
|
@@ -251,7 +254,7 @@ class ReleaseQueue:
|
|
251
254
|
heappush(self.complete, value)
|
252
255
|
|
253
256
|
# NOTE: Remove complete queue on workflow that keep more than the
|
254
|
-
# maximum config.
|
257
|
+
# maximum config value.
|
255
258
|
num_complete_delete: int = len(self.complete) - dynamic(
|
256
259
|
"max_queue_complete_hist", extras=self.extras
|
257
260
|
)
|
@@ -262,6 +265,70 @@ class ReleaseQueue:
|
|
262
265
|
|
263
266
|
return self
|
264
267
|
|
268
|
+
def gen(
|
269
|
+
self,
|
270
|
+
end_date: datetime,
|
271
|
+
audit: type[Audit],
|
272
|
+
runner: CronRunner,
|
273
|
+
name: str,
|
274
|
+
*,
|
275
|
+
offset: float = 0,
|
276
|
+
force_run: bool = False,
|
277
|
+
extras: Optional[DictData] = None,
|
278
|
+
) -> Self:
|
279
|
+
"""Generate Release model to queue.
|
280
|
+
|
281
|
+
Steps:
|
282
|
+
- Create Release object from the current date that not reach the end
|
283
|
+
date.
|
284
|
+
- Check this release do not store on the release queue object.
|
285
|
+
Generate the next date if it exists.
|
286
|
+
- Push this release to the release queue
|
287
|
+
|
288
|
+
:param end_date: (datetime) An end datetime object.
|
289
|
+
:param audit: (type[Audit]) An audit class that want to make audit
|
290
|
+
instance.
|
291
|
+
:param runner: (CronRunner) A CronRunner object.
|
292
|
+
:param name: (str) A target name that want to check at pointer of audit.
|
293
|
+
:param offset: (float) An offset in second unit for time travel.
|
294
|
+
:param force_run: A flag that allow to release workflow if the audit
|
295
|
+
with that release was pointed.
|
296
|
+
:param extras: An extra parameter that want to override core config.
|
297
|
+
|
298
|
+
:rtype: ReleaseQueue
|
299
|
+
|
300
|
+
"""
|
301
|
+
if runner.date > end_date:
|
302
|
+
return self
|
303
|
+
|
304
|
+
workflow_release = Release(
|
305
|
+
date=runner.date,
|
306
|
+
offset=offset,
|
307
|
+
end_date=end_date,
|
308
|
+
runner=runner,
|
309
|
+
type=ReleaseType.POKE,
|
310
|
+
)
|
311
|
+
|
312
|
+
while self.check_queue(workflow_release) or (
|
313
|
+
audit.is_pointed(
|
314
|
+
name=name, release=workflow_release.date, extras=extras
|
315
|
+
)
|
316
|
+
and not force_run
|
317
|
+
):
|
318
|
+
workflow_release = Release(
|
319
|
+
date=runner.next,
|
320
|
+
offset=offset,
|
321
|
+
end_date=end_date,
|
322
|
+
runner=runner,
|
323
|
+
type=ReleaseType.POKE,
|
324
|
+
)
|
325
|
+
|
326
|
+
if runner.date > end_date:
|
327
|
+
return self
|
328
|
+
|
329
|
+
heappush(self.queue, workflow_release)
|
330
|
+
return self
|
331
|
+
|
265
332
|
|
266
333
|
class Workflow(BaseModel):
|
267
334
|
"""Workflow Pydantic model.
|
@@ -274,7 +341,7 @@ class Workflow(BaseModel):
|
|
274
341
|
|
275
342
|
extras: DictData = Field(
|
276
343
|
default_factory=dict,
|
277
|
-
description="An extra override config values.",
|
344
|
+
description="An extra parameters that want to override config values.",
|
278
345
|
)
|
279
346
|
|
280
347
|
name: str = Field(description="A workflow name.")
|
@@ -344,8 +411,8 @@ class Workflow(BaseModel):
|
|
344
411
|
|
345
412
|
:param name: (str) A workflow name that want to pass to Loader object.
|
346
413
|
:param path: (Path) A config path that want to search.
|
347
|
-
:param extras: An extra parameters that want to
|
348
|
-
|
414
|
+
:param extras: (DictData) An extra parameters that want to override core
|
415
|
+
config values.
|
349
416
|
|
350
417
|
:raise ValueError: If the type does not match with current object.
|
351
418
|
|
@@ -376,10 +443,10 @@ class Workflow(BaseModel):
|
|
376
443
|
) -> DictData:
|
377
444
|
"""Bypass the on data to loaded config data.
|
378
445
|
|
379
|
-
:param data: A data to construct to this Workflow model.
|
380
|
-
:param path: A config path.
|
381
|
-
:param extras: An extra parameters that want to
|
382
|
-
|
446
|
+
:param data: (DictData) A data to construct to this Workflow model.
|
447
|
+
:param path: (Path) A config path.
|
448
|
+
:param extras: (DictData) An extra parameters that want to override core
|
449
|
+
config values.
|
383
450
|
|
384
451
|
:rtype: DictData
|
385
452
|
"""
|
@@ -456,7 +523,7 @@ class Workflow(BaseModel):
|
|
456
523
|
|
457
524
|
extras: Optional[DictData] = info.data.get("extras")
|
458
525
|
if len(set_ons) > (
|
459
|
-
conf := dynamic("
|
526
|
+
conf := dynamic("max_cron_per_workflow", extras=extras)
|
460
527
|
):
|
461
528
|
raise ValueError(
|
462
529
|
f"The number of the on should not more than {conf} crontabs."
|
@@ -494,8 +561,9 @@ class Workflow(BaseModel):
|
|
494
561
|
return self
|
495
562
|
|
496
563
|
def job(self, name: str) -> Job:
|
497
|
-
"""Return the workflow's
|
498
|
-
|
564
|
+
"""Return the workflow's Job model that getting by an input job's name
|
565
|
+
or job's ID. This method will pass an extra parameter from this model
|
566
|
+
to the returned Job model.
|
499
567
|
|
500
568
|
:param name: (str) A job name or ID that want to get from a mapping of
|
501
569
|
job models.
|
@@ -712,14 +780,6 @@ class Workflow(BaseModel):
|
|
712
780
|
"""Generate Release from all on values from the on field and store them
|
713
781
|
to the ReleaseQueue object.
|
714
782
|
|
715
|
-
Steps:
|
716
|
-
- For-loop all the on value in the on field.
|
717
|
-
- Create Release object from the current date that not reach the end
|
718
|
-
date.
|
719
|
-
- Check this release do not store on the release queue object.
|
720
|
-
Generate the next date if it exists.
|
721
|
-
- Push this release to the release queue
|
722
|
-
|
723
783
|
:param offset: An offset in second unit for time travel.
|
724
784
|
:param end_date: An end datetime object.
|
725
785
|
:param queue: A workflow queue object.
|
@@ -731,40 +791,19 @@ class Workflow(BaseModel):
|
|
731
791
|
"""
|
732
792
|
for on in self.on:
|
733
793
|
|
734
|
-
|
735
|
-
|
736
|
-
|
737
|
-
|
738
|
-
|
739
|
-
|
740
|
-
|
741
|
-
|
742
|
-
|
743
|
-
workflow_release = Release(
|
744
|
-
date=runner.date,
|
794
|
+
queue.gen(
|
795
|
+
end_date,
|
796
|
+
audit,
|
797
|
+
on.next(
|
798
|
+
get_dt_now(
|
799
|
+
tz=dynamic("tz", extras=self.extras), offset=offset
|
800
|
+
).replace(microsecond=0)
|
801
|
+
),
|
802
|
+
self.name,
|
745
803
|
offset=offset,
|
746
|
-
|
747
|
-
runner=runner,
|
748
|
-
type=ReleaseType.POKE,
|
804
|
+
force_run=force_run,
|
749
805
|
)
|
750
806
|
|
751
|
-
while queue.check_queue(workflow_release) or (
|
752
|
-
audit.is_pointed(name=self.name, release=workflow_release.date)
|
753
|
-
and not force_run
|
754
|
-
):
|
755
|
-
workflow_release = Release(
|
756
|
-
date=runner.next,
|
757
|
-
offset=offset,
|
758
|
-
end_date=end_date,
|
759
|
-
runner=runner,
|
760
|
-
type=ReleaseType.POKE,
|
761
|
-
)
|
762
|
-
|
763
|
-
if runner.date > end_date:
|
764
|
-
continue
|
765
|
-
|
766
|
-
heappush(queue.queue, workflow_release)
|
767
|
-
|
768
807
|
return queue
|
769
808
|
|
770
809
|
def poke(
|
@@ -803,7 +842,7 @@ class Workflow(BaseModel):
|
|
803
842
|
:rtype: Result
|
804
843
|
:return: A list of all results that return from `self.release` method.
|
805
844
|
"""
|
806
|
-
audit: type[Audit] = audit or get_audit()
|
845
|
+
audit: type[Audit] = audit or get_audit(extras=self.extras)
|
807
846
|
result: Result = Result(
|
808
847
|
run_id=(run_id or gen_id(self.name, unique=True))
|
809
848
|
)
|
@@ -884,15 +923,11 @@ class Workflow(BaseModel):
|
|
884
923
|
# NOTE: Pop the latest Release object from the release queue.
|
885
924
|
release: Release = heappop(q.queue)
|
886
925
|
|
887
|
-
if reach_next_minute(
|
888
|
-
release.date,
|
889
|
-
tz=dynamic("tz", extras=self.extras),
|
890
|
-
offset=offset,
|
891
|
-
):
|
926
|
+
if reach_next_minute(release.date, offset=offset):
|
892
927
|
result.trace.debug(
|
893
|
-
f"[POKING]:
|
894
|
-
f"{release.date:%Y-%m-%d %H:%M:%S},
|
895
|
-
f"
|
928
|
+
f"[POKING]: Latest Release, "
|
929
|
+
f"{release.date:%Y-%m-%d %H:%M:%S}, can not run on "
|
930
|
+
f"this time"
|
896
931
|
)
|
897
932
|
heappush(q.queue, release)
|
898
933
|
wait_to_next_minute(
|
@@ -937,7 +972,6 @@ class Workflow(BaseModel):
|
|
937
972
|
*,
|
938
973
|
result: Result | None = None,
|
939
974
|
event: Event | None = None,
|
940
|
-
raise_error: bool = True,
|
941
975
|
) -> Result:
|
942
976
|
"""Job execution with passing dynamic parameters from the main workflow
|
943
977
|
execution to the target job object via job's ID.
|
@@ -948,7 +982,6 @@ class Workflow(BaseModel):
|
|
948
982
|
|
949
983
|
:raise WorkflowException: If execute with not exist job's ID.
|
950
984
|
:raise WorkflowException: If the job execution raise JobException.
|
951
|
-
:raise NotImplementedError: If set raise_error argument to False.
|
952
985
|
|
953
986
|
:param job_id: A job ID that want to execute.
|
954
987
|
:param params: A params that was parameterized from workflow execution.
|
@@ -956,8 +989,6 @@ class Workflow(BaseModel):
|
|
956
989
|
data.
|
957
990
|
:param event: (Event) An event manager that pass to the
|
958
991
|
PoolThreadExecutor.
|
959
|
-
:param raise_error: A flag that raise error instead catching to result
|
960
|
-
if it gets exception from job execution.
|
961
992
|
|
962
993
|
:rtype: Result
|
963
994
|
:return: Return the result object that receive the job execution result
|
@@ -982,10 +1013,10 @@ class Workflow(BaseModel):
|
|
982
1013
|
try:
|
983
1014
|
job: Job = self.jobs[job_id]
|
984
1015
|
if job.is_skipped(params=params):
|
985
|
-
result.trace.info(f"[
|
1016
|
+
result.trace.info(f"[WORKFLOW]: Skip job: {job_id!r}")
|
986
1017
|
job.set_outputs(output={"SKIP": {"skipped": True}}, to=params)
|
987
1018
|
else:
|
988
|
-
result.trace.info(f"[
|
1019
|
+
result.trace.info(f"[WORKFLOW]: Execute: {job_id!r}")
|
989
1020
|
job.set_outputs(
|
990
1021
|
job.execute(
|
991
1022
|
params=params,
|
@@ -997,12 +1028,8 @@ class Workflow(BaseModel):
|
|
997
1028
|
)
|
998
1029
|
except JobException as e:
|
999
1030
|
result.trace.error(f"[WORKFLOW]: {e.__class__.__name__}: {e}")
|
1000
|
-
|
1001
|
-
|
1002
|
-
f"Get job execution error {job_id}: JobException: {e}"
|
1003
|
-
) from None
|
1004
|
-
raise NotImplementedError(
|
1005
|
-
"Handle error from the job execution does not support yet."
|
1031
|
+
raise WorkflowException(
|
1032
|
+
f"Get job execution error {job_id}: JobException: {e}"
|
1006
1033
|
) from None
|
1007
1034
|
|
1008
1035
|
return result.catch(status=SUCCESS, context=params)
|
@@ -1237,7 +1264,7 @@ class Workflow(BaseModel):
|
|
1237
1264
|
max_workers=1,
|
1238
1265
|
thread_name_prefix="wf_exec_non_threading_",
|
1239
1266
|
) as executor:
|
1240
|
-
future: Future
|
1267
|
+
future: Optional[Future] = None
|
1241
1268
|
|
1242
1269
|
while not job_queue.empty() and (
|
1243
1270
|
not_timeout_flag := ((time.monotonic() - ts) < timeout)
|
@@ -1277,14 +1304,13 @@ class Workflow(BaseModel):
|
|
1277
1304
|
|
1278
1305
|
future = None
|
1279
1306
|
job_queue.put(job_id)
|
1280
|
-
elif future.running():
|
1307
|
+
elif future.running() or "state=pending" in str(future):
|
1281
1308
|
time.sleep(0.075)
|
1282
1309
|
job_queue.put(job_id)
|
1283
1310
|
else: # pragma: no cov
|
1284
1311
|
job_queue.put(job_id)
|
1285
|
-
result.trace.
|
1286
|
-
f"Execution non-threading
|
1287
|
-
f"that not running."
|
1312
|
+
result.trace.warning(
|
1313
|
+
f"... Execution non-threading not handle: {future}."
|
1288
1314
|
)
|
1289
1315
|
|
1290
1316
|
job_queue.task_done()
|
@@ -1313,12 +1339,19 @@ class WorkflowTask:
|
|
1313
1339
|
|
1314
1340
|
This dataclass has the release method for itself that prepare necessary
|
1315
1341
|
arguments before passing to the parent release method.
|
1342
|
+
|
1343
|
+
:param alias: (str) An alias name of Workflow model.
|
1344
|
+
:param workflow: (Workflow) A Workflow model instance.
|
1345
|
+
:param runner: (CronRunner)
|
1346
|
+
:param values:
|
1347
|
+
:param extras:
|
1316
1348
|
"""
|
1317
1349
|
|
1318
|
-
alias: str
|
1319
|
-
workflow: Workflow
|
1320
|
-
runner: CronRunner
|
1350
|
+
alias: str
|
1351
|
+
workflow: Workflow
|
1352
|
+
runner: CronRunner
|
1321
1353
|
values: DictData = field(default_factory=dict)
|
1354
|
+
extras: DictData = field(default_factory=dict)
|
1322
1355
|
|
1323
1356
|
def release(
|
1324
1357
|
self,
|
@@ -1345,7 +1378,7 @@ class WorkflowTask:
|
|
1345
1378
|
|
1346
1379
|
:rtype: Result
|
1347
1380
|
"""
|
1348
|
-
audit: type[Audit] = audit or get_audit()
|
1381
|
+
audit: type[Audit] = audit or get_audit(extras=self.extras)
|
1349
1382
|
|
1350
1383
|
if release is None:
|
1351
1384
|
|
@@ -1395,35 +1428,15 @@ class WorkflowTask:
|
|
1395
1428
|
|
1396
1429
|
:rtype: ReleaseQueue
|
1397
1430
|
"""
|
1398
|
-
|
1399
|
-
|
1400
|
-
|
1401
|
-
|
1402
|
-
|
1403
|
-
|
1404
|
-
|
1405
|
-
runner=self.runner,
|
1406
|
-
type=ReleaseType.TASK,
|
1431
|
+
return queue.gen(
|
1432
|
+
end_date,
|
1433
|
+
audit,
|
1434
|
+
self.runner,
|
1435
|
+
self.alias,
|
1436
|
+
force_run=force_run,
|
1437
|
+
extras=self.extras,
|
1407
1438
|
)
|
1408
1439
|
|
1409
|
-
while queue.check_queue(workflow_release) or (
|
1410
|
-
audit.is_pointed(name=self.alias, release=workflow_release.date)
|
1411
|
-
and not force_run
|
1412
|
-
):
|
1413
|
-
workflow_release = Release(
|
1414
|
-
date=self.runner.next,
|
1415
|
-
offset=0,
|
1416
|
-
end_date=end_date,
|
1417
|
-
runner=self.runner,
|
1418
|
-
type=ReleaseType.TASK,
|
1419
|
-
)
|
1420
|
-
|
1421
|
-
if self.runner.date > end_date:
|
1422
|
-
return queue
|
1423
|
-
|
1424
|
-
heappush(queue.queue, workflow_release)
|
1425
|
-
return queue
|
1426
|
-
|
1427
1440
|
def __repr__(self) -> str:
|
1428
1441
|
"""Override the `__repr__` method.
|
1429
1442
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.50
|
4
4
|
Summary: Lightweight workflow orchestration
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -39,6 +39,8 @@ Requires-Dist: ujson; extra == "api"
|
|
39
39
|
Provides-Extra: async
|
40
40
|
Requires-Dist: aiofiles; extra == "async"
|
41
41
|
Requires-Dist: aiohttp; extra == "async"
|
42
|
+
Provides-Extra: docker
|
43
|
+
Requires-Dist: docker==7.1.0; extra == "docker"
|
42
44
|
Dynamic: license-file
|
43
45
|
|
44
46
|
# Workflow Orchestration
|
@@ -262,15 +264,12 @@ it will use default value and do not raise any error to you.
|
|
262
264
|
|
263
265
|
| Name | Component | Default | Description |
|
264
266
|
|:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|
|
265
|
-
| **ROOT_PATH** | Core | `.` | Root path or the project path for this workflow engine. |
|
266
267
|
| **REGISTRY_CALLER** | Core | `.` | List of importable string for the call stage. |
|
267
268
|
| **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
|
268
|
-
| **CONF_PATH** | Core |
|
269
|
+
| **CONF_PATH** | Core | `./conf` | The config path that keep all template `.yaml` files. |
|
269
270
|
| **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
|
270
|
-
| **STAGE_DEFAULT_ID** | Core | `
|
271
|
+
| **STAGE_DEFAULT_ID** | Core | `false` | A flag that enable default stage ID that use for catch an execution output. |
|
271
272
|
| **STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. |
|
272
|
-
| **JOB_DEFAULT_ID** | Core | `false` | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
|
273
|
-
| **JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. |
|
274
273
|
| **MAX_CRON_PER_WORKFLOW** | Core | `5` | |
|
275
274
|
| **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | |
|
276
275
|
| **GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. |
|
@@ -0,0 +1,31 @@
|
|
1
|
+
ddeutil/workflow/__about__.py,sha256=K4g7cm4iInR43bd_K3fsuGDTVpz7fbAASmKh5_jH8_U,28
|
2
|
+
ddeutil/workflow/__cron.py,sha256=h8rLeIUAAEB2SdZ4Jhch7LU1Yl3bbJ-iNNJ3tQ0eYVM,28095
|
3
|
+
ddeutil/workflow/__init__.py,sha256=3u-yGnTyfY4BFrKqA5UGaMVe_Q4cZNODuC9qZ5meOXo,2048
|
4
|
+
ddeutil/workflow/__main__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
5
|
+
ddeutil/workflow/__types.py,sha256=8jBdbfb3aZSetjz0mvNrpGHwwxJff7mK8_4v41cLqlc,4316
|
6
|
+
ddeutil/workflow/conf.py,sha256=o6RyqcjjeQXdPBZi3lMs5sSQ5aYvsUgMdJMoRYMWcN0,12492
|
7
|
+
ddeutil/workflow/cron.py,sha256=80SijzMdDOBxTWRsiF-Fmuz7Ym7leY0XT2lzRAPGdXc,8781
|
8
|
+
ddeutil/workflow/exceptions.py,sha256=r4Jrf9qtVPALU4wh4bnb_OYqC-StqSQJEmFC-_QK934,1408
|
9
|
+
ddeutil/workflow/job.py,sha256=-VUsv6ub3T199GyugplRjI8Vs6CKQ9QHY6yhlzvUF9w,32495
|
10
|
+
ddeutil/workflow/logs.py,sha256=GG8tqs2BQv-iXSPWqxfrRJvKwJBvXn86Uq2lW1HrM9U,26455
|
11
|
+
ddeutil/workflow/params.py,sha256=xCtFEh0-G-G-f8y_SXxyf31bU6Ox5p5Z-WbBFXrjy8M,9960
|
12
|
+
ddeutil/workflow/result.py,sha256=27nPQq9CETLCVczv4vvFEF9w2TllHZ_ROfyDoLFxRWM,5647
|
13
|
+
ddeutil/workflow/reusables.py,sha256=hIpehea6J4OWeXX55kjYzo-c9-_Cc0YRwLRRbcaUkZs,17539
|
14
|
+
ddeutil/workflow/scheduler.py,sha256=F783QaJfPg8tvYyvJvkwl8Sa42vsJzj6BzzROZFvm9I,28153
|
15
|
+
ddeutil/workflow/stages.py,sha256=rQtW8W8btMavJxnseusHOH4HAv7SA_WLm9zQsCK22f8,63237
|
16
|
+
ddeutil/workflow/utils.py,sha256=zbVttaMFMRLuuBJdSJf7D9qtz8bOnQIBq-rHI3Eqy4M,7821
|
17
|
+
ddeutil/workflow/workflow.py,sha256=4jp7wm8TkSv8CXOKrCC-dlFgTP2d0OXgRHimoXnjSvY,50430
|
18
|
+
ddeutil/workflow/api/__init__.py,sha256=F53NMBWtb9IKaDWkPU5KvybGGfKAcbehgn6TLBwHuuM,21
|
19
|
+
ddeutil/workflow/api/api.py,sha256=CWtPLgOv2Jus9E7nzG5mG2Z32ZEkUK3JWQ2htZyMRpA,5244
|
20
|
+
ddeutil/workflow/api/log.py,sha256=NMTnOnsBrDB5129329xF2myLdrb-z9k1MQrmrP7qXJw,1818
|
21
|
+
ddeutil/workflow/api/repeat.py,sha256=uTtUFVLpiYYahXvCVx8sueRQ03K2Xw1id_gW3IMmX1U,5295
|
22
|
+
ddeutil/workflow/api/routes/__init__.py,sha256=qoGtOMyVgQ5nTUc8J8wH27A8isaxl3IFCX8qoyibeCY,484
|
23
|
+
ddeutil/workflow/api/routes/job.py,sha256=oPwBVP0Mxwxv-bGPlfmxQQ9PcVl0ev9HoPzndpYDCCQ,1954
|
24
|
+
ddeutil/workflow/api/routes/logs.py,sha256=U6vOni3wd-ZTOwd3yVdSOpgyRmNdcgfngU5KlLM3Cww,5383
|
25
|
+
ddeutil/workflow/api/routes/schedules.py,sha256=EgUjyRGhsm6UNaMj5luh6TcY6l571sCHcla-BL1iOfY,4829
|
26
|
+
ddeutil/workflow/api/routes/workflows.py,sha256=JcDOrn1deK8ztFRcMTNATQejG6KMA7JxZLVc4QeBsP4,4527
|
27
|
+
ddeutil_workflow-0.0.50.dist-info/licenses/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
|
28
|
+
ddeutil_workflow-0.0.50.dist-info/METADATA,sha256=jh7H6NtEXQ5lKiuSpZ63LAwKDboVmV-AKGSZPiATwn4,18036
|
29
|
+
ddeutil_workflow-0.0.50.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
30
|
+
ddeutil_workflow-0.0.50.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
|
31
|
+
ddeutil_workflow-0.0.50.dist-info/RECORD,,
|