ddeutil-workflow 0.0.47__py3-none-any.whl → 0.0.49__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/__init__.py +4 -2
- ddeutil/workflow/api/api.py +2 -1
- ddeutil/workflow/api/repeat.py +2 -1
- ddeutil/workflow/api/routes/job.py +1 -1
- ddeutil/workflow/api/routes/logs.py +6 -5
- ddeutil/workflow/api/routes/schedules.py +2 -1
- ddeutil/workflow/api/routes/workflows.py +2 -2
- ddeutil/workflow/conf.py +61 -66
- ddeutil/workflow/job.py +13 -5
- ddeutil/workflow/logs.py +282 -105
- ddeutil/workflow/result.py +19 -8
- ddeutil/workflow/reusables.py +4 -5
- ddeutil/workflow/scheduler.py +70 -50
- ddeutil/workflow/stages.py +288 -83
- ddeutil/workflow/utils.py +3 -3
- ddeutil/workflow/workflow.py +135 -103
- {ddeutil_workflow-0.0.47.dist-info → ddeutil_workflow-0.0.49.dist-info}/METADATA +24 -26
- ddeutil_workflow-0.0.49.dist-info/RECORD +31 -0
- ddeutil_workflow-0.0.47.dist-info/RECORD +0 -31
- {ddeutil_workflow-0.0.47.dist-info → ddeutil_workflow-0.0.49.dist-info}/WHEEL +0 -0
- {ddeutil_workflow-0.0.47.dist-info → ddeutil_workflow-0.0.49.dist-info}/licenses/LICENSE +0 -0
- {ddeutil_workflow-0.0.47.dist-info → ddeutil_workflow-0.0.49.dist-info}/top_level.txt +0 -0
ddeutil/workflow/result.py
CHANGED
@@ -3,6 +3,7 @@
|
|
3
3
|
# Licensed under the MIT License. See LICENSE in the project root for
|
4
4
|
# license information.
|
5
5
|
# ------------------------------------------------------------------------------
|
6
|
+
# [x] Use dynamic config
|
6
7
|
"""Result module. It is the data context transfer objects that use by all object
|
7
8
|
in this package. This module provide Status enum object and Result dataclass.
|
8
9
|
"""
|
@@ -19,8 +20,9 @@ from pydantic.functional_validators import model_validator
|
|
19
20
|
from typing_extensions import Self
|
20
21
|
|
21
22
|
from .__types import DictData
|
22
|
-
from .
|
23
|
-
from .
|
23
|
+
from .conf import dynamic
|
24
|
+
from .logs import Trace, get_dt_tznow, get_trace
|
25
|
+
from .utils import default_gen_id, gen_id, get_dt_now
|
24
26
|
|
25
27
|
|
26
28
|
class Status(IntEnum):
|
@@ -66,8 +68,8 @@ class Result:
|
|
66
68
|
parent_run_id: Optional[str] = field(default=None, compare=False)
|
67
69
|
ts: datetime = field(default_factory=get_dt_tznow, compare=False)
|
68
70
|
|
69
|
-
trace: Optional[
|
70
|
-
extras: DictData = field(default_factory=dict)
|
71
|
+
trace: Optional[Trace] = field(default=None, compare=False, repr=False)
|
72
|
+
extras: DictData = field(default_factory=dict, compare=False, repr=False)
|
71
73
|
|
72
74
|
@classmethod
|
73
75
|
def construct_with_rs_or_id(
|
@@ -111,17 +113,24 @@ class Result:
|
|
111
113
|
:rtype: Self
|
112
114
|
"""
|
113
115
|
if self.trace is None: # pragma: no cov
|
114
|
-
self.trace:
|
116
|
+
self.trace: Trace = get_trace(
|
117
|
+
self.run_id,
|
118
|
+
parent_run_id=self.parent_run_id,
|
119
|
+
extras=self.extras,
|
120
|
+
)
|
115
121
|
return self
|
116
122
|
|
117
123
|
def set_parent_run_id(self, running_id: str) -> Self:
|
118
124
|
"""Set a parent running ID.
|
119
125
|
|
120
|
-
:param running_id: A running ID that want to update on this model.
|
126
|
+
:param running_id: (str) A running ID that want to update on this model.
|
127
|
+
|
121
128
|
:rtype: Self
|
122
129
|
"""
|
123
130
|
self.parent_run_id: str = running_id
|
124
|
-
self.trace:
|
131
|
+
self.trace: Trace = get_trace(
|
132
|
+
self.run_id, parent_run_id=running_id, extras=self.extras
|
133
|
+
)
|
125
134
|
return self
|
126
135
|
|
127
136
|
def catch(
|
@@ -152,4 +161,6 @@ class Result:
|
|
152
161
|
|
153
162
|
:rtype: float
|
154
163
|
"""
|
155
|
-
return (
|
164
|
+
return (
|
165
|
+
get_dt_now(tz=dynamic("tz", extras=self.extras)) - self.ts
|
166
|
+
).total_seconds()
|
ddeutil/workflow/reusables.py
CHANGED
@@ -91,7 +91,7 @@ def make_filter_registry(
|
|
91
91
|
:rtype: dict[str, FilterRegistry]
|
92
92
|
"""
|
93
93
|
rs: dict[str, FilterRegistry] = {}
|
94
|
-
for module in dynamic("
|
94
|
+
for module in dynamic("registry_filter", f=registers):
|
95
95
|
# NOTE: try to sequential import task functions
|
96
96
|
try:
|
97
97
|
importer = import_module(module)
|
@@ -343,7 +343,7 @@ def param2template(
|
|
343
343
|
:returns: An any getter value from the params input.
|
344
344
|
"""
|
345
345
|
registers: Optional[list[str]] = (
|
346
|
-
extras.get("
|
346
|
+
extras.get("registry_filter") if extras else None
|
347
347
|
)
|
348
348
|
filters: dict[str, FilterRegistry] = filters or make_filter_registry(
|
349
349
|
registers=registers
|
@@ -449,7 +449,7 @@ def make_registry(
|
|
449
449
|
"""
|
450
450
|
rs: dict[str, Registry] = {}
|
451
451
|
regis_calls: list[str] = dynamic(
|
452
|
-
"
|
452
|
+
"registry_caller", f=registries
|
453
453
|
) # pragma: no cov
|
454
454
|
regis_calls.extend(["ddeutil.vendors"])
|
455
455
|
|
@@ -534,8 +534,7 @@ def extract_call(
|
|
534
534
|
|
535
535
|
call: CallSearchData = CallSearchData(**found.groupdict())
|
536
536
|
rgt: dict[str, Registry] = make_registry(
|
537
|
-
submodule=f"{call.path}",
|
538
|
-
registries=registries,
|
537
|
+
submodule=f"{call.path}", registries=registries
|
539
538
|
)
|
540
539
|
|
541
540
|
if call.func not in rgt:
|
ddeutil/workflow/scheduler.py
CHANGED
@@ -3,7 +3,7 @@
|
|
3
3
|
# Licensed under the MIT License. See LICENSE in the project root for
|
4
4
|
# license information.
|
5
5
|
# ------------------------------------------------------------------------------
|
6
|
-
# [x] Use
|
6
|
+
# [x] Use dynamic config
|
7
7
|
"""The main schedule running is `schedule_runner` function that trigger the
|
8
8
|
multiprocess of `schedule_control` function for listing schedules on the
|
9
9
|
config by `Loader.finds(Schedule)`.
|
@@ -36,7 +36,7 @@ from textwrap import dedent
|
|
36
36
|
from threading import Thread
|
37
37
|
from typing import Callable, Optional, TypedDict, Union
|
38
38
|
|
39
|
-
from pydantic import BaseModel, Field
|
39
|
+
from pydantic import BaseModel, Field, ValidationInfo
|
40
40
|
from pydantic.functional_validators import field_validator, model_validator
|
41
41
|
from typing_extensions import Self
|
42
42
|
|
@@ -52,7 +52,7 @@ except ImportError: # pragma: no cov
|
|
52
52
|
|
53
53
|
from .__cron import CronRunner
|
54
54
|
from .__types import DictData, TupleStr
|
55
|
-
from .conf import Loader, SimLoad,
|
55
|
+
from .conf import Loader, SimLoad, dynamic
|
56
56
|
from .cron import On
|
57
57
|
from .exceptions import ScheduleException, WorkflowException
|
58
58
|
from .logs import Audit, get_audit
|
@@ -62,7 +62,6 @@ from .workflow import Release, ReleaseQueue, Workflow, WorkflowTask
|
|
62
62
|
|
63
63
|
P = ParamSpec("P")
|
64
64
|
|
65
|
-
logger = get_logger("ddeutil.workflow")
|
66
65
|
logging.getLogger("schedule").setLevel(logging.INFO)
|
67
66
|
|
68
67
|
|
@@ -90,6 +89,11 @@ class ScheduleWorkflow(BaseModel):
|
|
90
89
|
not want to change on the workflow model.
|
91
90
|
"""
|
92
91
|
|
92
|
+
extras: DictData = Field(
|
93
|
+
default_factory=dict,
|
94
|
+
description="An extra parameters that want to override config values.",
|
95
|
+
)
|
96
|
+
|
93
97
|
alias: Optional[str] = Field(
|
94
98
|
default=None,
|
95
99
|
description="An alias name of workflow that use for schedule model.",
|
@@ -122,14 +126,17 @@ class ScheduleWorkflow(BaseModel):
|
|
122
126
|
if not values.get("alias"):
|
123
127
|
values["alias"] = values.get("name")
|
124
128
|
|
125
|
-
cls.__bypass_on(values)
|
129
|
+
cls.__bypass_on(values, extras=values.get("extras"))
|
126
130
|
return values
|
127
131
|
|
128
132
|
@classmethod
|
129
|
-
def __bypass_on(
|
133
|
+
def __bypass_on(
|
134
|
+
cls, data: DictData, *, extras: Optional[DictData] = None
|
135
|
+
) -> DictData:
|
130
136
|
"""Bypass and prepare the on data to loaded config data.
|
131
137
|
|
132
138
|
:param data: A data that want to validate for model initialization.
|
139
|
+
:param extras: An extra parameter that want to override core config.
|
133
140
|
|
134
141
|
:rtype: DictData
|
135
142
|
"""
|
@@ -144,14 +151,14 @@ class ScheduleWorkflow(BaseModel):
|
|
144
151
|
# NOTE: Pass on value to Loader and keep on model object to on
|
145
152
|
# field.
|
146
153
|
data["on"] = [
|
147
|
-
Loader(n, externals=
|
154
|
+
Loader(n, externals=extras).data if isinstance(n, str) else n
|
148
155
|
for n in on
|
149
156
|
]
|
150
157
|
|
151
158
|
return data
|
152
159
|
|
153
160
|
@field_validator("on", mode="after")
|
154
|
-
def __on_no_dup__(cls, value: list[On]) -> list[On]:
|
161
|
+
def __on_no_dup__(cls, value: list[On], info: ValidationInfo) -> list[On]:
|
155
162
|
"""Validate the on fields should not contain duplicate values and if it
|
156
163
|
contains every minute value, it should have only one on value.
|
157
164
|
|
@@ -163,10 +170,12 @@ class ScheduleWorkflow(BaseModel):
|
|
163
170
|
"The on fields should not contain duplicate on value."
|
164
171
|
)
|
165
172
|
|
166
|
-
|
173
|
+
extras: Optional[DictData] = info.data.get("extras")
|
174
|
+
if len(set_ons) > (
|
175
|
+
conf := dynamic("max_cron_per_workflow", extras=extras)
|
176
|
+
):
|
167
177
|
raise ValueError(
|
168
|
-
f"The number of the on should not more than "
|
169
|
-
f"{config.max_on_per_workflow} crontab."
|
178
|
+
f"The number of the on should not more than {conf} crontabs."
|
170
179
|
)
|
171
180
|
|
172
181
|
return value
|
@@ -175,8 +184,6 @@ class ScheduleWorkflow(BaseModel):
|
|
175
184
|
self,
|
176
185
|
start_date: datetime,
|
177
186
|
queue: dict[str, ReleaseQueue],
|
178
|
-
*,
|
179
|
-
extras: DictData | None = None,
|
180
187
|
) -> list[WorkflowTask]:
|
181
188
|
"""Return the list of WorkflowTask object from the specific input
|
182
189
|
datetime that mapping with the on field.
|
@@ -186,17 +193,15 @@ class ScheduleWorkflow(BaseModel):
|
|
186
193
|
|
187
194
|
:param start_date: A start date that get from the workflow schedule.
|
188
195
|
:param queue: A mapping of name and list of datetime for queue.
|
189
|
-
:param extras: An extra parameters that pass to the Loader object.
|
190
196
|
|
191
197
|
:rtype: list[WorkflowTask]
|
192
198
|
:return: Return the list of WorkflowTask object from the specific
|
193
199
|
input datetime that mapping with the on field.
|
194
200
|
"""
|
195
201
|
workflow_tasks: list[WorkflowTask] = []
|
196
|
-
extras: DictData = extras or {}
|
197
202
|
|
198
203
|
# NOTE: Loading workflow model from the name of workflow.
|
199
|
-
wf: Workflow = Workflow.from_conf(self.name, extras=extras)
|
204
|
+
wf: Workflow = Workflow.from_conf(self.name, extras=self.extras)
|
200
205
|
wf_queue: ReleaseQueue = queue[self.alias]
|
201
206
|
|
202
207
|
# IMPORTANT: Create the default 'on' value if it does not pass the `on`
|
@@ -218,6 +223,7 @@ class ScheduleWorkflow(BaseModel):
|
|
218
223
|
workflow=wf,
|
219
224
|
runner=runner,
|
220
225
|
values=self.values,
|
226
|
+
extras=self.extras,
|
221
227
|
),
|
222
228
|
)
|
223
229
|
|
@@ -231,6 +237,11 @@ class Schedule(BaseModel):
|
|
231
237
|
enhance the workflow object by adding the alias and values fields.
|
232
238
|
"""
|
233
239
|
|
240
|
+
extras: DictData = Field(
|
241
|
+
default_factory=dict,
|
242
|
+
description="An extra parameters that want to override config values.",
|
243
|
+
)
|
244
|
+
|
234
245
|
desc: Optional[str] = Field(
|
235
246
|
default=None,
|
236
247
|
description=(
|
@@ -281,6 +292,9 @@ class Schedule(BaseModel):
|
|
281
292
|
# NOTE: Add name to loader data
|
282
293
|
loader_data["name"] = name.replace(" ", "_")
|
283
294
|
|
295
|
+
if extras:
|
296
|
+
loader_data["extras"] = extras
|
297
|
+
|
284
298
|
return cls.model_validate(obj=loader_data)
|
285
299
|
|
286
300
|
@classmethod
|
@@ -288,7 +302,7 @@ class Schedule(BaseModel):
|
|
288
302
|
cls,
|
289
303
|
name: str,
|
290
304
|
path: Path,
|
291
|
-
|
305
|
+
extras: DictData | None = None,
|
292
306
|
) -> Self:
|
293
307
|
"""Create Schedule instance from the SimLoad object that receive an
|
294
308
|
input schedule name and conf path. The loader object will use this
|
@@ -297,7 +311,7 @@ class Schedule(BaseModel):
|
|
297
311
|
|
298
312
|
:param name: (str) A schedule name that want to pass to Loader object.
|
299
313
|
:param path: (Path) A config path that want to search.
|
300
|
-
:param
|
314
|
+
:param extras: An external parameters that want to pass to Loader
|
301
315
|
object.
|
302
316
|
|
303
317
|
:raise ValueError: If the type does not match with current object.
|
@@ -305,7 +319,7 @@ class Schedule(BaseModel):
|
|
305
319
|
:rtype: Self
|
306
320
|
"""
|
307
321
|
loader: SimLoad = SimLoad(
|
308
|
-
name, conf_path=path, externals=(
|
322
|
+
name, conf_path=path, externals=(extras or {})
|
309
323
|
)
|
310
324
|
|
311
325
|
# NOTE: Validate the config type match with current connection model
|
@@ -317,14 +331,15 @@ class Schedule(BaseModel):
|
|
317
331
|
# NOTE: Add name to loader data
|
318
332
|
loader_data["name"] = name.replace(" ", "_")
|
319
333
|
|
334
|
+
if extras:
|
335
|
+
loader_data["extras"] = extras
|
336
|
+
|
320
337
|
return cls.model_validate(obj=loader_data)
|
321
338
|
|
322
339
|
def tasks(
|
323
340
|
self,
|
324
341
|
start_date: datetime,
|
325
342
|
queue: dict[str, ReleaseQueue],
|
326
|
-
*,
|
327
|
-
extras: DictData | None = None,
|
328
343
|
) -> list[WorkflowTask]:
|
329
344
|
"""Return the list of WorkflowTask object from the specific input
|
330
345
|
datetime that mapping with the on field from workflow schedule model.
|
@@ -332,8 +347,6 @@ class Schedule(BaseModel):
|
|
332
347
|
:param start_date: A start date that get from the workflow schedule.
|
333
348
|
:param queue: (dict[str, ReleaseQueue]) A mapping of name and list of
|
334
349
|
datetime for queue.
|
335
|
-
:param extras: (DictData) An extra parameters that pass to the Loader
|
336
|
-
object.
|
337
350
|
|
338
351
|
:rtype: list[WorkflowTask]
|
339
352
|
:return: Return the list of WorkflowTask object from the specific
|
@@ -342,13 +355,13 @@ class Schedule(BaseModel):
|
|
342
355
|
workflow_tasks: list[WorkflowTask] = []
|
343
356
|
|
344
357
|
for workflow in self.workflows:
|
358
|
+
if self.extras:
|
359
|
+
workflow.extras = self.extras
|
345
360
|
|
346
361
|
if workflow.alias not in queue:
|
347
362
|
queue[workflow.alias] = ReleaseQueue()
|
348
363
|
|
349
|
-
workflow_tasks.extend(
|
350
|
-
workflow.tasks(start_date, queue=queue, extras=extras)
|
351
|
-
)
|
364
|
+
workflow_tasks.extend(workflow.tasks(start_date, queue=queue))
|
352
365
|
|
353
366
|
return workflow_tasks
|
354
367
|
|
@@ -356,24 +369,26 @@ class Schedule(BaseModel):
|
|
356
369
|
self,
|
357
370
|
*,
|
358
371
|
stop: datetime | None = None,
|
359
|
-
extras: DictData | None = None,
|
360
372
|
audit: type[Audit] | None = None,
|
361
373
|
parent_run_id: str | None = None,
|
362
374
|
) -> Result: # pragma: no cov
|
363
375
|
"""Pending this schedule tasks with the schedule package.
|
364
376
|
|
365
377
|
:param stop: A datetime value that use to stop running schedule.
|
366
|
-
:param extras: An extra parameters that pass to Loader.
|
367
378
|
:param audit: An audit class that use on the workflow task release for
|
368
379
|
writing its release audit context.
|
369
380
|
:param parent_run_id: A parent workflow running ID for this release.
|
370
381
|
"""
|
371
|
-
audit: type[Audit] = audit or get_audit()
|
382
|
+
audit: type[Audit] = audit or get_audit(extras=self.extras)
|
372
383
|
result: Result = Result().set_parent_run_id(parent_run_id)
|
373
384
|
|
374
385
|
# NOTE: Create the start and stop datetime.
|
375
|
-
start_date: datetime = datetime.now(
|
376
|
-
|
386
|
+
start_date: datetime = datetime.now(
|
387
|
+
tz=dynamic("tz", extras=self.extras)
|
388
|
+
)
|
389
|
+
stop_date: datetime = stop or (
|
390
|
+
start_date + dynamic("stop_boundary_delta", extras=self.extras)
|
391
|
+
)
|
377
392
|
|
378
393
|
# IMPORTANT: Create main mapping of queue and thread object.
|
379
394
|
queue: dict[str, ReleaseQueue] = {}
|
@@ -384,7 +399,7 @@ class Schedule(BaseModel):
|
|
384
399
|
) + timedelta(minutes=1)
|
385
400
|
|
386
401
|
scheduler_pending(
|
387
|
-
tasks=self.tasks(start_date_waiting, queue=queue
|
402
|
+
tasks=self.tasks(start_date_waiting, queue=queue),
|
388
403
|
stop=stop_date,
|
389
404
|
queue=queue,
|
390
405
|
threads=threads,
|
@@ -459,6 +474,7 @@ def schedule_task(
|
|
459
474
|
audit: type[Audit],
|
460
475
|
*,
|
461
476
|
parent_run_id: str | None = None,
|
477
|
+
extras: Optional[DictData] = None,
|
462
478
|
) -> ResultOrCancel:
|
463
479
|
"""Schedule task function that generate thread of workflow task release
|
464
480
|
method in background. This function do the same logic as the workflow poke
|
@@ -473,12 +489,13 @@ def schedule_task(
|
|
473
489
|
:param threads: A mapping of alias name and Thread object.
|
474
490
|
:param audit: An audit class that want to make audit object.
|
475
491
|
:param parent_run_id: A parent workflow running ID for this release.
|
492
|
+
:param extras: An extra parameter that want to override the core config.
|
476
493
|
|
477
494
|
:rtype: ResultOrCancel
|
478
495
|
"""
|
479
496
|
result: Result = Result().set_parent_run_id(parent_run_id)
|
480
|
-
current_date: datetime = datetime.now(tz=
|
481
|
-
if current_date > stop.replace(tzinfo=
|
497
|
+
current_date: datetime = datetime.now(tz=dynamic("tz", extras=extras))
|
498
|
+
if current_date > stop.replace(tzinfo=dynamic("tz", extras=extras)):
|
482
499
|
return CancelJob
|
483
500
|
|
484
501
|
# IMPORTANT:
|
@@ -559,7 +576,7 @@ def schedule_task(
|
|
559
576
|
|
560
577
|
threads[thread_name] = {
|
561
578
|
"thread": thread,
|
562
|
-
"start_date": datetime.now(tz=
|
579
|
+
"start_date": datetime.now(tz=dynamic("tz", extras=extras)),
|
563
580
|
"release_date": release.date,
|
564
581
|
}
|
565
582
|
|
@@ -704,8 +721,8 @@ def scheduler_pending(
|
|
704
721
|
def schedule_control(
|
705
722
|
schedules: list[str],
|
706
723
|
stop: datetime | None = None,
|
707
|
-
extras: DictData | None = None,
|
708
724
|
*,
|
725
|
+
extras: DictData | None = None,
|
709
726
|
audit: type[Audit] | None = None,
|
710
727
|
parent_run_id: str | None = None,
|
711
728
|
) -> Result: # pragma: no cov
|
@@ -715,19 +732,21 @@ def schedule_control(
|
|
715
732
|
|
716
733
|
:param schedules: A list of workflow names that want to schedule running.
|
717
734
|
:param stop: A datetime value that use to stop running schedule.
|
718
|
-
:param extras: An extra parameters that
|
735
|
+
:param extras: An extra parameters that want to override core config.
|
719
736
|
:param audit: An audit class that use on the workflow task release for
|
720
737
|
writing its release audit context.
|
721
738
|
:param parent_run_id: A parent workflow running ID for this release.
|
722
739
|
|
723
740
|
:rtype: Result
|
724
741
|
"""
|
725
|
-
audit: type[Audit] = audit or get_audit()
|
742
|
+
audit: type[Audit] = audit or get_audit(extras=extras)
|
726
743
|
result: Result = Result().set_parent_run_id(parent_run_id)
|
727
744
|
|
728
745
|
# NOTE: Create the start and stop datetime.
|
729
|
-
start_date: datetime = datetime.now(tz=
|
730
|
-
stop_date: datetime = stop or (
|
746
|
+
start_date: datetime = datetime.now(tz=dynamic("tz", extras=extras))
|
747
|
+
stop_date: datetime = stop or (
|
748
|
+
start_date + dynamic("stop_boundary_delta", extras=extras)
|
749
|
+
)
|
731
750
|
|
732
751
|
# IMPORTANT: Create main mapping of queue and thread object.
|
733
752
|
queue: dict[str, ReleaseQueue] = {}
|
@@ -740,10 +759,10 @@ def schedule_control(
|
|
740
759
|
tasks: list[WorkflowTask] = []
|
741
760
|
for name in schedules:
|
742
761
|
tasks.extend(
|
743
|
-
|
744
|
-
|
745
|
-
|
746
|
-
|
762
|
+
(
|
763
|
+
Schedule.from_conf(name, extras=extras).tasks(
|
764
|
+
start_date_waiting, queue=queue
|
765
|
+
)
|
747
766
|
),
|
748
767
|
)
|
749
768
|
|
@@ -761,7 +780,8 @@ def schedule_control(
|
|
761
780
|
|
762
781
|
def schedule_runner(
|
763
782
|
stop: datetime | None = None,
|
764
|
-
|
783
|
+
*,
|
784
|
+
extras: DictData | None = None,
|
765
785
|
excluded: list[str] | None = None,
|
766
786
|
) -> Result: # pragma: no cov
|
767
787
|
"""Schedule runner function it the multiprocess controller function for
|
@@ -770,7 +790,7 @@ def schedule_runner(
|
|
770
790
|
path by `WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS` value.
|
771
791
|
|
772
792
|
:param stop: A stop datetime object that force stop running scheduler.
|
773
|
-
:param
|
793
|
+
:param extras: An extra parameter that want to override core config.
|
774
794
|
:param excluded: A list of schedule name that want to exclude from finding.
|
775
795
|
|
776
796
|
This function will get all workflows that include on value that was
|
@@ -793,7 +813,7 @@ def schedule_runner(
|
|
793
813
|
context: DictData = {"schedules": [], "threads": []}
|
794
814
|
|
795
815
|
with ProcessPoolExecutor(
|
796
|
-
max_workers=
|
816
|
+
max_workers=dynamic("max_schedule_process", extras=extras),
|
797
817
|
) as executor:
|
798
818
|
|
799
819
|
futures: list[Future] = [
|
@@ -801,12 +821,12 @@ def schedule_runner(
|
|
801
821
|
schedule_control,
|
802
822
|
schedules=[load[0] for load in loader],
|
803
823
|
stop=stop,
|
804
|
-
|
824
|
+
extras=extras,
|
805
825
|
parent_run_id=result.parent_run_id,
|
806
826
|
)
|
807
827
|
for loader in batch(
|
808
828
|
Loader.finds(Schedule, excluded=excluded),
|
809
|
-
n=
|
829
|
+
n=dynamic("max_schedule_per_process", extras=extras),
|
810
830
|
)
|
811
831
|
]
|
812
832
|
|
@@ -821,4 +841,4 @@ def schedule_runner(
|
|
821
841
|
context["schedule"].extend(rs.context.get("schedules", []))
|
822
842
|
context["threads"].extend(rs.context.get("threads", []))
|
823
843
|
|
824
|
-
return result.catch(status=
|
844
|
+
return result.catch(status=SUCCESS, context=context)
|