ddeutil-workflow 0.0.54__py3-none-any.whl → 0.0.56__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/__init__.py +4 -2
- ddeutil/workflow/__main__.py +30 -0
- ddeutil/workflow/api/__init__.py +170 -1
- ddeutil/workflow/api/routes/job.py +22 -21
- ddeutil/workflow/api/routes/schedules.py +0 -2
- ddeutil/workflow/api/routes/workflows.py +3 -4
- ddeutil/workflow/conf.py +144 -94
- ddeutil/workflow/{cron.py → event.py} +36 -20
- ddeutil/workflow/exceptions.py +10 -1
- ddeutil/workflow/job.py +23 -14
- ddeutil/workflow/result.py +1 -0
- ddeutil/workflow/scheduler.py +33 -74
- ddeutil/workflow/stages.py +169 -116
- ddeutil/workflow/workflow.py +57 -106
- {ddeutil_workflow-0.0.54.dist-info → ddeutil_workflow-0.0.56.dist-info}/METADATA +5 -7
- ddeutil_workflow-0.0.56.dist-info/RECORD +31 -0
- ddeutil_workflow-0.0.56.dist-info/entry_points.txt +2 -0
- ddeutil/workflow/api/api.py +0 -170
- ddeutil_workflow-0.0.54.dist-info/RECORD +0 -31
- {ddeutil_workflow-0.0.54.dist-info → ddeutil_workflow-0.0.56.dist-info}/WHEEL +0 -0
- {ddeutil_workflow-0.0.54.dist-info → ddeutil_workflow-0.0.56.dist-info}/licenses/LICENSE +0 -0
- {ddeutil_workflow-0.0.54.dist-info → ddeutil_workflow-0.0.56.dist-info}/top_level.txt +0 -0
ddeutil/workflow/conf.py
CHANGED
@@ -7,24 +7,26 @@ from __future__ import annotations
|
|
7
7
|
|
8
8
|
import json
|
9
9
|
import os
|
10
|
+
from abc import ABC, abstractmethod
|
10
11
|
from collections.abc import Iterator
|
11
12
|
from datetime import timedelta
|
12
13
|
from functools import cached_property
|
14
|
+
from inspect import isclass
|
13
15
|
from pathlib import Path
|
14
|
-
from typing import Final, Optional, TypeVar
|
16
|
+
from typing import Final, Optional, Protocol, TypeVar, Union
|
15
17
|
from zoneinfo import ZoneInfo
|
16
18
|
|
17
19
|
from ddeutil.core import str2bool
|
18
20
|
from ddeutil.io import YamlFlResolve
|
19
21
|
from ddeutil.io.paths import glob_files, is_ignored, read_ignore
|
20
22
|
|
21
|
-
from .__types import DictData
|
23
|
+
from .__types import DictData
|
22
24
|
|
23
25
|
T = TypeVar("T")
|
24
26
|
PREFIX: Final[str] = "WORKFLOW"
|
25
27
|
|
26
28
|
|
27
|
-
def env(var: str, default: str | None = None) -> str | None:
|
29
|
+
def env(var: str, default: str | None = None) -> str | None:
|
28
30
|
"""Get environment variable with uppercase and adding prefix string.
|
29
31
|
|
30
32
|
:param var: (str) A env variable name.
|
@@ -35,17 +37,6 @@ def env(var: str, default: str | None = None) -> str | None: # pragma: no cov
|
|
35
37
|
return os.getenv(f"{PREFIX}_{var.upper().replace(' ', '_')}", default)
|
36
38
|
|
37
39
|
|
38
|
-
__all__: TupleStr = (
|
39
|
-
"api_config",
|
40
|
-
"env",
|
41
|
-
"Config",
|
42
|
-
"SimLoad",
|
43
|
-
"Loader",
|
44
|
-
"config",
|
45
|
-
"dynamic",
|
46
|
-
)
|
47
|
-
|
48
|
-
|
49
40
|
class Config: # pragma: no cov
|
50
41
|
"""Config object for keeping core configurations on the current session
|
51
42
|
without changing when if the application still running.
|
@@ -188,7 +179,7 @@ class Config: # pragma: no cov
|
|
188
179
|
return timedelta(**json.loads(stop_boundary_delta_str))
|
189
180
|
except Exception as err:
|
190
181
|
raise ValueError(
|
191
|
-
"Config
|
182
|
+
"Config `WORKFLOW_APP_STOP_BOUNDARY_DELTA` can not parsing to"
|
192
183
|
f"timedelta with {stop_boundary_delta_str}."
|
193
184
|
) from err
|
194
185
|
|
@@ -209,110 +200,194 @@ class APIConfig:
|
|
209
200
|
return str2bool(env("API_ENABLE_ROUTE_SCHEDULE", "true"))
|
210
201
|
|
211
202
|
|
212
|
-
class
|
213
|
-
|
214
|
-
|
203
|
+
class BaseLoad(ABC):
|
204
|
+
|
205
|
+
@classmethod
|
206
|
+
@abstractmethod
|
207
|
+
def find(cls, name: str, *args, **kwargs) -> DictData: ...
|
208
|
+
|
209
|
+
@classmethod
|
210
|
+
@abstractmethod
|
211
|
+
def finds(
|
212
|
+
cls, obj: object, *args, **kwargs
|
213
|
+
) -> Iterator[tuple[str, DictData]]: ...
|
215
214
|
|
216
|
-
|
217
|
-
|
218
|
-
|
215
|
+
|
216
|
+
class FileLoad(BaseLoad):
|
217
|
+
"""Base Load object that use to search config data by given some identity
|
218
|
+
value like name of `Workflow` or `On` templates.
|
219
|
+
|
220
|
+
:param name: (str) A name of key of config data that read with YAML
|
221
|
+
Environment object.
|
222
|
+
:param path: (Path) A config path object.
|
223
|
+
:param externals: (DictData) An external config data that want to add to
|
224
|
+
loaded config data.
|
225
|
+
:param extras: (DictDdata) An extra parameters that use to override core
|
226
|
+
config values.
|
227
|
+
|
228
|
+
:raise ValueError: If the data does not find on the config path with the
|
229
|
+
name parameter.
|
219
230
|
|
220
231
|
Noted:
|
221
|
-
The config data should have
|
232
|
+
The config data should have `type` key for modeling validation that
|
222
233
|
make this loader know what is config should to do pass to.
|
223
234
|
|
224
235
|
... <identity-key>:
|
225
236
|
... type: <importable-object>
|
226
237
|
... <key-data-1>: <value-data-1>
|
227
238
|
... <key-data-2>: <value-data-2>
|
239
|
+
|
240
|
+
This object support multiple config paths if you pass the `conf_paths`
|
241
|
+
key to the `extras` parameter.
|
228
242
|
"""
|
229
243
|
|
230
244
|
def __init__(
|
231
245
|
self,
|
232
246
|
name: str,
|
233
|
-
|
247
|
+
*,
|
248
|
+
path: Optional[Union[str, Path]] = None,
|
234
249
|
externals: DictData | None = None,
|
250
|
+
extras: DictData | None = None,
|
235
251
|
) -> None:
|
236
|
-
self.
|
252
|
+
self.path: Path = Path(dynamic("conf_path", f=path, extras=extras))
|
237
253
|
self.externals: DictData = externals or {}
|
238
|
-
|
239
|
-
self.data: DictData =
|
240
|
-
|
241
|
-
|
242
|
-
|
243
|
-
|
244
|
-
|
245
|
-
if data := self.filter_yaml(file, name=name):
|
246
|
-
self.data = data
|
254
|
+
self.extras: DictData = extras or {}
|
255
|
+
self.data: DictData = self.find(
|
256
|
+
name,
|
257
|
+
path=path,
|
258
|
+
paths=self.extras.get("conf_paths"),
|
259
|
+
extras=extras,
|
260
|
+
)
|
247
261
|
|
248
262
|
# VALIDATE: check the data that reading should not empty.
|
249
263
|
if not self.data:
|
250
264
|
raise ValueError(
|
251
|
-
f"Config {name!r} does not found on conf path: "
|
252
|
-
f"{self.conf_path}."
|
265
|
+
f"Config {name!r} does not found on the conf path: {self.path}."
|
253
266
|
)
|
254
267
|
|
255
268
|
self.data.update(self.externals)
|
256
269
|
|
270
|
+
@classmethod
|
271
|
+
def find(
|
272
|
+
cls,
|
273
|
+
name: str,
|
274
|
+
*,
|
275
|
+
path: Optional[Path] = None,
|
276
|
+
paths: Optional[list[Path]] = None,
|
277
|
+
extras: Optional[DictData] = None,
|
278
|
+
) -> DictData:
|
279
|
+
"""Find data with specific key and return the latest modify date data if
|
280
|
+
this key exists multiple files.
|
281
|
+
|
282
|
+
:param name: (str) A name of data that want to find.
|
283
|
+
:param path: (Path) A config path object.
|
284
|
+
:param paths: (list[Path]) A list of config path object.
|
285
|
+
:param extras: (DictData) An extra parameter that use to override core
|
286
|
+
config values.
|
287
|
+
|
288
|
+
:rtype: DictData
|
289
|
+
"""
|
290
|
+
path: Path = dynamic("conf_path", f=path, extras=extras)
|
291
|
+
if not paths:
|
292
|
+
paths: list[Path] = [path]
|
293
|
+
elif not isinstance(paths, list):
|
294
|
+
raise TypeError(
|
295
|
+
f"Multi-config paths does not support for type: {type(paths)}"
|
296
|
+
)
|
297
|
+
else:
|
298
|
+
paths.append(path)
|
299
|
+
|
300
|
+
all_data: list[tuple[float, DictData]] = []
|
301
|
+
for path in paths:
|
302
|
+
for file in glob_files(path):
|
303
|
+
|
304
|
+
if cls.is_ignore(file, path):
|
305
|
+
continue
|
306
|
+
|
307
|
+
if data := cls.filter_yaml(file, name=name):
|
308
|
+
all_data.append((file.lstat().st_mtime, data))
|
309
|
+
|
310
|
+
return {} if not all_data else max(all_data, key=lambda x: x[0])[1]
|
311
|
+
|
257
312
|
@classmethod
|
258
313
|
def finds(
|
259
314
|
cls,
|
260
315
|
obj: object,
|
261
|
-
conf_path: Path,
|
262
316
|
*,
|
263
|
-
|
317
|
+
path: Optional[Path] = None,
|
318
|
+
paths: Optional[list[Path]] = None,
|
264
319
|
excluded: list[str] | None = None,
|
320
|
+
extras: Optional[DictData] = None,
|
265
321
|
) -> Iterator[tuple[str, DictData]]:
|
266
322
|
"""Find all data that match with object type in config path. This class
|
267
323
|
method can use include and exclude list of identity name for filter and
|
268
324
|
adds-on.
|
269
325
|
|
270
326
|
:param obj: An object that want to validate matching before return.
|
271
|
-
:param
|
272
|
-
:param
|
273
|
-
data if any key exist.
|
327
|
+
:param path: A config path object.
|
328
|
+
:param paths: (list[Path]) A list of config path object.
|
274
329
|
:param excluded: An included list of data key that want to filter from
|
275
330
|
data.
|
331
|
+
:param extras: (DictData) An extra parameter that use to override core
|
332
|
+
config values.
|
276
333
|
|
277
334
|
:rtype: Iterator[tuple[str, DictData]]
|
278
335
|
"""
|
279
|
-
|
280
|
-
|
336
|
+
excluded: list[str] = excluded or []
|
337
|
+
path: Path = dynamic("conf_path", f=path, extras=extras)
|
338
|
+
if not paths:
|
339
|
+
paths: list[Path] = [path]
|
340
|
+
else:
|
341
|
+
paths.append(path)
|
342
|
+
|
343
|
+
all_data: dict[str, list[tuple[float, DictData]]] = {}
|
344
|
+
for path in paths:
|
345
|
+
for file in glob_files(path):
|
346
|
+
|
347
|
+
if cls.is_ignore(file, path):
|
348
|
+
continue
|
281
349
|
|
282
|
-
|
283
|
-
continue
|
350
|
+
for key, data in cls.filter_yaml(file).items():
|
284
351
|
|
285
|
-
|
352
|
+
if key in excluded:
|
353
|
+
continue
|
286
354
|
|
287
|
-
|
288
|
-
|
355
|
+
if (
|
356
|
+
data.get("type", "")
|
357
|
+
== (obj if isclass(obj) else obj.__class__).__name__
|
358
|
+
):
|
359
|
+
marking: tuple[float, DictData] = (
|
360
|
+
file.lstat().st_mtime,
|
361
|
+
data,
|
362
|
+
)
|
363
|
+
if key in all_data:
|
364
|
+
all_data[key].append(marking)
|
365
|
+
else:
|
366
|
+
all_data[key] = [marking]
|
289
367
|
|
290
|
-
|
291
|
-
|
292
|
-
{k: data[k] for k in data if k in included}
|
293
|
-
if included
|
294
|
-
else data
|
295
|
-
)
|
368
|
+
for key in all_data:
|
369
|
+
yield key, max(all_data[key], key=lambda x: x[0])[1]
|
296
370
|
|
297
371
|
@classmethod
|
298
372
|
def is_ignore(
|
299
373
|
cls,
|
300
374
|
file: Path,
|
301
|
-
|
375
|
+
path: Path,
|
302
376
|
*,
|
303
377
|
ignore_filename: Optional[str] = None,
|
304
378
|
) -> bool:
|
305
379
|
"""Check this file was ignored.
|
306
380
|
|
307
381
|
:param file: (Path) A file path that want to check.
|
308
|
-
:param
|
382
|
+
:param path: (Path) A config path that want to read the config
|
309
383
|
ignore file.
|
310
|
-
:param ignore_filename: (str) An ignore filename.
|
384
|
+
:param ignore_filename: (str) An ignore filename. Default is
|
385
|
+
`.confignore` filename.
|
311
386
|
|
312
387
|
:rtype: bool
|
313
388
|
"""
|
314
389
|
ignore_filename: str = ignore_filename or ".confignore"
|
315
|
-
return is_ignored(file, read_ignore(
|
390
|
+
return is_ignored(file, read_ignore(path / ignore_filename))
|
316
391
|
|
317
392
|
@classmethod
|
318
393
|
def filter_yaml(cls, file: Path, name: str | None = None) -> DictData:
|
@@ -369,44 +444,19 @@ def dynamic(
|
|
369
444
|
return rsx if rsx is not None else rs
|
370
445
|
|
371
446
|
|
372
|
-
class Loader(
|
373
|
-
|
447
|
+
class Loader(Protocol): # pragma: no cov
|
448
|
+
type: str
|
449
|
+
path: Path
|
450
|
+
data: DictData
|
451
|
+
extras: DictData
|
452
|
+
externals: DictData
|
374
453
|
|
375
|
-
|
376
|
-
:param externals: (DictData) An external parameters
|
377
|
-
"""
|
454
|
+
def __init__(self, *args, **kwargs) -> None: ...
|
378
455
|
|
379
456
|
@classmethod
|
380
|
-
def
|
381
|
-
cls,
|
382
|
-
obj: object,
|
383
|
-
*,
|
384
|
-
path: Path | None = None,
|
385
|
-
included: list[str] | None = None,
|
386
|
-
excluded: list[str] | None = None,
|
387
|
-
**kwargs,
|
388
|
-
) -> Iterator[tuple[str, DictData]]:
|
389
|
-
"""Override the find class method from the Simple Loader object.
|
457
|
+
def find(cls, name: str, *args, **kwargs) -> DictData: ...
|
390
458
|
|
391
|
-
|
392
|
-
|
393
|
-
|
394
|
-
|
395
|
-
:param excluded: An included list of data key that want to filter from
|
396
|
-
data.
|
397
|
-
|
398
|
-
:rtype: Iterator[tuple[str, DictData]]
|
399
|
-
"""
|
400
|
-
return super().finds(
|
401
|
-
obj=obj,
|
402
|
-
conf_path=(path or config.conf_path),
|
403
|
-
included=included,
|
404
|
-
excluded=excluded,
|
405
|
-
)
|
406
|
-
|
407
|
-
def __init__(self, name: str, externals: DictData) -> None:
|
408
|
-
super().__init__(
|
409
|
-
name,
|
410
|
-
conf_path=dynamic("conf_path", extras=externals),
|
411
|
-
externals=externals,
|
412
|
-
)
|
459
|
+
@classmethod
|
460
|
+
def finds(
|
461
|
+
cls, obj: object, *args, **kwargs
|
462
|
+
) -> Iterator[tuple[str, DictData]]: ...
|
@@ -3,11 +3,14 @@
|
|
3
3
|
# Licensed under the MIT License. See LICENSE in the project root for
|
4
4
|
# license information.
|
5
5
|
# ------------------------------------------------------------------------------
|
6
|
+
"""Event module that store all event object. Now, it has only `On` and `OnYear`
|
7
|
+
model these are schedule with crontab event.
|
8
|
+
"""
|
6
9
|
from __future__ import annotations
|
7
10
|
|
8
11
|
from dataclasses import fields
|
9
12
|
from datetime import datetime
|
10
|
-
from typing import Annotated, Literal, Union
|
13
|
+
from typing import Annotated, Any, Literal, Union
|
11
14
|
from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
|
12
15
|
|
13
16
|
from pydantic import BaseModel, ConfigDict, Field, ValidationInfo
|
@@ -17,11 +20,13 @@ from typing_extensions import Self
|
|
17
20
|
|
18
21
|
from .__cron import WEEKDAYS, CronJob, CronJobYear, CronRunner, Options
|
19
22
|
from .__types import DictData, DictStr
|
20
|
-
from .conf import
|
23
|
+
from .conf import FileLoad
|
24
|
+
|
25
|
+
Interval = Literal["daily", "weekly", "monthly"]
|
21
26
|
|
22
27
|
|
23
28
|
def interval2crontab(
|
24
|
-
interval:
|
29
|
+
interval: Interval,
|
25
30
|
*,
|
26
31
|
day: str | None = None,
|
27
32
|
time: str = "00:00",
|
@@ -59,10 +64,11 @@ def interval2crontab(
|
|
59
64
|
|
60
65
|
|
61
66
|
class On(BaseModel):
|
62
|
-
"""On
|
67
|
+
"""On model (Warped crontab object by Pydantic model) to keep crontab value
|
68
|
+
and generate CronRunner object from this crontab value.
|
63
69
|
|
64
|
-
|
65
|
-
|
70
|
+
Methods:
|
71
|
+
- generate: is the main use-case of this schedule object.
|
66
72
|
"""
|
67
73
|
|
68
74
|
model_config = ConfigDict(arbitrary_types_allowed=True)
|
@@ -90,19 +96,24 @@ class On(BaseModel):
|
|
90
96
|
description="A timezone string value",
|
91
97
|
alias="timezone",
|
92
98
|
),
|
93
|
-
] = "
|
99
|
+
] = "UTC"
|
94
100
|
|
95
101
|
@classmethod
|
96
102
|
def from_value(cls, value: DictStr, extras: DictData) -> Self:
|
97
103
|
"""Constructor from values that will generate crontab by function.
|
98
104
|
|
99
|
-
:param value: A mapping value that will generate crontab
|
100
|
-
schedule model.
|
101
|
-
:param extras: An
|
105
|
+
:param value: (DictStr) A mapping value that will generate crontab
|
106
|
+
before create schedule model.
|
107
|
+
:param extras: (DictData) An extra parameter that use to override core
|
108
|
+
config value.
|
102
109
|
"""
|
103
110
|
passing: DictStr = {}
|
111
|
+
|
104
112
|
if "timezone" in value:
|
105
113
|
passing["tz"] = value.pop("timezone")
|
114
|
+
elif "tz" in value:
|
115
|
+
passing["tz"] = value.pop("tz")
|
116
|
+
|
106
117
|
passing["cronjob"] = interval2crontab(
|
107
118
|
**{v: value[v] for v in value if v in ("interval", "day", "time")}
|
108
119
|
)
|
@@ -112,18 +123,20 @@ class On(BaseModel):
|
|
112
123
|
def from_conf(
|
113
124
|
cls,
|
114
125
|
name: str,
|
126
|
+
*,
|
115
127
|
extras: DictData | None = None,
|
116
128
|
) -> Self:
|
117
|
-
"""Constructor from the name of config that will use loader
|
118
|
-
getting the data.
|
129
|
+
"""Constructor from the name of config loader that will use loader
|
130
|
+
object for getting the `On` data.
|
119
131
|
|
120
|
-
:param name: A name of config that will get from loader.
|
121
|
-
:param extras: An extra parameter that
|
132
|
+
:param name: (str) A name of config that will get from loader.
|
133
|
+
:param extras: (DictData) An extra parameter that use to override core
|
134
|
+
config values.
|
122
135
|
|
123
136
|
:rtype: Self
|
124
137
|
"""
|
125
138
|
extras: DictData = extras or {}
|
126
|
-
loader:
|
139
|
+
loader: FileLoad = FileLoad(name, extras=extras)
|
127
140
|
|
128
141
|
# NOTE: Validate the config type match with current connection model
|
129
142
|
if loader.type != cls.__name__:
|
@@ -155,17 +168,17 @@ class On(BaseModel):
|
|
155
168
|
)
|
156
169
|
|
157
170
|
@model_validator(mode="before")
|
158
|
-
def __prepare_values(cls,
|
171
|
+
def __prepare_values(cls, data: Any) -> Any:
|
159
172
|
"""Extract tz key from value and change name to timezone key.
|
160
173
|
|
161
|
-
:param
|
174
|
+
:param data: (DictData) A data that want to pass for create an On
|
162
175
|
model.
|
163
176
|
|
164
177
|
:rtype: DictData
|
165
178
|
"""
|
166
|
-
if tz :=
|
167
|
-
|
168
|
-
return
|
179
|
+
if isinstance(data, dict) and (tz := data.pop("tz", None)):
|
180
|
+
data["timezone"] = tz
|
181
|
+
return data
|
169
182
|
|
170
183
|
@field_validator("tz")
|
171
184
|
def __validate_tz(cls, value: str) -> str:
|
@@ -238,6 +251,9 @@ class On(BaseModel):
|
|
238
251
|
"""Return a next datetime from Cron runner object that start with any
|
239
252
|
date that given from input.
|
240
253
|
|
254
|
+
:param start: (str | datetime) A start datetime that use to generate
|
255
|
+
the CronRunner object.
|
256
|
+
|
241
257
|
:rtype: CronRunner
|
242
258
|
"""
|
243
259
|
runner: CronRunner = self.generate(start=start)
|
ddeutil/workflow/exceptions.py
CHANGED
@@ -22,7 +22,12 @@ ErrorData = TypedDict(
|
|
22
22
|
|
23
23
|
|
24
24
|
def to_dict(exception: Exception) -> ErrorData: # pragma: no cov
|
25
|
-
"""Create dict data from exception instance.
|
25
|
+
"""Create dict data from exception instance.
|
26
|
+
|
27
|
+
:param exception: An exception object.
|
28
|
+
|
29
|
+
:rtype: ErrorData
|
30
|
+
"""
|
26
31
|
return {
|
27
32
|
"class": exception,
|
28
33
|
"name": exception.__class__.__name__,
|
@@ -33,6 +38,10 @@ def to_dict(exception: Exception) -> ErrorData: # pragma: no cov
|
|
33
38
|
class BaseWorkflowException(Exception):
|
34
39
|
|
35
40
|
def to_dict(self) -> ErrorData:
|
41
|
+
"""Return ErrorData data from the current exception object.
|
42
|
+
|
43
|
+
:rtype: ErrorData
|
44
|
+
"""
|
36
45
|
return to_dict(self)
|
37
46
|
|
38
47
|
|
ddeutil/workflow/job.py
CHANGED
@@ -140,14 +140,19 @@ class Strategy(BaseModel):
|
|
140
140
|
|
141
141
|
fail_fast: bool = Field(
|
142
142
|
default=False,
|
143
|
+
description=(
|
144
|
+
"A fail-fast flag that use to cancel strategy execution when it "
|
145
|
+
"has some execution was failed."
|
146
|
+
),
|
143
147
|
alias="fail-fast",
|
144
148
|
)
|
145
149
|
max_parallel: int = Field(
|
146
150
|
default=1,
|
147
151
|
gt=0,
|
152
|
+
lt=10,
|
148
153
|
description=(
|
149
154
|
"The maximum number of executor thread pool that want to run "
|
150
|
-
"parallel"
|
155
|
+
"parallel. This value should gather than 0 and less than 10."
|
151
156
|
),
|
152
157
|
alias="max-parallel",
|
153
158
|
)
|
@@ -540,6 +545,11 @@ class Job(BaseModel):
|
|
540
545
|
}
|
541
546
|
}
|
542
547
|
|
548
|
+
The keys that will set to the received context is `strategies`,
|
549
|
+
`errors`, and `skipped` keys. The `errors` and `skipped` keys will
|
550
|
+
extract from the result context if it exists. If it does not found, it
|
551
|
+
will not set on the received context.
|
552
|
+
|
543
553
|
:raise JobException: If the job's ID does not set and the setting
|
544
554
|
default job ID flag does not set.
|
545
555
|
|
@@ -599,7 +609,7 @@ class Job(BaseModel):
|
|
599
609
|
|
600
610
|
:param params: (DictData) A parameter data.
|
601
611
|
:param run_id: (str) A job running ID.
|
602
|
-
:param parent_run_id: (str) A parent
|
612
|
+
:param parent_run_id: (str) A parent running ID.
|
603
613
|
:param event: (Event) An Event manager instance that use to cancel this
|
604
614
|
execution if it forces stopped by parent execution.
|
605
615
|
|
@@ -667,15 +677,15 @@ def local_execute_strategy(
|
|
667
677
|
`set_outputs` method for reconstruct result context data.
|
668
678
|
|
669
679
|
:param job: (Job) A job model that want to execute.
|
670
|
-
:param strategy: A strategy metrix value
|
671
|
-
|
680
|
+
:param strategy: (DictData) A strategy metrix value. This value will pass
|
681
|
+
to the `matrix` key for templating in context data.
|
672
682
|
:param params: (DictData) A parameter data.
|
673
683
|
:param result: (Result) A Result instance for return context and status.
|
674
684
|
:param event: (Event) An Event manager instance that use to cancel this
|
675
685
|
execution if it forces stopped by parent execution.
|
676
686
|
|
677
|
-
:raise JobException: If
|
678
|
-
`UtilException`.
|
687
|
+
:raise JobException: If stage execution raise any error as `StageException`
|
688
|
+
or `UtilException`.
|
679
689
|
|
680
690
|
:rtype: Result
|
681
691
|
"""
|
@@ -683,17 +693,16 @@ def local_execute_strategy(
|
|
683
693
|
run_id=gen_id(job.id or "not-set", unique=True),
|
684
694
|
extras=job.extras,
|
685
695
|
)
|
686
|
-
|
687
|
-
strategy_id: str = gen_id(strategy)
|
688
|
-
context: DictData = copy.deepcopy(params)
|
689
|
-
context.update({"matrix": strategy, "stages": {}})
|
690
|
-
|
691
696
|
if strategy:
|
697
|
+
strategy_id: str = gen_id(strategy)
|
692
698
|
result.trace.info(f"[JOB]: Start Strategy: {strategy_id!r}")
|
693
699
|
result.trace.info(f"[JOB]: ... matrix: {strategy!r}")
|
694
700
|
else:
|
695
|
-
|
701
|
+
strategy_id: str = "EMPTY"
|
702
|
+
result.trace.info("[JOB]: Start Strategy: 'EMPTY'")
|
696
703
|
|
704
|
+
context: DictData = copy.deepcopy(params)
|
705
|
+
context.update({"matrix": strategy, "stages": {}})
|
697
706
|
for stage in job.stages:
|
698
707
|
|
699
708
|
if job.extras:
|
@@ -707,7 +716,7 @@ def local_execute_strategy(
|
|
707
716
|
if event and event.is_set():
|
708
717
|
error_msg: str = (
|
709
718
|
"Job strategy was canceled from event that had set before "
|
710
|
-
"strategy execution."
|
719
|
+
"job strategy execution."
|
711
720
|
)
|
712
721
|
return result.catch(
|
713
722
|
status=CANCEL,
|
@@ -820,7 +829,7 @@ def local_execute(
|
|
820
829
|
context={
|
821
830
|
"errors": JobException(
|
822
831
|
"Job was canceled from event that had set before "
|
823
|
-
"local execution."
|
832
|
+
"local job execution."
|
824
833
|
).to_dict()
|
825
834
|
},
|
826
835
|
)
|
ddeutil/workflow/result.py
CHANGED