ddeutil-workflow 0.0.14__py3-none-any.whl → 0.0.16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __version__: str = "0.0.14"
1
+ __version__: str = "0.0.16"
@@ -6,21 +6,50 @@
6
6
  from __future__ import annotations
7
7
 
8
8
  import re
9
+ from collections.abc import Iterator
10
+ from dataclasses import dataclass
9
11
  from re import (
10
12
  IGNORECASE,
11
13
  MULTILINE,
12
14
  UNICODE,
13
15
  VERBOSE,
16
+ Match,
14
17
  Pattern,
15
18
  )
16
- from typing import Any, Union
19
+ from typing import Any, Optional, TypedDict, Union
20
+
21
+ from typing_extensions import Self
17
22
 
18
23
  TupleStr = tuple[str, ...]
19
24
  DictData = dict[str, Any]
20
25
  DictStr = dict[str, str]
21
26
  Matrix = dict[str, Union[list[str], list[int]]]
22
- MatrixInclude = list[dict[str, Union[str, int]]]
23
- MatrixExclude = list[dict[str, Union[str, int]]]
27
+
28
+
29
+ class Context(TypedDict):
30
+ params: dict[str, Any]
31
+ jobs: dict[str, Any]
32
+
33
+
34
+ @dataclass(frozen=True)
35
+ class CallerRe:
36
+ """Caller dataclass that catching result from the matching regex with the
37
+ Re.RE_CALLER value.
38
+ """
39
+
40
+ full: str
41
+ caller: str
42
+ caller_prefix: Optional[str]
43
+ caller_last: str
44
+ post_filters: str
45
+
46
+ @classmethod
47
+ def from_regex(cls, match: Match[str]) -> Self:
48
+ """Class construct from matching result.
49
+
50
+ :rtype: Self
51
+ """
52
+ return cls(full=match.group(0), **match.groupdict())
24
53
 
25
54
 
26
55
  class Re:
@@ -30,28 +59,33 @@ class Re:
30
59
  # Regular expression:
31
60
  # - Version 1:
32
61
  # \${{\s*(?P<caller>[a-zA-Z0-9_.\s'\"\[\]\(\)\-\{}]+?)\s*(?P<post_filters>(?:\|\s*(?:[a-zA-Z0-9_]{3,}[a-zA-Z0-9_.,-\\%\s'\"[\]()\{}]+)\s*)*)}}
33
- # - Version 2 (2024-09-30):
34
- # \${{\s*(?P<caller>(?P<caller_prefix>[a-zA-Z_-]+\.)*(?P<caller_last>[a-zA-Z0-9_\-.'\"(\)[\]{}]+))\s*(?P<post_filters>(?:\|\s*(?:[a-zA-Z0-9_]{3,}[a-zA-Z0-9_.,-\\%\s'\"[\]()\{}]+)\s*)*)}}
62
+ # - Version 2: (2024-09-30):
63
+ # \${{\s*(?P<caller>(?P<caller_prefix>(?:[a-zA-Z_-]+\.)*)(?P<caller_last>[a-zA-Z0-9_\-.'\"(\)[\]{}]+))\s*(?P<post_filters>(?:\|\s*(?:[a-zA-Z0-9_]{3,}[a-zA-Z0-9_.,-\\%\s'\"[\]()\{}]+)\s*)*)}}
64
+ # - Version 3: (2024-10-05):
65
+ # \${{\s*(?P<caller>(?P<caller_prefix>(?:[a-zA-Z_-]+\??\.)*)(?P<caller_last>[a-zA-Z0-9_\-.'\"(\)[\]{}]+\??))\s*(?P<post_filters>(?:\|\s*(?:[a-zA-Z0-9_]{3,}[a-zA-Z0-9_.,-\\%\s'\"[\]()\{}]+)\s*)*)}}
35
66
  #
36
67
  # Examples:
37
- # - ${{ params.asat_dt }}
68
+ # - ${{ params.data_dt }}
38
69
  # - ${{ params.source.table }}
70
+ # - ${{ params.datetime | fmt('%Y-%m-%d') }}
71
+ # - ${{ params.source?.schema }}
39
72
  #
40
73
  __re_caller: str = r"""
41
74
  \$
42
75
  {{
43
76
  \s*
44
77
  (?P<caller>
45
- (?P<caller_prefix>[a-zA-Z_-]+\.)*
46
- (?P<caller_last>[a-zA-Z0-9_\-.'\"(\)[\]{}]+)
78
+ (?P<caller_prefix>(?:[a-zA-Z_-]+\??\.)*)
79
+ (?P<caller_last>[a-zA-Z0-9_\-.'\"(\)[\]{}]+\??)
47
80
  )
48
81
  \s*
49
82
  (?P<post_filters>
50
83
  (?:
51
- \|
52
- \s*
53
- (?:[a-zA-Z0-9_]{3,}[a-zA-Z0-9_.,-\\%\s'\"[\]()\{}]*)
54
- \s*
84
+ \|\s*
85
+ (?:
86
+ [a-zA-Z0-9_]{3,}
87
+ [a-zA-Z0-9_.,-\\%\s'\"[\]()\{}]*
88
+ )\s*
55
89
  )*
56
90
  )
57
91
  }}
@@ -62,7 +96,8 @@ class Re:
62
96
 
63
97
  # NOTE:
64
98
  # Regular expression:
65
- # ^(?P<path>[^/@]+)/(?P<func>[^@]+)@(?P<tag>.+)$
99
+ # - Version 1:
100
+ # ^(?P<path>[^/@]+)/(?P<func>[^@]+)@(?P<tag>.+)$
66
101
  #
67
102
  # Examples:
68
103
  # - tasks/function@dummy
@@ -78,3 +113,13 @@ class Re:
78
113
  RE_TASK_FMT: Pattern = re.compile(
79
114
  __re_task_fmt, MULTILINE | IGNORECASE | UNICODE | VERBOSE
80
115
  )
116
+
117
+ @classmethod
118
+ def finditer_caller(cls, value) -> Iterator[CallerRe]:
119
+ """Generate CallerRe object that create from matching object that
120
+ extract with re.finditer function.
121
+
122
+ :rtype: Iterator[CallerRe]
123
+ """
124
+ for found in cls.RE_CALLER.finditer(value):
125
+ yield CallerRe.from_regex(found)
ddeutil/workflow/api.py CHANGED
@@ -7,7 +7,6 @@ from __future__ import annotations
7
7
 
8
8
  import asyncio
9
9
  import contextlib
10
- import os
11
10
  import uuid
12
11
  from collections.abc import AsyncIterator
13
12
  from datetime import datetime, timedelta
@@ -15,7 +14,6 @@ from queue import Empty, Queue
15
14
  from threading import Thread
16
15
  from typing import TypedDict
17
16
 
18
- from ddeutil.core import str2bool
19
17
  from dotenv import load_dotenv
20
18
  from fastapi import FastAPI
21
19
  from fastapi.middleware.gzip import GZipMiddleware
@@ -23,6 +21,7 @@ from fastapi.responses import UJSONResponse
23
21
  from pydantic import BaseModel
24
22
 
25
23
  from .__about__ import __version__
24
+ from .conf import config
26
25
  from .log import get_logger
27
26
  from .repeat import repeat_at, repeat_every
28
27
  from .scheduler import WorkflowTaskData
@@ -131,12 +130,12 @@ async def message_upper(payload: Payload):
131
130
  return await get_result(request_id)
132
131
 
133
132
 
134
- if str2bool(os.getenv("WORKFLOW_API_ENABLE_ROUTE_WORKFLOW", "true")):
133
+ if config.enable_route_workflow:
135
134
  from .route import workflow
136
135
 
137
136
  app.include_router(workflow)
138
137
 
139
- if str2bool(os.getenv("WORKFLOW_API_ENABLE_ROUTE_SCHEDULE", "true")):
138
+ if config.enable_route_schedule:
140
139
  from .route import schedule
141
140
  from .scheduler import workflow_task
142
141
 
ddeutil/workflow/cli.py CHANGED
@@ -6,15 +6,14 @@
6
6
  from __future__ import annotations
7
7
 
8
8
  import json
9
- import os
10
9
  from datetime import datetime
11
10
  from enum import Enum
12
11
  from typing import Annotated, Optional
13
- from zoneinfo import ZoneInfo
14
12
 
15
13
  from ddeutil.core import str2list
16
14
  from typer import Argument, Option, Typer
17
15
 
16
+ from .conf import config
18
17
  from .log import get_logger
19
18
 
20
19
  logger = get_logger("ddeutil.workflow")
@@ -73,9 +72,7 @@ def schedule(
73
72
  excluded: list[str] = str2list(excluded) if excluded else []
74
73
  externals: str = externals or "{}"
75
74
  if stop:
76
- stop: datetime = stop.astimezone(
77
- tz=ZoneInfo(os.getenv("WORKFLOW_CORE_TIMEZONE", "UTC"))
78
- )
75
+ stop: datetime = stop.astimezone(tz=config.tz)
79
76
 
80
77
  from .scheduler import workflow_runner
81
78
 
ddeutil/workflow/conf.py CHANGED
@@ -5,37 +5,314 @@
5
5
  # ------------------------------------------------------------------------------
6
6
  from __future__ import annotations
7
7
 
8
+ import json
8
9
  import os
10
+ from collections.abc import Iterator
11
+ from datetime import timedelta
12
+ from functools import cached_property
13
+ from pathlib import Path
14
+ from typing import Any, TypeVar
9
15
  from zoneinfo import ZoneInfo
10
16
 
11
- from ddeutil.core import str2bool
17
+ from ddeutil.core import import_string, str2bool
18
+ from ddeutil.io import Paths, PathSearch, YamlFlResolve
12
19
  from dotenv import load_dotenv
20
+ from pydantic import BaseModel, Field
21
+ from pydantic.functional_validators import model_validator
13
22
 
14
23
  load_dotenv()
15
24
  env = os.getenv
25
+ DictData = dict[str, Any]
26
+ AnyModel = TypeVar("AnyModel", bound=BaseModel)
27
+ AnyModelType = type[AnyModel]
16
28
 
17
29
 
18
30
  class Config:
31
+ """Config object for keeping application configuration on current session
32
+ without changing when if the application still running.
33
+ """
34
+
19
35
  # NOTE: Core
36
+ root_path: Path = Path(os.getenv("WORKFLOW_ROOT_PATH", "."))
20
37
  tz: ZoneInfo = ZoneInfo(env("WORKFLOW_CORE_TIMEZONE", "UTC"))
38
+ workflow_id_simple_mode: bool = str2bool(
39
+ os.getenv("WORKFLOW_CORE_WORKFLOW_ID_SIMPLE_MODE", "true")
40
+ )
41
+
42
+ # NOTE: Logging
43
+ debug: bool = str2bool(os.getenv("WORKFLOW_LOG_DEBUG_MODE", "true"))
44
+ enable_write_log: bool = str2bool(
45
+ os.getenv("WORKFLOW_LOG_ENABLE_WRITE", "false")
46
+ )
21
47
 
22
48
  # NOTE: Stage
23
49
  stage_raise_error: bool = str2bool(
24
- env("WORKFLOW_CORE_STAGE_RAISE_ERROR", "true")
50
+ env("WORKFLOW_CORE_STAGE_RAISE_ERROR", "false")
25
51
  )
26
52
  stage_default_id: bool = str2bool(
27
53
  env("WORKFLOW_CORE_STAGE_DEFAULT_ID", "false")
28
54
  )
29
55
 
56
+ # NOTE: Job
57
+ job_default_id: bool = str2bool(
58
+ env("WORKFLOW_CORE_JOB_DEFAULT_ID", "false")
59
+ )
60
+
30
61
  # NOTE: Workflow
31
62
  max_job_parallel: int = int(env("WORKFLOW_CORE_MAX_JOB_PARALLEL", "2"))
63
+ max_poking_pool_worker: int = int(
64
+ os.getenv("WORKFLOW_CORE_MAX_NUM_POKING", "4")
65
+ )
66
+
67
+ # NOTE: Schedule App
68
+ max_schedule_process: int = int(env("WORKFLOW_APP_MAX_PROCESS", "2"))
69
+ max_schedule_per_process: int = int(
70
+ env("WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS", "100")
71
+ )
72
+ __stop_boundary_delta: str = env(
73
+ "WORKFLOW_APP_STOP_BOUNDARY_DELTA", '{"minutes": 5, "seconds": 20}'
74
+ )
75
+
76
+ # NOTE: API
77
+ enable_route_workflow: bool = str2bool(
78
+ os.getenv("WORKFLOW_API_ENABLE_ROUTE_WORKFLOW", "true")
79
+ )
80
+ enable_route_schedule: bool = str2bool(
81
+ os.getenv("WORKFLOW_API_ENABLE_ROUTE_SCHEDULE", "true")
82
+ )
32
83
 
33
84
  def __init__(self):
34
85
  if self.max_job_parallel < 0:
35
86
  raise ValueError(
36
- f"MAX_JOB_PARALLEL should more than 0 but got "
87
+ f"``MAX_JOB_PARALLEL`` should more than 0 but got "
37
88
  f"{self.max_job_parallel}."
38
89
  )
90
+ try:
91
+ self.stop_boundary_delta: timedelta = timedelta(
92
+ **json.loads(self.__stop_boundary_delta)
93
+ )
94
+ except Exception as err:
95
+ raise ValueError(
96
+ "Config ``WORKFLOW_APP_STOP_BOUNDARY_DELTA`` can not parsing to"
97
+ f"timedelta with {self.__stop_boundary_delta}."
98
+ ) from err
99
+
100
+ def refresh_dotenv(self):
101
+ """Reload environment variables from the current stage."""
102
+ self.tz: ZoneInfo = ZoneInfo(env("WORKFLOW_CORE_TIMEZONE", "UTC"))
103
+ self.stage_raise_error: bool = str2bool(
104
+ env("WORKFLOW_CORE_STAGE_RAISE_ERROR", "false")
105
+ )
106
+
107
+
108
+ class Engine(BaseModel):
109
+ """Engine Pydantic Model for keeping application path."""
110
+
111
+ paths: Paths = Field(default_factory=Paths)
112
+ registry: list[str] = Field(
113
+ default_factory=lambda: ["ddeutil.workflow"], # pragma: no cover
114
+ )
115
+ registry_filter: list[str] = Field(
116
+ default_factory=lambda: ["ddeutil.workflow.utils"], # pragma: no cover
117
+ )
118
+
119
+ @model_validator(mode="before")
120
+ def __prepare_registry(cls, values: DictData) -> DictData:
121
+ """Prepare registry value that passing with string type. It convert the
122
+ string type to list of string.
123
+ """
124
+ if (_regis := values.get("registry")) and isinstance(_regis, str):
125
+ values["registry"] = [_regis]
126
+ if (_regis_filter := values.get("registry_filter")) and isinstance(
127
+ _regis_filter, str
128
+ ):
129
+ values["registry_filter"] = [_regis_filter]
130
+ return values
131
+
132
+
133
+ class ConfParams(BaseModel):
134
+ """Params Model"""
135
+
136
+ engine: Engine = Field(
137
+ default_factory=Engine,
138
+ description="A engine mapping values.",
139
+ )
140
+
141
+
142
+ def load_config() -> ConfParams:
143
+ """Load Config data from ``workflows-conf.yaml`` file.
144
+
145
+ Configuration Docs:
146
+ ---
147
+ :var engine.registry:
148
+ :var engine.registry_filter:
149
+ :var paths.root:
150
+ :var paths.conf:
151
+ """
152
+ root_path: str = config.root_path
153
+
154
+ regis: list[str] = ["ddeutil.workflow"]
155
+ if regis_env := os.getenv("WORKFLOW_CORE_REGISTRY"):
156
+ regis = [r.strip() for r in regis_env.split(",")]
157
+
158
+ regis_filter: list[str] = ["ddeutil.workflow.utils"]
159
+ if regis_filter_env := os.getenv("WORKFLOW_CORE_REGISTRY_FILTER"):
160
+ regis_filter = [r.strip() for r in regis_filter_env.split(",")]
161
+
162
+ conf_path: str = (
163
+ f"{root_path}/{conf_env}"
164
+ if (conf_env := os.getenv("WORKFLOW_CORE_PATH_CONF"))
165
+ else None
166
+ )
167
+ return ConfParams.model_validate(
168
+ obj={
169
+ "engine": {
170
+ "registry": regis,
171
+ "registry_filter": regis_filter,
172
+ "paths": {
173
+ "root": root_path,
174
+ "conf": conf_path,
175
+ },
176
+ },
177
+ }
178
+ )
179
+
180
+
181
+ class SimLoad:
182
+ """Simple Load Object that will search config data by given some identity
183
+ value like name of workflow or on.
184
+
185
+ :param name: A name of config data that will read by Yaml Loader object.
186
+ :param params: A Params model object.
187
+ :param externals: An external parameters
188
+
189
+ Noted:
190
+
191
+ The config data should have ``type`` key for modeling validation that
192
+ make this loader know what is config should to do pass to.
193
+
194
+ ... <identity-key>:
195
+ ... type: <importable-object>
196
+ ... <key-data>: <value-data>
197
+ ... ...
198
+
199
+ """
200
+
201
+ def __init__(
202
+ self,
203
+ name: str,
204
+ params: ConfParams,
205
+ externals: DictData | None = None,
206
+ ) -> None:
207
+ self.data: DictData = {}
208
+ for file in PathSearch(params.engine.paths.conf).files:
209
+ if any(file.suffix.endswith(s) for s in (".yml", ".yaml")) and (
210
+ data := YamlFlResolve(file).read().get(name, {})
211
+ ):
212
+ self.data = data
213
+
214
+ # VALIDATE: check the data that reading should not empty.
215
+ if not self.data:
216
+ raise ValueError(f"Config {name!r} does not found on conf path")
217
+
218
+ self.conf_params: ConfParams = params
219
+ self.externals: DictData = externals or {}
220
+ self.data.update(self.externals)
221
+
222
+ @classmethod
223
+ def finds(
224
+ cls,
225
+ obj: object,
226
+ params: ConfParams,
227
+ *,
228
+ include: list[str] | None = None,
229
+ exclude: list[str] | None = None,
230
+ ) -> Iterator[tuple[str, DictData]]:
231
+ """Find all data that match with object type in config path. This class
232
+ method can use include and exclude list of identity name for filter and
233
+ adds-on.
234
+
235
+ :param obj: A object that want to validate matching before return.
236
+ :param params:
237
+ :param include:
238
+ :param exclude:
239
+ :rtype: Iterator[tuple[str, DictData]]
240
+ """
241
+ exclude: list[str] = exclude or []
242
+ for file in PathSearch(params.engine.paths.conf).files:
243
+ if any(file.suffix.endswith(s) for s in (".yml", ".yaml")) and (
244
+ values := YamlFlResolve(file).read()
245
+ ):
246
+ for key, data in values.items():
247
+ if key in exclude:
248
+ continue
249
+ if issubclass(get_type(data["type"], params), obj) and (
250
+ include is None or all(i in data for i in include)
251
+ ):
252
+ yield key, data
253
+
254
+ @cached_property
255
+ def type(self) -> AnyModelType:
256
+ """Return object of string type which implement on any registry. The
257
+ object type.
258
+
259
+ :rtype: AnyModelType
260
+ """
261
+ if not (_typ := self.data.get("type")):
262
+ raise ValueError(
263
+ f"the 'type' value: {_typ} does not exists in config data."
264
+ )
265
+ return get_type(_typ, self.conf_params)
266
+
267
+
268
+ class Loader(SimLoad):
269
+ """Loader Object that get the config `yaml` file from current path.
270
+
271
+ :param name: A name of config data that will read by Yaml Loader object.
272
+ :param externals: An external parameters
273
+ """
274
+
275
+ @classmethod
276
+ def finds(
277
+ cls,
278
+ obj: object,
279
+ *,
280
+ include: list[str] | None = None,
281
+ exclude: list[str] | None = None,
282
+ **kwargs,
283
+ ) -> DictData:
284
+ """Override the find class method from the Simple Loader object.
285
+
286
+ :param obj: A object that want to validate matching before return.
287
+ :param include:
288
+ :param exclude:
289
+ """
290
+ return super().finds(
291
+ obj=obj, params=load_config(), include=include, exclude=exclude
292
+ )
293
+
294
+ def __init__(self, name: str, externals: DictData) -> None:
295
+ super().__init__(name, load_config(), externals)
296
+
297
+
298
+ def get_type(t: str, params: ConfParams) -> AnyModelType:
299
+ """Return import type from string importable value in the type key.
300
+
301
+ :param t: A importable type string.
302
+ :param params: A config parameters that use registry to search this
303
+ type.
304
+ :rtype: AnyModelType
305
+ """
306
+ try:
307
+ # NOTE: Auto adding module prefix if it does not set
308
+ return import_string(f"ddeutil.workflow.{t}")
309
+ except ModuleNotFoundError:
310
+ for registry in params.engine.registry:
311
+ try:
312
+ return import_string(f"{registry}.{t}")
313
+ except ModuleNotFoundError:
314
+ continue
315
+ return import_string(f"{t}")
39
316
 
40
317
 
41
318
  config = Config()
ddeutil/workflow/job.py CHANGED
@@ -4,6 +4,9 @@
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
6
  """Job Model that use for keeping stages and node that running its stages.
7
+ The job handle the lineage of stages and location of execution of stages that
8
+ mean the job model able to define ``runs-on`` key that allow you to run this
9
+ job.
7
10
  """
8
11
  from __future__ import annotations
9
12
 
@@ -19,21 +22,15 @@ from concurrent.futures import (
19
22
  from functools import lru_cache
20
23
  from textwrap import dedent
21
24
  from threading import Event
22
- from typing import Optional
25
+ from typing import Optional, Union
23
26
 
24
27
  from ddeutil.core import freeze_args
25
28
  from pydantic import BaseModel, Field
26
29
  from pydantic.functional_validators import field_validator, model_validator
27
30
  from typing_extensions import Self
28
31
 
29
- from .__types import (
30
- DictData,
31
- DictStr,
32
- Matrix,
33
- MatrixExclude,
34
- MatrixInclude,
35
- TupleStr,
36
- )
32
+ from .__types import DictData, DictStr, Matrix, TupleStr
33
+ from .conf import config
37
34
  from .exceptions import (
38
35
  JobException,
39
36
  StageException,
@@ -51,6 +48,8 @@ from .utils import (
51
48
  )
52
49
 
53
50
  logger = get_logger("ddeutil.workflow")
51
+ MatrixInclude = list[dict[str, Union[str, int]]]
52
+ MatrixExclude = list[dict[str, Union[str, int]]]
54
53
 
55
54
 
56
55
  __all__: TupleStr = (
@@ -262,7 +261,7 @@ class Job(BaseModel):
262
261
  )
263
262
 
264
263
  @model_validator(mode="before")
265
- def __prepare_keys(cls, values: DictData) -> DictData:
264
+ def __prepare_keys__(cls, values: DictData) -> DictData:
266
265
  """Rename key that use dash to underscore because Python does not
267
266
  support this character exist in any variable name.
268
267
 
@@ -273,12 +272,12 @@ class Job(BaseModel):
273
272
  return values
274
273
 
275
274
  @field_validator("desc", mode="after")
276
- def ___prepare_desc(cls, value: str) -> str:
275
+ def ___prepare_desc__(cls, value: str) -> str:
277
276
  """Prepare description string that was created on a template."""
278
277
  return dedent(value)
279
278
 
280
279
  @model_validator(mode="after")
281
- def __prepare_running_id(self) -> Self:
280
+ def __prepare_running_id__(self) -> Self:
282
281
  """Prepare the job running ID.
283
282
 
284
283
  :rtype: Self
@@ -319,31 +318,43 @@ class Job(BaseModel):
319
318
  For example of setting output method, If you receive execute output
320
319
  and want to set on the `to` like;
321
320
 
322
- ... (i) output: {'strategy01': bar, 'strategy02': bar}
323
- ... (ii) to: {}
321
+ ... (i) output: {'strategy-01': bar, 'strategy-02': bar}
322
+ ... (ii) to: {'jobs': {}}
324
323
 
325
324
  The result of the `to` variable will be;
326
325
 
327
326
  ... (iii) to: {
328
- 'strategies': {
329
- 'strategy01': bar, 'strategy02': bar
327
+ 'jobs': {
328
+ '<job-id>': {
329
+ 'strategies': {
330
+ 'strategy-01': bar,
331
+ 'strategy-02': bar
332
+ }
330
333
  }
331
334
  }
335
+ }
332
336
 
333
337
  :param output: An output context.
334
338
  :param to: A context data that want to add output result.
335
339
  :rtype: DictData
336
340
  """
337
- if self.id is None:
341
+ if self.id is None and not config.job_default_id:
338
342
  raise JobException(
339
- "This job do not set the ID before setting output."
343
+ "This job do not set the ID before setting execution output."
340
344
  )
341
345
 
342
- to[self.id] = (
346
+ # NOTE: Create jobs key to receive an output from the job execution.
347
+ if "jobs" not in to:
348
+ to["jobs"] = {}
349
+
350
+ # NOTE: If the job ID did not set, it will use index of jobs key
351
+ # instead.
352
+ _id: str = self.id or str(len(to["jobs"]) + 1)
353
+
354
+ logger.debug(f"({self.run_id}) [JOB]: Set outputs on: {_id}")
355
+ to["jobs"][_id] = (
343
356
  {"strategies": output}
344
357
  if self.strategy.is_set()
345
- # NOTE:
346
- # This is the best way to get single key from dict.
347
358
  else output[next(iter(output))]
348
359
  )
349
360
  return to
@@ -382,6 +393,7 @@ class Job(BaseModel):
382
393
  # "params": { ... }, <== Current input params
383
394
  # "jobs": { ... }, <== Current input params
384
395
  # "matrix": { ... } <== Current strategy value
396
+ # "stages": { ... } <== Catching stage outputs
385
397
  # }
386
398
  #
387
399
  context: DictData = copy.deepcopy(params)
@@ -489,15 +501,18 @@ class Job(BaseModel):
489
501
  :param params: An input parameters that use on job execution.
490
502
  :rtype: Result
491
503
  """
492
- context: DictData = {}
504
+
505
+ # NOTE: I use this condition because this method allow passing empty
506
+ # params and I do not want to create new dict object.
493
507
  params: DictData = {} if params is None else params
508
+ context: DictData = {}
494
509
 
495
510
  # NOTE: Normal Job execution without parallel strategy.
496
511
  if (not self.strategy.is_set()) or self.strategy.max_parallel == 1:
497
512
  for strategy in self.strategy.make():
498
513
  rs: Result = self.execute_strategy(
499
514
  strategy=strategy,
500
- params=copy.deepcopy(params),
515
+ params=params,
501
516
  )
502
517
  context.update(rs.context)
503
518
  return Result(
@@ -505,11 +520,15 @@ class Job(BaseModel):
505
520
  context=context,
506
521
  )
507
522
 
508
- # NOTE: Create event for cancel executor stop running.
523
+ # NOTE: Create event for cancel executor by trigger stop running event.
509
524
  event: Event = Event()
510
525
 
526
+ # IMPORTANT: Start running strategy execution by multithreading because
527
+ # it will running by strategy values without waiting previous
528
+ # execution.
511
529
  with ThreadPoolExecutor(
512
- max_workers=self.strategy.max_parallel
530
+ max_workers=self.strategy.max_parallel,
531
+ thread_name_prefix="job_strategy_exec_",
513
532
  ) as executor:
514
533
  futures: list[Future] = [
515
534
  executor.submit(