ddeutil-workflow 0.0.34__py3-none-any.whl → 0.0.36__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,37 +3,42 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
- """Param Model that use for parsing incoming parameters that pass to the
7
- Workflow and Schedule objects.
6
+ """This module include all Param Pydantic Models that use for parsing an
7
+ incoming parameters that was passed to the Workflow and Schedule objects before
8
+ execution or release methods.
9
+
10
+ The Param model allow you to handle validation and preparation steps before
11
+ passing an input value to target execution method.
8
12
  """
9
13
  from __future__ import annotations
10
14
 
11
15
  import decimal
12
- import logging
13
16
  from abc import ABC, abstractmethod
14
17
  from datetime import date, datetime
15
- from typing import Any, Literal, Optional, Union
18
+ from typing import Annotated, Any, Literal, Optional, TypeVar, Union
16
19
 
17
20
  from pydantic import BaseModel, Field
18
21
 
19
22
  from .__types import TupleStr
20
23
  from .exceptions import ParamValueException
21
- from .utils import get_dt_now
22
-
23
- logger = logging.getLogger("ddeutil.workflow")
24
+ from .utils import get_d_now, get_dt_now
24
25
 
25
26
  __all__: TupleStr = (
26
27
  "ChoiceParam",
27
28
  "DatetimeParam",
29
+ "DateParam",
28
30
  "IntParam",
29
31
  "Param",
30
32
  "StrParam",
31
33
  )
32
34
 
35
+ T = TypeVar("T")
36
+
33
37
 
34
38
  class BaseParam(BaseModel, ABC):
35
- """Base Parameter that use to make any Params Model. The type will dynamic
36
- with the type field that made from literal string."""
39
+ """Base Parameter that use to make any Params Models. The parameter type
40
+ will dynamic with the setup type field that made from literal string.
41
+ """
37
42
 
38
43
  desc: Optional[str] = Field(
39
44
  default=None, description="A description of parameter providing."
@@ -45,7 +50,7 @@ class BaseParam(BaseModel, ABC):
45
50
  type: str = Field(description="A type of parameter.")
46
51
 
47
52
  @abstractmethod
48
- def receive(self, value: Optional[Any] = None) -> Any:
53
+ def receive(self, value: Optional[T] = None) -> T:
49
54
  raise NotImplementedError(
50
55
  "Receive value and validate typing before return valid value."
51
56
  )
@@ -72,17 +77,42 @@ class DefaultParam(BaseParam):
72
77
  )
73
78
 
74
79
 
75
- # TODO: Not implement this parameter yet
76
80
  class DateParam(DefaultParam): # pragma: no cov
77
- """Date parameter."""
81
+ """Date parameter model."""
78
82
 
79
83
  type: Literal["date"] = "date"
84
+ default: date = Field(default_factory=get_d_now)
85
+
86
+ def receive(self, value: Optional[str | datetime | date] = None) -> date:
87
+ """Receive value that match with date. If an input value pass with
88
+ None, it will use default value instead.
89
+
90
+ :param value: A value that want to validate with date parameter type.
91
+
92
+ :rtype: date
93
+ """
94
+ if value is None:
95
+ return self.default
80
96
 
81
- def receive(self, value: Optional[str | date] = None) -> date: ...
97
+ if isinstance(value, datetime):
98
+ return value.date()
99
+ elif isinstance(value, date):
100
+ return value
101
+ elif not isinstance(value, str):
102
+ raise ParamValueException(
103
+ f"Value that want to convert to date does not support for "
104
+ f"type: {type(value)}"
105
+ )
106
+ try:
107
+ return date.fromisoformat(value)
108
+ except ValueError:
109
+ raise ParamValueException(
110
+ f"Invalid the ISO format string for date: {value!r}"
111
+ ) from None
82
112
 
83
113
 
84
114
  class DatetimeParam(DefaultParam):
85
- """Datetime parameter."""
115
+ """Datetime parameter model."""
86
116
 
87
117
  type: Literal["datetime"] = "datetime"
88
118
  default: datetime = Field(default_factory=get_dt_now)
@@ -93,6 +123,7 @@ class DatetimeParam(DefaultParam):
93
123
 
94
124
  :param value: A value that want to validate with datetime parameter
95
125
  type.
126
+
96
127
  :rtype: datetime
97
128
  """
98
129
  if value is None:
@@ -111,7 +142,7 @@ class DatetimeParam(DefaultParam):
111
142
  return datetime.fromisoformat(value)
112
143
  except ValueError:
113
144
  raise ParamValueException(
114
- f"Invalid the ISO format string: {value!r}"
145
+ f"Invalid the ISO format string for datetime: {value!r}"
115
146
  ) from None
116
147
 
117
148
 
@@ -169,9 +200,11 @@ class ChoiceParam(BaseParam):
169
200
  """Choice parameter."""
170
201
 
171
202
  type: Literal["choice"] = "choice"
172
- options: list[str] = Field(description="A list of choice parameters.")
203
+ options: Union[list[str], list[int]] = Field(
204
+ description="A list of choice parameters that able be str or int.",
205
+ )
173
206
 
174
- def receive(self, value: str | None = None) -> str:
207
+ def receive(self, value: Union[str, int] | None = None) -> Union[str, int]:
175
208
  """Receive value that match with options.
176
209
 
177
210
  :param value: A value that want to select from the options field.
@@ -188,9 +221,41 @@ class ChoiceParam(BaseParam):
188
221
  return value
189
222
 
190
223
 
191
- Param = Union[
192
- ChoiceParam,
193
- DatetimeParam,
194
- IntParam,
195
- StrParam,
224
+ # TODO: Not implement this parameter yet
225
+ class MapParam(DefaultParam): # pragma: no cov
226
+
227
+ type: Literal["map"] = "map"
228
+ default: dict[Any, Any] = Field(default_factory=dict)
229
+
230
+ def receive(self, value: Optional[dict[Any, Any]] = None) -> dict[Any, Any]:
231
+ if value is None:
232
+ return self.default
233
+
234
+
235
+ # TODO: Not implement this parameter yet
236
+ class ArrayParam(DefaultParam): # pragma: no cov
237
+
238
+ type: Literal["array"] = "array"
239
+ default: list[Any] = Field(default_factory=list)
240
+
241
+ def receive(self, value: Optional[list[T]] = None) -> list[T]:
242
+ if value is None:
243
+ return self.default
244
+ if not isinstance(value, list):
245
+ raise ParamValueException(
246
+ f"Value that want to convert to array does not support for "
247
+ f"type: {type(value)}"
248
+ )
249
+ return value
250
+
251
+
252
+ Param = Annotated[
253
+ Union[
254
+ ChoiceParam,
255
+ DatetimeParam,
256
+ DateParam,
257
+ IntParam,
258
+ StrParam,
259
+ ],
260
+ Field(discriminator="type"),
196
261
  ]
@@ -4,36 +4,29 @@
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
6
  """This is the Result module. It is the data context transfer objects that use
7
- by all object in this package.
7
+ by all object in this package. This module provide Result dataclass.
8
8
  """
9
9
  from __future__ import annotations
10
10
 
11
- import os
12
- from abc import ABC, abstractmethod
13
11
  from dataclasses import field
14
12
  from datetime import datetime
15
13
  from enum import IntEnum
16
- from inspect import Traceback, currentframe, getframeinfo
17
- from pathlib import Path
18
- from threading import Event, get_ident
14
+ from threading import Event
19
15
  from typing import Optional
20
16
 
21
17
  from pydantic import ConfigDict
22
18
  from pydantic.dataclasses import dataclass
19
+ from pydantic.functional_validators import model_validator
23
20
  from typing_extensions import Self
24
21
 
25
22
  from .__types import DictData, TupleStr
26
- from .conf import config, get_logger
27
- from .utils import cut_id, gen_id, get_dt_now
28
-
29
- logger = get_logger("ddeutil.workflow")
23
+ from .logs import TraceLog, get_dt_tznow, get_trace
24
+ from .utils import gen_id
30
25
 
31
26
  __all__: TupleStr = (
32
27
  "Result",
33
28
  "Status",
34
- "TraceLog",
35
29
  "default_gen_id",
36
- "get_dt_tznow",
37
30
  )
38
31
 
39
32
 
@@ -46,14 +39,6 @@ def default_gen_id() -> str:
46
39
  return gen_id("manual", unique=True)
47
40
 
48
41
 
49
- def get_dt_tznow() -> datetime:
50
- """Return the current datetime object that passing the config timezone.
51
-
52
- :rtype: datetime
53
- """
54
- return get_dt_now(tz=config.tz)
55
-
56
-
57
42
  class Status(IntEnum):
58
43
  """Status Int Enum object."""
59
44
 
@@ -62,111 +47,6 @@ class Status(IntEnum):
62
47
  WAIT: int = 2
63
48
 
64
49
 
65
- @dataclass(frozen=True)
66
- class BaseTraceLog(ABC): # pragma: no cov
67
- """Base Trace Log dataclass object."""
68
-
69
- run_id: str
70
- parent_run_id: Optional[str] = None
71
-
72
- @abstractmethod
73
- def writer(self, message: str, is_err: bool = False) -> None: ...
74
-
75
- @abstractmethod
76
- def make_message(self, message: str) -> str: ...
77
-
78
- def debug(self, message: str):
79
- msg: str = self.make_message(message)
80
-
81
- # NOTE: Write file if debug mode.
82
- if config.debug:
83
- self.writer(msg)
84
-
85
- logger.debug(msg, stacklevel=2)
86
-
87
- def info(self, message: str):
88
- msg: str = self.make_message(message)
89
- self.writer(msg)
90
- logger.info(msg, stacklevel=2)
91
-
92
- def warning(self, message: str):
93
- msg: str = self.make_message(message)
94
- self.writer(msg)
95
- logger.warning(msg, stacklevel=2)
96
-
97
- def error(self, message: str):
98
- msg: str = self.make_message(message)
99
- self.writer(msg, is_err=True)
100
- logger.error(msg, stacklevel=2)
101
-
102
-
103
- class TraceLog(BaseTraceLog): # pragma: no cov
104
- """Trace Log object that write file to the local storage."""
105
-
106
- @property
107
- def log_file(self) -> Path:
108
- log_file: Path = (
109
- config.log_path / f"run_id={self.parent_run_id or self.run_id}"
110
- )
111
- if not log_file.exists():
112
- log_file.mkdir(parents=True)
113
- return log_file
114
-
115
- @property
116
- def cut_id(self) -> str:
117
- """Combine cutting ID of parent running ID if it set."""
118
- cut_run_id: str = cut_id(self.run_id)
119
- if not self.parent_run_id:
120
- return f"{cut_run_id} -> {' ' * 6}"
121
-
122
- cut_parent_run_id: str = cut_id(self.parent_run_id)
123
- return f"{cut_parent_run_id} -> {cut_run_id}"
124
-
125
- def make_message(self, message: str) -> str:
126
- return f"({self.cut_id}) {message}"
127
-
128
- def writer(self, message: str, is_err: bool = False) -> None:
129
- """The path of logging data will store by format:
130
-
131
- ... ./logs/run_id=<run-id>/stdout.txt
132
- ... ./logs/run_id=<run-id>/stderr.txt
133
-
134
- :param message:
135
- :param is_err:
136
- """
137
- if not config.enable_write_log:
138
- return
139
-
140
- frame_info: Traceback = getframeinfo(currentframe().f_back.f_back)
141
- filename: str = frame_info.filename.split(os.path.sep)[-1]
142
- lineno: int = frame_info.lineno
143
-
144
- # NOTE: set process and thread IDs.
145
- process: int = os.getpid()
146
- thread: int = get_ident()
147
-
148
- write_file: str = "stderr.txt" if is_err else "stdout.txt"
149
- with (self.log_file / write_file).open(
150
- mode="at", encoding="utf-8"
151
- ) as f:
152
- msg_fmt: str = f"{config.log_format_file}\n"
153
- print(msg_fmt)
154
- f.write(
155
- msg_fmt.format(
156
- **{
157
- "datetime": get_dt_tznow().strftime(
158
- config.log_datetime_format
159
- ),
160
- "process": process,
161
- "thread": thread,
162
- "message": message,
163
- "filename": filename,
164
- "lineno": lineno,
165
- }
166
- )
167
- )
168
-
169
-
170
50
  @dataclass(
171
51
  config=ConfigDict(arbitrary_types_allowed=True, use_enum_values=True)
172
52
  )
@@ -182,12 +62,12 @@ class Result:
182
62
  status: Status = field(default=Status.WAIT)
183
63
  context: DictData = field(default_factory=dict)
184
64
  run_id: Optional[str] = field(default_factory=default_gen_id)
185
-
186
- # NOTE: Ignore this field to compare another result model with __eq__.
187
65
  parent_run_id: Optional[str] = field(default=None, compare=False)
188
- event: Event = field(default_factory=Event, compare=False)
189
66
  ts: datetime = field(default_factory=get_dt_tznow, compare=False)
190
67
 
68
+ event: Event = field(default_factory=Event, compare=False, repr=False)
69
+ trace: Optional[TraceLog] = field(default=None, compare=False, repr=False)
70
+
191
71
  @classmethod
192
72
  def construct_with_rs_or_id(
193
73
  cls,
@@ -208,13 +88,12 @@ class Result:
208
88
  result.set_parent_run_id(parent_run_id)
209
89
  return result
210
90
 
211
- def set_run_id(self, running_id: str) -> Self:
212
- """Set a running ID.
91
+ @model_validator(mode="after")
92
+ def __prepare_trace(self) -> Self:
93
+ """Prepare trace field that want to pass after its initialize step."""
94
+ if self.trace is None: # pragma: no cove
95
+ self.trace: TraceLog = get_trace(self.run_id, self.parent_run_id)
213
96
 
214
- :param running_id: A running ID that want to update on this model.
215
- :rtype: Self
216
- """
217
- self.run_id: str = running_id
218
97
  return self
219
98
 
220
99
  def set_parent_run_id(self, running_id: str) -> Self:
@@ -224,6 +103,7 @@ class Result:
224
103
  :rtype: Self
225
104
  """
226
105
  self.parent_run_id: str = running_id
106
+ self.trace: TraceLog = get_trace(self.run_id, running_id)
227
107
  return self
228
108
 
229
109
  def catch(
@@ -244,13 +124,9 @@ class Result:
244
124
  self.__dict__["context"].update(context or {})
245
125
  return self
246
126
 
247
- @property
248
- def trace(self) -> TraceLog:
249
- """Return TraceLog object that passing its running ID.
127
+ def alive_time(self) -> float: # pragma: no cov
128
+ """Return total seconds that this object use since it was created.
250
129
 
251
- :rtype: TraceLog
130
+ :rtype: float
252
131
  """
253
- return TraceLog(self.run_id, self.parent_run_id)
254
-
255
- def alive_time(self) -> float: # pragma: no cov
256
132
  return (get_dt_tznow() - self.ts).total_seconds()
@@ -4,18 +4,18 @@
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
6
  """
7
- The main schedule running is ``schedule_runner`` function that trigger the
8
- multiprocess of ``schedule_control`` function for listing schedules on the
9
- config by ``Loader.finds(Schedule)``.
7
+ The main schedule running is `schedule_runner` function that trigger the
8
+ multiprocess of `schedule_control` function for listing schedules on the
9
+ config by `Loader.finds(Schedule)`.
10
10
 
11
- The ``schedule_control`` is the scheduler function that release 2 schedule
12
- functions; ``workflow_task``, and ``workflow_monitor``.
11
+ The `schedule_control` is the scheduler function that release 2 schedule
12
+ functions; `workflow_task`, and `workflow_monitor`.
13
13
 
14
- ``schedule_control`` --- Every minute at :02 --> ``schedule_task``
15
- --- Every 5 minutes --> ``monitor``
14
+ `schedule_control` ---( Every minute at :02 )--> `schedule_task`
15
+ ---( Every 5 minutes )--> `monitor`
16
16
 
17
- The ``schedule_task`` will run ``task.release`` method in threading object
18
- for multithreading strategy. This ``release`` method will run only one crontab
17
+ The `schedule_task` will run `task.release` method in threading object
18
+ for multithreading strategy. This `release` method will run only one crontab
19
19
  value with the on field.
20
20
  """
21
21
  from __future__ import annotations
@@ -134,7 +134,7 @@ class ScheduleWorkflow(BaseModel):
134
134
  on: list[str] = [on]
135
135
 
136
136
  if any(not isinstance(n, (dict, str)) for n in on):
137
- raise TypeError("The ``on`` key should be list of str or dict")
137
+ raise TypeError("The `on` key should be list of str or dict")
138
138
 
139
139
  # NOTE: Pass on value to Loader and keep on model object to on
140
140
  # field.
@@ -344,7 +344,7 @@ class Schedule(BaseModel):
344
344
  tasks=self.tasks(
345
345
  start_date_waiting, queue=queue, externals=externals
346
346
  ),
347
- stop_date=stop_date,
347
+ stop=stop_date,
348
348
  queue=queue,
349
349
  threads=threads,
350
350
  result=result,
@@ -359,12 +359,16 @@ ReturnResultOrCancel = Callable[P, ResultOrCancel]
359
359
  DecoratorCancelJob = Callable[[ReturnResultOrCancel], ReturnResultOrCancel]
360
360
 
361
361
 
362
- def catch_exceptions(cancel_on_failure: bool = False) -> DecoratorCancelJob:
362
+ def catch_exceptions(
363
+ cancel_on_failure: bool = False,
364
+ parent_run_id: str | None = None,
365
+ ) -> DecoratorCancelJob:
363
366
  """Catch exception error from scheduler job that running with schedule
364
367
  package and return CancelJob if this function raise an error.
365
368
 
366
369
  :param cancel_on_failure: A flag that allow to return the CancelJob or not
367
370
  it will raise.
371
+ :param parent_run_id:
368
372
 
369
373
  :rtype: DecoratorCancelJob
370
374
  """
@@ -375,10 +379,17 @@ def catch_exceptions(cancel_on_failure: bool = False) -> DecoratorCancelJob:
375
379
 
376
380
  @wraps(func)
377
381
  def wrapper(*args: P.args, **kwargs: P.kwargs) -> ResultOrCancel:
382
+
378
383
  try:
379
384
  return func(*args, **kwargs)
385
+
380
386
  except Exception as err:
381
- logger.exception(err)
387
+ if parent_run_id:
388
+ (
389
+ Result(parent_run_id=parent_run_id).trace.exception(
390
+ str(err)
391
+ )
392
+ )
382
393
  if cancel_on_failure:
383
394
  return CancelJob
384
395
  raise err
@@ -399,13 +410,13 @@ class ReleaseThread(TypedDict):
399
410
  ReleaseThreads = dict[str, ReleaseThread]
400
411
 
401
412
 
402
- @catch_exceptions(cancel_on_failure=True)
403
413
  def schedule_task(
404
414
  tasks: list[WorkflowTask],
405
415
  stop: datetime,
406
416
  queue: dict[str, ReleaseQueue],
407
417
  threads: ReleaseThreads,
408
418
  audit: type[Audit],
419
+ *,
409
420
  parent_run_id: str | None = None,
410
421
  ) -> ResultOrCancel:
411
422
  """Schedule task function that generate thread of workflow task release
@@ -491,8 +502,14 @@ def schedule_task(
491
502
  # job.
492
503
  thread_name: str = f"{task.alias}|{release.date:%Y%m%d%H%M}"
493
504
  thread: Thread = Thread(
494
- target=catch_exceptions(cancel_on_failure=True)(task.release),
495
- kwargs={"release": release, "queue": q, "audit": audit},
505
+ target=catch_exceptions(
506
+ cancel_on_failure=True,
507
+ )(task.release),
508
+ kwargs={
509
+ "release": release,
510
+ "queue": q,
511
+ "audit": audit,
512
+ },
496
513
  name=thread_name,
497
514
  daemon=True,
498
515
  )
@@ -508,22 +525,28 @@ def schedule_task(
508
525
  delay()
509
526
 
510
527
  result.trace.debug(
511
- f"[SCHEDULE]: End schedule task at {current_date:%Y-%m-%d %H:%M:%S} "
512
- f"{'=' * 80}"
528
+ f"[SCHEDULE]: End schedule task that run since "
529
+ f"{current_date:%Y-%m-%d %H:%M:%S} {'=' * 30}"
513
530
  )
514
531
  return result.catch(
515
532
  status=Status.SUCCESS, context={"task_date": current_date}
516
533
  )
517
534
 
518
535
 
519
- def monitor(threads: ReleaseThreads) -> None: # pragma: no cov
536
+ def monitor(
537
+ threads: ReleaseThreads,
538
+ parent_run_id: str | None = None,
539
+ ) -> None: # pragma: no cov
520
540
  """Monitoring function that running every five minute for track long-running
521
541
  thread instance from the schedule_control function that run every minute.
522
542
 
523
543
  :param threads: A mapping of Thread object and its name.
544
+ :param parent_run_id: A parent workflow running ID for this release.
545
+
524
546
  :type threads: ReleaseThreads
525
547
  """
526
- logger.debug("[MONITOR]: Start checking long running schedule task.")
548
+ result: Result = Result().set_parent_run_id(parent_run_id)
549
+ result.trace.debug("[MONITOR]: Start checking long running schedule task.")
527
550
 
528
551
  snapshot_threads: list[str] = list(threads.keys())
529
552
  for thread_name in snapshot_threads:
@@ -538,20 +561,20 @@ def monitor(threads: ReleaseThreads) -> None: # pragma: no cov
538
561
 
539
562
  def scheduler_pending(
540
563
  tasks: list[WorkflowTask],
541
- stop_date,
542
- queue,
543
- threads,
564
+ stop: datetime,
565
+ queue: dict[str, ReleaseQueue],
566
+ threads: ReleaseThreads,
544
567
  result: Result,
545
568
  audit: type[Audit],
546
569
  ) -> Result: # pragma: no cov
547
- """
570
+ """Scheduler pending function.
548
571
 
549
- :param tasks:
550
- :param stop_date:
551
- :param queue:
552
- :param threads:
553
- :param result:
554
- :param audit:
572
+ :param tasks: A list of WorkflowTask object.
573
+ :param stop: A stop datetime object that force stop running scheduler.
574
+ :param queue: A mapping of alias name and ReleaseQueue object.
575
+ :param threads: A mapping of alias name and Thread object.
576
+ :param result: A result object.
577
+ :param audit: An audit class that want to make audit object.
555
578
 
556
579
  :rtype: Result
557
580
  """
@@ -569,9 +592,12 @@ def scheduler_pending(
569
592
  scheduler.every(1)
570
593
  .minutes.at(":02")
571
594
  .do(
572
- schedule_task,
595
+ catch_exceptions(
596
+ cancel_on_failure=True,
597
+ parent_run_id=result.parent_run_id,
598
+ )(schedule_task),
573
599
  tasks=tasks,
574
- stop=stop_date,
600
+ stop=stop,
575
601
  queue=queue,
576
602
  threads=threads,
577
603
  audit=audit,
@@ -588,13 +614,14 @@ def scheduler_pending(
588
614
  .do(
589
615
  monitor,
590
616
  threads=threads,
617
+ parent_run_id=result.parent_run_id,
591
618
  )
592
619
  .tag("monitor")
593
620
  )
594
621
 
595
622
  # NOTE: Start running schedule
596
623
  result.trace.info(
597
- f"[SCHEDULE]: Schedule with stopper: {stop_date:%Y-%m-%d %H:%M:%S}"
624
+ f"[SCHEDULE]: Schedule with stopper: {stop:%Y-%m-%d %H:%M:%S}"
598
625
  )
599
626
 
600
627
  while True:
@@ -611,7 +638,7 @@ def scheduler_pending(
611
638
  "running in background."
612
639
  )
613
640
  delay(10)
614
- monitor(threads)
641
+ monitor(threads, parent_run_id=result.parent_run_id)
615
642
 
616
643
  break
617
644
 
@@ -681,7 +708,7 @@ def schedule_control(
681
708
 
682
709
  scheduler_pending(
683
710
  tasks=tasks,
684
- stop_date=stop_date,
711
+ stop=stop_date,
685
712
  queue=queue,
686
713
  threads=threads,
687
714
  result=result,
@@ -707,15 +734,16 @@ def schedule_runner(
707
734
 
708
735
  This function will get all workflows that include on value that was
709
736
  created in config path and chuck it with application config variable
710
- ``WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS`` env var to multiprocess executor
737
+ `WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS` env var to multiprocess executor
711
738
  pool.
712
739
 
713
740
  The current workflow logic that split to process will be below diagram:
714
741
 
715
- MAIN ==> process 01 ==> schedule --> thread of release task 01 01
716
- --> thread of release task 01 02
717
- ==> schedule --> thread of release task 02 01
718
- --> thread of release task 02 02
742
+ MAIN ==> process 01 ==> schedule ==> thread 01 --> 01
743
+ ==> thread 01 --> 02
744
+ ==> schedule ==> thread 02 --> 01
745
+ ==> thread 02 --> 02
746
+ ==> ...
719
747
  ==> process 02 ==> ...
720
748
 
721
749
  :rtype: Result
@@ -745,7 +773,7 @@ def schedule_runner(
745
773
 
746
774
  # NOTE: Raise error when it has any error from schedule_control.
747
775
  if err := future.exception():
748
- logger.error(str(err))
776
+ result.trace.error(str(err))
749
777
  raise WorkflowException(str(err)) from err
750
778
 
751
779
  rs: Result = future.result(timeout=1)