ddeutil-workflow 0.0.34__py3-none-any.whl → 0.0.35__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,214 @@
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
+ # Licensed under the MIT License. See LICENSE in the project root for
4
+ # license information.
5
+ # ------------------------------------------------------------------------------
6
+ """This is the Logs module. This module provide TraceLog dataclasses.
7
+ """
8
+ from __future__ import annotations
9
+
10
+ import os
11
+ from abc import ABC, abstractmethod
12
+ from collections.abc import Iterator
13
+ from datetime import datetime
14
+ from inspect import Traceback, currentframe, getframeinfo
15
+ from pathlib import Path
16
+ from threading import get_ident
17
+ from typing import Optional, Union
18
+
19
+ from pydantic.dataclasses import dataclass
20
+
21
+ from .__types import TupleStr
22
+ from .conf import config, get_logger
23
+ from .utils import cut_id, get_dt_now
24
+
25
+ logger = get_logger("ddeutil.workflow")
26
+
27
+ __all__: TupleStr = (
28
+ "FileTraceLog",
29
+ "TraceLog",
30
+ "get_dt_tznow",
31
+ "get_trace",
32
+ )
33
+
34
+
35
+ def get_dt_tznow() -> datetime:
36
+ """Return the current datetime object that passing the config timezone.
37
+
38
+ :rtype: datetime
39
+ """
40
+ return get_dt_now(tz=config.tz)
41
+
42
+
43
+ @dataclass(frozen=True)
44
+ class BaseTraceLog(ABC): # pragma: no cov
45
+ """Base Trace Log dataclass object."""
46
+
47
+ run_id: str
48
+ parent_run_id: Optional[str] = None
49
+
50
+ @abstractmethod
51
+ def writer(self, message: str, is_err: bool = False) -> None:
52
+ raise NotImplementedError(
53
+ "Create writer logic for this trace object before using."
54
+ )
55
+
56
+ @abstractmethod
57
+ def make_message(self, message: str) -> str:
58
+ raise NotImplementedError(
59
+ "Adjust make message method for this trace object before using."
60
+ )
61
+
62
+ def debug(self, message: str):
63
+ msg: str = self.make_message(message)
64
+
65
+ # NOTE: Write file if debug mode.
66
+ if config.debug:
67
+ self.writer(msg)
68
+
69
+ logger.debug(msg, stacklevel=2)
70
+
71
+ def info(self, message: str):
72
+ msg: str = self.make_message(message)
73
+ self.writer(msg)
74
+ logger.info(msg, stacklevel=2)
75
+
76
+ def warning(self, message: str):
77
+ msg: str = self.make_message(message)
78
+ self.writer(msg)
79
+ logger.warning(msg, stacklevel=2)
80
+
81
+ def error(self, message: str):
82
+ msg: str = self.make_message(message)
83
+ self.writer(msg, is_err=True)
84
+ logger.error(msg, stacklevel=2)
85
+
86
+
87
+ class FileTraceLog(BaseTraceLog): # pragma: no cov
88
+ """Trace Log object that write file to the local storage."""
89
+
90
+ @classmethod
91
+ def find_logs(cls) -> Iterator[dict[str, str]]: # pragma: no cov
92
+ for file in config.log_path.glob("./run_id=*"):
93
+ data: dict[str, str] = {}
94
+
95
+ if (file / "stdout.txt").exists():
96
+ data["stdout"] = (file / "stdout.txt").read_text(
97
+ encoding="utf-8"
98
+ )
99
+
100
+ if (file / "stderr.txt").exists():
101
+ data["stdout"] = (file / "stdout.txt").read_text(
102
+ encoding="utf-8"
103
+ )
104
+
105
+ yield data
106
+
107
+ @classmethod
108
+ def find_log_with_id(cls, run_id: str) -> dict[str, str]:
109
+ file: Path = config.log_path / f"run_id={run_id}"
110
+ data: dict[str, str] = {}
111
+
112
+ if (file / "stdout.txt").exists():
113
+ data["stdout"] = (file / "stdout.txt").read_text(encoding="utf-8")
114
+
115
+ if (file / "stderr.txt").exists():
116
+ data["stdout"] = (file / "stdout.txt").read_text(encoding="utf-8")
117
+
118
+ return data
119
+
120
+ @property
121
+ def log_file(self) -> Path:
122
+ log_file: Path = (
123
+ config.log_path / f"run_id={self.parent_run_id or self.run_id}"
124
+ )
125
+ if not log_file.exists():
126
+ log_file.mkdir(parents=True)
127
+ return log_file
128
+
129
+ @property
130
+ def cut_id(self) -> str:
131
+ """Combine cutting ID of parent running ID if it set."""
132
+ cut_run_id: str = cut_id(self.run_id)
133
+ if not self.parent_run_id:
134
+ return f"{cut_run_id} -> {' ' * 6}"
135
+
136
+ cut_parent_run_id: str = cut_id(self.parent_run_id)
137
+ return f"{cut_parent_run_id} -> {cut_run_id}"
138
+
139
+ def make_message(self, message: str) -> str:
140
+ return f"({self.cut_id}) {message}"
141
+
142
+ def writer(self, message: str, is_err: bool = False) -> None:
143
+ """The path of logging data will store by format:
144
+
145
+ ... ./logs/run_id=<run-id>/stdout.txt
146
+ ... ./logs/run_id=<run-id>/stderr.txt
147
+
148
+ :param message:
149
+ :param is_err:
150
+ """
151
+ if not config.enable_write_log:
152
+ return
153
+
154
+ frame_info: Traceback = getframeinfo(currentframe().f_back.f_back)
155
+ filename: str = frame_info.filename.split(os.path.sep)[-1]
156
+ lineno: int = frame_info.lineno
157
+
158
+ # NOTE: set process and thread IDs.
159
+ process: int = os.getpid()
160
+ thread: int = get_ident()
161
+
162
+ write_file: str = "stderr.txt" if is_err else "stdout.txt"
163
+ with (self.log_file / write_file).open(
164
+ mode="at", encoding="utf-8"
165
+ ) as f:
166
+ msg_fmt: str = f"{config.log_format_file}\n"
167
+ print(msg_fmt)
168
+ f.write(
169
+ msg_fmt.format(
170
+ **{
171
+ "datetime": get_dt_tznow().strftime(
172
+ config.log_datetime_format
173
+ ),
174
+ "process": process,
175
+ "thread": thread,
176
+ "message": message,
177
+ "filename": filename,
178
+ "lineno": lineno,
179
+ }
180
+ )
181
+ )
182
+
183
+
184
+ class SQLiteTraceLog(BaseTraceLog): # pragma: no cov
185
+
186
+ @classmethod
187
+ def find_logs(cls) -> Iterator[dict[str, str]]: ...
188
+
189
+ @classmethod
190
+ def find_log_with_id(cls, run_id: str) -> dict[str, str]: ...
191
+
192
+ def make_message(self, message: str) -> str: ...
193
+
194
+ def writer(self, message: str, is_err: bool = False) -> None: ...
195
+
196
+
197
+ TraceLog = Union[
198
+ FileTraceLog,
199
+ SQLiteTraceLog,
200
+ ]
201
+
202
+
203
+ def get_trace(
204
+ run_id: str, parent_run_id: str | None = None
205
+ ) -> TraceLog: # pragma: no cov
206
+ if config.log_path.is_file():
207
+ return SQLiteTraceLog(run_id, parent_run_id=parent_run_id)
208
+ return FileTraceLog(run_id, parent_run_id=parent_run_id)
209
+
210
+
211
+ def get_trace_obj() -> type[TraceLog]: # pragma: no cov
212
+ if config.log_path.is_file():
213
+ return SQLiteTraceLog
214
+ return FileTraceLog
@@ -3,8 +3,8 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
- """Param Model that use for parsing incoming parameters that pass to the
7
- Workflow and Schedule objects.
6
+ """This module include all Param Models that use for parsing incoming parameters
7
+ that pass to the Workflow and Schedule objects.
8
8
  """
9
9
  from __future__ import annotations
10
10
 
@@ -12,7 +12,7 @@ import decimal
12
12
  import logging
13
13
  from abc import ABC, abstractmethod
14
14
  from datetime import date, datetime
15
- from typing import Any, Literal, Optional, Union
15
+ from typing import Annotated, Any, Literal, Optional, Union
16
16
 
17
17
  from pydantic import BaseModel, Field
18
18
 
@@ -32,8 +32,9 @@ __all__: TupleStr = (
32
32
 
33
33
 
34
34
  class BaseParam(BaseModel, ABC):
35
- """Base Parameter that use to make any Params Model. The type will dynamic
36
- with the type field that made from literal string."""
35
+ """Base Parameter that use to make any Params Models. The parameter type
36
+ will dynamic with the setup type field that made from literal string.
37
+ """
37
38
 
38
39
  desc: Optional[str] = Field(
39
40
  default=None, description="A description of parameter providing."
@@ -169,9 +170,11 @@ class ChoiceParam(BaseParam):
169
170
  """Choice parameter."""
170
171
 
171
172
  type: Literal["choice"] = "choice"
172
- options: list[str] = Field(description="A list of choice parameters.")
173
+ options: Union[list[str], list[int]] = Field(
174
+ description="A list of choice parameters that able be str or int.",
175
+ )
173
176
 
174
- def receive(self, value: str | None = None) -> str:
177
+ def receive(self, value: Union[str, int] | None = None) -> Union[str, int]:
175
178
  """Receive value that match with options.
176
179
 
177
180
  :param value: A value that want to select from the options field.
@@ -188,9 +191,34 @@ class ChoiceParam(BaseParam):
188
191
  return value
189
192
 
190
193
 
191
- Param = Union[
192
- ChoiceParam,
193
- DatetimeParam,
194
- IntParam,
195
- StrParam,
194
+ # TODO: Not implement this parameter yet
195
+ class MappingParam(DefaultParam): # pragma: no cov
196
+
197
+ type: Literal["map"] = "map"
198
+ default: dict[Any, Any] = Field(default_factory=dict)
199
+
200
+ def receive(self, value: Optional[dict[Any, Any]] = None) -> dict[Any, Any]:
201
+ if value is None:
202
+ return self.default
203
+
204
+
205
+ # TODO: Not implement this parameter yet
206
+ class ArrayParam(DefaultParam): # pragma: no cov
207
+
208
+ type: Literal["array"] = "array"
209
+ default: list[Any] = Field(default_factory=list)
210
+
211
+ def receive(self, value: Optional[list[Any]] = None) -> list[Any]:
212
+ if value is None:
213
+ return self.default
214
+
215
+
216
+ Param = Annotated[
217
+ Union[
218
+ ChoiceParam,
219
+ DatetimeParam,
220
+ IntParam,
221
+ StrParam,
222
+ ],
223
+ Field(discriminator="type"),
196
224
  ]
@@ -4,36 +4,32 @@
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
6
  """This is the Result module. It is the data context transfer objects that use
7
- by all object in this package.
7
+ by all object in this package. This module provide Result dataclass.
8
8
  """
9
9
  from __future__ import annotations
10
10
 
11
- import os
12
- from abc import ABC, abstractmethod
13
11
  from dataclasses import field
14
12
  from datetime import datetime
15
13
  from enum import IntEnum
16
- from inspect import Traceback, currentframe, getframeinfo
17
- from pathlib import Path
18
- from threading import Event, get_ident
14
+ from threading import Event
19
15
  from typing import Optional
20
16
 
21
17
  from pydantic import ConfigDict
22
18
  from pydantic.dataclasses import dataclass
19
+ from pydantic.functional_validators import model_validator
23
20
  from typing_extensions import Self
24
21
 
25
22
  from .__types import DictData, TupleStr
26
- from .conf import config, get_logger
27
- from .utils import cut_id, gen_id, get_dt_now
23
+ from .conf import get_logger
24
+ from .logs import TraceLog, get_dt_tznow, get_trace
25
+ from .utils import gen_id
28
26
 
29
27
  logger = get_logger("ddeutil.workflow")
30
28
 
31
29
  __all__: TupleStr = (
32
30
  "Result",
33
31
  "Status",
34
- "TraceLog",
35
32
  "default_gen_id",
36
- "get_dt_tznow",
37
33
  )
38
34
 
39
35
 
@@ -46,14 +42,6 @@ def default_gen_id() -> str:
46
42
  return gen_id("manual", unique=True)
47
43
 
48
44
 
49
- def get_dt_tznow() -> datetime:
50
- """Return the current datetime object that passing the config timezone.
51
-
52
- :rtype: datetime
53
- """
54
- return get_dt_now(tz=config.tz)
55
-
56
-
57
45
  class Status(IntEnum):
58
46
  """Status Int Enum object."""
59
47
 
@@ -62,111 +50,6 @@ class Status(IntEnum):
62
50
  WAIT: int = 2
63
51
 
64
52
 
65
- @dataclass(frozen=True)
66
- class BaseTraceLog(ABC): # pragma: no cov
67
- """Base Trace Log dataclass object."""
68
-
69
- run_id: str
70
- parent_run_id: Optional[str] = None
71
-
72
- @abstractmethod
73
- def writer(self, message: str, is_err: bool = False) -> None: ...
74
-
75
- @abstractmethod
76
- def make_message(self, message: str) -> str: ...
77
-
78
- def debug(self, message: str):
79
- msg: str = self.make_message(message)
80
-
81
- # NOTE: Write file if debug mode.
82
- if config.debug:
83
- self.writer(msg)
84
-
85
- logger.debug(msg, stacklevel=2)
86
-
87
- def info(self, message: str):
88
- msg: str = self.make_message(message)
89
- self.writer(msg)
90
- logger.info(msg, stacklevel=2)
91
-
92
- def warning(self, message: str):
93
- msg: str = self.make_message(message)
94
- self.writer(msg)
95
- logger.warning(msg, stacklevel=2)
96
-
97
- def error(self, message: str):
98
- msg: str = self.make_message(message)
99
- self.writer(msg, is_err=True)
100
- logger.error(msg, stacklevel=2)
101
-
102
-
103
- class TraceLog(BaseTraceLog): # pragma: no cov
104
- """Trace Log object that write file to the local storage."""
105
-
106
- @property
107
- def log_file(self) -> Path:
108
- log_file: Path = (
109
- config.log_path / f"run_id={self.parent_run_id or self.run_id}"
110
- )
111
- if not log_file.exists():
112
- log_file.mkdir(parents=True)
113
- return log_file
114
-
115
- @property
116
- def cut_id(self) -> str:
117
- """Combine cutting ID of parent running ID if it set."""
118
- cut_run_id: str = cut_id(self.run_id)
119
- if not self.parent_run_id:
120
- return f"{cut_run_id} -> {' ' * 6}"
121
-
122
- cut_parent_run_id: str = cut_id(self.parent_run_id)
123
- return f"{cut_parent_run_id} -> {cut_run_id}"
124
-
125
- def make_message(self, message: str) -> str:
126
- return f"({self.cut_id}) {message}"
127
-
128
- def writer(self, message: str, is_err: bool = False) -> None:
129
- """The path of logging data will store by format:
130
-
131
- ... ./logs/run_id=<run-id>/stdout.txt
132
- ... ./logs/run_id=<run-id>/stderr.txt
133
-
134
- :param message:
135
- :param is_err:
136
- """
137
- if not config.enable_write_log:
138
- return
139
-
140
- frame_info: Traceback = getframeinfo(currentframe().f_back.f_back)
141
- filename: str = frame_info.filename.split(os.path.sep)[-1]
142
- lineno: int = frame_info.lineno
143
-
144
- # NOTE: set process and thread IDs.
145
- process: int = os.getpid()
146
- thread: int = get_ident()
147
-
148
- write_file: str = "stderr.txt" if is_err else "stdout.txt"
149
- with (self.log_file / write_file).open(
150
- mode="at", encoding="utf-8"
151
- ) as f:
152
- msg_fmt: str = f"{config.log_format_file}\n"
153
- print(msg_fmt)
154
- f.write(
155
- msg_fmt.format(
156
- **{
157
- "datetime": get_dt_tznow().strftime(
158
- config.log_datetime_format
159
- ),
160
- "process": process,
161
- "thread": thread,
162
- "message": message,
163
- "filename": filename,
164
- "lineno": lineno,
165
- }
166
- )
167
- )
168
-
169
-
170
53
  @dataclass(
171
54
  config=ConfigDict(arbitrary_types_allowed=True, use_enum_values=True)
172
55
  )
@@ -182,11 +65,10 @@ class Result:
182
65
  status: Status = field(default=Status.WAIT)
183
66
  context: DictData = field(default_factory=dict)
184
67
  run_id: Optional[str] = field(default_factory=default_gen_id)
185
-
186
- # NOTE: Ignore this field to compare another result model with __eq__.
187
68
  parent_run_id: Optional[str] = field(default=None, compare=False)
188
69
  event: Event = field(default_factory=Event, compare=False)
189
70
  ts: datetime = field(default_factory=get_dt_tznow, compare=False)
71
+ trace: Optional[TraceLog] = field(default=None)
190
72
 
191
73
  @classmethod
192
74
  def construct_with_rs_or_id(
@@ -208,13 +90,12 @@ class Result:
208
90
  result.set_parent_run_id(parent_run_id)
209
91
  return result
210
92
 
211
- def set_run_id(self, running_id: str) -> Self:
212
- """Set a running ID.
93
+ @model_validator(mode="after")
94
+ def __prepare_trace(self) -> Self:
95
+ """Prepare trace field that want to pass after its initialize step."""
96
+ if self.trace is None: # pragma: no cove
97
+ self.trace: TraceLog = get_trace(self.run_id, self.parent_run_id)
213
98
 
214
- :param running_id: A running ID that want to update on this model.
215
- :rtype: Self
216
- """
217
- self.run_id: str = running_id
218
99
  return self
219
100
 
220
101
  def set_parent_run_id(self, running_id: str) -> Self:
@@ -224,6 +105,7 @@ class Result:
224
105
  :rtype: Self
225
106
  """
226
107
  self.parent_run_id: str = running_id
108
+ self.trace: TraceLog = get_trace(self.run_id, running_id)
227
109
  return self
228
110
 
229
111
  def catch(
@@ -244,13 +126,9 @@ class Result:
244
126
  self.__dict__["context"].update(context or {})
245
127
  return self
246
128
 
247
- @property
248
- def trace(self) -> TraceLog:
249
- """Return TraceLog object that passing its running ID.
129
+ def alive_time(self) -> float: # pragma: no cov
130
+ """Return total seconds that this object use since it was created.
250
131
 
251
- :rtype: TraceLog
132
+ :rtype: float
252
133
  """
253
- return TraceLog(self.run_id, self.parent_run_id)
254
-
255
- def alive_time(self) -> float: # pragma: no cov
256
134
  return (get_dt_tznow() - self.ts).total_seconds()
@@ -4,18 +4,18 @@
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
6
  """
7
- The main schedule running is ``schedule_runner`` function that trigger the
8
- multiprocess of ``schedule_control`` function for listing schedules on the
9
- config by ``Loader.finds(Schedule)``.
7
+ The main schedule running is `schedule_runner` function that trigger the
8
+ multiprocess of `schedule_control` function for listing schedules on the
9
+ config by `Loader.finds(Schedule)`.
10
10
 
11
- The ``schedule_control`` is the scheduler function that release 2 schedule
12
- functions; ``workflow_task``, and ``workflow_monitor``.
11
+ The `schedule_control` is the scheduler function that release 2 schedule
12
+ functions; `workflow_task`, and `workflow_monitor`.
13
13
 
14
- ``schedule_control`` --- Every minute at :02 --> ``schedule_task``
15
- --- Every 5 minutes --> ``monitor``
14
+ `schedule_control` ---( Every minute at :02 )--> `schedule_task`
15
+ ---( Every 5 minutes )--> `monitor`
16
16
 
17
- The ``schedule_task`` will run ``task.release`` method in threading object
18
- for multithreading strategy. This ``release`` method will run only one crontab
17
+ The `schedule_task` will run `task.release` method in threading object
18
+ for multithreading strategy. This `release` method will run only one crontab
19
19
  value with the on field.
20
20
  """
21
21
  from __future__ import annotations
@@ -134,7 +134,7 @@ class ScheduleWorkflow(BaseModel):
134
134
  on: list[str] = [on]
135
135
 
136
136
  if any(not isinstance(n, (dict, str)) for n in on):
137
- raise TypeError("The ``on`` key should be list of str or dict")
137
+ raise TypeError("The `on` key should be list of str or dict")
138
138
 
139
139
  # NOTE: Pass on value to Loader and keep on model object to on
140
140
  # field.
@@ -344,7 +344,7 @@ class Schedule(BaseModel):
344
344
  tasks=self.tasks(
345
345
  start_date_waiting, queue=queue, externals=externals
346
346
  ),
347
- stop_date=stop_date,
347
+ stop=stop_date,
348
348
  queue=queue,
349
349
  threads=threads,
350
350
  result=result,
@@ -509,21 +509,27 @@ def schedule_task(
509
509
 
510
510
  result.trace.debug(
511
511
  f"[SCHEDULE]: End schedule task at {current_date:%Y-%m-%d %H:%M:%S} "
512
- f"{'=' * 80}"
512
+ f"{'=' * 60}"
513
513
  )
514
514
  return result.catch(
515
515
  status=Status.SUCCESS, context={"task_date": current_date}
516
516
  )
517
517
 
518
518
 
519
- def monitor(threads: ReleaseThreads) -> None: # pragma: no cov
519
+ def monitor(
520
+ threads: ReleaseThreads,
521
+ parent_run_id: str | None = None,
522
+ ) -> None: # pragma: no cov
520
523
  """Monitoring function that running every five minute for track long-running
521
524
  thread instance from the schedule_control function that run every minute.
522
525
 
523
526
  :param threads: A mapping of Thread object and its name.
527
+ :param parent_run_id: A parent workflow running ID for this release.
528
+
524
529
  :type threads: ReleaseThreads
525
530
  """
526
- logger.debug("[MONITOR]: Start checking long running schedule task.")
531
+ result: Result = Result().set_parent_run_id(parent_run_id)
532
+ result.trace.debug("[MONITOR]: Start checking long running schedule task.")
527
533
 
528
534
  snapshot_threads: list[str] = list(threads.keys())
529
535
  for thread_name in snapshot_threads:
@@ -538,20 +544,20 @@ def monitor(threads: ReleaseThreads) -> None: # pragma: no cov
538
544
 
539
545
  def scheduler_pending(
540
546
  tasks: list[WorkflowTask],
541
- stop_date,
542
- queue,
543
- threads,
547
+ stop: datetime,
548
+ queue: dict[str, ReleaseQueue],
549
+ threads: ReleaseThreads,
544
550
  result: Result,
545
551
  audit: type[Audit],
546
552
  ) -> Result: # pragma: no cov
547
- """
553
+ """Scheduler pending function.
548
554
 
549
- :param tasks:
550
- :param stop_date:
551
- :param queue:
552
- :param threads:
553
- :param result:
554
- :param audit:
555
+ :param tasks: A list of WorkflowTask object.
556
+ :param stop: A stop datetime object that force stop running scheduler.
557
+ :param queue: A mapping of alias name and ReleaseQueue object.
558
+ :param threads: A mapping of alias name and Thread object.
559
+ :param result: A result object.
560
+ :param audit: An audit class that want to make audit object.
555
561
 
556
562
  :rtype: Result
557
563
  """
@@ -571,7 +577,7 @@ def scheduler_pending(
571
577
  .do(
572
578
  schedule_task,
573
579
  tasks=tasks,
574
- stop=stop_date,
580
+ stop=stop,
575
581
  queue=queue,
576
582
  threads=threads,
577
583
  audit=audit,
@@ -594,7 +600,7 @@ def scheduler_pending(
594
600
 
595
601
  # NOTE: Start running schedule
596
602
  result.trace.info(
597
- f"[SCHEDULE]: Schedule with stopper: {stop_date:%Y-%m-%d %H:%M:%S}"
603
+ f"[SCHEDULE]: Schedule with stopper: {stop:%Y-%m-%d %H:%M:%S}"
598
604
  )
599
605
 
600
606
  while True:
@@ -681,7 +687,7 @@ def schedule_control(
681
687
 
682
688
  scheduler_pending(
683
689
  tasks=tasks,
684
- stop_date=stop_date,
690
+ stop=stop_date,
685
691
  queue=queue,
686
692
  threads=threads,
687
693
  result=result,
@@ -707,15 +713,16 @@ def schedule_runner(
707
713
 
708
714
  This function will get all workflows that include on value that was
709
715
  created in config path and chuck it with application config variable
710
- ``WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS`` env var to multiprocess executor
716
+ `WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS` env var to multiprocess executor
711
717
  pool.
712
718
 
713
719
  The current workflow logic that split to process will be below diagram:
714
720
 
715
- MAIN ==> process 01 ==> schedule --> thread of release task 01 01
716
- --> thread of release task 01 02
717
- ==> schedule --> thread of release task 02 01
718
- --> thread of release task 02 02
721
+ MAIN ==> process 01 ==> schedule ==> thread 01 --> 01
722
+ ==> thread 01 --> 02
723
+ ==> schedule ==> thread 02 --> 01
724
+ ==> thread 02 --> 02
725
+ ==> ...
719
726
  ==> process 02 ==> ...
720
727
 
721
728
  :rtype: Result