ddeutil-workflow 0.0.35__py3-none-any.whl → 0.0.37__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ddeutil/workflow/logs.py CHANGED
@@ -3,10 +3,11 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
- """This is the Logs module. This module provide TraceLog dataclasses.
6
+ """A Logs module contain a TraceLog dataclass.
7
7
  """
8
8
  from __future__ import annotations
9
9
 
10
+ import json
10
11
  import os
11
12
  from abc import ABC, abstractmethod
12
13
  from collections.abc import Iterator
@@ -14,11 +15,13 @@ from datetime import datetime
14
15
  from inspect import Traceback, currentframe, getframeinfo
15
16
  from pathlib import Path
16
17
  from threading import get_ident
17
- from typing import Optional, Union
18
+ from typing import ClassVar, Literal, Optional, Union
18
19
 
20
+ from pydantic import BaseModel, Field
19
21
  from pydantic.dataclasses import dataclass
22
+ from typing_extensions import Self
20
23
 
21
- from .__types import TupleStr
24
+ from .__types import DictStr, TupleStr
22
25
  from .conf import config, get_logger
23
26
  from .utils import cut_id, get_dt_now
24
27
 
@@ -26,13 +29,17 @@ logger = get_logger("ddeutil.workflow")
26
29
 
27
30
  __all__: TupleStr = (
28
31
  "FileTraceLog",
32
+ "SQLiteTraceLog",
33
+ "TraceData",
34
+ "TraceMeda",
29
35
  "TraceLog",
30
36
  "get_dt_tznow",
31
37
  "get_trace",
38
+ "get_trace_obj",
32
39
  )
33
40
 
34
41
 
35
- def get_dt_tznow() -> datetime:
42
+ def get_dt_tznow() -> datetime: # pragma: no cov
36
43
  """Return the current datetime object that passing the config timezone.
37
44
 
38
45
  :rtype: datetime
@@ -49,76 +56,154 @@ class BaseTraceLog(ABC): # pragma: no cov
49
56
 
50
57
  @abstractmethod
51
58
  def writer(self, message: str, is_err: bool = False) -> None:
59
+ """Write a trace message after making to target pointer object. The
60
+ target can be anything be inherited this class and overwrite this method
61
+ such as file, console, or database.
62
+
63
+ :param message: A message after making.
64
+ :param is_err: A flag for writing with an error trace or not.
65
+ """
52
66
  raise NotImplementedError(
53
67
  "Create writer logic for this trace object before using."
54
68
  )
55
69
 
56
70
  @abstractmethod
57
71
  def make_message(self, message: str) -> str:
72
+ """Prepare and Make a message before write and log processes.
73
+
74
+ :param message: A message that want to prepare and make before.
75
+
76
+ :rtype: str
77
+ """
58
78
  raise NotImplementedError(
59
79
  "Adjust make message method for this trace object before using."
60
80
  )
61
81
 
62
82
  def debug(self, message: str):
83
+ """Write trace log with append mode and logging this message with the
84
+ DEBUG level.
85
+
86
+ :param message: (str) A message that want to log.
87
+ """
63
88
  msg: str = self.make_message(message)
64
89
 
65
- # NOTE: Write file if debug mode.
90
+ # NOTE: Write file if debug mode was enabled.
66
91
  if config.debug:
67
92
  self.writer(msg)
68
93
 
69
94
  logger.debug(msg, stacklevel=2)
70
95
 
71
- def info(self, message: str):
96
+ def info(self, message: str) -> None:
97
+ """Write trace log with append mode and logging this message with the
98
+ INFO level.
99
+
100
+ :param message: (str) A message that want to log.
101
+ """
72
102
  msg: str = self.make_message(message)
73
103
  self.writer(msg)
74
104
  logger.info(msg, stacklevel=2)
75
105
 
76
- def warning(self, message: str):
106
+ def warning(self, message: str) -> None:
107
+ """Write trace log with append mode and logging this message with the
108
+ WARNING level.
109
+
110
+ :param message: (str) A message that want to log.
111
+ """
77
112
  msg: str = self.make_message(message)
78
113
  self.writer(msg)
79
114
  logger.warning(msg, stacklevel=2)
80
115
 
81
- def error(self, message: str):
116
+ def error(self, message: str) -> None:
117
+ """Write trace log with append mode and logging this message with the
118
+ ERROR level.
119
+
120
+ :param message: (str) A message that want to log.
121
+ """
82
122
  msg: str = self.make_message(message)
83
123
  self.writer(msg, is_err=True)
84
124
  logger.error(msg, stacklevel=2)
85
125
 
126
+ def exception(self, message: str) -> None:
127
+ """Write trace log with append mode and logging this message with the
128
+ EXCEPTION level.
86
129
 
87
- class FileTraceLog(BaseTraceLog): # pragma: no cov
88
- """Trace Log object that write file to the local storage."""
130
+ :param message: (str) A message that want to log.
131
+ """
132
+ msg: str = self.make_message(message)
133
+ self.writer(msg, is_err=True)
134
+ logger.exception(msg, stacklevel=2)
135
+
136
+
137
+ class TraceMeda(BaseModel): # pragma: no cov
138
+ mode: Literal["stdout", "stderr"]
139
+ datetime: str
140
+ process: int
141
+ thread: int
142
+ message: str
143
+ filename: str
144
+ lineno: int
145
+
146
+
147
+ class TraceData(BaseModel): # pragma: no cov
148
+ stdout: str = Field(description="A standard output trace data.")
149
+ stderr: str = Field(description="A standard error trace data.")
150
+ meta: list[TraceMeda] = Field(
151
+ default_factory=list,
152
+ description=(
153
+ "A metadata mapping of this output and error before making it to "
154
+ "standard value."
155
+ ),
156
+ )
89
157
 
90
158
  @classmethod
91
- def find_logs(cls) -> Iterator[dict[str, str]]: # pragma: no cov
92
- for file in config.log_path.glob("./run_id=*"):
93
- data: dict[str, str] = {}
159
+ def from_path(cls, file: Path) -> Self:
160
+ data: DictStr = {"stdout": "", "stderr": "", "meta": []}
94
161
 
95
- if (file / "stdout.txt").exists():
96
- data["stdout"] = (file / "stdout.txt").read_text(
97
- encoding="utf-8"
98
- )
162
+ if (file / "stdout.txt").exists():
163
+ data["stdout"] = (file / "stdout.txt").read_text(encoding="utf-8")
99
164
 
100
- if (file / "stderr.txt").exists():
101
- data["stdout"] = (file / "stdout.txt").read_text(
102
- encoding="utf-8"
165
+ if (file / "stderr.txt").exists():
166
+ data["stderr"] = (file / "stderr.txt").read_text(encoding="utf-8")
167
+
168
+ if (file / "metadata.json").exists():
169
+ data["meta"] = [
170
+ json.loads(line)
171
+ for line in (
172
+ (file / "metadata.json")
173
+ .read_text(encoding="utf-8")
174
+ .splitlines()
103
175
  )
176
+ ]
104
177
 
105
- yield data
178
+ return cls.model_validate(data)
106
179
 
107
- @classmethod
108
- def find_log_with_id(cls, run_id: str) -> dict[str, str]:
109
- file: Path = config.log_path / f"run_id={run_id}"
110
- data: dict[str, str] = {}
111
180
 
112
- if (file / "stdout.txt").exists():
113
- data["stdout"] = (file / "stdout.txt").read_text(encoding="utf-8")
181
+ class FileTraceLog(BaseTraceLog): # pragma: no cov
182
+ """Trace Log object that write file to the local storage."""
114
183
 
115
- if (file / "stderr.txt").exists():
116
- data["stdout"] = (file / "stdout.txt").read_text(encoding="utf-8")
184
+ @classmethod
185
+ def find_logs(cls) -> Iterator[TraceData]: # pragma: no cov
186
+ for file in sorted(
187
+ config.log_path.glob("./run_id=*"),
188
+ key=lambda f: f.lstat().st_mtime,
189
+ ):
190
+ yield TraceData.from_path(file)
117
191
 
118
- return data
192
+ @classmethod
193
+ def find_log_with_id(
194
+ cls, run_id: str, force_raise: bool = True
195
+ ) -> TraceData:
196
+ file: Path = config.log_path / f"run_id={run_id}"
197
+ if file.exists():
198
+ return TraceData.from_path(file)
199
+ elif force_raise:
200
+ raise FileNotFoundError(
201
+ f"Trace log on path 'run_id={run_id}' does not found."
202
+ )
203
+ return {}
119
204
 
120
205
  @property
121
- def log_file(self) -> Path:
206
+ def pointer(self) -> Path:
122
207
  log_file: Path = (
123
208
  config.log_path / f"run_id={self.parent_run_id or self.run_id}"
124
209
  )
@@ -128,7 +213,10 @@ class FileTraceLog(BaseTraceLog): # pragma: no cov
128
213
 
129
214
  @property
130
215
  def cut_id(self) -> str:
131
- """Combine cutting ID of parent running ID if it set."""
216
+ """Combine cutting ID of parent running ID if it set.
217
+
218
+ :rtype: str
219
+ """
132
220
  cut_run_id: str = cut_id(self.run_id)
133
221
  if not self.parent_run_id:
134
222
  return f"{cut_run_id} -> {' ' * 6}"
@@ -137,16 +225,26 @@ class FileTraceLog(BaseTraceLog): # pragma: no cov
137
225
  return f"{cut_parent_run_id} -> {cut_run_id}"
138
226
 
139
227
  def make_message(self, message: str) -> str:
228
+ """Prepare and Make a message before write and log processes.
229
+
230
+ :param message: A message that want to prepare and make before.
231
+
232
+ :rtype: str
233
+ """
140
234
  return f"({self.cut_id}) {message}"
141
235
 
142
236
  def writer(self, message: str, is_err: bool = False) -> None:
143
- """The path of logging data will store by format:
237
+ """ "Write a trace message after making to target file and write metadata
238
+ in the same path of standard files.
239
+
240
+ The path of logging data will store by format:
144
241
 
242
+ ... ./logs/run_id=<run-id>/metadata.json
145
243
  ... ./logs/run_id=<run-id>/stdout.txt
146
244
  ... ./logs/run_id=<run-id>/stderr.txt
147
245
 
148
- :param message:
149
- :param is_err:
246
+ :param message: A message after making.
247
+ :param is_err: A flag for writing with an error trace or not.
150
248
  """
151
249
  if not config.enable_write_log:
152
250
  return
@@ -160,34 +258,47 @@ class FileTraceLog(BaseTraceLog): # pragma: no cov
160
258
  thread: int = get_ident()
161
259
 
162
260
  write_file: str = "stderr.txt" if is_err else "stdout.txt"
163
- with (self.log_file / write_file).open(
261
+ write_data: dict[str, Union[str, int]] = {
262
+ "datetime": get_dt_tznow().strftime(config.log_datetime_format),
263
+ "process": process,
264
+ "thread": thread,
265
+ "message": message,
266
+ "filename": filename,
267
+ "lineno": lineno,
268
+ }
269
+
270
+ with (self.pointer / write_file).open(mode="at", encoding="utf-8") as f:
271
+ msg_fmt: str = f"{config.log_format_file}\n"
272
+ f.write(msg_fmt.format(**write_data))
273
+
274
+ with (self.pointer / "metadata.json").open(
164
275
  mode="at", encoding="utf-8"
165
276
  ) as f:
166
- msg_fmt: str = f"{config.log_format_file}\n"
167
- print(msg_fmt)
168
277
  f.write(
169
- msg_fmt.format(
170
- **{
171
- "datetime": get_dt_tznow().strftime(
172
- config.log_datetime_format
173
- ),
174
- "process": process,
175
- "thread": thread,
176
- "message": message,
177
- "filename": filename,
178
- "lineno": lineno,
179
- }
180
- )
278
+ json.dumps({"mode": write_file.split(".")[0]} | write_data)
279
+ + "\n"
181
280
  )
182
281
 
183
282
 
184
283
  class SQLiteTraceLog(BaseTraceLog): # pragma: no cov
284
+ """Trace Log object that write trace log to the SQLite database file."""
285
+
286
+ table_name: ClassVar[str] = "audits"
287
+ schemas: ClassVar[
288
+ str
289
+ ] = """
290
+ run_id int,
291
+ stdout str,
292
+ stderr str,
293
+ update datetime
294
+ primary key ( run_id )
295
+ """
185
296
 
186
297
  @classmethod
187
- def find_logs(cls) -> Iterator[dict[str, str]]: ...
298
+ def find_logs(cls) -> Iterator[DictStr]: ...
188
299
 
189
300
  @classmethod
190
- def find_log_with_id(cls, run_id: str) -> dict[str, str]: ...
301
+ def find_log_with_id(cls, run_id: str) -> DictStr: ...
191
302
 
192
303
  def make_message(self, message: str) -> str: ...
193
304
 
@@ -203,6 +314,7 @@ TraceLog = Union[
203
314
  def get_trace(
204
315
  run_id: str, parent_run_id: str | None = None
205
316
  ) -> TraceLog: # pragma: no cov
317
+ """Get dynamic TraceLog object from the setting config."""
206
318
  if config.log_path.is_file():
207
319
  return SQLiteTraceLog(run_id, parent_run_id=parent_run_id)
208
320
  return FileTraceLog(run_id, parent_run_id=parent_run_id)
@@ -3,33 +3,37 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
- """This module include all Param Models that use for parsing incoming parameters
7
- that pass to the Workflow and Schedule objects.
6
+ """This module include all Param Pydantic Models that use for parsing an
7
+ incoming parameters that was passed to the Workflow and Schedule objects before
8
+ execution or release methods.
9
+
10
+ The Param model allow you to handle validation and preparation steps before
11
+ passing an input value to target execution method.
8
12
  """
9
13
  from __future__ import annotations
10
14
 
11
15
  import decimal
12
- import logging
13
16
  from abc import ABC, abstractmethod
14
17
  from datetime import date, datetime
15
- from typing import Annotated, Any, Literal, Optional, Union
18
+ from typing import Annotated, Any, Literal, Optional, TypeVar, Union
16
19
 
17
20
  from pydantic import BaseModel, Field
18
21
 
19
22
  from .__types import TupleStr
20
23
  from .exceptions import ParamValueException
21
- from .utils import get_dt_now
22
-
23
- logger = logging.getLogger("ddeutil.workflow")
24
+ from .utils import get_d_now, get_dt_now
24
25
 
25
26
  __all__: TupleStr = (
26
27
  "ChoiceParam",
27
28
  "DatetimeParam",
29
+ "DateParam",
28
30
  "IntParam",
29
31
  "Param",
30
32
  "StrParam",
31
33
  )
32
34
 
35
+ T = TypeVar("T")
36
+
33
37
 
34
38
  class BaseParam(BaseModel, ABC):
35
39
  """Base Parameter that use to make any Params Models. The parameter type
@@ -46,7 +50,7 @@ class BaseParam(BaseModel, ABC):
46
50
  type: str = Field(description="A type of parameter.")
47
51
 
48
52
  @abstractmethod
49
- def receive(self, value: Optional[Any] = None) -> Any:
53
+ def receive(self, value: Optional[T] = None) -> T:
50
54
  raise NotImplementedError(
51
55
  "Receive value and validate typing before return valid value."
52
56
  )
@@ -73,17 +77,42 @@ class DefaultParam(BaseParam):
73
77
  )
74
78
 
75
79
 
76
- # TODO: Not implement this parameter yet
77
80
  class DateParam(DefaultParam): # pragma: no cov
78
- """Date parameter."""
81
+ """Date parameter model."""
79
82
 
80
83
  type: Literal["date"] = "date"
84
+ default: date = Field(default_factory=get_d_now)
85
+
86
+ def receive(self, value: Optional[str | datetime | date] = None) -> date:
87
+ """Receive value that match with date. If an input value pass with
88
+ None, it will use default value instead.
81
89
 
82
- def receive(self, value: Optional[str | date] = None) -> date: ...
90
+ :param value: A value that want to validate with date parameter type.
91
+
92
+ :rtype: date
93
+ """
94
+ if value is None:
95
+ return self.default
96
+
97
+ if isinstance(value, datetime):
98
+ return value.date()
99
+ elif isinstance(value, date):
100
+ return value
101
+ elif not isinstance(value, str):
102
+ raise ParamValueException(
103
+ f"Value that want to convert to date does not support for "
104
+ f"type: {type(value)}"
105
+ )
106
+ try:
107
+ return date.fromisoformat(value)
108
+ except ValueError:
109
+ raise ParamValueException(
110
+ f"Invalid the ISO format string for date: {value!r}"
111
+ ) from None
83
112
 
84
113
 
85
114
  class DatetimeParam(DefaultParam):
86
- """Datetime parameter."""
115
+ """Datetime parameter model."""
87
116
 
88
117
  type: Literal["datetime"] = "datetime"
89
118
  default: datetime = Field(default_factory=get_dt_now)
@@ -94,6 +123,7 @@ class DatetimeParam(DefaultParam):
94
123
 
95
124
  :param value: A value that want to validate with datetime parameter
96
125
  type.
126
+
97
127
  :rtype: datetime
98
128
  """
99
129
  if value is None:
@@ -112,7 +142,7 @@ class DatetimeParam(DefaultParam):
112
142
  return datetime.fromisoformat(value)
113
143
  except ValueError:
114
144
  raise ParamValueException(
115
- f"Invalid the ISO format string: {value!r}"
145
+ f"Invalid the ISO format string for datetime: {value!r}"
116
146
  ) from None
117
147
 
118
148
 
@@ -192,7 +222,7 @@ class ChoiceParam(BaseParam):
192
222
 
193
223
 
194
224
  # TODO: Not implement this parameter yet
195
- class MappingParam(DefaultParam): # pragma: no cov
225
+ class MapParam(DefaultParam): # pragma: no cov
196
226
 
197
227
  type: Literal["map"] = "map"
198
228
  default: dict[Any, Any] = Field(default_factory=dict)
@@ -208,15 +238,22 @@ class ArrayParam(DefaultParam): # pragma: no cov
208
238
  type: Literal["array"] = "array"
209
239
  default: list[Any] = Field(default_factory=list)
210
240
 
211
- def receive(self, value: Optional[list[Any]] = None) -> list[Any]:
241
+ def receive(self, value: Optional[list[T]] = None) -> list[T]:
212
242
  if value is None:
213
243
  return self.default
244
+ if not isinstance(value, list):
245
+ raise ParamValueException(
246
+ f"Value that want to convert to array does not support for "
247
+ f"type: {type(value)}"
248
+ )
249
+ return value
214
250
 
215
251
 
216
252
  Param = Annotated[
217
253
  Union[
218
254
  ChoiceParam,
219
255
  DatetimeParam,
256
+ DateParam,
220
257
  IntParam,
221
258
  StrParam,
222
259
  ],
@@ -20,12 +20,9 @@ from pydantic.functional_validators import model_validator
20
20
  from typing_extensions import Self
21
21
 
22
22
  from .__types import DictData, TupleStr
23
- from .conf import get_logger
24
23
  from .logs import TraceLog, get_dt_tznow, get_trace
25
24
  from .utils import gen_id
26
25
 
27
- logger = get_logger("ddeutil.workflow")
28
-
29
26
  __all__: TupleStr = (
30
27
  "Result",
31
28
  "Status",
@@ -66,9 +63,10 @@ class Result:
66
63
  context: DictData = field(default_factory=dict)
67
64
  run_id: Optional[str] = field(default_factory=default_gen_id)
68
65
  parent_run_id: Optional[str] = field(default=None, compare=False)
69
- event: Event = field(default_factory=Event, compare=False)
70
66
  ts: datetime = field(default_factory=get_dt_tznow, compare=False)
71
- trace: Optional[TraceLog] = field(default=None)
67
+
68
+ event: Event = field(default_factory=Event, compare=False, repr=False)
69
+ trace: Optional[TraceLog] = field(default=None, compare=False, repr=False)
72
70
 
73
71
  @classmethod
74
72
  def construct_with_rs_or_id(
@@ -83,6 +83,11 @@ class ScheduleWorkflow(BaseModel):
83
83
  the Schedule model. it should not use Workflow model directly because on the
84
84
  schedule config it can adjust crontab value that different from the Workflow
85
85
  model.
86
+
87
+ This on field does not equal to the on field of Workflow model, but it
88
+ uses same logic to generate running release date with crontab object. It use
89
+ for override the on field if the schedule time was change but you do not
90
+ want to change on the workflow model.
86
91
  """
87
92
 
88
93
  alias: Optional[str] = Field(
@@ -97,7 +102,7 @@ class ScheduleWorkflow(BaseModel):
97
102
  values: DictData = Field(
98
103
  default_factory=dict,
99
104
  description=(
100
- "A value that want to pass to the workflow parameters when "
105
+ "A value that want to pass to the workflow params field when auto "
101
106
  "calling release method."
102
107
  ),
103
108
  alias="params",
@@ -222,8 +227,8 @@ class ScheduleWorkflow(BaseModel):
222
227
  class Schedule(BaseModel):
223
228
  """Schedule Pydantic model that use to run with any scheduler package.
224
229
 
225
- It does not equal the on value in Workflow model, but it uses same logic
226
- to running release date with crontab interval.
230
+ The workflows field of this model include ScheduleWorkflow objects that
231
+ enhance the workflow object by adding the alias and values fields.
227
232
  """
228
233
 
229
234
  desc: Optional[str] = Field(
@@ -359,12 +364,16 @@ ReturnResultOrCancel = Callable[P, ResultOrCancel]
359
364
  DecoratorCancelJob = Callable[[ReturnResultOrCancel], ReturnResultOrCancel]
360
365
 
361
366
 
362
- def catch_exceptions(cancel_on_failure: bool = False) -> DecoratorCancelJob:
367
+ def catch_exceptions(
368
+ cancel_on_failure: bool = False,
369
+ parent_run_id: str | None = None,
370
+ ) -> DecoratorCancelJob:
363
371
  """Catch exception error from scheduler job that running with schedule
364
372
  package and return CancelJob if this function raise an error.
365
373
 
366
374
  :param cancel_on_failure: A flag that allow to return the CancelJob or not
367
375
  it will raise.
376
+ :param parent_run_id:
368
377
 
369
378
  :rtype: DecoratorCancelJob
370
379
  """
@@ -375,10 +384,17 @@ def catch_exceptions(cancel_on_failure: bool = False) -> DecoratorCancelJob:
375
384
 
376
385
  @wraps(func)
377
386
  def wrapper(*args: P.args, **kwargs: P.kwargs) -> ResultOrCancel:
387
+
378
388
  try:
379
389
  return func(*args, **kwargs)
390
+
380
391
  except Exception as err:
381
- logger.exception(err)
392
+ if parent_run_id:
393
+ (
394
+ Result(parent_run_id=parent_run_id).trace.exception(
395
+ str(err)
396
+ )
397
+ )
382
398
  if cancel_on_failure:
383
399
  return CancelJob
384
400
  raise err
@@ -399,13 +415,13 @@ class ReleaseThread(TypedDict):
399
415
  ReleaseThreads = dict[str, ReleaseThread]
400
416
 
401
417
 
402
- @catch_exceptions(cancel_on_failure=True)
403
418
  def schedule_task(
404
419
  tasks: list[WorkflowTask],
405
420
  stop: datetime,
406
421
  queue: dict[str, ReleaseQueue],
407
422
  threads: ReleaseThreads,
408
423
  audit: type[Audit],
424
+ *,
409
425
  parent_run_id: str | None = None,
410
426
  ) -> ResultOrCancel:
411
427
  """Schedule task function that generate thread of workflow task release
@@ -466,7 +482,9 @@ def schedule_task(
466
482
  current_release: datetime = current_date.replace(
467
483
  second=0, microsecond=0
468
484
  )
469
- if (first_date := q.first_queue.date) > current_release:
485
+ if (
486
+ first_date := q.first_queue.date
487
+ ) > current_release: # pragma: no cov
470
488
  result.trace.debug(
471
489
  f"[WORKFLOW]: Skip schedule "
472
490
  f"{first_date:%Y-%m-%d %H:%M:%S} for : {task.alias!r}"
@@ -491,8 +509,14 @@ def schedule_task(
491
509
  # job.
492
510
  thread_name: str = f"{task.alias}|{release.date:%Y%m%d%H%M}"
493
511
  thread: Thread = Thread(
494
- target=catch_exceptions(cancel_on_failure=True)(task.release),
495
- kwargs={"release": release, "queue": q, "audit": audit},
512
+ target=catch_exceptions(
513
+ cancel_on_failure=True,
514
+ )(task.release),
515
+ kwargs={
516
+ "release": release,
517
+ "queue": q,
518
+ "audit": audit,
519
+ },
496
520
  name=thread_name,
497
521
  daemon=True,
498
522
  )
@@ -508,8 +532,8 @@ def schedule_task(
508
532
  delay()
509
533
 
510
534
  result.trace.debug(
511
- f"[SCHEDULE]: End schedule task at {current_date:%Y-%m-%d %H:%M:%S} "
512
- f"{'=' * 60}"
535
+ f"[SCHEDULE]: End schedule task that run since "
536
+ f"{current_date:%Y-%m-%d %H:%M:%S} {'=' * 30}"
513
537
  )
514
538
  return result.catch(
515
539
  status=Status.SUCCESS, context={"task_date": current_date}
@@ -575,7 +599,10 @@ def scheduler_pending(
575
599
  scheduler.every(1)
576
600
  .minutes.at(":02")
577
601
  .do(
578
- schedule_task,
602
+ catch_exceptions(
603
+ cancel_on_failure=True,
604
+ parent_run_id=result.parent_run_id,
605
+ )(schedule_task),
579
606
  tasks=tasks,
580
607
  stop=stop,
581
608
  queue=queue,
@@ -594,6 +621,7 @@ def scheduler_pending(
594
621
  .do(
595
622
  monitor,
596
623
  threads=threads,
624
+ parent_run_id=result.parent_run_id,
597
625
  )
598
626
  .tag("monitor")
599
627
  )
@@ -617,7 +645,7 @@ def scheduler_pending(
617
645
  "running in background."
618
646
  )
619
647
  delay(10)
620
- monitor(threads)
648
+ monitor(threads, parent_run_id=result.parent_run_id)
621
649
 
622
650
  break
623
651
 
@@ -752,7 +780,7 @@ def schedule_runner(
752
780
 
753
781
  # NOTE: Raise error when it has any error from schedule_control.
754
782
  if err := future.exception():
755
- logger.error(str(err))
783
+ result.trace.error(str(err))
756
784
  raise WorkflowException(str(err)) from err
757
785
 
758
786
  rs: Result = future.result(timeout=1)