ddeutil-workflow 0.0.37__py3-none-any.whl → 0.0.39__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ddeutil/workflow/job.py CHANGED
@@ -27,7 +27,7 @@ from threading import Event
27
27
  from typing import Annotated, Any, Literal, Optional, Union
28
28
 
29
29
  from ddeutil.core import freeze_args
30
- from pydantic import BaseModel, ConfigDict, Field
30
+ from pydantic import BaseModel, ConfigDict, Discriminator, Field, Tag
31
31
  from pydantic.functional_validators import field_validator, model_validator
32
32
  from typing_extensions import Self
33
33
 
@@ -40,13 +40,8 @@ from .exceptions import (
40
40
  )
41
41
  from .result import Result, Status
42
42
  from .stages import Stage
43
- from .templates import has_template
44
- from .utils import (
45
- cross_product,
46
- dash2underscore,
47
- filter_func,
48
- gen_id,
49
- )
43
+ from .templates import has_template, param2template
44
+ from .utils import cross_product, filter_func, gen_id
50
45
 
51
46
  MatrixFilter = list[dict[str, Union[str, int]]]
52
47
 
@@ -55,6 +50,7 @@ __all__: TupleStr = (
55
50
  "Strategy",
56
51
  "Job",
57
52
  "TriggerRules",
53
+ "TriggerState",
58
54
  "RunsOn",
59
55
  "RunsOnLocal",
60
56
  "RunsOnSelfHosted",
@@ -155,7 +151,7 @@ class Strategy(BaseModel):
155
151
 
156
152
  fail_fast: bool = Field(
157
153
  default=False,
158
- serialization_alias="fail-fast",
154
+ alias="fail-fast",
159
155
  )
160
156
  max_parallel: int = Field(
161
157
  default=1,
@@ -164,7 +160,7 @@ class Strategy(BaseModel):
164
160
  "The maximum number of executor thread pool that want to run "
165
161
  "parallel"
166
162
  ),
167
- serialization_alias="max-parallel",
163
+ alias="max-parallel",
168
164
  )
169
165
  matrix: Matrix = Field(
170
166
  default_factory=dict,
@@ -181,18 +177,6 @@ class Strategy(BaseModel):
181
177
  description="A list of exclude matrix that want to filter-out.",
182
178
  )
183
179
 
184
- @model_validator(mode="before")
185
- def __prepare_keys(cls, values: DictData) -> DictData:
186
- """Rename key that use dash to underscore because Python does not
187
- support this character exist in any variable name.
188
-
189
- :param values: A parsing values to these models
190
- :rtype: DictData
191
- """
192
- dash2underscore("max-parallel", values)
193
- dash2underscore("fail-fast", values)
194
- return values
195
-
196
180
  def is_set(self) -> bool:
197
181
  """Return True if this strategy was set from yaml template.
198
182
 
@@ -221,6 +205,16 @@ class TriggerRules(str, Enum):
221
205
  none_skipped: str = "none_skipped"
222
206
 
223
207
 
208
+ class TriggerState(str, Enum):
209
+ waiting: str = "waiting"
210
+ passed: str = "passed"
211
+ skipped: str = "skipped"
212
+ failed: str = "failed"
213
+
214
+ def is_waiting(self):
215
+ return self.value == "waiting"
216
+
217
+
224
218
  class RunsOnType(str, Enum):
225
219
  """Runs-On enum object."""
226
220
 
@@ -268,13 +262,17 @@ class RunsOnK8s(BaseRunsOn): # pragma: no cov
268
262
  type: Literal[RunsOnType.K8S] = Field(default=RunsOnType.K8S)
269
263
 
270
264
 
265
+ def get_discriminator_runs_on(model: dict[str, Any]) -> str:
266
+ return model.get("type", "local")
267
+
268
+
271
269
  RunsOn = Annotated[
272
270
  Union[
273
- RunsOnLocal,
274
- RunsOnSelfHosted,
275
- RunsOnK8s,
271
+ Annotated[RunsOnK8s, Tag(RunsOnType.K8S)],
272
+ Annotated[RunsOnSelfHosted, Tag(RunsOnType.SELF_HOSTED)],
273
+ Annotated[RunsOnLocal, Tag(RunsOnType.LOCAL)],
276
274
  ],
277
- Field(discriminator="type"),
275
+ Discriminator(get_discriminator_runs_on),
278
276
  ]
279
277
 
280
278
 
@@ -319,7 +317,12 @@ class Job(BaseModel):
319
317
  runs_on: RunsOn = Field(
320
318
  default_factory=RunsOnLocal,
321
319
  description="A target node for this job to use for execution.",
322
- serialization_alias="runs-on",
320
+ alias="runs-on",
321
+ )
322
+ condition: Optional[str] = Field(
323
+ default=None,
324
+ description="A job condition statement to allow job executable.",
325
+ alias="if",
323
326
  )
324
327
  stages: list[Stage] = Field(
325
328
  default_factory=list,
@@ -327,8 +330,11 @@ class Job(BaseModel):
327
330
  )
328
331
  trigger_rule: TriggerRules = Field(
329
332
  default=TriggerRules.all_success,
330
- description="A trigger rule of tracking needed jobs.",
331
- serialization_alias="trigger-rule",
333
+ description=(
334
+ "A trigger rule of tracking needed jobs if feature will use when "
335
+ "the `raise_error` did not set from job and stage executions."
336
+ ),
337
+ alias="trigger-rule",
332
338
  )
333
339
  needs: list[str] = Field(
334
340
  default_factory=list,
@@ -339,18 +345,6 @@ class Job(BaseModel):
339
345
  description="A strategy matrix that want to generate.",
340
346
  )
341
347
 
342
- @model_validator(mode="before")
343
- def __prepare_keys__(cls, values: DictData) -> DictData:
344
- """Rename key that use dash to underscore because Python does not
345
- support this character exist in any variable name.
346
-
347
- :param values: A passing value that coming for initialize this object.
348
- :rtype: DictData
349
- """
350
- dash2underscore("runs-on", values)
351
- dash2underscore("trigger-rule", values)
352
- return values
353
-
354
348
  @field_validator("desc", mode="after")
355
349
  def ___prepare_desc__(cls, value: str) -> str:
356
350
  """Prepare description string that was created on a template.
@@ -403,12 +397,87 @@ class Job(BaseModel):
403
397
  return stage
404
398
  raise ValueError(f"Stage ID {stage_id} does not exists")
405
399
 
406
- def check_needs(self, jobs: dict[str, Any]) -> bool:
400
+ def check_needs(
401
+ self, jobs: dict[str, Any]
402
+ ) -> TriggerState: # pragma: no cov
407
403
  """Return True if job's need exists in an input list of job's ID.
408
404
 
405
+ :param jobs: A mapping of job model and its ID.
406
+
407
+ :rtype: TriggerState
408
+ """
409
+ if not self.needs:
410
+ return TriggerState.passed
411
+
412
+ def make_return(result: bool) -> TriggerState:
413
+ return TriggerState.passed if result else TriggerState.failed
414
+
415
+ need_exist: dict[str, Any] = {
416
+ need: jobs[need] for need in self.needs if need in jobs
417
+ }
418
+ if len(need_exist) != len(self.needs):
419
+ return TriggerState.waiting
420
+ elif all("skipped" in need_exist[job] for job in need_exist):
421
+ return TriggerState.skipped
422
+ elif self.trigger_rule == TriggerRules.all_done:
423
+ return TriggerState.passed
424
+ elif self.trigger_rule == TriggerRules.all_success:
425
+ rs = all(
426
+ k not in need_exist[job]
427
+ for k in ("errors", "skipped")
428
+ for job in need_exist
429
+ )
430
+ elif self.trigger_rule == TriggerRules.all_failed:
431
+ rs = all("errors" in need_exist[job] for job in need_exist)
432
+ elif self.trigger_rule == TriggerRules.one_success:
433
+ rs = sum(
434
+ k not in need_exist[job]
435
+ for k in ("errors", "skipped")
436
+ for job in need_exist
437
+ ) + 1 == len(self.needs)
438
+ elif self.trigger_rule == TriggerRules.one_failed:
439
+ rs = sum("errors" in need_exist[job] for job in need_exist) == 1
440
+ elif self.trigger_rule == TriggerRules.none_skipped:
441
+ rs = all("skipped" not in need_exist[job] for job in need_exist)
442
+ elif self.trigger_rule == TriggerRules.none_failed:
443
+ rs = all("errors" not in need_exist[job] for job in need_exist)
444
+ else: # pragma: no cov
445
+ raise NotImplementedError(
446
+ f"Trigger rule: {self.trigger_rule} does not support yet."
447
+ )
448
+ return make_return(rs)
449
+
450
+ def is_skipped(self, params: DictData | None = None) -> bool:
451
+ """Return true if condition of this job do not correct. This process
452
+ use build-in eval function to execute the if-condition.
453
+
454
+ :raise JobException: When it has any error raise from the eval
455
+ condition statement.
456
+ :raise JobException: When return type of the eval condition statement
457
+ does not return with boolean type.
458
+
459
+ :param params: (DictData) A parameters that want to pass to condition
460
+ template.
461
+
409
462
  :rtype: bool
410
463
  """
411
- return all(need in jobs for need in self.needs)
464
+ if self.condition is None:
465
+ return False
466
+
467
+ params: DictData = {} if params is None else params
468
+
469
+ try:
470
+ # WARNING: The eval build-in function is very dangerous. So, it
471
+ # should use the `re` module to validate eval-string before
472
+ # running.
473
+ rs: bool = eval(
474
+ param2template(self.condition, params), globals() | params, {}
475
+ )
476
+ if not isinstance(rs, bool):
477
+ raise TypeError("Return type of condition does not be boolean")
478
+ return not rs
479
+ except Exception as err:
480
+ raise JobException(f"{err.__class__.__name__}: {err}") from err
412
481
 
413
482
  def set_outputs(self, output: DictData, to: DictData) -> DictData:
414
483
  """Set an outputs from execution process to the received context. The
@@ -441,15 +510,14 @@ class Job(BaseModel):
441
510
 
442
511
  :rtype: DictData
443
512
  """
513
+ if "jobs" not in to:
514
+ to["jobs"] = {}
515
+
444
516
  if self.id is None and not config.job_default_id:
445
517
  raise JobException(
446
518
  "This job do not set the ID before setting execution output."
447
519
  )
448
520
 
449
- # NOTE: Create jobs key to receive an output from the job execution.
450
- if "jobs" not in to:
451
- to["jobs"] = {}
452
-
453
521
  # NOTE: If the job ID did not set, it will use index of jobs key
454
522
  # instead.
455
523
  _id: str = self.id or str(len(to["jobs"]) + 1)
@@ -458,11 +526,14 @@ class Job(BaseModel):
458
526
  {"errors": output.pop("errors", {})} if "errors" in output else {}
459
527
  )
460
528
 
461
- to["jobs"][_id] = (
462
- {"strategies": output, **errors}
463
- if self.strategy.is_set()
464
- else {**output.get(next(iter(output), "DUMMY"), {}), **errors}
465
- )
529
+ if "SKIP" in output: # pragma: no cov
530
+ to["jobs"][_id] = output["SKIP"]
531
+ elif self.strategy.is_set():
532
+ to["jobs"][_id] = {"strategies": output, **errors}
533
+ else:
534
+ _output = output.get(next(iter(output), "FIRST"), {})
535
+ _output.pop("matrix", {})
536
+ to["jobs"][_id] = {**_output, **errors}
466
537
  return to
467
538
 
468
539
  def execute(
@@ -472,33 +543,46 @@ class Job(BaseModel):
472
543
  run_id: str | None = None,
473
544
  parent_run_id: str | None = None,
474
545
  result: Result | None = None,
546
+ event: Event | None = None,
475
547
  ) -> Result:
476
548
  """Job execution with passing dynamic parameters from the workflow
477
549
  execution. It will generate matrix values at the first step and run
478
550
  multithread on this metrics to the `stages` field of this job.
479
551
 
480
552
  :param params: An input parameters that use on job execution.
481
- :param run_id: A job running ID for this execution.
482
- :param parent_run_id: A parent workflow running ID for this release.
553
+ :param run_id: (str) A job running ID.
554
+ :param parent_run_id: (str) A parent workflow running ID.
483
555
  :param result: (Result) A result object for keeping context and status
484
556
  data.
557
+ :param event: (Event) An event manager that pass to the
558
+ PoolThreadExecutor.
485
559
 
486
560
  :rtype: Result
487
561
  """
488
- if result is None: # pragma: no cov
489
- result: Result = Result(
490
- run_id=(run_id or gen_id(self.id or "", unique=True)),
491
- parent_run_id=parent_run_id,
492
- )
493
- elif parent_run_id: # pragma: no cov
494
- result.set_parent_run_id(parent_run_id)
562
+ result: Result = Result.construct_with_rs_or_id(
563
+ result,
564
+ run_id=run_id,
565
+ parent_run_id=parent_run_id,
566
+ id_logic=(self.id or "not-set"),
567
+ )
495
568
 
496
569
  if self.runs_on.type == RunsOnType.LOCAL:
497
570
  return local_execute(
498
571
  job=self,
499
572
  params=params,
500
573
  result=result,
574
+ event=event,
501
575
  )
576
+ elif self.runs_on.type == RunsOnType.SELF_HOSTED: # pragma: no cov
577
+ pass
578
+ elif self.runs_on.type == RunsOnType.K8S: # pragma: no cov
579
+ pass
580
+
581
+ # pragma: no cov
582
+ result.trace.error(
583
+ f"[JOB]: Job executor does not support for runs-on type: "
584
+ f"{self.runs_on.type} yet"
585
+ )
502
586
  raise NotImplementedError(
503
587
  f"The job runs-on other type: {self.runs_on.type} does not "
504
588
  f"support yet."
@@ -512,6 +596,7 @@ def local_execute_strategy(
512
596
  *,
513
597
  result: Result | None = None,
514
598
  event: Event | None = None,
599
+ raise_error: bool = False,
515
600
  ) -> Result:
516
601
  """Local job strategy execution with passing dynamic parameters from the
517
602
  workflow execution to strategy matrix.
@@ -533,11 +618,12 @@ def local_execute_strategy(
533
618
  :param result: (Result) A result object for keeping context and status
534
619
  data.
535
620
  :param event: (Event) An event manager that pass to the PoolThreadExecutor.
621
+ :param raise_error: (bool) A flag that all this method raise error
536
622
 
537
623
  :rtype: Result
538
624
  """
539
- if result is None: # pragma: no cov
540
- result: Result = Result(run_id=gen_id(job.id or "", unique=True))
625
+ if result is None:
626
+ result: Result = Result(run_id=gen_id(job.id or "not-set", unique=True))
541
627
 
542
628
  strategy_id: str = gen_id(strategy)
543
629
 
@@ -556,62 +642,44 @@ def local_execute_strategy(
556
642
  context: DictData = copy.deepcopy(params)
557
643
  context.update({"matrix": strategy, "stages": {}})
558
644
 
645
+ if strategy:
646
+ result.trace.info(f"[JOB]: Execute Strategy ID: {strategy_id}")
647
+ result.trace.info(f"[JOB]: ... Matrix: {strategy_id}")
648
+
559
649
  # IMPORTANT: The stage execution only run sequentially one-by-one.
560
650
  for stage in job.stages:
561
651
 
562
652
  if stage.is_skipped(params=context):
563
- result.trace.info(f"[JOB]: Skip stage: {stage.iden!r}")
653
+ result.trace.info(f"[STAGE]: Skip stage: {stage.iden!r}")
654
+ stage.set_outputs(output={"skipped": True}, to=context)
564
655
  continue
565
656
 
566
- result.trace.info(f"[JOB]: Execute stage: {stage.iden!r}")
567
-
568
- # NOTE: Logging a matrix that pass on this stage execution.
569
- if strategy:
570
- result.trace.info(f"[JOB]: ... Matrix: {strategy}")
571
-
572
- # NOTE: Force stop this execution if event was set from main
573
- # execution.
574
657
  if event and event.is_set():
575
658
  error_msg: str = (
576
659
  "Job strategy was canceled from event that had set before "
577
660
  "strategy execution."
578
661
  )
579
662
  return result.catch(
580
- status=1,
663
+ status=Status.FAILED,
581
664
  context={
582
665
  strategy_id: {
583
666
  "matrix": strategy,
584
- # NOTE: If job strategy executor use multithreading,
585
- # it will not filter function object from context.
586
- # ---
587
- # "stages": filter_func(context.pop("stages", {})),
588
- #
589
667
  "stages": context.pop("stages", {}),
590
- "errors": {
591
- "class": JobException(error_msg),
592
- "name": "JobException",
593
- "message": error_msg,
594
- },
668
+ "errors": JobException(error_msg).to_dict(),
595
669
  },
596
670
  },
597
671
  )
598
672
 
599
673
  # PARAGRAPH:
600
674
  #
601
- # I do not use below syntax because `params` dict be the
602
- # reference memory pointer, and it was changed when I action
603
- # anything like update or re-construct this.
604
- #
605
- # ... params |= stage.execute(params=params)
606
- #
607
- # This step will add the stage result to `stages` key in
608
- # that stage id. It will have structure like;
675
+ # This step will add the stage result to `stages` key in that
676
+ # stage id. It will have structure like;
609
677
  #
610
678
  # {
611
679
  # "params": { ... },
612
680
  # "jobs": { ... },
613
681
  # "matrix": { ... },
614
- # "stages": { { "stage-id-1": ... }, ... }
682
+ # "stages": { { "stage-id-01": { "outputs": { ... } } }, ... }
615
683
  # }
616
684
  #
617
685
  # IMPORTANT:
@@ -631,30 +699,23 @@ def local_execute_strategy(
631
699
  )
632
700
  except (StageException, UtilException) as err:
633
701
  result.trace.error(f"[JOB]: {err.__class__.__name__}: {err}")
634
- if config.job_raise_error:
702
+ if raise_error or config.job_raise_error:
635
703
  raise JobException(
636
704
  f"Stage execution error: {err.__class__.__name__}: "
637
705
  f"{err}"
638
706
  ) from None
639
707
 
640
708
  return result.catch(
641
- status=1,
709
+ status=Status.FAILED,
642
710
  context={
643
711
  strategy_id: {
644
712
  "matrix": strategy,
645
713
  "stages": context.pop("stages", {}),
646
- "errors": {
647
- "class": err,
648
- "name": err.__class__.__name__,
649
- "message": f"{err.__class__.__name__}: {err}",
650
- },
714
+ "errors": err.to_dict(),
651
715
  },
652
716
  },
653
717
  )
654
718
 
655
- # NOTE: Remove the current stage object for saving memory.
656
- del stage
657
-
658
719
  return result.catch(
659
720
  status=Status.SUCCESS,
660
721
  context={
@@ -673,44 +734,81 @@ def local_execute(
673
734
  run_id: str | None = None,
674
735
  parent_run_id: str | None = None,
675
736
  result: Result | None = None,
737
+ event: Event | None = None,
738
+ raise_error: bool = False,
676
739
  ) -> Result:
677
740
  """Local job execution with passing dynamic parameters from the workflow
678
741
  execution. It will generate matrix values at the first step and run
679
742
  multithread on this metrics to the `stages` field of this job.
680
743
 
681
- :param job: A job model that want to execute.
682
- :param params: An input parameters that use on job execution.
683
- :param run_id: A job running ID for this execution.
684
- :param parent_run_id: A parent workflow running ID for this release.
744
+ This method does not raise any JobException if it runs with
745
+ multi-threading strategy.
746
+
747
+ :param job: (Job) A job model that want to execute.
748
+ :param params: (DictData) An input parameters that use on job execution.
749
+ :param run_id: (str) A job running ID for this execution.
750
+ :param parent_run_id: (str) A parent workflow running ID for this release.
685
751
  :param result: (Result) A result object for keeping context and status
686
752
  data.
753
+ :param event: (Event) An event manager that pass to the PoolThreadExecutor.
754
+ :param raise_error: (bool) A flag that all this method raise error to the
755
+ strategy execution.
687
756
 
688
757
  :rtype: Result
689
758
  """
690
- if result is None: # pragma: no cov
691
- result: Result = Result(
692
- run_id=(run_id or gen_id(job.id or "", unique=True)),
693
- parent_run_id=parent_run_id,
694
- )
695
- elif parent_run_id: # pragma: no cov
696
- result.set_parent_run_id(parent_run_id)
759
+ result: Result = Result.construct_with_rs_or_id(
760
+ result,
761
+ run_id=run_id,
762
+ parent_run_id=parent_run_id,
763
+ id_logic=(job.id or "not-set"),
764
+ )
765
+ event: Event = Event() if event is None else event
697
766
 
698
767
  # NOTE: Normal Job execution without parallel strategy matrix. It uses
699
768
  # for-loop to control strategy execution sequentially.
700
769
  if (not job.strategy.is_set()) or job.strategy.max_parallel == 1:
701
770
 
702
771
  for strategy in job.strategy.make():
703
- result: Result = local_execute_strategy(
772
+
773
+ if event and event.is_set(): # pragma: no cov
774
+ return result.catch(
775
+ status=Status.FAILED,
776
+ context={
777
+ "errors": JobException(
778
+ "Job strategy was canceled from event that had set "
779
+ "before strategy execution."
780
+ ).to_dict()
781
+ },
782
+ )
783
+
784
+ local_execute_strategy(
704
785
  job=job,
705
786
  strategy=strategy,
706
787
  params=params,
707
788
  result=result,
789
+ event=event,
790
+ raise_error=raise_error,
708
791
  )
709
792
 
710
793
  return result.catch(status=Status.SUCCESS)
711
794
 
712
- # NOTE: Create event for cancel executor by trigger stop running event.
713
- event: Event = Event()
795
+ fail_fast_flag: bool = job.strategy.fail_fast
796
+ ls: str = "Fail-Fast" if fail_fast_flag else "All-Completed"
797
+ result.trace.info(
798
+ f"[JOB]: Start multithreading: {job.strategy.max_parallel} threads "
799
+ f"with {ls} mode."
800
+ )
801
+
802
+ if event and event.is_set(): # pragma: no cov
803
+ return result.catch(
804
+ status=Status.FAILED,
805
+ context={
806
+ "errors": JobException(
807
+ "Job strategy was canceled from event that had set "
808
+ "before strategy execution."
809
+ ).to_dict()
810
+ },
811
+ )
714
812
 
715
813
  # IMPORTANT: Start running strategy execution by multithreading because
716
814
  # it will run by strategy values without waiting previous execution.
@@ -727,52 +825,43 @@ def local_execute(
727
825
  params=params,
728
826
  result=result,
729
827
  event=event,
828
+ raise_error=raise_error,
730
829
  )
731
830
  for strategy in job.strategy.make()
732
831
  ]
733
832
 
734
833
  context: DictData = {}
735
834
  status: Status = Status.SUCCESS
736
- fail_fast_flag: bool = job.strategy.fail_fast
737
835
 
738
836
  if not fail_fast_flag:
739
837
  done = as_completed(futures, timeout=1800)
740
838
  else:
741
- # NOTE: Get results from a collection of tasks with a timeout
742
- # that has the first exception.
743
839
  done, not_done = wait(
744
840
  futures, timeout=1800, return_when=FIRST_EXCEPTION
745
841
  )
746
- nd: str = (
747
- f", the strategies do not run is {not_done}" if not_done else ""
748
- )
749
- result.trace.debug(f"[JOB]: Strategy is set Fail Fast{nd}")
750
842
 
751
- # NOTE: Stop all running tasks with setting the event manager
752
- # and cancel any scheduled tasks.
753
843
  if len(done) != len(futures):
844
+ result.trace.warning(
845
+ "[JOB]: Set the event for stop running stage."
846
+ )
754
847
  event.set()
755
848
  for future in not_done:
756
849
  future.cancel()
757
850
 
851
+ nd: str = (
852
+ f", the strategies do not run is {not_done}" if not_done else ""
853
+ )
854
+ result.trace.debug(f"[JOB]: Strategy is set Fail Fast{nd}")
855
+
758
856
  for future in done:
759
857
  try:
760
858
  future.result()
761
859
  except JobException as err:
762
860
  status = Status.FAILED
763
- ls: str = "Fail-Fast" if fail_fast_flag else "All-Completed"
764
861
  result.trace.error(
765
862
  f"[JOB]: {ls} Catch:\n\t{err.__class__.__name__}:"
766
863
  f"\n\t{err}"
767
864
  )
768
- context.update(
769
- {
770
- "errors": {
771
- "class": err,
772
- "name": err.__class__.__name__,
773
- "message": f"{err.__class__.__name__}: {err}",
774
- },
775
- },
776
- )
865
+ context.update({"errors": err.to_dict()})
777
866
 
778
867
  return result.catch(status=status, context=context)
ddeutil/workflow/logs.py CHANGED
@@ -234,7 +234,7 @@ class FileTraceLog(BaseTraceLog): # pragma: no cov
234
234
  return f"({self.cut_id}) {message}"
235
235
 
236
236
  def writer(self, message: str, is_err: bool = False) -> None:
237
- """ "Write a trace message after making to target file and write metadata
237
+ """Write a trace message after making to target file and write metadata
238
238
  in the same path of standard files.
239
239
 
240
240
  The path of logging data will store by format:
@@ -279,6 +279,11 @@ class FileTraceLog(BaseTraceLog): # pragma: no cov
279
279
  + "\n"
280
280
  )
281
281
 
282
+ async def awriter(
283
+ self, message: str, is_err: bool = False
284
+ ): # pragma: no cov
285
+ """TODO: Use `aiofiles` for make writer method support async."""
286
+
282
287
 
283
288
  class SQLiteTraceLog(BaseTraceLog): # pragma: no cov
284
289
  """Trace Log object that write trace log to the SQLite database file."""
@@ -91,7 +91,7 @@ class Result:
91
91
  @model_validator(mode="after")
92
92
  def __prepare_trace(self) -> Self:
93
93
  """Prepare trace field that want to pass after its initialize step."""
94
- if self.trace is None: # pragma: no cove
94
+ if self.trace is None: # pragma: no cov
95
95
  self.trace: TraceLog = get_trace(self.run_id, self.parent_run_id)
96
96
 
97
97
  return self