ddeutil-workflow 0.0.32__py3-none-any.whl → 0.0.34__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -34,7 +34,7 @@ from functools import partial, total_ordering
34
34
  from heapq import heappop, heappush
35
35
  from queue import Queue
36
36
  from textwrap import dedent
37
- from typing import Any, Optional
37
+ from typing import Optional
38
38
 
39
39
  from pydantic import BaseModel, ConfigDict, Field
40
40
  from pydantic.dataclasses import dataclass
@@ -43,15 +43,15 @@ from typing_extensions import Self
43
43
 
44
44
  from .__cron import CronJob, CronRunner
45
45
  from .__types import DictData, TupleStr
46
- from .conf import Loader, Log, config, get_log, get_logger
46
+ from .audit import Audit, get_audit
47
+ from .conf import Loader, config, get_logger
47
48
  from .cron import On
48
49
  from .exceptions import JobException, WorkflowException
49
50
  from .job import Job
50
51
  from .params import Param
51
- from .result import Result
52
+ from .result import Result, Status
52
53
  from .templates import has_template, param2template
53
54
  from .utils import (
54
- cut_id,
55
55
  gen_id,
56
56
  get_dt_now,
57
57
  reach_next_minute,
@@ -485,22 +485,25 @@ class Workflow(BaseModel):
485
485
  params: DictData,
486
486
  *,
487
487
  run_id: str | None = None,
488
- log: type[Log] = None,
488
+ parent_run_id: str | None = None,
489
+ audit: type[Audit] = None,
489
490
  queue: ReleaseQueue | None = None,
490
491
  override_log_name: str | None = None,
492
+ result: Result | None = None,
491
493
  ) -> Result:
492
494
  """Release the workflow execution with overriding parameter with the
493
495
  release templating that include logical date (release date), execution
494
496
  date, or running id to the params.
495
497
 
496
- This method allow workflow use log object to save the execution
497
- result to log destination like file log to the local `/logs` directory.
498
+ This method allow workflow use audit object to save the execution
499
+ result to audit destination like file audit to the local `/logs`
500
+ directory.
498
501
 
499
502
  Steps:
500
503
  - Initialize ReleaseQueue and Release if they do not pass.
501
504
  - Create release data for pass to parameter templating function.
502
505
  - Execute this workflow with mapping release data to its parameters.
503
- - Writing result log
506
+ - Writing result audit
504
507
  - Remove this release on the running queue
505
508
  - Push this release to complete queue
506
509
 
@@ -508,17 +511,26 @@ class Workflow(BaseModel):
508
511
  :param params: A workflow parameter that pass to execute method.
509
512
  :param queue: A ReleaseQueue that use for mark complete.
510
513
  :param run_id: A workflow running ID for this release.
511
- :param log: A log class that want to save the execution result.
514
+ :param parent_run_id: A parent workflow running ID for this release.
515
+ :param audit: An audit class that want to save the execution result.
512
516
  :param queue: A ReleaseQueue object.
513
517
  :param override_log_name: An override logging name that use instead
514
518
  the workflow name.
519
+ :param result: (Result) A result object for keeping context and status
520
+ data.
515
521
 
516
522
  :rtype: Result
517
523
  """
518
- log: type[Log] = log or get_log()
524
+ audit: type[Audit] = audit or get_audit()
519
525
  name: str = override_log_name or self.name
520
- run_id: str = run_id or gen_id(name, unique=True)
521
- rs_release: Result = Result(run_id=run_id)
526
+
527
+ if result is None:
528
+ result: Result = Result(
529
+ run_id=(run_id or gen_id(name, unique=True)),
530
+ parent_run_id=parent_run_id,
531
+ )
532
+ elif parent_run_id:
533
+ result.set_parent_run_id(parent_run_id)
522
534
 
523
535
  if queue is not None and not isinstance(queue, ReleaseQueue):
524
536
  raise TypeError(
@@ -526,13 +538,11 @@ class Workflow(BaseModel):
526
538
  )
527
539
 
528
540
  # VALIDATE: Change release value to Release object.
529
- rs_release_type: str = "release"
530
541
  if isinstance(release, datetime):
531
- rs_release_type: str = "datetime"
532
542
  release: Release = Release.from_dt(release)
533
543
 
534
- logger.debug(
535
- f"({cut_id(run_id)}) [RELEASE]: Start release - {name!r} : "
544
+ result.trace.debug(
545
+ f"[RELEASE]: Start release - {name!r} : "
536
546
  f"{release.date:%Y-%m-%d %H:%M:%S}"
537
547
  )
538
548
 
@@ -542,35 +552,38 @@ class Workflow(BaseModel):
542
552
  "release": {
543
553
  "logical_date": release.date,
544
554
  "execute_date": datetime.now(tz=config.tz),
545
- "run_id": run_id,
555
+ "run_id": result.run_id,
546
556
  "timezone": config.tz,
547
557
  }
548
558
  }
549
559
 
550
560
  # NOTE: Execute workflow with templating params from release mapping.
551
- rs: Result = self.execute(
561
+ # The result context that return from execution method is:
562
+ #
563
+ # ... {"params": ..., "jobs": ...}
564
+ #
565
+ self.execute(
552
566
  params=param2template(params, release_params),
553
- run_id=run_id,
567
+ result=result,
568
+ parent_run_id=result.parent_run_id,
554
569
  )
555
- logger.debug(
556
- f"({cut_id(run_id)}) [RELEASE]: End release - {name!r} : "
570
+ result.trace.debug(
571
+ f"[RELEASE]: End release - {name!r} : "
557
572
  f"{release.date:%Y-%m-%d %H:%M:%S}"
558
573
  )
559
574
 
560
- rs.set_parent_run_id(run_id)
561
-
562
- # NOTE: Saving execution result to destination of the input log object.
563
- logger.debug(f"({cut_id(run_id)}) [LOG]: Writing log: {name!r}.")
575
+ # NOTE: Saving execution result to destination of the input audit
576
+ # object.
577
+ result.trace.debug(f"[LOG]: Writing audit: {name!r}.")
564
578
  (
565
- log.model_validate(
566
- {
567
- "name": name,
568
- "release": release.date,
569
- "type": release.type,
570
- "context": rs.context,
571
- "parent_run_id": rs.parent_run_id,
572
- "run_id": rs.run_id,
573
- }
579
+ audit(
580
+ name=name,
581
+ release=release.date,
582
+ type=release.type,
583
+ context=result.context,
584
+ parent_run_id=result.parent_run_id,
585
+ run_id=result.run_id,
586
+ execution_time=result.alive_time(),
574
587
  ).save(excluded=None)
575
588
  )
576
589
 
@@ -580,20 +593,25 @@ class Workflow(BaseModel):
580
593
  queue.mark_complete(release)
581
594
 
582
595
  # NOTE: Remove the params key from the result context for deduplicate.
583
- context: dict[str, Any] = rs.context
584
- context.pop("params")
596
+ # This step is prepare result context for this release method.
597
+ context: DictData = result.context
598
+ jobs: DictData = context.pop("jobs", {})
599
+ errors: DictData = (
600
+ {"errors": context.pop("errors", {})} if "errors" in context else {}
601
+ )
585
602
 
586
- return rs_release.catch(
587
- status=0,
603
+ return result.catch(
604
+ status=Status.SUCCESS,
588
605
  context={
606
+ # NOTE: Update the real params that pass in this method.
589
607
  "params": params,
590
608
  "release": {
591
- "status": "success",
592
- "type": rs_release_type,
609
+ "type": release.type,
593
610
  "logical_date": release.date,
594
611
  "release": release,
595
612
  },
596
- "outputs": context,
613
+ "outputs": {"jobs": jobs},
614
+ **errors,
597
615
  },
598
616
  )
599
617
 
@@ -602,7 +620,7 @@ class Workflow(BaseModel):
602
620
  offset: float,
603
621
  end_date: datetime,
604
622
  queue: ReleaseQueue,
605
- log: type[Log],
623
+ audit: type[Audit],
606
624
  *,
607
625
  force_run: bool = False,
608
626
  ) -> ReleaseQueue:
@@ -620,9 +638,9 @@ class Workflow(BaseModel):
620
638
  :param offset: An offset in second unit for time travel.
621
639
  :param end_date: An end datetime object.
622
640
  :param queue: A workflow queue object.
623
- :param log: A log class that want to make log object.
624
- :param force_run: A flag that allow to release workflow if the log with
625
- that release was pointed.
641
+ :param audit: An audit class that want to make audit object.
642
+ :param force_run: A flag that allow to release workflow if the audit
643
+ with that release was pointed.
626
644
 
627
645
  :rtype: ReleaseQueue
628
646
  """
@@ -645,7 +663,7 @@ class Workflow(BaseModel):
645
663
  )
646
664
 
647
665
  while queue.check_queue(workflow_release) or (
648
- log.is_pointed(name=self.name, release=workflow_release.date)
666
+ audit.is_pointed(name=self.name, release=workflow_release.date)
649
667
  and not force_run
650
668
  ):
651
669
  workflow_release = Release(
@@ -671,16 +689,16 @@ class Workflow(BaseModel):
671
689
  *,
672
690
  run_id: str | None = None,
673
691
  periods: int = 1,
674
- log: Log | None = None,
692
+ audit: Audit | None = None,
675
693
  force_run: bool = False,
676
694
  timeout: int = 1800,
677
- ) -> list[Result]:
678
- """Poke this workflow with start datetime value that passing to its
679
- ``on`` field with threading executor pool for executing with all its
680
- schedules that was set on the `on` value.
695
+ ) -> Result:
696
+ """Poke function with a start datetime value that will pass to its
697
+ `on` field on the threading executor pool for execute the `release`
698
+ method (It run all schedules that was set on the `on` values).
681
699
 
682
700
  This method will observe its schedule that nearing to run with the
683
- ``self.release()`` method.
701
+ `self.release()` method.
684
702
 
685
703
  The limitation of this method is not allow run a date that less
686
704
  than the current date.
@@ -689,17 +707,19 @@ class Workflow(BaseModel):
689
707
  :param params: A parameters that want to pass to the release method.
690
708
  :param run_id: A workflow running ID for this poke.
691
709
  :param periods: A periods in minutes value that use to run this poking.
692
- :param log: A log object that want to use on this poking process.
693
- :param force_run: A flag that allow to release workflow if the log with
710
+ :param audit: An audit object that want to use on this poking process.
711
+ :param force_run: A flag that allow to release workflow if the audit with
694
712
  that release was pointed.
695
713
  :param timeout: A second value for timeout while waiting all futures
696
714
  run completely.
697
715
 
698
- :rtype: list[Result]
699
- :return: A list of all results that return from ``self.release`` method.
716
+ :rtype: Result
717
+ :return: A list of all results that return from `self.release` method.
700
718
  """
701
- log: type[Log] = log or get_log()
702
- run_id: str = run_id or gen_id(self.name, unique=True)
719
+ audit: type[Audit] = audit or get_audit()
720
+ result: Result = Result(
721
+ run_id=(run_id or gen_id(self.name, unique=True))
722
+ )
703
723
 
704
724
  # VALIDATE: Check the periods value should gather than 0.
705
725
  if periods <= 0:
@@ -710,11 +730,10 @@ class Workflow(BaseModel):
710
730
  # NOTE: If this workflow does not set the on schedule, it will return
711
731
  # empty result.
712
732
  if len(self.on) == 0:
713
- logger.info(
714
- f"({cut_id(run_id)}) [POKING]: {self.name!r} does not have any "
715
- f"schedule to run."
733
+ result.trace.info(
734
+ f"[POKING]: {self.name!r} does not have any schedule to run."
716
735
  )
717
- return []
736
+ return result.catch(status=Status.SUCCESS, context={"outputs": []})
718
737
 
719
738
  # NOTE: Create the current date that change microsecond to 0
720
739
  current_date: datetime = datetime.now(tz=config.tz).replace(
@@ -737,13 +756,13 @@ class Workflow(BaseModel):
737
756
  # periods value.
738
757
  end_date: datetime = start_date + timedelta(minutes=periods)
739
758
 
740
- logger.info(
741
- f"({cut_id(run_id)}) [POKING]: Start Poking: {self.name!r} from "
759
+ result.trace.info(
760
+ f"[POKING]: Start Poking: {self.name!r} from "
742
761
  f"{start_date:%Y-%m-%d %H:%M:%S} to {end_date:%Y-%m-%d %H:%M:%S}"
743
762
  )
744
763
 
745
764
  params: DictData = {} if params is None else params
746
- results: list[Result] = []
765
+ context: list[Result] = []
747
766
 
748
767
  # NOTE: Create empty ReleaseQueue object.
749
768
  q: ReleaseQueue = ReleaseQueue()
@@ -751,17 +770,16 @@ class Workflow(BaseModel):
751
770
  # NOTE: Create reusable partial function and add Release to the release
752
771
  # queue object.
753
772
  partial_queue = partial(
754
- self.queue, offset, end_date, log=log, force_run=force_run
773
+ self.queue, offset, end_date, audit=audit, force_run=force_run
755
774
  )
756
775
  partial_queue(q)
757
776
 
758
777
  # NOTE: Return the empty result if it does not have any Release.
759
778
  if not q.is_queued:
760
- logger.info(
761
- f"({cut_id(run_id)}) [POKING]: {self.name!r} does not have "
762
- f"any queue."
779
+ result.trace.info(
780
+ f"[POKING]: {self.name!r} does not have any queue."
763
781
  )
764
- return []
782
+ return result.catch(status=Status.SUCCESS, context={"outputs": []})
765
783
 
766
784
  # NOTE: Start create the thread pool executor for running this poke
767
785
  # process.
@@ -778,8 +796,8 @@ class Workflow(BaseModel):
778
796
  release: Release = heappop(q.queue)
779
797
 
780
798
  if reach_next_minute(release.date, tz=config.tz, offset=offset):
781
- logger.debug(
782
- f"({cut_id(run_id)}) [POKING]: The latest release, "
799
+ result.trace.debug(
800
+ f"[POKING]: The latest release, "
783
801
  f"{release.date:%Y-%m-%d %H:%M:%S}, is not able to run "
784
802
  f"on this minute"
785
803
  )
@@ -799,8 +817,9 @@ class Workflow(BaseModel):
799
817
  self.release,
800
818
  release=release,
801
819
  params=params,
802
- log=log,
820
+ audit=audit,
803
821
  queue=q,
822
+ parent_run_id=result.run_id,
804
823
  )
805
824
  )
806
825
 
@@ -809,16 +828,19 @@ class Workflow(BaseModel):
809
828
  # WARNING: This poking method does not allow to use fail-fast
810
829
  # logic to catching parallel execution result.
811
830
  for future in as_completed(futures, timeout=timeout):
812
- results.append(future.result().set_parent_run_id(run_id))
831
+ context.append(future.result())
813
832
 
814
- return results
833
+ return result.catch(
834
+ status=Status.SUCCESS,
835
+ context={"outputs": context},
836
+ )
815
837
 
816
838
  def execute_job(
817
839
  self,
818
840
  job_id: str,
819
841
  params: DictData,
820
842
  *,
821
- run_id: str | None = None,
843
+ result: Result | None = None,
822
844
  raise_error: bool = True,
823
845
  ) -> Result:
824
846
  """Job execution with passing dynamic parameters from the main workflow
@@ -834,16 +856,17 @@ class Workflow(BaseModel):
834
856
 
835
857
  :param job_id: A job ID that want to execute.
836
858
  :param params: A params that was parameterized from workflow execution.
837
- :param run_id: A workflow running ID for this job execution.
838
859
  :param raise_error: A flag that raise error instead catching to result
839
860
  if it gets exception from job execution.
861
+ :param result: (Result) A result object for keeping context and status
862
+ data.
840
863
 
841
864
  :rtype: Result
842
865
  :return: Return the result object that receive the job execution result
843
866
  context.
844
867
  """
845
- run_id: str = run_id or gen_id(self.name, unique=True)
846
- rs: Result = Result(run_id=run_id)
868
+ if result is None: # pragma: no cov
869
+ result: Result = Result(run_id=gen_id(self.name, unique=True))
847
870
 
848
871
  # VALIDATE: check a job ID that exists in this workflow or not.
849
872
  if job_id not in self.jobs:
@@ -852,26 +875,26 @@ class Workflow(BaseModel):
852
875
  f"workflow."
853
876
  )
854
877
 
855
- logger.info(
856
- f"({cut_id(run_id)}) [WORKFLOW]: Start execute job: {job_id!r}"
857
- )
878
+ result.trace.info(f"[WORKFLOW]: Start execute job: {job_id!r}")
858
879
 
859
880
  # IMPORTANT:
860
881
  # This execution change all job running IDs to the current workflow
861
- # execution running ID (with passing run_id to the job execution
862
- # argument).
882
+ # running ID, but it still trac log to the same parent running ID
883
+ # (with passing `run_id` and `parent_run_id` to the job execution
884
+ # arguments).
863
885
  #
864
886
  try:
865
887
  job: Job = self.jobs[job_id]
866
888
  job.set_outputs(
867
- job.execute(params=params, run_id=run_id).context,
889
+ job.execute(
890
+ params=params,
891
+ run_id=result.run_id,
892
+ parent_run_id=result.parent_run_id,
893
+ ).context,
868
894
  to=params,
869
895
  )
870
896
  except JobException as err:
871
- logger.error(
872
- f"({cut_id(run_id)}) [WORKFLOW]: {err.__class__.__name__}: "
873
- f"{err}"
874
- )
897
+ result.trace.error(f"[WORKFLOW]: {err.__class__.__name__}: {err}")
875
898
  if raise_error:
876
899
  raise WorkflowException(
877
900
  f"Get job execution error {job_id}: JobException: {err}"
@@ -880,14 +903,16 @@ class Workflow(BaseModel):
880
903
  "Handle error from the job execution does not support yet."
881
904
  ) from None
882
905
 
883
- return rs.catch(status=0, context=params)
906
+ return result.catch(status=0, context=params)
884
907
 
885
908
  def execute(
886
909
  self,
887
910
  params: DictData,
888
911
  *,
889
912
  run_id: str | None = None,
913
+ parent_run_id: str | None = None,
890
914
  timeout: int = 0,
915
+ result: Result | None = None,
891
916
  ) -> Result:
892
917
  """Execute workflow with passing a dynamic parameters to all jobs that
893
918
  included in this workflow model with ``jobs`` field.
@@ -900,38 +925,38 @@ class Workflow(BaseModel):
900
925
  can access it with syntax:
901
926
 
902
927
  ... ${job-name}.stages.${stage-id}.outputs.${key}
928
+ ... ${job-name}.stages.${stage-id}.errors.${key}
903
929
 
904
930
  :param params: An input parameters that use on workflow execution that
905
931
  will parameterize before using it. Default is None.
906
- :type params: DictData
907
-
908
932
  :param run_id: A workflow running ID for this job execution.
909
- :type run_id: str | None (default: None)
910
- :param timeout: A workflow execution time out in second unit that use
911
- for limit time of execution and waiting job dependency. This value
912
- does not force stop the task that still running more than this limit
913
- time.
914
- :type timeout: int (default: 0)
933
+ :param parent_run_id: A parent workflow running ID for this release.
934
+ :param timeout: (int) A workflow execution time out in second unit that
935
+ use for limit time of execution and waiting job dependency. This
936
+ value does not force stop the task that still running more than this
937
+ limit time. (default: 0)
938
+ :param result: (Result) A result object for keeping context and status
939
+ data.
915
940
 
916
941
  :rtype: Result
917
942
  """
918
- run_id: str = run_id or gen_id(self.name, unique=True)
919
- logger.info(
920
- f"({cut_id(run_id)}) [WORKFLOW]: Start Execute: {self.name!r} ..."
921
- )
922
-
923
- # NOTE: I use this condition because this method allow passing empty
924
- # params and I do not want to create new dict object.
925
943
  ts: float = time.monotonic()
926
- rs: Result = Result(run_id=run_id)
944
+ if result is None: # pragma: no cov
945
+ result: Result = Result(
946
+ run_id=(run_id or gen_id(self.name, unique=True)),
947
+ parent_run_id=parent_run_id,
948
+ )
949
+ elif parent_run_id:
950
+ result.set_parent_run_id(parent_run_id)
951
+
952
+ result.trace.info(f"[WORKFLOW]: Start Execute: {self.name!r} ...")
927
953
 
928
954
  # NOTE: It should not do anything if it does not have job.
929
955
  if not self.jobs:
930
- logger.warning(
931
- f"({cut_id(run_id)}) [WORKFLOW]: This workflow: {self.name!r} "
932
- f"does not have any jobs"
956
+ result.trace.warning(
957
+ f"[WORKFLOW]: Workflow: {self.name!r} does not have any jobs"
933
958
  )
934
- return rs.catch(status=0, context=params)
959
+ return result.catch(status=0, context=params)
935
960
 
936
961
  # NOTE: Create a job queue that keep the job that want to run after
937
962
  # its dependency condition.
@@ -952,7 +977,7 @@ class Workflow(BaseModel):
952
977
  try:
953
978
  if config.max_job_parallel == 1:
954
979
  self.__exec_non_threading(
955
- run_id=run_id,
980
+ result=result,
956
981
  context=context,
957
982
  ts=ts,
958
983
  job_queue=jq,
@@ -960,7 +985,7 @@ class Workflow(BaseModel):
960
985
  )
961
986
  else:
962
987
  self.__exec_threading(
963
- run_id=run_id,
988
+ result=result,
964
989
  context=context,
965
990
  ts=ts,
966
991
  job_queue=jq,
@@ -970,15 +995,18 @@ class Workflow(BaseModel):
970
995
  status: int = 1
971
996
  context.update(
972
997
  {
973
- "error": err,
974
- "error_message": f"{err.__class__.__name__}: {err}",
998
+ "errors": {
999
+ "class": err,
1000
+ "name": err.__class__.__name__,
1001
+ "message": f"{err.__class__.__name__}: {err}",
1002
+ },
975
1003
  },
976
1004
  )
977
- return rs.catch(status=status, context=context)
1005
+ return result.catch(status=status, context=context)
978
1006
 
979
1007
  def __exec_threading(
980
1008
  self,
981
- run_id: str,
1009
+ result: Result,
982
1010
  context: DictData,
983
1011
  ts: float,
984
1012
  job_queue: Queue,
@@ -991,6 +1019,7 @@ class Workflow(BaseModel):
991
1019
  If a job need dependency, it will check dependency job ID from
992
1020
  context data before allow it run.
993
1021
 
1022
+ :param result: A result model.
994
1023
  :param context: A context workflow data that want to downstream passing.
995
1024
  :param ts: A start timestamp that use for checking execute time should
996
1025
  time out.
@@ -1002,9 +1031,7 @@ class Workflow(BaseModel):
1002
1031
  """
1003
1032
  not_timeout_flag: bool = True
1004
1033
  timeout: int = timeout or config.max_job_exec_timeout
1005
- logger.debug(
1006
- f"({cut_id(run_id)}) [WORKFLOW]: Run {self.name!r} with threading."
1007
- )
1034
+ result.trace.debug(f"[WORKFLOW]: Run {self.name!r} with threading.")
1008
1035
 
1009
1036
  # IMPORTANT: The job execution can run parallel and waiting by
1010
1037
  # needed.
@@ -1039,8 +1066,9 @@ class Workflow(BaseModel):
1039
1066
  futures.append(
1040
1067
  executor.submit(
1041
1068
  self.execute_job,
1042
- job_id,
1069
+ job_id=job_id,
1043
1070
  params=context,
1071
+ result=result,
1044
1072
  ),
1045
1073
  )
1046
1074
 
@@ -1055,7 +1083,7 @@ class Workflow(BaseModel):
1055
1083
 
1056
1084
  for future in as_completed(futures, timeout=thread_timeout):
1057
1085
  if err := future.exception():
1058
- logger.error(f"({cut_id(run_id)}) [WORKFLOW]: {err}")
1086
+ result.trace.error(f"[WORKFLOW]: {err}")
1059
1087
  raise WorkflowException(str(err))
1060
1088
 
1061
1089
  # NOTE: This getting result does not do anything.
@@ -1067,15 +1095,12 @@ class Workflow(BaseModel):
1067
1095
  future.cancel()
1068
1096
 
1069
1097
  # NOTE: Raise timeout error.
1070
- logger.warning(
1071
- f"({cut_id(run_id)}) [WORKFLOW]: Execution: {self.name!r} "
1072
- f"was timeout."
1073
- )
1098
+ result.trace.error(f"[WORKFLOW]: Execution: {self.name!r} was timeout.")
1074
1099
  raise WorkflowException(f"Execution: {self.name!r} was timeout.")
1075
1100
 
1076
1101
  def __exec_non_threading(
1077
1102
  self,
1078
- run_id: str,
1103
+ result: Result,
1079
1104
  context: DictData,
1080
1105
  ts: float,
1081
1106
  job_queue: Queue,
@@ -1088,6 +1113,7 @@ class Workflow(BaseModel):
1088
1113
  If a job need dependency, it will check dependency job ID from
1089
1114
  context data before allow it run.
1090
1115
 
1116
+ :param result: A result model.
1091
1117
  :param context: A context workflow data that want to downstream passing.
1092
1118
  :param ts: A start timestamp that use for checking execute time should
1093
1119
  time out.
@@ -1097,10 +1123,7 @@ class Workflow(BaseModel):
1097
1123
  """
1098
1124
  not_timeout_flag: bool = True
1099
1125
  timeout: int = timeout or config.max_job_exec_timeout
1100
- logger.debug(
1101
- f"({cut_id(run_id)}) [WORKFLOW]: Run {self.name!r} with "
1102
- f"non-threading."
1103
- )
1126
+ result.trace.debug(f"[WORKFLOW]: Run {self.name!r} with non-threading.")
1104
1127
 
1105
1128
  while not job_queue.empty() and (
1106
1129
  not_timeout_flag := ((time.monotonic() - ts) < timeout)
@@ -1123,7 +1146,7 @@ class Workflow(BaseModel):
1123
1146
  # 'params': <input-params>,
1124
1147
  # 'jobs': {},
1125
1148
  # }
1126
- self.execute_job(job_id=job_id, params=context, run_id=run_id)
1149
+ self.execute_job(job_id=job_id, params=context, result=result)
1127
1150
 
1128
1151
  # NOTE: Mark this job queue done.
1129
1152
  job_queue.task_done()
@@ -1137,10 +1160,7 @@ class Workflow(BaseModel):
1137
1160
  return context
1138
1161
 
1139
1162
  # NOTE: Raise timeout error.
1140
- logger.warning(
1141
- f"({cut_id(run_id)}) [WORKFLOW]: Execution: {self.name!r} "
1142
- f"was timeout."
1143
- )
1163
+ result.trace.error(f"[WORKFLOW]: Execution: {self.name!r} was timeout.")
1144
1164
  raise WorkflowException(f"Execution: {self.name!r} was timeout.")
1145
1165
 
1146
1166
 
@@ -1152,8 +1172,8 @@ class WorkflowTask:
1152
1172
  This dataclass object is mapping 1-to-1 with workflow and cron runner
1153
1173
  objects.
1154
1174
 
1155
- This dataclass has the release method for itself.
1156
-
1175
+ This dataclass has the release method for itself that prepare necessary
1176
+ arguments before passing to the parent release method.
1157
1177
  """
1158
1178
 
1159
1179
  alias: str
@@ -1165,19 +1185,23 @@ class WorkflowTask:
1165
1185
  self,
1166
1186
  release: datetime | Release | None = None,
1167
1187
  run_id: str | None = None,
1168
- log: type[Log] = None,
1188
+ audit: type[Audit] = None,
1169
1189
  queue: ReleaseQueue | None = None,
1170
1190
  ) -> Result:
1171
- """Release the workflow task data.
1191
+ """Release the workflow task that passing an override parameter to
1192
+ the parent release method with the `values` field.
1193
+
1194
+ This method can handler not passing release value by default
1195
+ generate step. It uses the `runner` field for generate release object.
1172
1196
 
1173
1197
  :param release: A release datetime or Release object.
1174
1198
  :param run_id: A workflow running ID for this release.
1175
- :param log: A log class that want to save the execution result.
1199
+ :param audit: An audit class that want to save the execution result.
1176
1200
  :param queue: A ReleaseQueue object that use to mark complete.
1177
1201
 
1178
1202
  :rtype: Result
1179
1203
  """
1180
- log: type[Log] = log or get_log()
1204
+ audit: type[Audit] = audit or get_audit()
1181
1205
 
1182
1206
  if release is None:
1183
1207
 
@@ -1204,7 +1228,7 @@ class WorkflowTask:
1204
1228
  release=release,
1205
1229
  params=self.values,
1206
1230
  run_id=run_id,
1207
- log=log,
1231
+ audit=audit,
1208
1232
  queue=queue,
1209
1233
  override_log_name=self.alias,
1210
1234
  )
@@ -1213,7 +1237,7 @@ class WorkflowTask:
1213
1237
  self,
1214
1238
  end_date: datetime,
1215
1239
  queue: ReleaseQueue,
1216
- log: type[Log],
1240
+ audit: type[Audit],
1217
1241
  *,
1218
1242
  force_run: bool = False,
1219
1243
  ) -> ReleaseQueue:
@@ -1222,9 +1246,9 @@ class WorkflowTask:
1222
1246
 
1223
1247
  :param end_date: An end datetime object.
1224
1248
  :param queue: A workflow queue object.
1225
- :param log: A log class that want to make log object.
1226
- :param force_run: A flag that allow to release workflow if the log with
1227
- that release was pointed.
1249
+ :param audit: An audit class that want to make audit object.
1250
+ :param force_run: (bool) A flag that allow to release workflow if the
1251
+ audit with that release was pointed.
1228
1252
 
1229
1253
  :rtype: ReleaseQueue
1230
1254
  """
@@ -1240,7 +1264,7 @@ class WorkflowTask:
1240
1264
  )
1241
1265
 
1242
1266
  while queue.check_queue(workflow_release) or (
1243
- log.is_pointed(name=self.alias, release=workflow_release.date)
1267
+ audit.is_pointed(name=self.alias, release=workflow_release.date)
1244
1268
  and not force_run
1245
1269
  ):
1246
1270
  workflow_release = Release(
@@ -1260,7 +1284,7 @@ class WorkflowTask:
1260
1284
  return queue
1261
1285
 
1262
1286
  def __repr__(self) -> str:
1263
- """Override ___repr__ method."""
1287
+ """Override the `__repr__` method."""
1264
1288
  return (
1265
1289
  f"{self.__class__.__name__}(alias={self.alias!r}, "
1266
1290
  f"workflow={self.workflow.name!r}, runner={self.runner!r}, "