ddeutil-workflow 0.0.33__py3-none-any.whl → 0.0.34__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -34,7 +34,7 @@ from functools import partial, total_ordering
34
34
  from heapq import heappop, heappush
35
35
  from queue import Queue
36
36
  from textwrap import dedent
37
- from typing import Any, Optional
37
+ from typing import Optional
38
38
 
39
39
  from pydantic import BaseModel, ConfigDict, Field
40
40
  from pydantic.dataclasses import dataclass
@@ -49,10 +49,9 @@ from .cron import On
49
49
  from .exceptions import JobException, WorkflowException
50
50
  from .job import Job
51
51
  from .params import Param
52
- from .result import Result
52
+ from .result import Result, Status
53
53
  from .templates import has_template, param2template
54
54
  from .utils import (
55
- cut_id,
56
55
  gen_id,
57
56
  get_dt_now,
58
57
  reach_next_minute,
@@ -486,22 +485,25 @@ class Workflow(BaseModel):
486
485
  params: DictData,
487
486
  *,
488
487
  run_id: str | None = None,
489
- log: type[Audit] = None,
488
+ parent_run_id: str | None = None,
489
+ audit: type[Audit] = None,
490
490
  queue: ReleaseQueue | None = None,
491
491
  override_log_name: str | None = None,
492
+ result: Result | None = None,
492
493
  ) -> Result:
493
494
  """Release the workflow execution with overriding parameter with the
494
495
  release templating that include logical date (release date), execution
495
496
  date, or running id to the params.
496
497
 
497
- This method allow workflow use log object to save the execution
498
- result to log destination like file log to the local `/logs` directory.
498
+ This method allow workflow use audit object to save the execution
499
+ result to audit destination like file audit to the local `/logs`
500
+ directory.
499
501
 
500
502
  Steps:
501
503
  - Initialize ReleaseQueue and Release if they do not pass.
502
504
  - Create release data for pass to parameter templating function.
503
505
  - Execute this workflow with mapping release data to its parameters.
504
- - Writing result log
506
+ - Writing result audit
505
507
  - Remove this release on the running queue
506
508
  - Push this release to complete queue
507
509
 
@@ -509,17 +511,26 @@ class Workflow(BaseModel):
509
511
  :param params: A workflow parameter that pass to execute method.
510
512
  :param queue: A ReleaseQueue that use for mark complete.
511
513
  :param run_id: A workflow running ID for this release.
512
- :param log: A log class that want to save the execution result.
514
+ :param parent_run_id: A parent workflow running ID for this release.
515
+ :param audit: An audit class that want to save the execution result.
513
516
  :param queue: A ReleaseQueue object.
514
517
  :param override_log_name: An override logging name that use instead
515
518
  the workflow name.
519
+ :param result: (Result) A result object for keeping context and status
520
+ data.
516
521
 
517
522
  :rtype: Result
518
523
  """
519
- log: type[Audit] = log or get_audit()
524
+ audit: type[Audit] = audit or get_audit()
520
525
  name: str = override_log_name or self.name
521
- run_id: str = run_id or gen_id(name, unique=True)
522
- rs_release: Result = Result(run_id=run_id)
526
+
527
+ if result is None:
528
+ result: Result = Result(
529
+ run_id=(run_id or gen_id(name, unique=True)),
530
+ parent_run_id=parent_run_id,
531
+ )
532
+ elif parent_run_id:
533
+ result.set_parent_run_id(parent_run_id)
523
534
 
524
535
  if queue is not None and not isinstance(queue, ReleaseQueue):
525
536
  raise TypeError(
@@ -527,13 +538,11 @@ class Workflow(BaseModel):
527
538
  )
528
539
 
529
540
  # VALIDATE: Change release value to Release object.
530
- rs_release_type: str = "release"
531
541
  if isinstance(release, datetime):
532
- rs_release_type: str = "datetime"
533
542
  release: Release = Release.from_dt(release)
534
543
 
535
- logger.debug(
536
- f"({cut_id(run_id)}) [RELEASE]: Start release - {name!r} : "
544
+ result.trace.debug(
545
+ f"[RELEASE]: Start release - {name!r} : "
537
546
  f"{release.date:%Y-%m-%d %H:%M:%S}"
538
547
  )
539
548
 
@@ -543,34 +552,38 @@ class Workflow(BaseModel):
543
552
  "release": {
544
553
  "logical_date": release.date,
545
554
  "execute_date": datetime.now(tz=config.tz),
546
- "run_id": run_id,
555
+ "run_id": result.run_id,
547
556
  "timezone": config.tz,
548
557
  }
549
558
  }
550
559
 
551
560
  # NOTE: Execute workflow with templating params from release mapping.
552
- rs: Result = self.execute(
561
+ # The result context that return from execution method is:
562
+ #
563
+ # ... {"params": ..., "jobs": ...}
564
+ #
565
+ self.execute(
553
566
  params=param2template(params, release_params),
554
- run_id=run_id,
567
+ result=result,
568
+ parent_run_id=result.parent_run_id,
555
569
  )
556
- logger.debug(
557
- f"({cut_id(run_id)}) [RELEASE]: End release - {name!r} : "
570
+ result.trace.debug(
571
+ f"[RELEASE]: End release - {name!r} : "
558
572
  f"{release.date:%Y-%m-%d %H:%M:%S}"
559
573
  )
560
574
 
561
- rs.set_parent_run_id(run_id)
562
-
563
- # NOTE: Saving execution result to destination of the input log object.
564
- logger.debug(f"({cut_id(run_id)}) [LOG]: Writing log: {name!r}.")
575
+ # NOTE: Saving execution result to destination of the input audit
576
+ # object.
577
+ result.trace.debug(f"[LOG]: Writing audit: {name!r}.")
565
578
  (
566
- log(
579
+ audit(
567
580
  name=name,
568
581
  release=release.date,
569
582
  type=release.type,
570
- context=rs.context,
571
- parent_run_id=rs.parent_run_id,
572
- run_id=rs.run_id,
573
- execution_time=rs.alive_time(),
583
+ context=result.context,
584
+ parent_run_id=result.parent_run_id,
585
+ run_id=result.run_id,
586
+ execution_time=result.alive_time(),
574
587
  ).save(excluded=None)
575
588
  )
576
589
 
@@ -580,20 +593,25 @@ class Workflow(BaseModel):
580
593
  queue.mark_complete(release)
581
594
 
582
595
  # NOTE: Remove the params key from the result context for deduplicate.
583
- context: dict[str, Any] = rs.context
584
- context.pop("params")
596
+ # This step is prepare result context for this release method.
597
+ context: DictData = result.context
598
+ jobs: DictData = context.pop("jobs", {})
599
+ errors: DictData = (
600
+ {"errors": context.pop("errors", {})} if "errors" in context else {}
601
+ )
585
602
 
586
- return rs_release.catch(
587
- status=0,
603
+ return result.catch(
604
+ status=Status.SUCCESS,
588
605
  context={
606
+ # NOTE: Update the real params that pass in this method.
589
607
  "params": params,
590
608
  "release": {
591
- "status": "success",
592
- "type": rs_release_type,
609
+ "type": release.type,
593
610
  "logical_date": release.date,
594
611
  "release": release,
595
612
  },
596
- "outputs": context,
613
+ "outputs": {"jobs": jobs},
614
+ **errors,
597
615
  },
598
616
  )
599
617
 
@@ -602,7 +620,7 @@ class Workflow(BaseModel):
602
620
  offset: float,
603
621
  end_date: datetime,
604
622
  queue: ReleaseQueue,
605
- log: type[Audit],
623
+ audit: type[Audit],
606
624
  *,
607
625
  force_run: bool = False,
608
626
  ) -> ReleaseQueue:
@@ -620,9 +638,9 @@ class Workflow(BaseModel):
620
638
  :param offset: An offset in second unit for time travel.
621
639
  :param end_date: An end datetime object.
622
640
  :param queue: A workflow queue object.
623
- :param log: A log class that want to make log object.
624
- :param force_run: A flag that allow to release workflow if the log with
625
- that release was pointed.
641
+ :param audit: An audit class that want to make audit object.
642
+ :param force_run: A flag that allow to release workflow if the audit
643
+ with that release was pointed.
626
644
 
627
645
  :rtype: ReleaseQueue
628
646
  """
@@ -645,7 +663,7 @@ class Workflow(BaseModel):
645
663
  )
646
664
 
647
665
  while queue.check_queue(workflow_release) or (
648
- log.is_pointed(name=self.name, release=workflow_release.date)
666
+ audit.is_pointed(name=self.name, release=workflow_release.date)
649
667
  and not force_run
650
668
  ):
651
669
  workflow_release = Release(
@@ -671,16 +689,16 @@ class Workflow(BaseModel):
671
689
  *,
672
690
  run_id: str | None = None,
673
691
  periods: int = 1,
674
- log: Audit | None = None,
692
+ audit: Audit | None = None,
675
693
  force_run: bool = False,
676
694
  timeout: int = 1800,
677
- ) -> list[Result]:
678
- """Poke this workflow with start datetime value that passing to its
679
- ``on`` field with threading executor pool for executing with all its
680
- schedules that was set on the `on` value.
695
+ ) -> Result:
696
+ """Poke function with a start datetime value that will pass to its
697
+ `on` field on the threading executor pool for execute the `release`
698
+ method (It run all schedules that was set on the `on` values).
681
699
 
682
700
  This method will observe its schedule that nearing to run with the
683
- ``self.release()`` method.
701
+ `self.release()` method.
684
702
 
685
703
  The limitation of this method is not allow run a date that less
686
704
  than the current date.
@@ -689,17 +707,19 @@ class Workflow(BaseModel):
689
707
  :param params: A parameters that want to pass to the release method.
690
708
  :param run_id: A workflow running ID for this poke.
691
709
  :param periods: A periods in minutes value that use to run this poking.
692
- :param log: A log object that want to use on this poking process.
693
- :param force_run: A flag that allow to release workflow if the log with
710
+ :param audit: An audit object that want to use on this poking process.
711
+ :param force_run: A flag that allow to release workflow if the audit with
694
712
  that release was pointed.
695
713
  :param timeout: A second value for timeout while waiting all futures
696
714
  run completely.
697
715
 
698
- :rtype: list[Result]
699
- :return: A list of all results that return from ``self.release`` method.
716
+ :rtype: Result
717
+ :return: A list of all results that return from `self.release` method.
700
718
  """
701
- log: type[Audit] = log or get_audit()
702
- run_id: str = run_id or gen_id(self.name, unique=True)
719
+ audit: type[Audit] = audit or get_audit()
720
+ result: Result = Result(
721
+ run_id=(run_id or gen_id(self.name, unique=True))
722
+ )
703
723
 
704
724
  # VALIDATE: Check the periods value should gather than 0.
705
725
  if periods <= 0:
@@ -710,11 +730,10 @@ class Workflow(BaseModel):
710
730
  # NOTE: If this workflow does not set the on schedule, it will return
711
731
  # empty result.
712
732
  if len(self.on) == 0:
713
- logger.info(
714
- f"({cut_id(run_id)}) [POKING]: {self.name!r} does not have any "
715
- f"schedule to run."
733
+ result.trace.info(
734
+ f"[POKING]: {self.name!r} does not have any schedule to run."
716
735
  )
717
- return []
736
+ return result.catch(status=Status.SUCCESS, context={"outputs": []})
718
737
 
719
738
  # NOTE: Create the current date that change microsecond to 0
720
739
  current_date: datetime = datetime.now(tz=config.tz).replace(
@@ -737,13 +756,13 @@ class Workflow(BaseModel):
737
756
  # periods value.
738
757
  end_date: datetime = start_date + timedelta(minutes=periods)
739
758
 
740
- logger.info(
741
- f"({cut_id(run_id)}) [POKING]: Start Poking: {self.name!r} from "
759
+ result.trace.info(
760
+ f"[POKING]: Start Poking: {self.name!r} from "
742
761
  f"{start_date:%Y-%m-%d %H:%M:%S} to {end_date:%Y-%m-%d %H:%M:%S}"
743
762
  )
744
763
 
745
764
  params: DictData = {} if params is None else params
746
- results: list[Result] = []
765
+ context: list[Result] = []
747
766
 
748
767
  # NOTE: Create empty ReleaseQueue object.
749
768
  q: ReleaseQueue = ReleaseQueue()
@@ -751,17 +770,16 @@ class Workflow(BaseModel):
751
770
  # NOTE: Create reusable partial function and add Release to the release
752
771
  # queue object.
753
772
  partial_queue = partial(
754
- self.queue, offset, end_date, log=log, force_run=force_run
773
+ self.queue, offset, end_date, audit=audit, force_run=force_run
755
774
  )
756
775
  partial_queue(q)
757
776
 
758
777
  # NOTE: Return the empty result if it does not have any Release.
759
778
  if not q.is_queued:
760
- logger.info(
761
- f"({cut_id(run_id)}) [POKING]: {self.name!r} does not have "
762
- f"any queue."
779
+ result.trace.info(
780
+ f"[POKING]: {self.name!r} does not have any queue."
763
781
  )
764
- return []
782
+ return result.catch(status=Status.SUCCESS, context={"outputs": []})
765
783
 
766
784
  # NOTE: Start create the thread pool executor for running this poke
767
785
  # process.
@@ -778,8 +796,8 @@ class Workflow(BaseModel):
778
796
  release: Release = heappop(q.queue)
779
797
 
780
798
  if reach_next_minute(release.date, tz=config.tz, offset=offset):
781
- logger.debug(
782
- f"({cut_id(run_id)}) [POKING]: The latest release, "
799
+ result.trace.debug(
800
+ f"[POKING]: The latest release, "
783
801
  f"{release.date:%Y-%m-%d %H:%M:%S}, is not able to run "
784
802
  f"on this minute"
785
803
  )
@@ -799,8 +817,9 @@ class Workflow(BaseModel):
799
817
  self.release,
800
818
  release=release,
801
819
  params=params,
802
- log=log,
820
+ audit=audit,
803
821
  queue=q,
822
+ parent_run_id=result.run_id,
804
823
  )
805
824
  )
806
825
 
@@ -809,18 +828,20 @@ class Workflow(BaseModel):
809
828
  # WARNING: This poking method does not allow to use fail-fast
810
829
  # logic to catching parallel execution result.
811
830
  for future in as_completed(futures, timeout=timeout):
812
- results.append(future.result().set_parent_run_id(run_id))
831
+ context.append(future.result())
813
832
 
814
- return results
833
+ return result.catch(
834
+ status=Status.SUCCESS,
835
+ context={"outputs": context},
836
+ )
815
837
 
816
838
  def execute_job(
817
839
  self,
818
840
  job_id: str,
819
841
  params: DictData,
820
842
  *,
821
- run_id: str | None = None,
822
- raise_error: bool = True,
823
843
  result: Result | None = None,
844
+ raise_error: bool = True,
824
845
  ) -> Result:
825
846
  """Job execution with passing dynamic parameters from the main workflow
826
847
  execution to the target job object via job's ID.
@@ -835,7 +856,6 @@ class Workflow(BaseModel):
835
856
 
836
857
  :param job_id: A job ID that want to execute.
837
858
  :param params: A params that was parameterized from workflow execution.
838
- :param run_id: A workflow running ID for this job execution.
839
859
  :param raise_error: A flag that raise error instead catching to result
840
860
  if it gets exception from job execution.
841
861
  :param result: (Result) A result object for keeping context and status
@@ -846,10 +866,7 @@ class Workflow(BaseModel):
846
866
  context.
847
867
  """
848
868
  if result is None: # pragma: no cov
849
- run_id: str = run_id or gen_id(self.name, unique=True)
850
- result: Result = Result(run_id=run_id)
851
- else:
852
- run_id: str = result.run_id
869
+ result: Result = Result(run_id=gen_id(self.name, unique=True))
853
870
 
854
871
  # VALIDATE: check a job ID that exists in this workflow or not.
855
872
  if job_id not in self.jobs:
@@ -862,13 +879,18 @@ class Workflow(BaseModel):
862
879
 
863
880
  # IMPORTANT:
864
881
  # This execution change all job running IDs to the current workflow
865
- # execution running ID (with passing run_id to the job execution
866
- # argument).
882
+ # running ID, but it still trac log to the same parent running ID
883
+ # (with passing `run_id` and `parent_run_id` to the job execution
884
+ # arguments).
867
885
  #
868
886
  try:
869
887
  job: Job = self.jobs[job_id]
870
888
  job.set_outputs(
871
- job.execute(params=params, run_id=run_id).context,
889
+ job.execute(
890
+ params=params,
891
+ run_id=result.run_id,
892
+ parent_run_id=result.parent_run_id,
893
+ ).context,
872
894
  to=params,
873
895
  )
874
896
  except JobException as err:
@@ -888,6 +910,7 @@ class Workflow(BaseModel):
888
910
  params: DictData,
889
911
  *,
890
912
  run_id: str | None = None,
913
+ parent_run_id: str | None = None,
891
914
  timeout: int = 0,
892
915
  result: Result | None = None,
893
916
  ) -> Result:
@@ -902,37 +925,36 @@ class Workflow(BaseModel):
902
925
  can access it with syntax:
903
926
 
904
927
  ... ${job-name}.stages.${stage-id}.outputs.${key}
928
+ ... ${job-name}.stages.${stage-id}.errors.${key}
905
929
 
906
930
  :param params: An input parameters that use on workflow execution that
907
931
  will parameterize before using it. Default is None.
908
- :type params: DictData
909
-
910
932
  :param run_id: A workflow running ID for this job execution.
911
- :type run_id: str | None (default: None)
912
- :param timeout: (int) A workflow execution time out in second unit that use
913
- for limit time of execution and waiting job dependency. This value
914
- does not force stop the task that still running more than this limit
915
- time. (default: 0)
933
+ :param parent_run_id: A parent workflow running ID for this release.
934
+ :param timeout: (int) A workflow execution time out in second unit that
935
+ use for limit time of execution and waiting job dependency. This
936
+ value does not force stop the task that still running more than this
937
+ limit time. (default: 0)
916
938
  :param result: (Result) A result object for keeping context and status
917
939
  data.
918
940
 
919
941
  :rtype: Result
920
942
  """
921
- # NOTE: I use this condition because this method allow passing empty
922
- # params and I do not want to create new dict object.
923
943
  ts: float = time.monotonic()
924
944
  if result is None: # pragma: no cov
925
945
  result: Result = Result(
926
- run_id=(run_id or gen_id(self.name, unique=True))
946
+ run_id=(run_id or gen_id(self.name, unique=True)),
947
+ parent_run_id=parent_run_id,
927
948
  )
949
+ elif parent_run_id:
950
+ result.set_parent_run_id(parent_run_id)
928
951
 
929
952
  result.trace.info(f"[WORKFLOW]: Start Execute: {self.name!r} ...")
930
953
 
931
954
  # NOTE: It should not do anything if it does not have job.
932
955
  if not self.jobs:
933
956
  result.trace.warning(
934
- f"[WORKFLOW]: This workflow: {self.name!r} does not have any "
935
- f"jobs"
957
+ f"[WORKFLOW]: Workflow: {self.name!r} does not have any jobs"
936
958
  )
937
959
  return result.catch(status=0, context=params)
938
960
 
@@ -973,8 +995,11 @@ class Workflow(BaseModel):
973
995
  status: int = 1
974
996
  context.update(
975
997
  {
976
- "error": err,
977
- "error_message": f"{err.__class__.__name__}: {err}",
998
+ "errors": {
999
+ "class": err,
1000
+ "name": err.__class__.__name__,
1001
+ "message": f"{err.__class__.__name__}: {err}",
1002
+ },
978
1003
  },
979
1004
  )
980
1005
  return result.catch(status=status, context=context)
@@ -1041,7 +1066,7 @@ class Workflow(BaseModel):
1041
1066
  futures.append(
1042
1067
  executor.submit(
1043
1068
  self.execute_job,
1044
- job_id,
1069
+ job_id=job_id,
1045
1070
  params=context,
1046
1071
  result=result,
1047
1072
  ),
@@ -1070,9 +1095,7 @@ class Workflow(BaseModel):
1070
1095
  future.cancel()
1071
1096
 
1072
1097
  # NOTE: Raise timeout error.
1073
- result.trace.warning(
1074
- f"[WORKFLOW]: Execution: {self.name!r} was timeout."
1075
- )
1098
+ result.trace.error(f"[WORKFLOW]: Execution: {self.name!r} was timeout.")
1076
1099
  raise WorkflowException(f"Execution: {self.name!r} was timeout.")
1077
1100
 
1078
1101
  def __exec_non_threading(
@@ -1137,9 +1160,7 @@ class Workflow(BaseModel):
1137
1160
  return context
1138
1161
 
1139
1162
  # NOTE: Raise timeout error.
1140
- result.trace.warning(
1141
- f"[WORKFLOW]: Execution: {self.name!r} was timeout."
1142
- )
1163
+ result.trace.error(f"[WORKFLOW]: Execution: {self.name!r} was timeout.")
1143
1164
  raise WorkflowException(f"Execution: {self.name!r} was timeout.")
1144
1165
 
1145
1166
 
@@ -1151,8 +1172,8 @@ class WorkflowTask:
1151
1172
  This dataclass object is mapping 1-to-1 with workflow and cron runner
1152
1173
  objects.
1153
1174
 
1154
- This dataclass has the release method for itself.
1155
-
1175
+ This dataclass has the release method for itself that prepare necessary
1176
+ arguments before passing to the parent release method.
1156
1177
  """
1157
1178
 
1158
1179
  alias: str
@@ -1164,19 +1185,23 @@ class WorkflowTask:
1164
1185
  self,
1165
1186
  release: datetime | Release | None = None,
1166
1187
  run_id: str | None = None,
1167
- log: type[Audit] = None,
1188
+ audit: type[Audit] = None,
1168
1189
  queue: ReleaseQueue | None = None,
1169
1190
  ) -> Result:
1170
- """Release the workflow task data.
1191
+ """Release the workflow task that passing an override parameter to
1192
+ the parent release method with the `values` field.
1193
+
1194
+ This method can handler not passing release value by default
1195
+ generate step. It uses the `runner` field for generate release object.
1171
1196
 
1172
1197
  :param release: A release datetime or Release object.
1173
1198
  :param run_id: A workflow running ID for this release.
1174
- :param log: A log class that want to save the execution result.
1199
+ :param audit: An audit class that want to save the execution result.
1175
1200
  :param queue: A ReleaseQueue object that use to mark complete.
1176
1201
 
1177
1202
  :rtype: Result
1178
1203
  """
1179
- log: type[Audit] = log or get_audit()
1204
+ audit: type[Audit] = audit or get_audit()
1180
1205
 
1181
1206
  if release is None:
1182
1207
 
@@ -1203,7 +1228,7 @@ class WorkflowTask:
1203
1228
  release=release,
1204
1229
  params=self.values,
1205
1230
  run_id=run_id,
1206
- log=log,
1231
+ audit=audit,
1207
1232
  queue=queue,
1208
1233
  override_log_name=self.alias,
1209
1234
  )
@@ -1212,7 +1237,7 @@ class WorkflowTask:
1212
1237
  self,
1213
1238
  end_date: datetime,
1214
1239
  queue: ReleaseQueue,
1215
- log: type[Audit],
1240
+ audit: type[Audit],
1216
1241
  *,
1217
1242
  force_run: bool = False,
1218
1243
  ) -> ReleaseQueue:
@@ -1221,9 +1246,9 @@ class WorkflowTask:
1221
1246
 
1222
1247
  :param end_date: An end datetime object.
1223
1248
  :param queue: A workflow queue object.
1224
- :param log: A log class that want to make log object.
1249
+ :param audit: An audit class that want to make audit object.
1225
1250
  :param force_run: (bool) A flag that allow to release workflow if the
1226
- log with that release was pointed.
1251
+ audit with that release was pointed.
1227
1252
 
1228
1253
  :rtype: ReleaseQueue
1229
1254
  """
@@ -1239,7 +1264,7 @@ class WorkflowTask:
1239
1264
  )
1240
1265
 
1241
1266
  while queue.check_queue(workflow_release) or (
1242
- log.is_pointed(name=self.alias, release=workflow_release.date)
1267
+ audit.is_pointed(name=self.alias, release=workflow_release.date)
1243
1268
  and not force_run
1244
1269
  ):
1245
1270
  workflow_release = Release(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.33
3
+ Version: 0.0.34
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -74,22 +74,22 @@ you should to set the data layer separate this core program before run this appl
74
74
 
75
75
  ```mermaid
76
76
  flowchart LR
77
- subgraph Interface
78
- A((User))
79
- subgraph Docker Container
77
+ A((fa:fa-user User))
78
+
79
+ subgraph Docker Container
80
+ direction TB
80
81
  G@{ shape: rounded, label: "Observe<br>Application" }
81
- end
82
82
  end
83
83
 
84
- A --->|action| B(Workflow<br>Application)
85
- B ---> |response| A
86
- B -..-> |response| G
87
- G -..-> |request| B
88
-
89
84
  subgraph Docker Container
90
- B
85
+ direction TB
86
+ B@{ shape: rounded, label: "Workflow<br>Application" }
91
87
  end
92
88
 
89
+ A <--->|action &<br>response| B
90
+ B -....-> |response| G
91
+ G -....-> |request| B
92
+
93
93
  subgraph Data Context
94
94
  D@{ shape: processes, label: "Logs" }
95
95
  E@{ shape: lin-cyl, label: "Audit<br>Logs" }
@@ -138,9 +138,9 @@ This is examples that use workflow file for running common Data Engineering
138
138
  use-case.
139
139
 
140
140
  > [!IMPORTANT]
141
- > I recommend you to use the `hook` stage for all actions that you want to do
141
+ > I recommend you to use the `call` stage for all actions that you want to do
142
142
  > with workflow activity that you want to orchestrate. Because it is able to
143
- > dynamic an input argument with the same hook function that make you use less
143
+ > dynamic an input argument with the same call function that make you use less
144
144
  > time to maintenance your data workflows.
145
145
 
146
146
  ```yaml
@@ -182,7 +182,7 @@ run-py-local:
182
182
  writing_mode: flatten
183
183
  aws_s3_path: my-data/open-data/${{ params.source-extract }}
184
184
 
185
- # This Authentication code should implement with your custom hook
185
+ # This Authentication code should implement with your custom call
186
186
  # function. The template allow you to use environment variable.
187
187
  aws_access_client_id: ${AWS_ACCESS_CLIENT_ID}
188
188
  aws_access_client_secret: ${AWS_ACCESS_CLIENT_SECRET}
@@ -247,7 +247,7 @@ it will use default value and do not raise any error to you.
247
247
  | Name | Component | Default | Description |
248
248
  |:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|
249
249
  | **ROOT_PATH** | Core | `.` | The root path of the workflow application. |
250
- | **REGISTRY** | Core | `.` | List of importable string for the hook stage. |
250
+ | **REGISTRY** | Core | `.` | List of importable string for the call stage. |
251
251
  | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
252
252
  | **CONF_PATH** | Core | `conf` | The config path that keep all template `.yaml` files. |
253
253
  | **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
@@ -264,9 +264,10 @@ it will use default value and do not raise any error to you.
264
264
  | **PATH** | Log | `./logs` | The log path of the workflow saving log. |
265
265
  | **DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. |
266
266
  | **FORMAT** | Log | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | |
267
+ | **FORMAT_FILE** | Log | `{datetime} ({process:5d}, {thread:5d}) {message:120s} ({filename}:{lineno})` | |
267
268
  | **DATETIME_FORMAT** | Log | `%Y-%m-%d %H:%M:%S` | |
268
- | **ENABLE_ROTATED_FILE** | Log | `false` | |
269
- | **PATH** | Audit | `./logs` | |
269
+ | **ENABLE_WRITE** | Log | `false` | |
270
+ | **PATH** | Audit | `./audits` | |
270
271
  | **ENABLE_WRITE** | Audit | `true` | A flag that enable logging object saving log to its destination. |
271
272
  | **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
272
273
  | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |