ddeutil-workflow 0.0.21__py3-none-any.whl → 0.0.23__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -33,7 +33,7 @@ from functools import total_ordering
33
33
  from heapq import heappop, heappush
34
34
  from queue import Queue
35
35
  from textwrap import dedent
36
- from typing import Optional
36
+ from typing import Any, Optional
37
37
 
38
38
  from pydantic import BaseModel, ConfigDict, Field
39
39
  from pydantic.dataclasses import dataclass
@@ -46,9 +46,10 @@ from .conf import FileLog, Loader, Log, config, get_logger
46
46
  from .exceptions import JobException, WorkflowException
47
47
  from .job import Job
48
48
  from .on import On
49
+ from .params import Param
50
+ from .result import Result
49
51
  from .utils import (
50
- Param,
51
- Result,
52
+ cut_id,
52
53
  delay,
53
54
  gen_id,
54
55
  get_diff_sec,
@@ -85,13 +86,16 @@ class WorkflowRelease:
85
86
  return f"{self.date:%Y-%m-%d %H:%M:%S}"
86
87
 
87
88
  @classmethod
88
- def from_dt(cls, dt: datetime) -> Self:
89
+ def from_dt(cls, dt: datetime | str) -> Self:
89
90
  """Construct WorkflowRelease via datetime object only.
90
91
 
91
92
  :param dt: A datetime object.
92
93
 
93
94
  :rtype: Self
94
95
  """
96
+ if isinstance(dt, str):
97
+ dt: datetime = datetime.fromisoformat(dt)
98
+
95
99
  return cls(
96
100
  date=dt,
97
101
  offset=0,
@@ -123,12 +127,39 @@ class WorkflowRelease:
123
127
 
124
128
  @dataclass
125
129
  class WorkflowQueue:
126
- """Workflow Queue object."""
130
+ """Workflow Queue object that is management of WorkflowRelease objects."""
127
131
 
128
132
  queue: list[WorkflowRelease] = field(default_factory=list)
129
133
  running: list[WorkflowRelease] = field(default_factory=list)
130
134
  complete: list[WorkflowRelease] = field(default_factory=list)
131
135
 
136
+ @classmethod
137
+ def from_list(
138
+ cls, queue: list[datetime] | list[WorkflowRelease] | None = None
139
+ ) -> Self:
140
+ """Construct WorkflowQueue object from an input queue value that passing
141
+ with list of datetime or list of WorkflowRelease.
142
+
143
+ :raise TypeError: If the type of an input queue does not valid.
144
+
145
+ :rtype: Self
146
+ """
147
+ if queue is None:
148
+ return cls()
149
+
150
+ if isinstance(queue, list):
151
+
152
+ if all(isinstance(q, datetime) for q in queue):
153
+ return cls(queue=[WorkflowRelease.from_dt(q) for q in queue])
154
+
155
+ if all(isinstance(q, WorkflowRelease) for q in queue):
156
+ return cls(queue=queue)
157
+
158
+ raise TypeError(
159
+ "Type of the queue does not valid with WorkflowQueue "
160
+ "or list of datetime or list of WorkflowRelease."
161
+ )
162
+
132
163
  @property
133
164
  def is_queued(self) -> bool:
134
165
  """Return True if it has workflow release object in the queue.
@@ -137,34 +168,35 @@ class WorkflowQueue:
137
168
  """
138
169
  return len(self.queue) > 0
139
170
 
140
- def check_queue(self, data: WorkflowRelease) -> bool:
171
+ def check_queue(self, value: WorkflowRelease) -> bool:
141
172
  """Check a WorkflowRelease value already exists in list of tracking
142
173
  queues.
143
174
 
144
- :param data: A workflow release object.
175
+ :param value: A WorkflowRelease object that want to check it already in
176
+ queues.
145
177
 
146
178
  :rtype: bool
147
179
  """
148
180
  return (
149
- (data in self.queue)
150
- or (data in self.running)
151
- or (data in self.complete)
181
+ (value in self.queue)
182
+ or (value in self.running)
183
+ or (value in self.complete)
152
184
  )
153
185
 
154
- def push_queue(self, data: WorkflowRelease) -> Self:
186
+ def push_queue(self, value: WorkflowRelease) -> Self:
155
187
  """Push data to the queue."""
156
- heappush(self.queue, data)
188
+ heappush(self.queue, value)
157
189
  return self
158
190
 
159
- def push_running(self, data: WorkflowRelease) -> Self:
191
+ def push_running(self, value: WorkflowRelease) -> Self:
160
192
  """Push data to the running."""
161
- heappush(self.running, data)
193
+ heappush(self.running, value)
162
194
  return self
163
195
 
164
- def remove_running(self, data: WorkflowRelease) -> Self:
196
+ def remove_running(self, value: WorkflowRelease) -> Self:
165
197
  """Remove data on the running if it exists."""
166
- if data in self.running:
167
- self.running.remove(data)
198
+ if value in self.running:
199
+ self.running.remove(value)
168
200
 
169
201
 
170
202
  class Workflow(BaseModel):
@@ -283,7 +315,7 @@ class Workflow(BaseModel):
283
315
  return dedent(value)
284
316
 
285
317
  @field_validator("on", mode="after")
286
- def __on_no_dup__(cls, value: list[On]) -> list[On]:
318
+ def __on_no_dup_and_reach_limit__(cls, value: list[On]) -> list[On]:
287
319
  """Validate the on fields should not contain duplicate values and if it
288
320
  contain the every minute value more than one value, it will remove to
289
321
  only one value.
@@ -306,6 +338,12 @@ class Workflow(BaseModel):
306
338
  # "If it has every minute cronjob on value, it should has only "
307
339
  # "one value in the on field."
308
340
  # )
341
+
342
+ if len(set_ons) > config.max_on_per_workflow:
343
+ raise ValueError(
344
+ f"The number of the on should not more than "
345
+ f"{config.max_on_per_workflow} crontab."
346
+ )
309
347
  return value
310
348
 
311
349
  @model_validator(mode="after")
@@ -404,22 +442,21 @@ class Workflow(BaseModel):
404
442
  self,
405
443
  release: datetime | WorkflowRelease,
406
444
  params: DictData,
407
- run_id: str | None = None,
408
445
  *,
446
+ run_id: str | None = None,
409
447
  log: type[Log] = None,
410
- queue: WorkflowQueue | list[datetime] | None = None,
448
+ queue: (
449
+ WorkflowQueue | list[datetime] | list[WorkflowRelease] | None
450
+ ) = None,
411
451
  ) -> Result:
412
452
  """Release the workflow execution with overriding parameter with the
413
453
  release templating that include logical date (release date), execution
414
454
  date, or running id to the params.
415
455
 
416
456
  This method allow workflow use log object to save the execution
417
- result to log destination like file log to local `/logs` directory.
457
+ result to log destination like file log to the local `/logs` directory.
418
458
 
419
- I will add sleep with 0.15 seconds on every step that interact with
420
- the queue object.
421
-
422
- :param release: A release datetime.
459
+ :param release: A release datetime or WorkflowRelease object.
423
460
  :param params: A workflow parameter that pass to execute method.
424
461
  :param queue: A list of release time that already queue.
425
462
  :param run_id: A workflow running ID for this release.
@@ -430,23 +467,23 @@ class Workflow(BaseModel):
430
467
  """
431
468
  log: type[Log] = log or FileLog
432
469
  run_id: str = run_id or gen_id(self.name, unique=True)
470
+ rs_release: Result = Result(run_id=run_id)
433
471
 
434
472
  # VALIDATE: Change queue value to WorkflowQueue object.
435
- if queue is None:
436
- queue: WorkflowQueue = WorkflowQueue()
437
- elif isinstance(queue, list):
438
- queue: WorkflowQueue = WorkflowQueue(queue=queue)
473
+ if queue is None or isinstance(queue, list):
474
+ queue: WorkflowQueue = WorkflowQueue.from_list(queue)
439
475
 
440
476
  # VALIDATE: Change release value to WorkflowRelease object.
441
477
  if isinstance(release, datetime):
442
478
  release: WorkflowRelease = WorkflowRelease.from_dt(release)
443
479
 
444
480
  logger.debug(
445
- f"({run_id}) [RELEASE]: {self.name!r} : "
446
- f"Closely to run >> {release.date:%Y-%m-%d %H:%M:%S}"
481
+ f"({cut_id(run_id)}) [RELEASE]: {self.name!r} : Start release - "
482
+ f"{release.date:%Y-%m-%d %H:%M:%S}"
447
483
  )
448
484
 
449
- # NOTE: Release parameter that use to change if params has templating.
485
+ # NOTE: Release parameters that use to templating on the schedule
486
+ # config data.
450
487
  release_params: DictData = {
451
488
  "release": {
452
489
  "logical_date": release.date,
@@ -456,14 +493,14 @@ class Workflow(BaseModel):
456
493
  }
457
494
  }
458
495
 
459
- # WARNING: Re-create workflow object that use new running workflow ID.
496
+ # NOTE: Execute workflow with templating params from release mapping.
460
497
  rs: Result = self.execute(
461
498
  params=param2template(params, release_params),
462
499
  run_id=run_id,
463
500
  )
464
501
  logger.debug(
465
- f"({run_id}) [RELEASE]: {self.name!r} : "
466
- f"End release {release.date:%Y-%m-%d %H:%M:%S}"
502
+ f"({cut_id(run_id)}) [RELEASE]: {self.name!r} : End release - "
503
+ f"{release.date:%Y-%m-%d %H:%M:%S}"
467
504
  )
468
505
 
469
506
  rs.set_parent_run_id(run_id)
@@ -485,16 +522,16 @@ class Workflow(BaseModel):
485
522
  queue.remove_running(release)
486
523
  heappush(queue.complete, release)
487
524
 
488
- return Result(
525
+ context: dict[str, Any] = rs.context
526
+ context.pop("params")
527
+
528
+ return rs_release.catch(
489
529
  status=0,
490
530
  context={
491
531
  "params": params,
492
- "release": {
493
- "status": "success",
494
- "logical_date": release.date,
495
- },
532
+ "release": {"status": "success", "logical_date": release.date},
533
+ "outputs": context,
496
534
  },
497
- run_id=run_id,
498
535
  )
499
536
 
500
537
  def queue_poking(
@@ -503,14 +540,20 @@ class Workflow(BaseModel):
503
540
  end_date: datetime,
504
541
  queue: WorkflowQueue,
505
542
  log: type[Log],
543
+ *,
544
+ force_run: bool = False,
506
545
  ) -> WorkflowQueue:
507
546
  """Generate queue of datetime from the cron runner that initialize from
508
547
  the on field. with offset value.
509
548
 
510
- :param offset:
511
- :param end_date:
512
- :param queue:
513
- :param log:
549
+ :param offset: A offset in second unit for time travel.
550
+ :param end_date: An end datetime object.
551
+ :param queue: A workflow queue object.
552
+ :param log: A log class that want to making log object.
553
+ :param force_run: A flag that allow to release workflow if the log with
554
+ that release was pointed.
555
+
556
+ :rtype: WorkflowQueue
514
557
  """
515
558
  for on in self.on:
516
559
 
@@ -518,6 +561,7 @@ class Workflow(BaseModel):
518
561
  get_dt_now(tz=config.tz, offset=offset).replace(microsecond=0)
519
562
  )
520
563
 
564
+ # NOTE: Skip this runner date if it more than the end date.
521
565
  if runner.date > end_date:
522
566
  continue
523
567
 
@@ -529,8 +573,9 @@ class Workflow(BaseModel):
529
573
  type="poking",
530
574
  )
531
575
 
532
- while queue.check_queue(data=workflow_release) or (
576
+ while queue.check_queue(workflow_release) or (
533
577
  log.is_pointed(name=self.name, release=workflow_release.date)
578
+ and not force_run
534
579
  ):
535
580
  workflow_release = WorkflowRelease(
536
581
  date=runner.next,
@@ -543,36 +588,50 @@ class Workflow(BaseModel):
543
588
  if runner.date > end_date:
544
589
  continue
545
590
 
591
+ # NOTE: Push the WorkflowRelease object to queue.
546
592
  queue.push_queue(workflow_release)
593
+
547
594
  return queue
548
595
 
549
596
  def poke(
550
597
  self,
551
598
  start_date: datetime | None = None,
552
599
  params: DictData | None = None,
600
+ *,
553
601
  run_id: str | None = None,
554
602
  periods: int = 1,
555
- *,
556
603
  log: Log | None = None,
604
+ force_run: bool = False,
605
+ timeout: int = 1800,
557
606
  ) -> list[Result]:
558
- """Poke workflow with the ``on`` field with threading executor pool for
559
- executing with all its schedules that was set on the `on` value.
560
- This method will observe its schedule that nearing to run with the
607
+ """Poke this workflow with start datetime value that passing to its
608
+ ``on`` field with threading executor pool for executing with all its
609
+ schedules that was set on the `on` value.
610
+
611
+ This method will observe its schedule that nearing to run with the
561
612
  ``self.release()`` method.
562
613
 
563
614
  :param start_date: A start datetime object.
564
615
  :param params: A parameters that want to pass to the release method.
565
616
  :param run_id: A workflow running ID for this poke.
566
- :param periods: A periods of minutes value to running poke.
617
+ :param periods: A periods in minutes value that use to run this poking.
567
618
  :param log: A log object that want to use on this poking process.
619
+ :param force_run: A flag that allow to release workflow if the log with
620
+ that release was pointed.
621
+ :param timeout: A second value for timeout while waiting all futures
622
+ run completely.
568
623
 
569
624
  :rtype: list[Result]
625
+ :return: A list of all results that return from ``self.release`` method.
570
626
  """
627
+ log: type[Log] = log or FileLog
628
+ run_id: str = run_id or gen_id(self.name, unique=True)
629
+
571
630
  # NOTE: If this workflow does not set the on schedule, it will return
572
631
  # empty result.
573
632
  if len(self.on) == 0:
574
633
  logger.info(
575
- f"({run_id}) [POKING]: {self.name!r} does not have any "
634
+ f"({cut_id(run_id)}) [POKING]: {self.name!r} does not have any "
576
635
  f"schedule to run."
577
636
  )
578
637
  return []
@@ -592,82 +651,97 @@ class Workflow(BaseModel):
592
651
  start_date: datetime = current_date
593
652
  offset: float = 0
594
653
 
654
+ # NOTE: End date is use to stop generate queue with an input periods
655
+ # value.
595
656
  end_date: datetime = start_date + timedelta(minutes=periods)
596
657
 
597
- log: type[Log] = log or FileLog
598
- run_id: str = run_id or gen_id(self.name, unique=True)
599
658
  logger.info(
600
- f"({run_id}) [POKING]: Start Poking: {self.name!r} from "
659
+ f"({cut_id(run_id)}) [POKING]: Start Poking: {self.name!r} from "
601
660
  f"{start_date:%Y-%m-%d %H:%M:%S} to {end_date:%Y-%m-%d %H:%M:%S}"
602
661
  )
603
662
 
604
- params: DictData = params or {}
605
- workflow_queue: WorkflowQueue = WorkflowQueue()
663
+ params: DictData = {} if params is None else params
606
664
  results: list[Result] = []
607
- futures: list[Future] = []
608
665
 
666
+ # NOTE: Create empty WorkflowQueue object.
667
+ wf_queue: WorkflowQueue = WorkflowQueue()
668
+
669
+ # NOTE: Make queue to the workflow queue object.
609
670
  self.queue_poking(
610
- offset, end_date=end_date, queue=workflow_queue, log=log
671
+ offset,
672
+ end_date=end_date,
673
+ queue=wf_queue,
674
+ log=log,
675
+ force_run=force_run,
611
676
  )
612
-
613
- if len(workflow_queue.queue) == 0:
677
+ if not wf_queue.is_queued:
614
678
  logger.info(
615
- f"({run_id}) [POKING]: {self.name!r} does not have any "
616
- f"queue to run."
679
+ f"({cut_id(run_id)}) [POKING]: {self.name!r} does not have "
680
+ f"any queue."
617
681
  )
618
682
  return []
619
683
 
684
+ # NOTE: Start create the thread pool executor for running this poke
685
+ # process.
620
686
  with ThreadPoolExecutor(
621
687
  max_workers=config.max_poking_pool_worker,
622
- thread_name_prefix="workflow_poking_",
688
+ thread_name_prefix="wf_poking_",
623
689
  ) as executor:
624
690
 
625
- while workflow_queue.is_queued:
691
+ futures: list[Future] = []
692
+
693
+ while wf_queue.is_queued:
694
+
695
+ # NOTE: Pop the latest WorkflowRelease object from queue.
696
+ release: WorkflowRelease = heappop(wf_queue.queue)
626
697
 
627
- wf_release: WorkflowRelease = heappop(workflow_queue.queue)
628
698
  if (
629
- wf_release.date - get_dt_now(tz=config.tz, offset=offset)
699
+ release.date - get_dt_now(tz=config.tz, offset=offset)
630
700
  ).total_seconds() > 60:
631
701
  logger.debug(
632
- f"({run_id}) [POKING]: Waiting because the latest "
633
- f"release has diff time more than 60 seconds "
702
+ f"({cut_id(run_id)}) [POKING]: Wait because the latest "
703
+ f"release has diff time more than 60 seconds ..."
634
704
  )
635
- heappush(workflow_queue.queue, wf_release)
705
+ heappush(wf_queue.queue, release)
636
706
  delay(60)
707
+
708
+ # WARNING: I already call queue poking again because issue
709
+ # about the every minute crontab.
637
710
  self.queue_poking(
638
- offset, end_date, queue=workflow_queue, log=log
711
+ offset,
712
+ end_date,
713
+ queue=wf_queue,
714
+ log=log,
715
+ force_run=force_run,
639
716
  )
640
717
  continue
641
718
 
642
- # NOTE: Push the workflow release to running queue
643
- workflow_queue.push_running(wf_release)
719
+ # NOTE: Push the latest WorkflowRelease to the running queue.
720
+ wf_queue.push_running(release)
644
721
 
645
722
  futures.append(
646
723
  executor.submit(
647
724
  self.release,
648
- release=wf_release,
725
+ release=release,
649
726
  params=params,
650
727
  log=log,
651
- queue=workflow_queue,
728
+ queue=wf_queue,
652
729
  )
653
730
  )
654
731
 
655
732
  self.queue_poking(
656
- offset, end_date, queue=workflow_queue, log=log
733
+ offset,
734
+ end_date,
735
+ queue=wf_queue,
736
+ log=log,
737
+ force_run=force_run,
657
738
  )
658
739
 
659
740
  # WARNING: This poking method does not allow to use fail-fast
660
741
  # logic to catching parallel execution result.
661
- for future in as_completed(futures):
742
+ for future in as_completed(futures, timeout=timeout):
662
743
  results.append(future.result().set_parent_run_id(run_id))
663
744
 
664
- while len(workflow_queue.running) > 0: # pragma: no cov
665
- logger.warning(
666
- f"({run_id}) [POKING]: Running does empty when poking "
667
- f"process was finishing."
668
- )
669
- delay(10)
670
-
671
745
  return results
672
746
 
673
747
  def execute_job(
@@ -685,6 +759,8 @@ class Workflow(BaseModel):
685
759
  model. It different with ``self.execute`` because this method run only
686
760
  one job and return with context of this job data.
687
761
 
762
+ :raise WorkflowException: If execute with not exist job's ID.
763
+ :raise WorkflowException: If the job execution raise JobException.
688
764
  :raise NotImplementedError: If set raise_error argument to False.
689
765
 
690
766
  :param job_id: A job ID that want to execute.
@@ -694,8 +770,11 @@ class Workflow(BaseModel):
694
770
  if it get exception from job execution.
695
771
 
696
772
  :rtype: Result
773
+ :return: Return the result object that receive the job execution result
774
+ context.
697
775
  """
698
776
  run_id: str = run_id or gen_id(self.name, unique=True)
777
+ rs: Result = Result(run_id=run_id)
699
778
 
700
779
  # VALIDATE: check a job ID that exists in this workflow or not.
701
780
  if job_id not in self.jobs:
@@ -704,10 +783,14 @@ class Workflow(BaseModel):
704
783
  f"workflow."
705
784
  )
706
785
 
707
- logger.info(f"({run_id}) [WORKFLOW]: Start execute job: {job_id!r}")
786
+ logger.info(
787
+ f"({cut_id(run_id)}) [WORKFLOW]: Start execute job: {job_id!r}"
788
+ )
708
789
 
709
790
  # IMPORTANT:
710
- # Change any job running IDs to this workflow running ID.
791
+ # This execution change all job running IDs to the current workflow
792
+ # execution running ID (with passing run_id to the job execution
793
+ # argument).
711
794
  #
712
795
  try:
713
796
  job: Job = self.jobs[job_id]
@@ -717,7 +800,8 @@ class Workflow(BaseModel):
717
800
  )
718
801
  except JobException as err:
719
802
  logger.error(
720
- f"({run_id}) [WORKFLOW]: {err.__class__.__name__}: {err}"
803
+ f"({cut_id(run_id)}) [WORKFLOW]: {err.__class__.__name__}: "
804
+ f"{err}"
721
805
  )
722
806
  if raise_error:
723
807
  raise WorkflowException(
@@ -727,7 +811,7 @@ class Workflow(BaseModel):
727
811
  "Handle error from the job execution does not support yet."
728
812
  ) from None
729
813
 
730
- return Result(status=0, context=params).set_run_id(run_id)
814
+ return rs.catch(status=0, context=params)
731
815
 
732
816
  def execute(
733
817
  self,
@@ -761,7 +845,9 @@ class Workflow(BaseModel):
761
845
  :rtype: Result
762
846
  """
763
847
  run_id: str = run_id or gen_id(self.name, unique=True)
764
- logger.info(f"({run_id}) [WORKFLOW]: Start Execute: {self.name!r} ...")
848
+ logger.info(
849
+ f"({cut_id(run_id)}) [WORKFLOW]: Start Execute: {self.name!r} ..."
850
+ )
765
851
 
766
852
  # NOTE: I use this condition because this method allow passing empty
767
853
  # params and I do not want to create new dict object.
@@ -771,7 +857,7 @@ class Workflow(BaseModel):
771
857
  # NOTE: It should not do anything if it does not have job.
772
858
  if not self.jobs:
773
859
  logger.warning(
774
- f"({run_id}) [WORKFLOW]: This workflow: {self.name!r} "
860
+ f"({cut_id(run_id)}) [WORKFLOW]: This workflow: {self.name!r} "
775
861
  f"does not have any jobs"
776
862
  )
777
863
  return rs.catch(status=0, context=params)
@@ -846,14 +932,14 @@ class Workflow(BaseModel):
846
932
  not_timeout_flag: bool = True
847
933
  timeout: int = timeout or config.max_job_exec_timeout
848
934
  logger.debug(
849
- f"({run_id}) [WORKFLOW]: Run {self.name} with threading executor."
935
+ f"({cut_id(run_id)}) [WORKFLOW]: Run {self.name!r} with threading."
850
936
  )
851
937
 
852
938
  # IMPORTANT: The job execution can run parallel and waiting by
853
939
  # needed.
854
940
  with ThreadPoolExecutor(
855
941
  max_workers=config.max_job_parallel,
856
- thread_name_prefix="workflow_exec_threading_",
942
+ thread_name_prefix="wf_exec_threading_",
857
943
  ) as executor:
858
944
  futures: list[Future] = []
859
945
 
@@ -863,7 +949,7 @@ class Workflow(BaseModel):
863
949
  job_id: str = job_queue.get()
864
950
  job: Job = self.jobs[job_id]
865
951
 
866
- if any(need not in context["jobs"] for need in job.needs):
952
+ if not job.check_needs(context["jobs"]):
867
953
  job_queue.task_done()
868
954
  job_queue.put(job_id)
869
955
  time.sleep(0.25)
@@ -872,10 +958,13 @@ class Workflow(BaseModel):
872
958
  # NOTE: Start workflow job execution with deep copy context data
873
959
  # before release.
874
960
  #
961
+ # Context:
962
+ # ---
875
963
  # {
876
964
  # 'params': <input-params>,
877
- # 'jobs': {},
965
+ # 'jobs': { <job's-id>: ... },
878
966
  # }
967
+ #
879
968
  futures.append(
880
969
  executor.submit(
881
970
  self.execute_job,
@@ -895,8 +984,8 @@ class Workflow(BaseModel):
895
984
 
896
985
  for future in as_completed(futures, timeout=thread_timeout):
897
986
  if err := future.exception():
898
- logger.error(f"({run_id}) [WORKFLOW]: {err}")
899
- raise WorkflowException(f"{err}")
987
+ logger.error(f"({cut_id(run_id)}) [WORKFLOW]: {err}")
988
+ raise WorkflowException(str(err))
900
989
 
901
990
  # NOTE: This getting result does not do anything.
902
991
  future.result()
@@ -908,12 +997,10 @@ class Workflow(BaseModel):
908
997
 
909
998
  # NOTE: Raise timeout error.
910
999
  logger.warning(
911
- f"({run_id}) [WORKFLOW]: Execution of workflow, {self.name!r}, "
912
- f"was timeout"
913
- )
914
- raise WorkflowException(
915
- f"Execution of workflow: {self.name} was timeout"
1000
+ f"({cut_id(run_id)}) [WORKFLOW]: Execution: {self.name!r} "
1001
+ f"was timeout."
916
1002
  )
1003
+ raise WorkflowException(f"Execution: {self.name!r} was timeout.")
917
1004
 
918
1005
  def __exec_non_threading(
919
1006
  self,
@@ -940,8 +1027,8 @@ class Workflow(BaseModel):
940
1027
  not_timeout_flag: bool = True
941
1028
  timeout: int = timeout or config.max_job_exec_timeout
942
1029
  logger.debug(
943
- f"({run_id}) [WORKFLOW]: Run {self.name} with non-threading "
944
- f"executor."
1030
+ f"({cut_id(run_id)}) [WORKFLOW]: Run {self.name!r} with "
1031
+ f"non-threading."
945
1032
  )
946
1033
 
947
1034
  while not job_queue.empty() and (
@@ -951,7 +1038,7 @@ class Workflow(BaseModel):
951
1038
  job: Job = self.jobs[job_id]
952
1039
 
953
1040
  # NOTE: Waiting dependency job run successful before release.
954
- if any(need not in context["jobs"] for need in job.needs):
1041
+ if not job.check_needs(context["jobs"]):
955
1042
  job_queue.task_done()
956
1043
  job_queue.put(job_id)
957
1044
  time.sleep(0.075)
@@ -980,11 +1067,10 @@ class Workflow(BaseModel):
980
1067
 
981
1068
  # NOTE: Raise timeout error.
982
1069
  logger.warning(
983
- f"({run_id}) [WORKFLOW]: Execution of workflow was timeout"
984
- )
985
- raise WorkflowException(
986
- f"Execution of workflow: {self.name} was timeout"
1070
+ f"({cut_id(run_id)}) [WORKFLOW]: Execution: {self.name!r} "
1071
+ f"was timeout."
987
1072
  )
1073
+ raise WorkflowException(f"Execution: {self.name!r} was timeout.")
988
1074
 
989
1075
 
990
1076
  @dataclass(config=ConfigDict(arbitrary_types_allowed=True))
@@ -999,7 +1085,7 @@ class WorkflowTaskData:
999
1085
  alias: str
1000
1086
  workflow: Workflow
1001
1087
  runner: CronRunner
1002
- params: DictData
1088
+ params: DictData = field(default_factory=dict)
1003
1089
 
1004
1090
  def release(
1005
1091
  self,
@@ -1009,20 +1095,24 @@ class WorkflowTaskData:
1009
1095
  *,
1010
1096
  waiting_sec: int = 60,
1011
1097
  sleep_interval: int = 15,
1012
- ) -> None: # pragma: no cov
1013
- """Workflow task release that use the same logic of `workflow.release`
1014
- method.
1098
+ ) -> Result: # pragma: no cov
1099
+ """Release the workflow task data that use the same logic of
1100
+ `workflow.release` method but use different the queue object for
1101
+ tracking release datetime to run.
1015
1102
 
1016
- :param queue:
1103
+ :param queue: A mapping of alias name and list of release datetime.
1017
1104
  :param log: A log object for saving result logging from workflow
1018
1105
  execution process.
1019
1106
  :param run_id: A workflow running ID for this release.
1020
1107
  :param waiting_sec: A second period value that allow workflow execute.
1021
1108
  :param sleep_interval: A second value that want to waiting until time
1022
1109
  to execute.
1110
+
1111
+ :rtype: Result
1023
1112
  """
1024
- log: Log = log or FileLog
1025
- run_id: str = run_id or gen_id(self.workflow.name, unique=True)
1113
+ log: type[Log] = log or FileLog
1114
+ run_id: str = run_id or gen_id(self.alias, unique=True)
1115
+ rs_release: Result = Result(run_id=run_id)
1026
1116
  runner: CronRunner = self.runner
1027
1117
 
1028
1118
  # NOTE: get next schedule time that generate from now.
@@ -1035,15 +1125,15 @@ class WorkflowTaskData:
1035
1125
  next_time: datetime = runner.next
1036
1126
 
1037
1127
  logger.debug(
1038
- f"({run_id}) [CORE]: {self.workflow.name!r} : {runner.cron} : "
1039
- f"{next_time:%Y-%m-%d %H:%M:%S}"
1128
+ f"({cut_id(run_id)}) [CORE]: {self.workflow.name!r} : "
1129
+ f"{runner.cron} : {next_time:%Y-%m-%d %H:%M:%S}"
1040
1130
  )
1041
1131
  heappush(queue[self.alias], next_time)
1042
1132
  start_sec: float = time.monotonic()
1043
1133
 
1044
1134
  if get_diff_sec(next_time, tz=runner.tz) > waiting_sec:
1045
1135
  logger.debug(
1046
- f"({run_id}) [WORKFLOW]: {self.workflow.name!r} : "
1136
+ f"({cut_id(run_id)}) [WORKFLOW]: {self.workflow.name!r} : "
1047
1137
  f"{runner.cron} "
1048
1138
  f": Does not closely >> {next_time:%Y-%m-%d %H:%M:%S}"
1049
1139
  )
@@ -1053,11 +1143,11 @@ class WorkflowTaskData:
1053
1143
  queue[self.alias].remove(next_time)
1054
1144
 
1055
1145
  time.sleep(0.2)
1056
- return
1146
+ return rs_release.catch(status=0, context={})
1057
1147
 
1058
1148
  logger.debug(
1059
- f"({run_id}) [CORE]: {self.workflow.name!r} : {runner.cron} : "
1060
- f"Closely to run >> {next_time:%Y-%m-%d %H:%M:%S}"
1149
+ f"({cut_id(run_id)}) [CORE]: {self.workflow.name!r} : "
1150
+ f"{runner.cron} : Closely to run >> {next_time:%Y-%m-%d %H:%M:%S}"
1061
1151
  )
1062
1152
 
1063
1153
  # NOTE: Release when the time is nearly to schedule time.
@@ -1065,8 +1155,8 @@ class WorkflowTaskData:
1065
1155
  sleep_interval + 5
1066
1156
  ):
1067
1157
  logger.debug(
1068
- f"({run_id}) [CORE]: {self.workflow.name!r} : {runner.cron} "
1069
- f": Sleep until: {duration}"
1158
+ f"({cut_id(run_id)}) [CORE]: {self.workflow.name!r} : "
1159
+ f"{runner.cron} : Sleep until: {duration}"
1070
1160
  )
1071
1161
  time.sleep(15)
1072
1162
 
@@ -1077,6 +1167,9 @@ class WorkflowTaskData:
1077
1167
  release_params: DictData = {
1078
1168
  "release": {
1079
1169
  "logical_date": next_time,
1170
+ "execute_date": datetime.now(tz=config.tz),
1171
+ "run_id": run_id,
1172
+ "timezone": runner.tz,
1080
1173
  },
1081
1174
  }
1082
1175
 
@@ -1085,8 +1178,8 @@ class WorkflowTaskData:
1085
1178
  params=param2template(self.params, release_params),
1086
1179
  )
1087
1180
  logger.debug(
1088
- f"({run_id}) [CORE]: {self.workflow.name!r} : {runner.cron} : "
1089
- f"End release - {next_time:%Y-%m-%d %H:%M:%S}"
1181
+ f"({cut_id(run_id)}) [CORE]: {self.workflow.name!r} : "
1182
+ f"{runner.cron} : End release - {next_time:%Y-%m-%d %H:%M:%S}"
1090
1183
  )
1091
1184
 
1092
1185
  # NOTE: Set parent ID on this result.
@@ -1122,6 +1215,18 @@ class WorkflowTaskData:
1122
1215
  # NOTE: Queue next release date.
1123
1216
  logger.debug(f"[CORE]: {'-' * 100}")
1124
1217
 
1218
+ context: dict[str, Any] = rs.context
1219
+ context.pop("params")
1220
+
1221
+ return rs_release.catch(
1222
+ status=0,
1223
+ context={
1224
+ "params": self.params,
1225
+ "release": {"status": "success", "logical_date": next_time},
1226
+ "outputs": context,
1227
+ },
1228
+ )
1229
+
1125
1230
  def __eq__(self, other: WorkflowTaskData) -> bool:
1126
1231
  """Override equal property that will compare only the same type."""
1127
1232
  if isinstance(other, WorkflowTaskData):