ddeutil-workflow 0.0.20__tar.gz → 0.0.21__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. {ddeutil_workflow-0.0.20/src/ddeutil_workflow.egg-info → ddeutil_workflow-0.0.21}/PKG-INFO +2 -1
  2. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/README.md +1 -0
  3. ddeutil_workflow-0.0.21/src/ddeutil/workflow/__about__.py +1 -0
  4. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/src/ddeutil/workflow/__types.py +1 -0
  5. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/src/ddeutil/workflow/conf.py +3 -0
  6. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/src/ddeutil/workflow/job.py +62 -68
  7. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/src/ddeutil/workflow/stage.py +2 -2
  8. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/src/ddeutil/workflow/workflow.py +120 -72
  9. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21/src/ddeutil_workflow.egg-info}/PKG-INFO +2 -1
  10. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test_job.py +21 -16
  11. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test_job_exec_strategy.py +48 -11
  12. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test_stage.py +1 -0
  13. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test_workflow.py +21 -12
  14. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test_workflow_exec.py +52 -0
  15. ddeutil_workflow-0.0.20/src/ddeutil/workflow/__about__.py +0 -1
  16. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/LICENSE +0 -0
  17. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/pyproject.toml +0 -0
  18. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/setup.cfg +0 -0
  19. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/src/ddeutil/workflow/__cron.py +0 -0
  20. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/src/ddeutil/workflow/__init__.py +0 -0
  21. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/src/ddeutil/workflow/api.py +0 -0
  22. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/src/ddeutil/workflow/cli.py +0 -0
  23. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/src/ddeutil/workflow/exceptions.py +0 -0
  24. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/src/ddeutil/workflow/on.py +0 -0
  25. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/src/ddeutil/workflow/repeat.py +0 -0
  26. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/src/ddeutil/workflow/route.py +0 -0
  27. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/src/ddeutil/workflow/scheduler.py +0 -0
  28. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/src/ddeutil/workflow/utils.py +0 -0
  29. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/src/ddeutil_workflow.egg-info/SOURCES.txt +0 -0
  30. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
  31. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/src/ddeutil_workflow.egg-info/entry_points.txt +0 -0
  32. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/src/ddeutil_workflow.egg-info/requires.txt +0 -0
  33. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
  34. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test__cron.py +0 -0
  35. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test__regex.py +0 -0
  36. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test_conf.py +0 -0
  37. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test_conf_log.py +0 -0
  38. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test_job_exec_py.py +0 -0
  39. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test_job_strategy.py +0 -0
  40. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test_on.py +0 -0
  41. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test_params.py +0 -0
  42. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test_scheduler.py +0 -0
  43. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test_scheduler_tasks.py +0 -0
  44. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test_stage_exec_bash.py +0 -0
  45. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test_stage_exec_hook.py +0 -0
  46. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test_stage_exec_py.py +0 -0
  47. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test_stage_exec_trigger.py +0 -0
  48. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test_utils.py +0 -0
  49. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test_utils_filter.py +0 -0
  50. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test_utils_params.py +0 -0
  51. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test_utils_result.py +0 -0
  52. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test_utils_tag.py +0 -0
  53. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test_utils_template.py +0 -0
  54. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test_workflow_exec_hook.py +0 -0
  55. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test_workflow_exec_needs.py +0 -0
  56. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test_workflow_job_exec.py +0 -0
  57. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test_workflow_poke.py +0 -0
  58. {ddeutil_workflow-0.0.20 → ddeutil_workflow-0.0.21}/tests/test_workflow_release.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.20
3
+ Version: 0.0.21
4
4
  Summary: Lightweight workflow orchestration with less dependencies
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -195,6 +195,7 @@ and do not raise any error to you.
195
195
  | `WORKFLOW_CORE_JOB_RAISE_ERROR` | Core | true | A flag that all job raise JobException from job strategy execution. | |
196
196
  | `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | . | |
197
197
  | `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor. | |
198
+ | `WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT` | Core | 600 | | |
198
199
  | `WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE` | Core | true | A flog that enable generating ID with `md5` algorithm. | |
199
200
  | `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode. | |
200
201
  | `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination. | |
@@ -162,6 +162,7 @@ and do not raise any error to you.
162
162
  | `WORKFLOW_CORE_JOB_RAISE_ERROR` | Core | true | A flag that all job raise JobException from job strategy execution. | |
163
163
  | `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | . | |
164
164
  | `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor. | |
165
+ | `WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT` | Core | 600 | | |
165
166
  | `WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE` | Core | true | A flog that enable generating ID with `md5` algorithm. | |
166
167
  | `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode. | |
167
168
  | `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination. | |
@@ -0,0 +1 @@
1
+ __version__: str = "0.0.21"
@@ -101,6 +101,7 @@ class Re:
101
101
  #
102
102
  # Examples:
103
103
  # - tasks/function@dummy
104
+ #
104
105
  __re_task_fmt: str = r"""
105
106
  ^
106
107
  (?P<path>[^/@]+)
@@ -100,6 +100,9 @@ class Config:
100
100
 
101
101
  # NOTE: Workflow
102
102
  max_job_parallel: int = int(env("WORKFLOW_CORE_MAX_JOB_PARALLEL", "2"))
103
+ max_job_exec_timeout: int = int(
104
+ env("WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT", "600")
105
+ )
103
106
  max_poking_pool_worker: int = int(
104
107
  os.getenv("WORKFLOW_CORE_MAX_NUM_POKING", "4")
105
108
  )
@@ -11,7 +11,6 @@ job.
11
11
  from __future__ import annotations
12
12
 
13
13
  import copy
14
- import time
15
14
  from concurrent.futures import (
16
15
  FIRST_EXCEPTION,
17
16
  Future,
@@ -48,13 +47,13 @@ from .utils import (
48
47
  )
49
48
 
50
49
  logger = get_logger("ddeutil.workflow")
51
- MatrixInclude = list[dict[str, Union[str, int]]]
52
- MatrixExclude = list[dict[str, Union[str, int]]]
50
+ MatrixFilter = list[dict[str, Union[str, int]]]
53
51
 
54
52
 
55
53
  __all__: TupleStr = (
56
54
  "Strategy",
57
55
  "Job",
56
+ "TriggerRules",
58
57
  "make",
59
58
  )
60
59
 
@@ -63,8 +62,8 @@ __all__: TupleStr = (
63
62
  @lru_cache
64
63
  def make(
65
64
  matrix: Matrix,
66
- include: MatrixInclude,
67
- exclude: MatrixExclude,
65
+ include: MatrixFilter,
66
+ exclude: MatrixFilter,
68
67
  ) -> list[DictStr]:
69
68
  """Make a list of product of matrix values that already filter with
70
69
  exclude matrix and add specific matrix with include.
@@ -124,7 +123,7 @@ def make(
124
123
 
125
124
 
126
125
  class Strategy(BaseModel):
127
- """Strategy Model that will combine a matrix together for running the
126
+ """Strategy model that will combine a matrix together for running the
128
127
  special job with combination of matrix data.
129
128
 
130
129
  This model does not be the part of job only because you can use it to
@@ -166,11 +165,11 @@ class Strategy(BaseModel):
166
165
  "A matrix values that want to cross product to possible strategies."
167
166
  ),
168
167
  )
169
- include: MatrixInclude = Field(
168
+ include: MatrixFilter = Field(
170
169
  default_factory=list,
171
170
  description="A list of additional matrix that want to adds-in.",
172
171
  )
173
- exclude: MatrixExclude = Field(
172
+ exclude: MatrixFilter = Field(
174
173
  default_factory=list,
175
174
  description="A list of exclude matrix that want to filter-out.",
176
175
  )
@@ -204,7 +203,7 @@ class Strategy(BaseModel):
204
203
 
205
204
 
206
205
  class TriggerRules(str, Enum):
207
- """Trigger Rules enum object."""
206
+ """Trigger rules enum object."""
208
207
 
209
208
  all_success: str = "all_success"
210
209
  all_failed: str = "all_failed"
@@ -215,8 +214,15 @@ class TriggerRules(str, Enum):
215
214
  none_skipped: str = "none_skipped"
216
215
 
217
216
 
217
+ class RunsOn(str, Enum):
218
+ """Runs-On enum object."""
219
+
220
+ local: str = "local"
221
+ docker: str = "docker"
222
+
223
+
218
224
  class Job(BaseModel):
219
- """Job Pydantic model object (group of stages).
225
+ """Job Pydantic model object (short descripte: a group of stages).
220
226
 
221
227
  This job model allow you to use for-loop that call matrix strategy. If
222
228
  you pass matrix mapping and it able to generate, you will see it running
@@ -327,7 +333,10 @@ class Job(BaseModel):
327
333
  return self
328
334
 
329
335
  def stage(self, stage_id: str) -> Stage:
330
- """Return stage model that match with an input stage ID.
336
+ """Return stage instance that exists in this job via passing an input
337
+ stage ID.
338
+
339
+ :raise ValueError: If an input stage ID does not found on this job.
331
340
 
332
341
  :param stage_id: A stage ID that want to extract from this job.
333
342
  :rtype: Stage
@@ -360,8 +369,12 @@ class Job(BaseModel):
360
369
  }
361
370
  }
362
371
 
372
+ :raise JobException: If the job's ID does not set and the setting
373
+ default job ID flag does not set.
374
+
363
375
  :param output: An output context.
364
376
  :param to: A context data that want to add output result.
377
+
365
378
  :rtype: DictData
366
379
  """
367
380
  if self.id is None and not config.job_default_id:
@@ -387,8 +400,8 @@ class Job(BaseModel):
387
400
  self,
388
401
  strategy: DictData,
389
402
  params: DictData,
390
- run_id: str | None = None,
391
403
  *,
404
+ run_id: str | None = None,
392
405
  event: Event | None = None,
393
406
  ) -> Result:
394
407
  """Job Strategy execution with passing dynamic parameters from the
@@ -398,11 +411,15 @@ class Job(BaseModel):
398
411
  It different with ``self.execute`` because this method run only one
399
412
  strategy and return with context of this strategy data.
400
413
 
414
+ The result of this execution will return result with strategy ID
415
+ that generated from the `gen_id` function with a input strategy value.
416
+
401
417
  :raise JobException: If it has any error from ``StageException`` or
402
418
  ``UtilException``.
403
419
 
404
- :param strategy: A metrix strategy value.
405
- :param params: A dynamic parameters.
420
+ :param strategy: A strategy metrix value that use on this execution.
421
+ This value will pass to the `matrix` key for templating.
422
+ :param params: A dynamic parameters that will deepcopy to the context.
406
423
  :param run_id: A job running ID for this strategy execution.
407
424
  :param event: An manger event that pass to the PoolThreadExecutor.
408
425
 
@@ -433,17 +450,19 @@ class Job(BaseModel):
433
450
  logger.info(f"({run_id}) [JOB]: Skip stage: {stage.iden!r}")
434
451
  continue
435
452
 
436
- logger.info(
437
- f"({run_id}) [JOB]: Start execute the stage: {stage.iden!r}"
438
- )
453
+ logger.info(f"({run_id}) [JOB]: Execute stage: {stage.iden!r}")
439
454
 
440
455
  # NOTE: Logging a matrix that pass on this stage execution.
441
456
  if strategy:
442
- logger.info(f"({run_id}) [JOB]: Matrix: {strategy}")
457
+ logger.info(f"({run_id}) [JOB]: ... Matrix: {strategy}")
443
458
 
444
459
  # NOTE: Force stop this execution if event was set from main
445
460
  # execution.
446
461
  if event and event.is_set():
462
+ error_msg: str = (
463
+ "Job strategy was canceled from event that had set before "
464
+ "strategy execution."
465
+ )
447
466
  return Result(
448
467
  status=1,
449
468
  context={
@@ -453,15 +472,10 @@ class Job(BaseModel):
453
472
  # it will not filter function object from context.
454
473
  # ---
455
474
  # "stages": filter_func(context.pop("stages", {})),
475
+ #
456
476
  "stages": context.pop("stages", {}),
457
- "error": JobException(
458
- "Job strategy was canceled from trigger event "
459
- "that had stopped before execution."
460
- ),
461
- "error_message": (
462
- "Job strategy was canceled from trigger event "
463
- "that had stopped before execution."
464
- ),
477
+ "error": JobException(error_msg),
478
+ "error_message": error_msg,
465
479
  },
466
480
  },
467
481
  run_id=run_id,
@@ -512,7 +526,7 @@ class Job(BaseModel):
512
526
  run_id=run_id,
513
527
  )
514
528
 
515
- # NOTE: Remove the current stage object.
529
+ # NOTE: Remove the current stage object for saving memory.
516
530
  del stage
517
531
 
518
532
  return Result(
@@ -542,7 +556,8 @@ class Job(BaseModel):
542
556
  run_id: str = run_id or gen_id(self.id or "", unique=True)
543
557
  context: DictData = {}
544
558
 
545
- # NOTE: Normal Job execution without parallel strategy.
559
+ # NOTE: Normal Job execution without parallel strategy matrix. It use
560
+ # for-loop to control strategy execution sequentially.
546
561
  if (not self.strategy.is_set()) or self.strategy.max_parallel == 1:
547
562
  for strategy in self.strategy.make():
548
563
  rs: Result = self.execute_strategy(
@@ -566,6 +581,7 @@ class Job(BaseModel):
566
581
  max_workers=self.strategy.max_parallel,
567
582
  thread_name_prefix="job_strategy_exec_",
568
583
  ) as executor:
584
+
569
585
  futures: list[Future] = [
570
586
  executor.submit(
571
587
  self.execute_strategy,
@@ -577,11 +593,8 @@ class Job(BaseModel):
577
593
  for strategy in self.strategy.make()
578
594
  ]
579
595
 
580
- # NOTE: Dynamic catching futures object with fail-fast flag.
581
596
  return (
582
- self.__catch_fail_fast(
583
- event=event, futures=futures, run_id=run_id
584
- )
597
+ self.__catch_fail_fast(event, futures=futures, run_id=run_id)
585
598
  if self.strategy.fail_fast
586
599
  else self.__catch_all_completed(futures=futures, run_id=run_id)
587
600
  )
@@ -593,19 +606,17 @@ class Job(BaseModel):
593
606
  run_id: str,
594
607
  *,
595
608
  timeout: int = 1800,
596
- result_timeout: int = 60,
597
609
  ) -> Result:
598
610
  """Job parallel pool futures catching with fail-fast mode. That will
599
- stop all not done futures if it receive the first exception from all
600
- running futures.
611
+ stop and set event on all not done futures if it receive the first
612
+ exception from all running futures.
601
613
 
602
614
  :param event: An event manager instance that able to set stopper on the
603
- observing thread/process.
615
+ observing multithreading.
604
616
  :param futures: A list of futures.
605
617
  :param run_id: A job running ID from execution.
606
618
  :param timeout: A timeout to waiting all futures complete.
607
- :param result_timeout: A timeout of getting result from the future
608
- instance when it was running completely.
619
+
609
620
  :rtype: Result
610
621
  """
611
622
  rs_final: Result = Result()
@@ -615,9 +626,7 @@ class Job(BaseModel):
615
626
  # NOTE: Get results from a collection of tasks with a timeout that has
616
627
  # the first exception.
617
628
  done, not_done = wait(
618
- futures,
619
- timeout=timeout,
620
- return_when=FIRST_EXCEPTION,
629
+ futures, timeout=timeout, return_when=FIRST_EXCEPTION
621
630
  )
622
631
  nd: str = (
623
632
  f", the strategies do not run is {not_done}" if not_done else ""
@@ -635,11 +644,13 @@ class Job(BaseModel):
635
644
 
636
645
  future: Future
637
646
  for future in done:
647
+
648
+ # NOTE: Handle the first exception from feature
638
649
  if err := future.exception():
639
650
  status: int = 1
640
651
  logger.error(
641
- f"({run_id}) [JOB]: One stage failed with: "
642
- f"{future.exception()}, shutting down this future."
652
+ f"({run_id}) [JOB]: Fail-fast catching:\n\t"
653
+ f"{future.exception()}"
643
654
  )
644
655
  context.update(
645
656
  {
@@ -650,7 +661,7 @@ class Job(BaseModel):
650
661
  continue
651
662
 
652
663
  # NOTE: Update the result context to main job context.
653
- context.update(future.result(timeout=result_timeout).context)
664
+ context.update(future.result().context)
654
665
 
655
666
  return rs_final.catch(status=status, context=context)
656
667
 
@@ -660,45 +671,27 @@ class Job(BaseModel):
660
671
  run_id: str,
661
672
  *,
662
673
  timeout: int = 1800,
663
- result_timeout: int = 60,
664
674
  ) -> Result:
665
675
  """Job parallel pool futures catching with all-completed mode.
666
676
 
667
- :param futures: A list of futures that want to catch all completed
668
- result.
677
+ :param futures: A list of futures.
669
678
  :param run_id: A job running ID from execution.
670
679
  :param timeout: A timeout to waiting all futures complete.
671
- :param result_timeout: A timeout of getting result from the future
672
- instance when it was running completely.
680
+
673
681
  :rtype: Result
674
682
  """
675
683
  rs_final: Result = Result()
676
684
  context: DictData = {}
677
685
  status: int = 0
686
+
678
687
  for future in as_completed(futures, timeout=timeout):
679
688
  try:
680
- context.update(future.result(timeout=result_timeout).context)
681
- except TimeoutError: # pragma: no cov
682
- status = 1
683
- logger.warning(
684
- f"({run_id}) [JOB]: Task is hanging. Attempting to "
685
- f"kill."
686
- )
687
- future.cancel()
688
- time.sleep(0.1)
689
-
690
- stmt: str = (
691
- "Failed to cancel the task."
692
- if not future.cancelled()
693
- else "Task canceled successfully."
694
- )
695
- logger.warning(f"({run_id}) [JOB]: {stmt}")
689
+ context.update(future.result().context)
696
690
  except JobException as err:
697
691
  status = 1
698
692
  logger.error(
699
- f"({run_id}) [JOB]: Get stage exception with "
700
- f"fail-fast does not set;\n{err.__class__.__name__}:\n\t"
701
- f"{err}"
693
+ f"({run_id}) [JOB]: All-completed catching:\n\t"
694
+ f"{err.__class__.__name__}:\n\t{err}"
702
695
  )
703
696
  context.update(
704
697
  {
@@ -706,4 +699,5 @@ class Job(BaseModel):
706
699
  "error_message": f"{err.__class__.__name__}: {err}",
707
700
  },
708
701
  )
702
+
709
703
  return rs_final.catch(status=status, context=context)
@@ -496,6 +496,7 @@ class PyStage(BaseStage):
496
496
 
497
497
  :param output: A output data that want to extract to an output key.
498
498
  :param to: A context data that want to add output result.
499
+
499
500
  :rtype: DictData
500
501
  """
501
502
  # NOTE: The output will fileter unnecessary keys from locals.
@@ -638,8 +639,7 @@ class HookStage(BaseStage):
638
639
 
639
640
  :rtype: Result
640
641
  """
641
- t_func_hook: str = param2template(self.uses, params)
642
- t_func: TagFunc = extract_hook(t_func_hook)()
642
+ t_func: TagFunc = extract_hook(param2template(self.uses, params))()
643
643
 
644
644
  # VALIDATE: check input task caller parameters that exists before
645
645
  # calling.