ddeutil-workflow 0.0.6__py3-none-any.whl → 0.0.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,134 @@
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2023 Priyanshu Panwar. All rights reserved.
3
+ # Licensed under the MIT License.
4
+ # This code refs from: https://github.com/priyanshu-panwar/fastapi-utilities
5
+ # ------------------------------------------------------------------------------
6
+ import asyncio
7
+ import logging
8
+ from asyncio import ensure_future
9
+ from datetime import datetime
10
+ from functools import wraps
11
+
12
+ from croniter import croniter
13
+ from starlette.concurrency import run_in_threadpool
14
+
15
+
16
+ def get_delta(cron: str):
17
+ """This function returns the time delta between now and the next cron
18
+ execution time.
19
+ """
20
+ now: datetime = datetime.now()
21
+ cron = croniter(cron, now)
22
+ return (cron.get_next(datetime) - now).total_seconds()
23
+
24
+
25
+ def repeat_at(
26
+ *,
27
+ cron: str,
28
+ logger: logging.Logger = None,
29
+ raise_exceptions: bool = False,
30
+ max_repetitions: int = None,
31
+ ):
32
+ """This function returns a decorator that makes a function execute
33
+ periodically as per the cron expression provided.
34
+
35
+ :param cron: str
36
+ Cron-style string for periodic execution, eg. '0 0 * * *' every midnight
37
+ :param logger: logging.Logger (default None)
38
+ Logger object to log exceptions
39
+ :param raise_exceptions: bool (default False)
40
+ Whether to raise exceptions or log them
41
+ :param max_repetitions: int (default None)
42
+ Maximum number of times to repeat the function. If None, repeat
43
+ indefinitely.
44
+
45
+ """
46
+
47
+ def decorator(func):
48
+ is_coroutine = asyncio.iscoroutinefunction(func)
49
+
50
+ @wraps(func)
51
+ def wrapper(*_args, **_kwargs):
52
+ repititions = 0
53
+ if not croniter.is_valid(cron):
54
+ raise ValueError("Invalid cron expression")
55
+
56
+ async def loop(*args, **kwargs):
57
+ nonlocal repititions
58
+ while max_repetitions is None or repititions < max_repetitions:
59
+ try:
60
+ sleepTime = get_delta(cron)
61
+ await asyncio.sleep(sleepTime)
62
+ if is_coroutine:
63
+ await func(*args, **kwargs)
64
+ else:
65
+ await run_in_threadpool(func, *args, **kwargs)
66
+ except Exception as e:
67
+ if logger is not None:
68
+ logger.exception(e)
69
+ if raise_exceptions:
70
+ raise e
71
+ repititions += 1
72
+
73
+ ensure_future(loop(*_args, **_kwargs))
74
+
75
+ return wrapper
76
+
77
+ return decorator
78
+
79
+
80
+ def repeat_every(
81
+ *,
82
+ seconds: float,
83
+ wait_first: bool = False,
84
+ logger: logging.Logger = None,
85
+ raise_exceptions: bool = False,
86
+ max_repetitions: int = None,
87
+ ):
88
+ """This function returns a decorator that schedules a function to execute
89
+ periodically after every `seconds` seconds.
90
+
91
+ :param seconds: float
92
+ The number of seconds to wait before executing the function again.
93
+ :param wait_first: bool (default False)
94
+ Whether to wait `seconds` seconds before executing the function for the
95
+ first time.
96
+ :param logger: logging.Logger (default None)
97
+ The logger to use for logging exceptions.
98
+ :param raise_exceptions: bool (default False)
99
+ Whether to raise exceptions instead of logging them.
100
+ :param max_repetitions: int (default None)
101
+ The maximum number of times to repeat the function. If None, the
102
+ function will repeat indefinitely.
103
+ """
104
+
105
+ def decorator(func):
106
+ is_coroutine = asyncio.iscoroutinefunction(func)
107
+
108
+ @wraps(func)
109
+ async def wrapper(*_args, **_kwargs):
110
+ repetitions = 0
111
+
112
+ async def loop(*args, **kwargs):
113
+ nonlocal repetitions
114
+ if wait_first:
115
+ await asyncio.sleep(seconds)
116
+ while max_repetitions is None or repetitions < max_repetitions:
117
+ try:
118
+ if is_coroutine:
119
+ await func(*args, **kwargs)
120
+ else:
121
+ await run_in_threadpool(func, *args, **kwargs)
122
+ except Exception as e:
123
+ if logger is not None:
124
+ logger.exception(e)
125
+ if raise_exceptions:
126
+ raise e
127
+ repetitions += 1
128
+ await asyncio.sleep(seconds)
129
+
130
+ ensure_future(loop(*_args, **_kwargs))
131
+
132
+ return wrapper
133
+
134
+ return decorator
@@ -0,0 +1,78 @@
1
+ from enum import Enum
2
+
3
+ from fastapi import APIRouter, Request, status
4
+ from pydantic import BaseModel, ConfigDict, Field
5
+
6
+ from .log import get_logger
7
+
8
+ logger = get_logger(__name__)
9
+ workflow_route = APIRouter(prefix="/workflow")
10
+
11
+
12
+ @workflow_route.get("/{name}")
13
+ async def get_pipeline(name: str):
14
+ return {"message": f"getting pipeline {name}"}
15
+
16
+
17
+ @workflow_route.get("/{name}/logs")
18
+ async def get_pipeline_log(name: str):
19
+ return {"message": f"getting pipeline {name} logs"}
20
+
21
+
22
+ class JobNotFoundError(Exception):
23
+ pass
24
+
25
+
26
+ schedule_route = APIRouter(prefix="/schedule", tags=["schedule"])
27
+
28
+
29
+ class TriggerEnum(str, Enum):
30
+ interval = "interval"
31
+ cron = "cron"
32
+
33
+
34
+ class Job(BaseModel):
35
+ model_config = ConfigDict(
36
+ json_schema_extra={
37
+ "example": {
38
+ "func": "example.main:pytest_job",
39
+ "trigger": "interval",
40
+ "seconds": 3,
41
+ "id": "pytest_job",
42
+ },
43
+ },
44
+ )
45
+ func: str = Field()
46
+ trigger: TriggerEnum = Field(title="Trigger type")
47
+ seconds: int = Field(title="Interval in seconds")
48
+ id: str = Field(title="Job ID")
49
+
50
+
51
+ @schedule_route.post(
52
+ "/", name="scheduler:add_job", status_code=status.HTTP_201_CREATED
53
+ )
54
+ async def add_job(request: Request, job: Job):
55
+ job = request.app.scheduler.add_job(**job.dict())
56
+ return {"job": f"{job.id}"}
57
+
58
+
59
+ @schedule_route.get("/", name="scheduler:get_jobs", response_model=list)
60
+ async def get_jobs(request: Request):
61
+ jobs = request.app.scheduler.get_jobs()
62
+ jobs = [
63
+ {k: v for k, v in job.__getstate__().items() if k != "trigger"}
64
+ for job in jobs
65
+ ]
66
+ return jobs
67
+
68
+
69
+ @schedule_route.delete("/{job_id}", name="scheduler:remove_job")
70
+ async def remove_job(request: Request, job_id: str):
71
+ try:
72
+ deleted = request.app.scheduler.remove_job(job_id=job_id)
73
+ logger.debug(f"Job {job_id} deleted: {deleted}")
74
+ return {"job": f"{job_id}"}
75
+ except AttributeError as err:
76
+ raise JobNotFoundError(
77
+ f"No job by the id of {job_id} was found"
78
+ ) from err
ddeutil/workflow/stage.py CHANGED
@@ -3,6 +3,18 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
+ """Stage Model that use for getting stage data template from Job Model.
7
+ The stage that handle the minimize task that run in some thread (same thread at
8
+ its job owner) that mean it is the lowest executor of a pipeline workflow that
9
+ can tracking logs.
10
+
11
+ The output of stage execution only return 0 status because I do not want to
12
+ handle stage error on this stage model. I think stage model should have a lot of
13
+ usecase and it does not worry when I want to create a new one.
14
+
15
+ Execution --> Ok --> Result with 0
16
+ --> Error --> Raise StageException
17
+ """
6
18
  from __future__ import annotations
7
19
 
8
20
  import contextlib
@@ -15,6 +27,7 @@ import uuid
15
27
  from abc import ABC, abstractmethod
16
28
  from collections.abc import Iterator
17
29
  from dataclasses import dataclass
30
+ from functools import wraps
18
31
  from inspect import Parameter
19
32
  from pathlib import Path
20
33
  from subprocess import CompletedProcess
@@ -22,6 +35,7 @@ from typing import Callable, Optional, Union
22
35
 
23
36
  from ddeutil.core import str2bool
24
37
  from pydantic import BaseModel, Field
38
+ from pydantic.functional_validators import model_validator
25
39
 
26
40
  from .__types import DictData, DictStr, Re, TupleStr
27
41
  from .exceptions import StageException
@@ -36,6 +50,35 @@ from .utils import (
36
50
  )
37
51
 
38
52
 
53
+ def handler_result(message: str | None = None):
54
+ """Decorator function for handler result from the stage execution."""
55
+ message: str = message or ""
56
+
57
+ def decorator(func):
58
+
59
+ @wraps(func)
60
+ def wrapped(self: BaseStage, *args, **kwargs):
61
+ try:
62
+ rs: DictData = func(self, *args, **kwargs)
63
+ return Result(status=0, context=rs)
64
+ except Exception as err:
65
+ logging.error(
66
+ f"({self.run_id}) [STAGE]: {err.__class__.__name__}: {err}"
67
+ )
68
+ if isinstance(err, StageException):
69
+ raise StageException(
70
+ f"{self.__class__.__name__}: {message}\n---\n\t{err}"
71
+ ) from err
72
+ raise StageException(
73
+ f"{self.__class__.__name__}: {message}\n---\n\t"
74
+ f"{err.__class__.__name__}: {err}"
75
+ ) from None
76
+
77
+ return wrapped
78
+
79
+ return decorator
80
+
81
+
39
82
  class BaseStage(BaseModel, ABC):
40
83
  """Base Stage Model that keep only id and name fields for the stage
41
84
  metadata. If you want to implement any custom stage, you can use this class
@@ -56,6 +99,17 @@ class BaseStage(BaseModel, ABC):
56
99
  default=None,
57
100
  alias="if",
58
101
  )
102
+ run_id: Optional[str] = Field(
103
+ default=None,
104
+ description="A running stage ID.",
105
+ repr=False,
106
+ )
107
+
108
+ @model_validator(mode="after")
109
+ def __prepare_running_id(self):
110
+ if self.run_id is None:
111
+ self.run_id = gen_id(self.name + (self.id or ""), unique=True)
112
+ return self
59
113
 
60
114
  @abstractmethod
61
115
  def execute(self, params: DictData) -> Result:
@@ -74,24 +128,40 @@ class BaseStage(BaseModel, ABC):
74
128
  :param params: A context data that want to add output result.
75
129
  :rtype: DictData
76
130
  """
77
- if self.id:
78
- _id: str = param2template(self.id, params)
79
- elif str2bool(os.getenv("WORKFLOW_CORE_DEFAULT_STAGE_ID", "false")):
80
- _id: str = gen_id(param2template(self.name, params))
81
- else:
131
+ if not (
132
+ self.id
133
+ or str2bool(os.getenv("WORKFLOW_CORE_DEFAULT_STAGE_ID", "false"))
134
+ ):
135
+ logging.debug(
136
+ f"({self.run_id}) [STAGE]: Output does not set because this "
137
+ f"stage does not set ID or default stage ID config flag not be "
138
+ f"True."
139
+ )
82
140
  return params
83
141
 
84
142
  # NOTE: Create stages key to receive an output from the stage execution.
85
143
  if "stages" not in params:
86
144
  params["stages"] = {}
87
145
 
146
+ # TODO: Validate stage id and name should not dynamic with params
147
+ # template. (allow only matrix)
148
+ if self.id:
149
+ _id: str = param2template(self.id, params=params)
150
+ else:
151
+ _id: str = gen_id(param2template(self.name, params=params))
152
+
153
+ # NOTE: Set the output to that stage generated ID.
88
154
  params["stages"][_id] = {"outputs": output}
155
+ logging.debug(
156
+ f"({self.run_id}) [STAGE]: Set output complete with stage ID: {_id}"
157
+ )
89
158
  return params
90
159
 
91
- def is_skip(self, params: DictData | None = None) -> bool:
160
+ def is_skipped(self, params: DictData | None = None) -> bool:
92
161
  """Return true if condition of this stage do not correct.
93
162
 
94
163
  :param params: A parameters that want to pass to condition template.
164
+ :rtype: bool
95
165
  """
96
166
  params: DictData = params or {}
97
167
  if self.condition is None:
@@ -99,20 +169,24 @@ class BaseStage(BaseModel, ABC):
99
169
 
100
170
  _g: DictData = globals() | params
101
171
  try:
102
- rs: bool = eval(
103
- param2template(self.condition, params, repr_flag=True), _g, {}
104
- )
172
+ rs: bool = eval(param2template(self.condition, params), _g, {})
105
173
  if not isinstance(rs, bool):
106
174
  raise TypeError("Return type of condition does not be boolean")
107
175
  return not rs
108
176
  except Exception as err:
109
- logging.error(str(err))
177
+ logging.error(f"({self.run_id}) [STAGE]: {err}")
110
178
  raise StageException(str(err)) from err
111
179
 
112
180
 
113
181
  class EmptyStage(BaseStage):
114
182
  """Empty stage that do nothing (context equal empty stage) and logging the
115
183
  name of stage only to stdout.
184
+
185
+ Data Validate:
186
+ >>> stage = {
187
+ ... "name": "Empty stage execution",
188
+ ... "echo": "Hello World",
189
+ ... }
116
190
  """
117
191
 
118
192
  echo: Optional[str] = Field(
@@ -127,7 +201,10 @@ class EmptyStage(BaseStage):
127
201
  :param params: A context data that want to add output result. But this
128
202
  stage does not pass any output.
129
203
  """
130
- logging.info(f"[STAGE]: Empty-Execute: {self.name!r}")
204
+ logging.info(
205
+ f"({self.run_id}) [STAGE]: Empty-Execute: {self.name!r}: "
206
+ f"( {param2template(self.echo, params=params) or '...'} )"
207
+ )
131
208
  return Result(status=0, context={})
132
209
 
133
210
 
@@ -178,12 +255,17 @@ class BashStage(BaseStage):
178
255
  f.write(bash.replace("\r\n", "\n"))
179
256
 
180
257
  make_exec(f"./{f_name}")
258
+ logging.debug(
259
+ f"({self.run_id}) [STAGE]: Start create `.sh` file and running a "
260
+ f"bash statement."
261
+ )
181
262
 
182
263
  yield [f_shebang, f_name]
183
264
 
184
265
  Path(f"./{f_name}").unlink()
185
266
 
186
- def execute(self, params: DictData) -> Result:
267
+ @handler_result()
268
+ def execute(self, params: DictData) -> DictData:
187
269
  """Execute the Bash statement with the Python build-in ``subprocess``
188
270
  package.
189
271
 
@@ -194,7 +276,7 @@ class BashStage(BaseStage):
194
276
  with self.__prepare_bash(
195
277
  bash=bash, env=param2template(self.env, params)
196
278
  ) as sh:
197
- logging.info(f"[STAGE]: Shell-Execute: {sh}")
279
+ logging.info(f"({self.run_id}) [STAGE]: Shell-Execute: {sh}")
198
280
  rs: CompletedProcess = subprocess.run(
199
281
  sh,
200
282
  shell=False,
@@ -207,21 +289,32 @@ class BashStage(BaseStage):
207
289
  if "\\x00" in rs.stderr
208
290
  else rs.stderr
209
291
  )
210
- logging.error(f"{err}\nRunning Statement:\n---\n{bash}")
211
- raise StageException(f"{err}\nRunning Statement:\n---\n{bash}")
212
- return Result(
213
- status=0,
214
- context={
215
- "return_code": rs.returncode,
216
- "stdout": rs.stdout.rstrip("\n"),
217
- "stderr": rs.stderr.rstrip("\n"),
218
- },
219
- )
292
+ logging.error(
293
+ f"({self.run_id}) [STAGE]: {err}\n\n```bash\n{bash}```"
294
+ )
295
+ raise StageException(
296
+ f"{err.__class__.__name__}: {err}\nRunning Statement:"
297
+ f"\n---\n```bash\n{bash}\n```"
298
+ )
299
+ return {
300
+ "return_code": rs.returncode,
301
+ "stdout": rs.stdout.rstrip("\n"),
302
+ "stderr": rs.stderr.rstrip("\n"),
303
+ }
220
304
 
221
305
 
222
306
  class PyStage(BaseStage):
223
307
  """Python executor stage that running the Python statement that receive
224
308
  globals nad additional variables.
309
+
310
+ Data Validate:
311
+ >>> stage = {
312
+ ... "name": "Python stage execution",
313
+ ... "run": 'print("Hello {x}")',
314
+ ... "vars": {
315
+ ... "x": "BAR",
316
+ ... },
317
+ ... }
225
318
  """
226
319
 
227
320
  run: str = Field(
@@ -254,7 +347,8 @@ class PyStage(BaseStage):
254
347
  params.update({k: _globals[k] for k in params if k in _globals})
255
348
  return params
256
349
 
257
- def execute(self, params: DictData) -> Result:
350
+ @handler_result()
351
+ def execute(self, params: DictData) -> DictData:
258
352
  """Execute the Python statement that pass all globals and input params
259
353
  to globals argument on ``exec`` build-in function.
260
354
 
@@ -266,18 +360,10 @@ class PyStage(BaseStage):
266
360
  globals() | params | param2template(self.vars, params)
267
361
  )
268
362
  _locals: DictData = {}
269
- try:
270
- logging.info(f"[STAGE]: Py-Execute: {uuid.uuid4()}")
271
- exec(param2template(self.run, params), _globals, _locals)
272
- except Exception as err:
273
- raise StageException(
274
- f"{err.__class__.__name__}: {err}\nRunning Statement:\n---\n"
275
- f"{self.run}"
276
- ) from None
277
- return Result(
278
- status=0,
279
- context={"locals": _locals, "globals": _globals},
280
- )
363
+ run: str = param2template(self.run, params)
364
+ logging.info(f"({self.run_id}) [STAGE]: Py-Execute: {uuid.uuid4()}")
365
+ exec(run, _globals, _locals)
366
+ return {"locals": _locals, "globals": _globals}
281
367
 
282
368
 
283
369
  @dataclass
@@ -289,6 +375,34 @@ class HookSearch:
289
375
  tag: str
290
376
 
291
377
 
378
+ def extract_hook(hook: str) -> Callable[[], TagFunc]:
379
+ """Extract Hook string value to hook function.
380
+
381
+ :param hook: A hook value that able to match with Task regex.
382
+ :rtype: Callable[[], TagFunc]
383
+ """
384
+ if not (found := Re.RE_TASK_FMT.search(hook)):
385
+ raise ValueError("Task does not match with task format regex.")
386
+
387
+ # NOTE: Pass the searching hook string to `path`, `func`, and `tag`.
388
+ hook: HookSearch = HookSearch(**found.groupdict())
389
+
390
+ # NOTE: Registry object should implement on this package only.
391
+ rgt: dict[str, Registry] = make_registry(f"{hook.path}")
392
+ if hook.func not in rgt:
393
+ raise NotImplementedError(
394
+ f"``REGISTER-MODULES.{hook.path}.registries`` does not "
395
+ f"implement registry: {hook.func!r}."
396
+ )
397
+
398
+ if hook.tag not in rgt[hook.func]:
399
+ raise NotImplementedError(
400
+ f"tag: {hook.tag!r} does not found on registry func: "
401
+ f"``REGISTER-MODULES.{hook.path}.registries.{hook.func}``"
402
+ )
403
+ return rgt[hook.func][hook.tag]
404
+
405
+
292
406
  class HookStage(BaseStage):
293
407
  """Hook executor that hook the Python function from registry with tag
294
408
  decorator function in ``utils`` module and run it with input arguments.
@@ -309,54 +423,33 @@ class HookStage(BaseStage):
309
423
  """
310
424
 
311
425
  uses: str = Field(
312
- description="A pointer that want to load function from registry",
426
+ description="A pointer that want to load function from registry.",
427
+ )
428
+ args: DictData = Field(
429
+ description="An arguments that want to pass to the hook function.",
430
+ alias="with",
313
431
  )
314
- args: DictData = Field(alias="with")
315
-
316
- @staticmethod
317
- def extract_hook(hook: str) -> Callable[[], TagFunc]:
318
- """Extract Hook string value to hook function.
319
-
320
- :param hook: A hook value that able to match with Task regex.
321
- """
322
- if not (found := Re.RE_TASK_FMT.search(hook)):
323
- raise ValueError("Task does not match with task format regex.")
324
-
325
- # NOTE: Pass the searching hook string to `path`, `func`, and `tag`.
326
- hook: HookSearch = HookSearch(**found.groupdict())
327
-
328
- # NOTE: Registry object should implement on this package only.
329
- rgt: dict[str, Registry] = make_registry(f"{hook.path}")
330
- if hook.func not in rgt:
331
- raise NotImplementedError(
332
- f"``REGISTER-MODULES.{hook.path}.registries`` does not "
333
- f"implement registry: {hook.func!r}."
334
- )
335
-
336
- if hook.tag not in rgt[hook.func]:
337
- raise NotImplementedError(
338
- f"tag: {hook.tag!r} does not found on registry func: "
339
- f"``REGISTER-MODULES.{hook.path}.registries.{hook.func}``"
340
- )
341
- return rgt[hook.func][hook.tag]
342
432
 
343
- def execute(self, params: DictData) -> Result:
344
- """Execute the Task function that already mark registry.
433
+ @handler_result()
434
+ def execute(self, params: DictData) -> DictData:
435
+ """Execute the Hook function that already in the hook registry.
345
436
 
346
437
  :param params: A parameter that want to pass before run any statement.
347
438
  :type params: DictData
348
439
  :rtype: Result
349
440
  """
350
- t_func: TagFunc = self.extract_hook(param2template(self.uses, params))()
441
+ t_func_hook: str = param2template(self.uses, params)
442
+ t_func: TagFunc = extract_hook(t_func_hook)()
351
443
  if not callable(t_func):
352
- raise ImportError("Hook caller function does not callable.")
353
-
354
- args: DictData = param2template(self.args, params)
444
+ raise ImportError(
445
+ f"Hook caller {t_func_hook!r} function does not callable."
446
+ )
355
447
  # VALIDATE: check input task caller parameters that exists before
356
448
  # calling.
449
+ args: DictData = param2template(self.args, params)
357
450
  ips = inspect.signature(t_func)
358
451
  if any(
359
- k not in args
452
+ (k.removeprefix("_") not in args and k not in args)
360
453
  for k in ips.parameters
361
454
  if ips.parameters[k].default == Parameter.empty
362
455
  ):
@@ -364,32 +457,62 @@ class HookStage(BaseStage):
364
457
  f"Necessary params, ({', '.join(ips.parameters.keys())}), "
365
458
  f"does not set to args"
366
459
  )
367
-
368
- try:
369
- logging.info(f"[STAGE]: Hook-Execute: {t_func.name}@{t_func.tag}")
370
- rs: DictData = t_func(**param2template(args, params))
371
- except Exception as err:
372
- raise StageException(f"{err.__class__.__name__}: {err}") from err
373
- return Result(status=0, context=rs)
460
+ # NOTE: add '_' prefix if it want to use.
461
+ for k in ips.parameters:
462
+ if k.removeprefix("_") in args:
463
+ args[k] = args.pop(k.removeprefix("_"))
464
+
465
+ logging.info(
466
+ f"({self.run_id}) [STAGE]: Hook-Execute: "
467
+ f"{t_func.name}@{t_func.tag}"
468
+ )
469
+ rs: DictData = t_func(**param2template(args, params))
470
+
471
+ # VALIDATE:
472
+ # Check the result type from hook function, it should be dict.
473
+ if not isinstance(rs, dict):
474
+ raise TypeError(
475
+ f"Return of hook function: {t_func.name}@{t_func.tag} does "
476
+ f"not serialize to result model, you should fix it to "
477
+ f"`dict` type."
478
+ )
479
+ return rs
374
480
 
375
481
 
376
482
  class TriggerStage(BaseStage):
377
- """Trigger Pipeline execution stage that execute another pipeline object."""
483
+ """Trigger Pipeline execution stage that execute another pipeline object.
484
+
485
+ Data Validate:
486
+ >>> stage = {
487
+ ... "name": "Trigger pipeline stage execution",
488
+ ... "trigger": 'pipeline-name-for-loader',
489
+ ... "params": {
490
+ ... "run-date": "2024-08-01",
491
+ ... "source": "src",
492
+ ... },
493
+ ... }
494
+ """
378
495
 
379
496
  trigger: str = Field(description="A trigger pipeline name.")
380
- params: DictData = Field(default_factory=dict)
497
+ params: DictData = Field(
498
+ default_factory=dict,
499
+ description="A parameter that want to pass to pipeline execution.",
500
+ )
381
501
 
382
- def execute(self, params: DictData) -> Result:
383
- """Trigger execution.
502
+ @handler_result("Raise from trigger pipeline")
503
+ def execute(self, params: DictData) -> DictData:
504
+ """Trigger pipeline execution.
384
505
 
385
506
  :param params: A parameter data that want to use in this execution.
386
507
  :rtype: Result
387
508
  """
388
509
  from .pipeline import Pipeline
389
510
 
390
- pipe: Pipeline = Pipeline.from_loader(name=self.trigger, externals={})
391
- rs = pipe.execute(params=self.params)
392
- return Result(status=0, context=rs)
511
+ # NOTE: Loading pipeline object from trigger name.
512
+ _trigger: str = param2template(self.trigger, params=params)
513
+ pipe: Pipeline = Pipeline.from_loader(name=_trigger, externals={})
514
+ rs: Result = pipe.execute(params=param2template(self.params, params))
515
+ return rs.context
393
516
 
394
517
 
395
518
  # NOTE: Order of parsing stage data