ddeutil-workflow 0.0.6__py3-none-any.whl → 0.0.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,78 @@
1
+ from enum import Enum
2
+
3
+ from fastapi import APIRouter, Request, status
4
+ from pydantic import BaseModel, ConfigDict, Field
5
+
6
+ from .log import get_logger
7
+
8
+ logger = get_logger(__name__)
9
+ workflow_route = APIRouter(prefix="/workflow")
10
+
11
+
12
+ @workflow_route.get("/{name}")
13
+ async def get_pipeline(name: str):
14
+ return {"message": f"getting pipeline {name}"}
15
+
16
+
17
+ @workflow_route.get("/{name}/logs")
18
+ async def get_pipeline_log(name: str):
19
+ return {"message": f"getting pipeline {name} logs"}
20
+
21
+
22
+ class JobNotFoundError(Exception):
23
+ pass
24
+
25
+
26
+ schedule_route = APIRouter(prefix="/schedule", tags=["schedule"])
27
+
28
+
29
+ class TriggerEnum(str, Enum):
30
+ interval = "interval"
31
+ cron = "cron"
32
+
33
+
34
+ class Job(BaseModel):
35
+ model_config = ConfigDict(
36
+ json_schema_extra={
37
+ "example": {
38
+ "func": "example.main:pytest_job",
39
+ "trigger": "interval",
40
+ "seconds": 3,
41
+ "id": "pytest_job",
42
+ },
43
+ },
44
+ )
45
+ func: str = Field()
46
+ trigger: TriggerEnum = Field(title="Trigger type")
47
+ seconds: int = Field(title="Interval in seconds")
48
+ id: str = Field(title="Job ID")
49
+
50
+
51
+ @schedule_route.post(
52
+ "/", name="scheduler:add_job", status_code=status.HTTP_201_CREATED
53
+ )
54
+ async def add_job(request: Request, job: Job):
55
+ job = request.app.scheduler.add_job(**job.dict())
56
+ return {"job": f"{job.id}"}
57
+
58
+
59
+ @schedule_route.get("/", name="scheduler:get_jobs", response_model=list)
60
+ async def get_jobs(request: Request):
61
+ jobs = request.app.scheduler.get_jobs()
62
+ jobs = [
63
+ {k: v for k, v in job.__getstate__().items() if k != "trigger"}
64
+ for job in jobs
65
+ ]
66
+ return jobs
67
+
68
+
69
+ @schedule_route.delete("/{job_id}", name="scheduler:remove_job")
70
+ async def remove_job(request: Request, job_id: str):
71
+ try:
72
+ deleted = request.app.scheduler.remove_job(job_id=job_id)
73
+ logger.debug(f"Job {job_id} deleted: {deleted}")
74
+ return {"job": f"{job_id}"}
75
+ except AttributeError as err:
76
+ raise JobNotFoundError(
77
+ f"No job by the id of {job_id} was found"
78
+ ) from err
ddeutil/workflow/stage.py CHANGED
@@ -99,9 +99,7 @@ class BaseStage(BaseModel, ABC):
99
99
 
100
100
  _g: DictData = globals() | params
101
101
  try:
102
- rs: bool = eval(
103
- param2template(self.condition, params, repr_flag=True), _g, {}
104
- )
102
+ rs: bool = eval(param2template(self.condition, params), _g, {})
105
103
  if not isinstance(rs, bool):
106
104
  raise TypeError("Return type of condition does not be boolean")
107
105
  return not rs
@@ -113,6 +111,12 @@ class BaseStage(BaseModel, ABC):
113
111
  class EmptyStage(BaseStage):
114
112
  """Empty stage that do nothing (context equal empty stage) and logging the
115
113
  name of stage only to stdout.
114
+
115
+ Data Validate:
116
+ >>> stage = {
117
+ ... "name": "Empty stage execution",
118
+ ... "echo": "Hello World",
119
+ ... }
116
120
  """
117
121
 
118
122
  echo: Optional[str] = Field(
@@ -127,7 +131,8 @@ class EmptyStage(BaseStage):
127
131
  :param params: A context data that want to add output result. But this
128
132
  stage does not pass any output.
129
133
  """
130
- logging.info(f"[STAGE]: Empty-Execute: {self.name!r}")
134
+ stm: str = param2template(self.echo, params=params) or "..."
135
+ logging.info(f"[STAGE]: Empty-Execute: {self.name!r}: " f"( {stm} )")
131
136
  return Result(status=0, context={})
132
137
 
133
138
 
@@ -207,8 +212,8 @@ class BashStage(BaseStage):
207
212
  if "\\x00" in rs.stderr
208
213
  else rs.stderr
209
214
  )
210
- logging.error(f"{err}\nRunning Statement:\n---\n{bash}")
211
- raise StageException(f"{err}\nRunning Statement:\n---\n{bash}")
215
+ logging.error(f"{err}\n\n```bash\n{bash}```")
216
+ raise StageException(f"{err}\n\n```bash\n{bash}```")
212
217
  return Result(
213
218
  status=0,
214
219
  context={
@@ -341,7 +346,7 @@ class HookStage(BaseStage):
341
346
  return rgt[hook.func][hook.tag]
342
347
 
343
348
  def execute(self, params: DictData) -> Result:
344
- """Execute the Task function that already mark registry.
349
+ """Execute the Hook function that already in the hook registry.
345
350
 
346
351
  :param params: A parameter that want to pass before run any statement.
347
352
  :type params: DictData
@@ -351,12 +356,12 @@ class HookStage(BaseStage):
351
356
  if not callable(t_func):
352
357
  raise ImportError("Hook caller function does not callable.")
353
358
 
354
- args: DictData = param2template(self.args, params)
355
359
  # VALIDATE: check input task caller parameters that exists before
356
360
  # calling.
361
+ args: DictData = param2template(self.args, params)
357
362
  ips = inspect.signature(t_func)
358
363
  if any(
359
- k not in args
364
+ (k.removeprefix("_") not in args and k not in args)
360
365
  for k in ips.parameters
361
366
  if ips.parameters[k].default == Parameter.empty
362
367
  ):
@@ -365,11 +370,23 @@ class HookStage(BaseStage):
365
370
  f"does not set to args"
366
371
  )
367
372
 
373
+ # NOTE: add '_' prefix if it want to use.
374
+ for k in ips.parameters:
375
+ if k.removeprefix("_") in args:
376
+ args[k] = args.pop(k.removeprefix("_"))
377
+
368
378
  try:
369
379
  logging.info(f"[STAGE]: Hook-Execute: {t_func.name}@{t_func.tag}")
370
380
  rs: DictData = t_func(**param2template(args, params))
371
381
  except Exception as err:
372
382
  raise StageException(f"{err.__class__.__name__}: {err}") from err
383
+
384
+ # VALIDATE: Check the result type from hook function, it should be dict.
385
+ if not isinstance(rs, dict):
386
+ raise StageException(
387
+ f"Return of hook function: {t_func.name}@{t_func.tag} does not "
388
+ f"serialize to result model, you should fix it to `dict` type."
389
+ )
373
390
  return Result(status=0, context=rs)
374
391
 
375
392
 
@@ -385,11 +402,23 @@ class TriggerStage(BaseStage):
385
402
  :param params: A parameter data that want to use in this execution.
386
403
  :rtype: Result
387
404
  """
405
+ from .exceptions import PipelineException
388
406
  from .pipeline import Pipeline
389
407
 
390
- pipe: Pipeline = Pipeline.from_loader(name=self.trigger, externals={})
391
- rs = pipe.execute(params=self.params)
392
- return Result(status=0, context=rs)
408
+ try:
409
+ # NOTE: Loading pipeline object from trigger name.
410
+ pipe: Pipeline = Pipeline.from_loader(
411
+ name=self.trigger, externals={}
412
+ )
413
+ rs: Result = pipe.execute(
414
+ params=param2template(self.params, params)
415
+ )
416
+ except PipelineException as err:
417
+ _alias_stage: str = self.id or self.name
418
+ raise StageException(
419
+ f"Trigger Stage: {_alias_stage} get trigger pipeline exception."
420
+ ) from err
421
+ return rs
393
422
 
394
423
 
395
424
  # NOTE: Order of parsing stage data