ddeutil-workflow 0.0.40__py3-none-any.whl → 0.0.42__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __version__: str = "0.0.40"
1
+ __version__: str = "0.0.42"
@@ -5,17 +5,6 @@
5
5
  # ------------------------------------------------------------------------------
6
6
  from .__cron import CronJob, CronRunner
7
7
  from .__types import Re
8
- from .audit import (
9
- Audit,
10
- get_audit,
11
- )
12
- from .caller import (
13
- ReturnTagFunc,
14
- TagFunc,
15
- extract_call,
16
- make_registry,
17
- tag,
18
- )
19
8
  from .conf import (
20
9
  Config,
21
10
  Loader,
@@ -43,8 +32,10 @@ from .job import (
43
32
  local_execute_strategy,
44
33
  )
45
34
  from .logs import (
35
+ Audit,
46
36
  TraceData,
47
37
  TraceLog,
38
+ get_audit,
48
39
  get_dt_tznow,
49
40
  get_trace,
50
41
  )
@@ -58,7 +49,24 @@ from .params import (
58
49
  from .result import (
59
50
  Result,
60
51
  Status,
61
- default_gen_id,
52
+ )
53
+ from .reusables import (
54
+ FILTERS,
55
+ FilterFunc,
56
+ FilterRegistry,
57
+ ReturnTagFunc,
58
+ TagFunc,
59
+ custom_filter,
60
+ extract_call,
61
+ get_args_const,
62
+ has_template,
63
+ make_filter_registry,
64
+ make_registry,
65
+ map_post_filter,
66
+ not_in_template,
67
+ param2template,
68
+ str2template,
69
+ tag,
62
70
  )
63
71
  from .scheduler import (
64
72
  Schedule,
@@ -77,22 +85,10 @@ from .stages import (
77
85
  Stage,
78
86
  TriggerStage,
79
87
  )
80
- from .templates import (
81
- FILTERS,
82
- FilterFunc,
83
- FilterRegistry,
84
- custom_filter,
85
- get_args_const,
86
- has_template,
87
- make_filter_registry,
88
- map_post_filter,
89
- not_in_template,
90
- param2template,
91
- str2template,
92
- )
93
88
  from .utils import (
94
89
  batch,
95
90
  cross_product,
91
+ default_gen_id,
96
92
  delay,
97
93
  filter_func,
98
94
  gen_id,
@@ -20,7 +20,7 @@ from fastapi.middleware.gzip import GZipMiddleware
20
20
  from fastapi.responses import UJSONResponse
21
21
 
22
22
  from ..__about__ import __version__
23
- from ..conf import config, get_logger
23
+ from ..conf import api_config, config, get_logger
24
24
  from ..scheduler import ReleaseThread, ReleaseThreads
25
25
  from ..workflow import ReleaseQueue, WorkflowTask
26
26
  from .repeat import repeat_at
@@ -96,24 +96,24 @@ async def health():
96
96
 
97
97
 
98
98
  # NOTE Add the jobs and logs routes by default.
99
- app.include_router(job, prefix=config.prefix_path)
100
- app.include_router(log, prefix=config.prefix_path)
99
+ app.include_router(job, prefix=api_config.prefix_path)
100
+ app.include_router(log, prefix=api_config.prefix_path)
101
101
 
102
102
 
103
103
  # NOTE: Enable the workflows route.
104
- if config.enable_route_workflow:
104
+ if api_config.enable_route_workflow:
105
105
  from .routes import workflow
106
106
 
107
- app.include_router(workflow, prefix=config.prefix_path)
107
+ app.include_router(workflow, prefix=api_config.prefix_path)
108
108
 
109
109
 
110
110
  # NOTE: Enable the schedules route.
111
- if config.enable_route_schedule:
112
- from ..audit import get_audit
111
+ if api_config.enable_route_schedule:
112
+ from ..logs import get_audit
113
113
  from ..scheduler import schedule_task
114
114
  from .routes import schedule
115
115
 
116
- app.include_router(schedule, prefix=config.prefix_path)
116
+ app.include_router(schedule, prefix=api_config.prefix_path)
117
117
 
118
118
  @schedule.on_event("startup")
119
119
  @repeat_at(cron="* * * * *", delay=2)
@@ -10,8 +10,7 @@ from fastapi import APIRouter, Path, Query
10
10
  from fastapi import status as st
11
11
  from fastapi.responses import UJSONResponse
12
12
 
13
- from ...audit import get_audit
14
- from ...logs import get_trace_obj
13
+ from ...logs import get_audit, get_trace_obj
15
14
 
16
15
  log_route = APIRouter(
17
16
  prefix="/logs",
@@ -28,7 +28,7 @@ schedule_route = APIRouter(
28
28
  async def get_schedules(name: str):
29
29
  """Get schedule object."""
30
30
  try:
31
- schedule: Schedule = Schedule.from_loader(name=name, externals={})
31
+ schedule: Schedule = Schedule.from_conf(name=name, extras={})
32
32
  except ValueError:
33
33
  raise HTTPException(
34
34
  status_code=st.HTTP_404_NOT_FOUND,
@@ -51,7 +51,7 @@ async def get_deploy_schedulers(request: Request):
51
51
  @schedule_route.get(path="/deploy/{name}", status_code=st.HTTP_200_OK)
52
52
  async def get_deploy_scheduler(request: Request, name: str):
53
53
  if name in request.state.scheduler:
54
- schedule = Schedule.from_loader(name)
54
+ schedule = Schedule.from_conf(name)
55
55
  getter: list[dict[str, dict[str, list[datetime]]]] = []
56
56
  for workflow in schedule.workflows:
57
57
  getter.append(
@@ -94,7 +94,7 @@ async def add_deploy_scheduler(request: Request, name: str):
94
94
 
95
95
  # NOTE: Create a pair of workflow and on from schedule model.
96
96
  try:
97
- schedule: Schedule = Schedule.from_loader(name)
97
+ schedule: Schedule = Schedule.from_conf(name)
98
98
  except ValueError as err:
99
99
  request.state.scheduler.remove(name)
100
100
  logger.exception(err)
@@ -107,7 +107,7 @@ async def add_deploy_scheduler(request: Request, name: str):
107
107
  schedule.tasks(
108
108
  start_date_waiting,
109
109
  queue=request.state.workflow_queue,
110
- externals={},
110
+ extras={},
111
111
  ),
112
112
  )
113
113
  return {
@@ -124,7 +124,7 @@ async def del_deploy_scheduler(request: Request, name: str):
124
124
  # NOTE: Remove current schedule name from the state.
125
125
  request.state.scheduler.remove(name)
126
126
 
127
- schedule: Schedule = Schedule.from_loader(name)
127
+ schedule: Schedule = Schedule.from_conf(name)
128
128
 
129
129
  for task in schedule.tasks(datetime.now(tz=config.tz), queue={}):
130
130
  if task in request.state.workflow_tasks:
@@ -15,8 +15,8 @@ from fastapi.responses import UJSONResponse
15
15
  from pydantic import BaseModel
16
16
 
17
17
  from ...__types import DictData
18
- from ...audit import Audit, get_audit
19
18
  from ...conf import Loader, get_logger
19
+ from ...logs import Audit, get_audit
20
20
  from ...result import Result
21
21
  from ...workflow import Workflow
22
22
 
@@ -44,7 +44,7 @@ async def get_workflows() -> DictData:
44
44
  async def get_workflow_by_name(name: str) -> DictData:
45
45
  """Return model of workflow that passing an input workflow name."""
46
46
  try:
47
- workflow: Workflow = Workflow.from_loader(name=name, externals={})
47
+ workflow: Workflow = Workflow.from_conf(name=name, extras={})
48
48
  except ValueError as err:
49
49
  logger.exception(err)
50
50
  raise HTTPException(
@@ -69,7 +69,7 @@ class ExecutePayload(BaseModel):
69
69
  async def workflow_execute(name: str, payload: ExecutePayload) -> DictData:
70
70
  """Return model of workflow that passing an input workflow name."""
71
71
  try:
72
- workflow: Workflow = Workflow.from_loader(name=name, externals={})
72
+ workflow: Workflow = Workflow.from_conf(name=name, extras={})
73
73
  except ValueError:
74
74
  raise HTTPException(
75
75
  status_code=st.HTTP_404_NOT_FOUND,
ddeutil/workflow/conf.py CHANGED
@@ -12,6 +12,7 @@ from collections.abc import Iterator
12
12
  from datetime import timedelta
13
13
  from functools import cached_property, lru_cache
14
14
  from pathlib import Path
15
+ from typing import Optional, TypeVar
15
16
  from zoneinfo import ZoneInfo
16
17
 
17
18
  from ddeutil.core import str2bool
@@ -20,20 +21,24 @@ from ddeutil.io.paths import glob_files, is_ignored, read_ignore
20
21
 
21
22
  from .__types import DictData, TupleStr
22
23
 
24
+ T = TypeVar("T")
23
25
  PREFIX: str = "WORKFLOW"
24
26
 
25
27
 
26
28
  def env(var: str, default: str | None = None) -> str | None: # pragma: no cov
29
+ """Get environment variable with uppercase and adding prefix string."""
27
30
  return os.getenv(f"{PREFIX}_{var.upper().replace(' ', '_')}", default)
28
31
 
29
32
 
30
33
  __all__: TupleStr = (
34
+ "api_config",
31
35
  "env",
32
36
  "get_logger",
33
37
  "Config",
34
38
  "SimLoad",
35
39
  "Loader",
36
40
  "config",
41
+ "dynamic",
37
42
  )
38
43
 
39
44
 
@@ -94,11 +99,19 @@ class Config(BaseConfig): # pragma: no cov
94
99
  # NOTE: Register
95
100
  @property
96
101
  def regis_call(self) -> list[str]:
97
- regis_call_str: str = env("CORE_REGISTRY", ".")
102
+ """Register Caller module importer str.
103
+
104
+ :rtype: list[str]
105
+ """
106
+ regis_call_str: str = env("CORE_REGISTRY_CALLER", ".")
98
107
  return [r.strip() for r in regis_call_str.split(",")]
99
108
 
100
109
  @property
101
110
  def regis_filter(self) -> list[str]:
111
+ """Register Filter module.
112
+
113
+ :rtype: list[str]
114
+ """
102
115
  regis_filter_str: str = env(
103
116
  "CORE_REGISTRY_FILTER", "ddeutil.workflow.templates"
104
117
  )
@@ -107,7 +120,7 @@ class Config(BaseConfig): # pragma: no cov
107
120
  # NOTE: Log
108
121
  @property
109
122
  def log_path(self) -> Path:
110
- return Path(env("LOG_PATH", "./logs"))
123
+ return Path(env("LOG_TRACE_PATH", "./logs"))
111
124
 
112
125
  @property
113
126
  def debug(self) -> bool:
@@ -136,16 +149,15 @@ class Config(BaseConfig): # pragma: no cov
136
149
 
137
150
  @property
138
151
  def enable_write_log(self) -> bool:
139
- return str2bool(env("LOG_ENABLE_WRITE", "false"))
152
+ return str2bool(env("LOG_TRACE_ENABLE_WRITE", "false"))
140
153
 
141
- # NOTE: Audit Log
142
154
  @property
143
155
  def audit_path(self) -> Path:
144
- return Path(env("AUDIT_PATH", "./audits"))
156
+ return Path(env("LOG_AUDIT_PATH", "./audits"))
145
157
 
146
158
  @property
147
159
  def enable_write_audit(self) -> bool:
148
- return str2bool(env("AUDIT_ENABLE_WRITE", "false"))
160
+ return str2bool(env("LOG_AUDIT_ENABLE_WRITE", "false"))
149
161
 
150
162
  @property
151
163
  def log_datetime_format(self) -> str:
@@ -169,29 +181,12 @@ class Config(BaseConfig): # pragma: no cov
169
181
  def job_default_id(self) -> bool:
170
182
  return str2bool(env("CORE_JOB_DEFAULT_ID", "false"))
171
183
 
172
- # NOTE: Workflow
173
- @property
174
- def max_job_parallel(self) -> int:
175
- max_job_parallel = int(env("CORE_MAX_JOB_PARALLEL", "2"))
176
-
177
- # VALIDATE: the MAX_JOB_PARALLEL value should not less than 0.
178
- if max_job_parallel < 0:
179
- raise ValueError(
180
- f"``WORKFLOW_MAX_JOB_PARALLEL`` should more than 0 but got "
181
- f"{max_job_parallel}."
182
- )
183
- return max_job_parallel
184
-
185
- @property
186
- def max_job_exec_timeout(self) -> int:
187
- return int(env("CORE_MAX_JOB_EXEC_TIMEOUT", "600"))
188
-
189
- @property
190
- def max_poking_pool_worker(self) -> int:
191
- return int(env("CORE_MAX_NUM_POKING", "4"))
192
-
193
184
  @property
194
185
  def max_on_per_workflow(self) -> int:
186
+ """The maximum on value that store in workflow model.
187
+
188
+ :rtype: int
189
+ """
195
190
  return int(env("CORE_MAX_CRON_PER_WORKFLOW", "5"))
196
191
 
197
192
  @property
@@ -220,7 +215,9 @@ class Config(BaseConfig): # pragma: no cov
220
215
  f"timedelta with {stop_boundary_delta_str}."
221
216
  ) from err
222
217
 
223
- # NOTE: API
218
+
219
+ class APIConfig:
220
+
224
221
  @property
225
222
  def prefix_path(self) -> str:
226
223
  return env("API_PREFIX_PATH", "/api/v1")
@@ -326,7 +323,8 @@ class SimLoad:
326
323
  def filter_yaml(cls, file: Path, name: str | None = None) -> DictData:
327
324
  if any(file.suffix.endswith(s) for s in (".yml", ".yaml")):
328
325
  values: DictData = YamlFlResolve(file).read()
329
- return values.get(name, {}) if name else values
326
+ if values is not None:
327
+ return values.get(name, {}) if name else values
330
328
  return {}
331
329
 
332
330
  @cached_property
@@ -379,6 +377,29 @@ class Loader(SimLoad):
379
377
 
380
378
 
381
379
  config: Config = Config()
380
+ api_config: APIConfig = APIConfig()
381
+
382
+
383
+ def dynamic(
384
+ key: Optional[str] = None,
385
+ *,
386
+ f: Optional[T] = None,
387
+ extras: Optional[DictData] = None,
388
+ ) -> Optional[T]:
389
+ """Dynamic get config if extra value was passed at run-time.
390
+
391
+ :param key: (str) A config key that get from Config object.
392
+ :param f: An inner config function scope.
393
+ :param extras: An extra values that pass at run-time.
394
+ """
395
+ rsx: Optional[T] = extras[key] if extras and key in extras else None
396
+ rs: Optional[T] = f or getattr(config, key, None)
397
+ if rsx is not None and not isinstance(rsx, type(rs)):
398
+ raise TypeError(
399
+ f"Type of config {key!r} from extras: {rsx!r} does not valid "
400
+ f"as config {type(rs)}."
401
+ )
402
+ return rsx or rs
382
403
 
383
404
 
384
405
  @lru_cache
ddeutil/workflow/cron.py CHANGED
@@ -72,17 +72,16 @@ class On(BaseModel):
72
72
 
73
73
  model_config = ConfigDict(arbitrary_types_allowed=True)
74
74
 
75
- # NOTE: This is fields of the base schedule.
76
75
  extras: Annotated[
77
76
  DictData,
78
77
  Field(
79
78
  default_factory=dict,
80
- description="An extras mapping parameters",
79
+ description="An extras mapping parameters.",
81
80
  ),
82
81
  ]
83
82
  cronjob: Annotated[
84
83
  CronJob,
85
- Field(description="Cron job of this schedule"),
84
+ Field(description="A Cronjob object of this schedule."),
86
85
  ]
87
86
  tz: Annotated[
88
87
  str,
@@ -93,12 +92,12 @@ class On(BaseModel):
93
92
  ] = "Etc/UTC"
94
93
 
95
94
  @classmethod
96
- def from_value(cls, value: DictStr, externals: DictData) -> Self:
95
+ def from_value(cls, value: DictStr, extras: DictData) -> Self:
97
96
  """Constructor from values that will generate crontab by function.
98
97
 
99
98
  :param value: A mapping value that will generate crontab before create
100
99
  schedule model.
101
- :param externals: An extras external parameter that will keep in extras.
100
+ :param extras: An extras parameter that will keep in extras.
102
101
  """
103
102
  passing: DictStr = {}
104
103
  if "timezone" in value:
@@ -106,22 +105,22 @@ class On(BaseModel):
106
105
  passing["cronjob"] = interval2crontab(
107
106
  **{v: value[v] for v in value if v in ("interval", "day", "time")}
108
107
  )
109
- return cls(extras=externals | passing.pop("extras", {}), **passing)
108
+ return cls(extras=extras | passing.pop("extras", {}), **passing)
110
109
 
111
110
  @classmethod
112
- def from_loader(
111
+ def from_conf(
113
112
  cls,
114
113
  name: str,
115
- externals: DictData | None = None,
114
+ extras: DictData | None = None,
116
115
  ) -> Self:
117
116
  """Constructor from the name of config that will use loader object for
118
117
  getting the data.
119
118
 
120
119
  :param name: A name of config that will get from loader.
121
- :param externals: An extras external parameter that will keep in extras.
120
+ :param extras: An extra parameter that will keep in extras.
122
121
  """
123
- externals: DictData = externals or {}
124
- loader: Loader = Loader(name, externals=externals)
122
+ extras: DictData = extras or {}
123
+ loader: Loader = Loader(name, externals=extras)
125
124
 
126
125
  # NOTE: Validate the config type match with current connection model
127
126
  if loader.type != cls.__name__:
@@ -138,7 +137,7 @@ class On(BaseModel):
138
137
  if v in ("interval", "day", "time")
139
138
  }
140
139
  ),
141
- extras=externals | loader_data.pop("extras", {}),
140
+ extras=extras | loader_data.pop("extras", {}),
142
141
  **loader_data,
143
142
  )
144
143
  )
@@ -149,7 +148,7 @@ class On(BaseModel):
149
148
  return cls.model_validate(
150
149
  obj=dict(
151
150
  cronjob=loader_data.pop("cronjob"),
152
- extras=externals | loader_data.pop("extras", {}),
151
+ extras=extras | loader_data.pop("extras", {}),
153
152
  **loader_data,
154
153
  )
155
154
  )
ddeutil/workflow/job.py CHANGED
@@ -3,6 +3,7 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
+ # [x] Use dynamic config
6
7
  """Job Model that use for keeping stages and node that running its stages.
7
8
  The job handle the lineage of stages and location of execution of stages that
8
9
  mean the job model able to define `runs-on` key that allow you to run this
@@ -32,15 +33,15 @@ from pydantic.functional_validators import field_validator, model_validator
32
33
  from typing_extensions import Self
33
34
 
34
35
  from .__types import DictData, DictStr, Matrix, TupleStr
35
- from .conf import config
36
+ from .conf import dynamic
36
37
  from .exceptions import (
37
38
  JobException,
38
39
  StageException,
39
40
  UtilException,
40
41
  )
41
42
  from .result import Result, Status
43
+ from .reusables import has_template, param2template
42
44
  from .stages import Stage
43
- from .templates import has_template, param2template
44
45
  from .utils import cross_product, filter_func, gen_id
45
46
 
46
47
  MatrixFilter = list[dict[str, Union[str, int]]]
@@ -344,6 +345,10 @@ class Job(BaseModel):
344
345
  default_factory=Strategy,
345
346
  description="A strategy matrix that want to generate.",
346
347
  )
348
+ extras: DictData = Field(
349
+ default_factory=dict,
350
+ description="An extra override config values.",
351
+ )
347
352
 
348
353
  @field_validator("desc", mode="after")
349
354
  def ___prepare_desc__(cls, value: str) -> str:
@@ -394,15 +399,20 @@ class Job(BaseModel):
394
399
  """
395
400
  for stage in self.stages:
396
401
  if stage_id == (stage.id or ""):
402
+ if self.extras:
403
+ stage.extras = self.extras
397
404
  return stage
398
405
  raise ValueError(f"Stage ID {stage_id} does not exists")
399
406
 
400
407
  def check_needs(
401
- self, jobs: dict[str, Any]
408
+ self,
409
+ jobs: dict[str, Any],
402
410
  ) -> TriggerState: # pragma: no cov
403
411
  """Return True if job's need exists in an input list of job's ID.
404
412
 
405
- :param jobs: A mapping of job model and its ID.
413
+ :param jobs: A mapping of job ID and result context.
414
+
415
+ :raise NotImplementedError: If the job trigger rule out of scope.
406
416
 
407
417
  :rtype: TriggerState
408
418
  """
@@ -471,7 +481,9 @@ class Job(BaseModel):
471
481
  # should use the `re` module to validate eval-string before
472
482
  # running.
473
483
  rs: bool = eval(
474
- param2template(self.condition, params), globals() | params, {}
484
+ param2template(self.condition, params, extras=self.extras),
485
+ globals() | params,
486
+ {},
475
487
  )
476
488
  if not isinstance(rs, bool):
477
489
  raise TypeError("Return type of condition does not be boolean")
@@ -513,7 +525,9 @@ class Job(BaseModel):
513
525
  if "jobs" not in to:
514
526
  to["jobs"] = {}
515
527
 
516
- if self.id is None and not config.job_default_id:
528
+ if self.id is None and not dynamic(
529
+ "job_default_id", extras=self.extras
530
+ ):
517
531
  raise JobException(
518
532
  "This job do not set the ID before setting execution output."
519
533
  )
@@ -596,7 +610,7 @@ def local_execute_strategy(
596
610
  *,
597
611
  result: Result | None = None,
598
612
  event: Event | None = None,
599
- raise_error: bool = False,
613
+ raise_error: bool | None = None,
600
614
  ) -> Result:
601
615
  """Local job strategy execution with passing dynamic parameters from the
602
616
  workflow execution to strategy matrix.
@@ -694,12 +708,16 @@ def local_execute_strategy(
694
708
  params=context,
695
709
  run_id=result.run_id,
696
710
  parent_run_id=result.parent_run_id,
711
+ event=event,
697
712
  ).context,
698
713
  to=context,
699
714
  )
700
715
  except (StageException, UtilException) as err:
701
716
  result.trace.error(f"[JOB]: {err.__class__.__name__}: {err}")
702
- if raise_error or config.job_raise_error:
717
+ do_raise: bool = dynamic(
718
+ "job_raise_error", f=raise_error, extras=job.extras
719
+ )
720
+ if do_raise:
703
721
  raise JobException(
704
722
  f"Stage execution error: {err.__class__.__name__}: "
705
723
  f"{err}"
@@ -735,7 +753,7 @@ def local_execute(
735
753
  parent_run_id: str | None = None,
736
754
  result: Result | None = None,
737
755
  event: Event | None = None,
738
- raise_error: bool = False,
756
+ raise_error: bool | None = None,
739
757
  ) -> Result:
740
758
  """Local job execution with passing dynamic parameters from the workflow
741
759
  execution. It will generate matrix values at the first step and run
@@ -865,3 +883,44 @@ def local_execute(
865
883
  context.update({"errors": err.to_dict()})
866
884
 
867
885
  return result.catch(status=status, context=context)
886
+
887
+
888
+ def self_hosted_execute(
889
+ job: Job,
890
+ params: DictData,
891
+ *,
892
+ run_id: str | None = None,
893
+ parent_run_id: str | None = None,
894
+ result: Result | None = None,
895
+ event: Event | None = None,
896
+ raise_error: bool | None = None,
897
+ ) -> Result: # pragma: no cov
898
+ result: Result = Result.construct_with_rs_or_id(
899
+ result,
900
+ run_id=run_id,
901
+ parent_run_id=parent_run_id,
902
+ id_logic=(job.id or "not-set"),
903
+ )
904
+
905
+ if event and event.is_set():
906
+ return result.catch(status=Status.FAILED)
907
+
908
+ import requests
909
+
910
+ resp = requests.post(
911
+ job.runs_on.args.host,
912
+ data={"job": job.model_dump(), "params": params},
913
+ )
914
+
915
+ if resp.status_code != 200:
916
+ do_raise: bool = dynamic(
917
+ "job_raise_error", f=raise_error, extras=job.extras
918
+ )
919
+ if do_raise:
920
+ raise JobException(
921
+ f"Job execution error from request to self-hosted: "
922
+ f"{job.runs_on.args.host!r}"
923
+ )
924
+
925
+ return result.catch(status=Status.FAILED)
926
+ return result.catch(status=Status.SUCCESS)