ddeutil-workflow 0.0.41__tar.gz → 0.0.42__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. {ddeutil_workflow-0.0.41/src/ddeutil_workflow.egg-info → ddeutil_workflow-0.0.42}/PKG-INFO +6 -9
  2. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/README.md +5 -8
  3. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/pyproject.toml +1 -0
  4. ddeutil_workflow-0.0.42/src/ddeutil/workflow/__about__.py +1 -0
  5. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/api/api.py +7 -7
  6. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/api/routes/schedules.py +5 -5
  7. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/api/routes/workflows.py +2 -2
  8. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/conf.py +39 -28
  9. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/cron.py +12 -13
  10. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/job.py +18 -10
  11. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/logs.py +33 -6
  12. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/reusables.py +16 -13
  13. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/scheduler.py +26 -28
  14. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/stages.py +257 -62
  15. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/utils.py +0 -1
  16. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/workflow.py +111 -74
  17. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42/src/ddeutil_workflow.egg-info}/PKG-INFO +6 -9
  18. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/src/ddeutil_workflow.egg-info/SOURCES.txt +0 -2
  19. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_conf.py +18 -4
  20. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_cron_on.py +20 -20
  21. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_job_exec.py +11 -23
  22. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_job_exec_strategy.py +6 -18
  23. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_job_strategy.py +1 -3
  24. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_reusables_call_tag.py +1 -0
  25. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_schedule.py +8 -8
  26. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_schedule_pending.py +1 -1
  27. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_schedule_tasks.py +3 -3
  28. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_stage.py +16 -1
  29. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_stage_handler_exec.py +30 -15
  30. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_workflow.py +18 -9
  31. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_workflow_exec.py +105 -83
  32. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_workflow_exec_poke.py +5 -5
  33. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_workflow_exec_release.py +4 -4
  34. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_workflow_task.py +7 -9
  35. ddeutil_workflow-0.0.41/src/ddeutil/workflow/__about__.py +0 -1
  36. ddeutil_workflow-0.0.41/src/ddeutil/workflow/context.py +0 -61
  37. ddeutil_workflow-0.0.41/tests/test_context.py +0 -136
  38. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/LICENSE +0 -0
  39. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/setup.cfg +0 -0
  40. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/__cron.py +0 -0
  41. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/__init__.py +0 -0
  42. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/__types.py +0 -0
  43. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/api/__init__.py +0 -0
  44. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/api/log.py +0 -0
  45. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/api/repeat.py +0 -0
  46. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/api/routes/__init__.py +0 -0
  47. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/api/routes/job.py +0 -0
  48. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/api/routes/logs.py +0 -0
  49. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/exceptions.py +0 -0
  50. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/params.py +0 -0
  51. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/result.py +0 -0
  52. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
  53. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/src/ddeutil_workflow.egg-info/requires.txt +0 -0
  54. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
  55. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test__cron.py +0 -0
  56. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test__regex.py +0 -0
  57. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_job.py +0 -0
  58. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_logs_audit.py +0 -0
  59. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_logs_trace.py +0 -0
  60. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_params.py +0 -0
  61. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_release.py +0 -0
  62. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_release_queue.py +0 -0
  63. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_result.py +0 -0
  64. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_reusables_template.py +0 -0
  65. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_reusables_template_filter.py +0 -0
  66. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_schedule_workflow.py +0 -0
  67. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_scheduler_control.py +0 -0
  68. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_utils.py +0 -0
  69. {ddeutil_workflow-0.0.41 → ddeutil_workflow-0.0.42}/tests/test_workflow_exec_job.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.41
3
+ Version: 0.0.42
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -264,7 +264,7 @@ it will use default value and do not raise any error to you.
264
264
  | Name | Component | Default | Description |
265
265
  |:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|
266
266
  | **ROOT_PATH** | Core | `.` | The root path of the workflow application. |
267
- | **REGISTRY** | Core | `.` | List of importable string for the call stage. |
267
+ | **REGISTRY_CALLER** | Core | `.` | List of importable string for the call stage. |
268
268
  | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
269
269
  | **CONF_PATH** | Core | `conf` | The config path that keep all template `.yaml` files. |
270
270
  | **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
@@ -272,20 +272,17 @@ it will use default value and do not raise any error to you.
272
272
  | **STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. |
273
273
  | **JOB_DEFAULT_ID** | Core | `false` | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
274
274
  | **JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. |
275
- | **MAX_NUM_POKING** | Core | `4` | . |
276
- | **MAX_JOB_PARALLEL** | Core | `2` | The maximum job number that able to run parallel in workflow executor. |
277
- | **MAX_JOB_EXEC_TIMEOUT** | Core | `600` | |
278
275
  | **MAX_CRON_PER_WORKFLOW** | Core | `5` | |
279
276
  | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | |
280
277
  | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. |
281
- | **PATH** | Log | `./logs` | The log path of the workflow saving log. |
278
+ | **TRACE_PATH** | Log | `./logs` | The log path of the workflow saving log. |
282
279
  | **DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. |
283
280
  | **FORMAT** | Log | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | |
284
281
  | **FORMAT_FILE** | Log | `{datetime} ({process:5d}, {thread:5d}) {message:120s} ({filename}:{lineno})` | |
285
282
  | **DATETIME_FORMAT** | Log | `%Y-%m-%d %H:%M:%S` | |
286
- | **ENABLE_WRITE** | Log | `false` | |
287
- | **PATH** | Audit | `./audits` | |
288
- | **ENABLE_WRITE** | Audit | `true` | A flag that enable logging object saving log to its destination. |
283
+ | **TRACE_ENABLE_WRITE** | Log | `false` | |
284
+ | **AUDIT_PATH** | Log | `./audits` | |
285
+ | **AUDIT_ENABLE_WRITE** | Log | `true` | A flag that enable logging object saving log to its destination. |
289
286
  | **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
290
287
  | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
291
288
  | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
@@ -220,7 +220,7 @@ it will use default value and do not raise any error to you.
220
220
  | Name | Component | Default | Description |
221
221
  |:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|
222
222
  | **ROOT_PATH** | Core | `.` | The root path of the workflow application. |
223
- | **REGISTRY** | Core | `.` | List of importable string for the call stage. |
223
+ | **REGISTRY_CALLER** | Core | `.` | List of importable string for the call stage. |
224
224
  | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
225
225
  | **CONF_PATH** | Core | `conf` | The config path that keep all template `.yaml` files. |
226
226
  | **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
@@ -228,20 +228,17 @@ it will use default value and do not raise any error to you.
228
228
  | **STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. |
229
229
  | **JOB_DEFAULT_ID** | Core | `false` | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
230
230
  | **JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. |
231
- | **MAX_NUM_POKING** | Core | `4` | . |
232
- | **MAX_JOB_PARALLEL** | Core | `2` | The maximum job number that able to run parallel in workflow executor. |
233
- | **MAX_JOB_EXEC_TIMEOUT** | Core | `600` | |
234
231
  | **MAX_CRON_PER_WORKFLOW** | Core | `5` | |
235
232
  | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | |
236
233
  | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. |
237
- | **PATH** | Log | `./logs` | The log path of the workflow saving log. |
234
+ | **TRACE_PATH** | Log | `./logs` | The log path of the workflow saving log. |
238
235
  | **DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. |
239
236
  | **FORMAT** | Log | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | |
240
237
  | **FORMAT_FILE** | Log | `{datetime} ({process:5d}, {thread:5d}) {message:120s} ({filename}:{lineno})` | |
241
238
  | **DATETIME_FORMAT** | Log | `%Y-%m-%d %H:%M:%S` | |
242
- | **ENABLE_WRITE** | Log | `false` | |
243
- | **PATH** | Audit | `./audits` | |
244
- | **ENABLE_WRITE** | Audit | `true` | A flag that enable logging object saving log to its destination. |
239
+ | **TRACE_ENABLE_WRITE** | Log | `false` | |
240
+ | **AUDIT_PATH** | Log | `./audits` | |
241
+ | **AUDIT_ENABLE_WRITE** | Log | `true` | A flag that enable logging object saving log to its destination. |
245
242
  | **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
246
243
  | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
247
244
  | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
@@ -104,6 +104,7 @@ markers = [
104
104
  "poke: marks tests as slow by poking (deselect with '-m \"not poke\"')",
105
105
  "schedule: marks tests as schedule (deselect with '-m \"not schedule\"')",
106
106
  "api: marks tests as api (deselect with '-m \"not api\"')",
107
+ "asyncio: marks async testcases",
107
108
  ]
108
109
  console_output_style = "count"
109
110
  addopts = [
@@ -0,0 +1 @@
1
+ __version__: str = "0.0.42"
@@ -20,7 +20,7 @@ from fastapi.middleware.gzip import GZipMiddleware
20
20
  from fastapi.responses import UJSONResponse
21
21
 
22
22
  from ..__about__ import __version__
23
- from ..conf import config, get_logger
23
+ from ..conf import api_config, config, get_logger
24
24
  from ..scheduler import ReleaseThread, ReleaseThreads
25
25
  from ..workflow import ReleaseQueue, WorkflowTask
26
26
  from .repeat import repeat_at
@@ -96,24 +96,24 @@ async def health():
96
96
 
97
97
 
98
98
  # NOTE Add the jobs and logs routes by default.
99
- app.include_router(job, prefix=config.prefix_path)
100
- app.include_router(log, prefix=config.prefix_path)
99
+ app.include_router(job, prefix=api_config.prefix_path)
100
+ app.include_router(log, prefix=api_config.prefix_path)
101
101
 
102
102
 
103
103
  # NOTE: Enable the workflows route.
104
- if config.enable_route_workflow:
104
+ if api_config.enable_route_workflow:
105
105
  from .routes import workflow
106
106
 
107
- app.include_router(workflow, prefix=config.prefix_path)
107
+ app.include_router(workflow, prefix=api_config.prefix_path)
108
108
 
109
109
 
110
110
  # NOTE: Enable the schedules route.
111
- if config.enable_route_schedule:
111
+ if api_config.enable_route_schedule:
112
112
  from ..logs import get_audit
113
113
  from ..scheduler import schedule_task
114
114
  from .routes import schedule
115
115
 
116
- app.include_router(schedule, prefix=config.prefix_path)
116
+ app.include_router(schedule, prefix=api_config.prefix_path)
117
117
 
118
118
  @schedule.on_event("startup")
119
119
  @repeat_at(cron="* * * * *", delay=2)
@@ -28,7 +28,7 @@ schedule_route = APIRouter(
28
28
  async def get_schedules(name: str):
29
29
  """Get schedule object."""
30
30
  try:
31
- schedule: Schedule = Schedule.from_loader(name=name, externals={})
31
+ schedule: Schedule = Schedule.from_conf(name=name, extras={})
32
32
  except ValueError:
33
33
  raise HTTPException(
34
34
  status_code=st.HTTP_404_NOT_FOUND,
@@ -51,7 +51,7 @@ async def get_deploy_schedulers(request: Request):
51
51
  @schedule_route.get(path="/deploy/{name}", status_code=st.HTTP_200_OK)
52
52
  async def get_deploy_scheduler(request: Request, name: str):
53
53
  if name in request.state.scheduler:
54
- schedule = Schedule.from_loader(name)
54
+ schedule = Schedule.from_conf(name)
55
55
  getter: list[dict[str, dict[str, list[datetime]]]] = []
56
56
  for workflow in schedule.workflows:
57
57
  getter.append(
@@ -94,7 +94,7 @@ async def add_deploy_scheduler(request: Request, name: str):
94
94
 
95
95
  # NOTE: Create a pair of workflow and on from schedule model.
96
96
  try:
97
- schedule: Schedule = Schedule.from_loader(name)
97
+ schedule: Schedule = Schedule.from_conf(name)
98
98
  except ValueError as err:
99
99
  request.state.scheduler.remove(name)
100
100
  logger.exception(err)
@@ -107,7 +107,7 @@ async def add_deploy_scheduler(request: Request, name: str):
107
107
  schedule.tasks(
108
108
  start_date_waiting,
109
109
  queue=request.state.workflow_queue,
110
- externals={},
110
+ extras={},
111
111
  ),
112
112
  )
113
113
  return {
@@ -124,7 +124,7 @@ async def del_deploy_scheduler(request: Request, name: str):
124
124
  # NOTE: Remove current schedule name from the state.
125
125
  request.state.scheduler.remove(name)
126
126
 
127
- schedule: Schedule = Schedule.from_loader(name)
127
+ schedule: Schedule = Schedule.from_conf(name)
128
128
 
129
129
  for task in schedule.tasks(datetime.now(tz=config.tz), queue={}):
130
130
  if task in request.state.workflow_tasks:
@@ -44,7 +44,7 @@ async def get_workflows() -> DictData:
44
44
  async def get_workflow_by_name(name: str) -> DictData:
45
45
  """Return model of workflow that passing an input workflow name."""
46
46
  try:
47
- workflow: Workflow = Workflow.from_loader(name=name, externals={})
47
+ workflow: Workflow = Workflow.from_conf(name=name, extras={})
48
48
  except ValueError as err:
49
49
  logger.exception(err)
50
50
  raise HTTPException(
@@ -69,7 +69,7 @@ class ExecutePayload(BaseModel):
69
69
  async def workflow_execute(name: str, payload: ExecutePayload) -> DictData:
70
70
  """Return model of workflow that passing an input workflow name."""
71
71
  try:
72
- workflow: Workflow = Workflow.from_loader(name=name, externals={})
72
+ workflow: Workflow = Workflow.from_conf(name=name, extras={})
73
73
  except ValueError:
74
74
  raise HTTPException(
75
75
  status_code=st.HTTP_404_NOT_FOUND,
@@ -12,6 +12,7 @@ from collections.abc import Iterator
12
12
  from datetime import timedelta
13
13
  from functools import cached_property, lru_cache
14
14
  from pathlib import Path
15
+ from typing import Optional, TypeVar
15
16
  from zoneinfo import ZoneInfo
16
17
 
17
18
  from ddeutil.core import str2bool
@@ -20,6 +21,7 @@ from ddeutil.io.paths import glob_files, is_ignored, read_ignore
20
21
 
21
22
  from .__types import DictData, TupleStr
22
23
 
24
+ T = TypeVar("T")
23
25
  PREFIX: str = "WORKFLOW"
24
26
 
25
27
 
@@ -29,12 +31,14 @@ def env(var: str, default: str | None = None) -> str | None: # pragma: no cov
29
31
 
30
32
 
31
33
  __all__: TupleStr = (
34
+ "api_config",
32
35
  "env",
33
36
  "get_logger",
34
37
  "Config",
35
38
  "SimLoad",
36
39
  "Loader",
37
40
  "config",
41
+ "dynamic",
38
42
  )
39
43
 
40
44
 
@@ -99,7 +103,7 @@ class Config(BaseConfig): # pragma: no cov
99
103
 
100
104
  :rtype: list[str]
101
105
  """
102
- regis_call_str: str = env("CORE_REGISTRY", ".")
106
+ regis_call_str: str = env("CORE_REGISTRY_CALLER", ".")
103
107
  return [r.strip() for r in regis_call_str.split(",")]
104
108
 
105
109
  @property
@@ -116,7 +120,7 @@ class Config(BaseConfig): # pragma: no cov
116
120
  # NOTE: Log
117
121
  @property
118
122
  def log_path(self) -> Path:
119
- return Path(env("LOG_PATH", "./logs"))
123
+ return Path(env("LOG_TRACE_PATH", "./logs"))
120
124
 
121
125
  @property
122
126
  def debug(self) -> bool:
@@ -145,16 +149,15 @@ class Config(BaseConfig): # pragma: no cov
145
149
 
146
150
  @property
147
151
  def enable_write_log(self) -> bool:
148
- return str2bool(env("LOG_ENABLE_WRITE", "false"))
152
+ return str2bool(env("LOG_TRACE_ENABLE_WRITE", "false"))
149
153
 
150
- # NOTE: Audit Log
151
154
  @property
152
155
  def audit_path(self) -> Path:
153
- return Path(env("AUDIT_PATH", "./audits"))
156
+ return Path(env("LOG_AUDIT_PATH", "./audits"))
154
157
 
155
158
  @property
156
159
  def enable_write_audit(self) -> bool:
157
- return str2bool(env("AUDIT_ENABLE_WRITE", "false"))
160
+ return str2bool(env("LOG_AUDIT_ENABLE_WRITE", "false"))
158
161
 
159
162
  @property
160
163
  def log_datetime_format(self) -> str:
@@ -178,29 +181,12 @@ class Config(BaseConfig): # pragma: no cov
178
181
  def job_default_id(self) -> bool:
179
182
  return str2bool(env("CORE_JOB_DEFAULT_ID", "false"))
180
183
 
181
- # NOTE: Workflow
182
- @property
183
- def max_job_parallel(self) -> int:
184
- max_job_parallel = int(env("CORE_MAX_JOB_PARALLEL", "2"))
185
-
186
- # VALIDATE: the MAX_JOB_PARALLEL value should not less than 0.
187
- if max_job_parallel < 0:
188
- raise ValueError(
189
- f"``WORKFLOW_MAX_JOB_PARALLEL`` should more than 0 but got "
190
- f"{max_job_parallel}."
191
- )
192
- return max_job_parallel
193
-
194
- @property
195
- def max_job_exec_timeout(self) -> int:
196
- return int(env("CORE_MAX_JOB_EXEC_TIMEOUT", "600"))
197
-
198
- @property
199
- def max_poking_pool_worker(self) -> int:
200
- return int(env("CORE_MAX_NUM_POKING", "4"))
201
-
202
184
  @property
203
185
  def max_on_per_workflow(self) -> int:
186
+ """The maximum on value that store in workflow model.
187
+
188
+ :rtype: int
189
+ """
204
190
  return int(env("CORE_MAX_CRON_PER_WORKFLOW", "5"))
205
191
 
206
192
  @property
@@ -229,7 +215,9 @@ class Config(BaseConfig): # pragma: no cov
229
215
  f"timedelta with {stop_boundary_delta_str}."
230
216
  ) from err
231
217
 
232
- # NOTE: API
218
+
219
+ class APIConfig:
220
+
233
221
  @property
234
222
  def prefix_path(self) -> str:
235
223
  return env("API_PREFIX_PATH", "/api/v1")
@@ -389,6 +377,29 @@ class Loader(SimLoad):
389
377
 
390
378
 
391
379
  config: Config = Config()
380
+ api_config: APIConfig = APIConfig()
381
+
382
+
383
+ def dynamic(
384
+ key: Optional[str] = None,
385
+ *,
386
+ f: Optional[T] = None,
387
+ extras: Optional[DictData] = None,
388
+ ) -> Optional[T]:
389
+ """Dynamic get config if extra value was passed at run-time.
390
+
391
+ :param key: (str) A config key that get from Config object.
392
+ :param f: An inner config function scope.
393
+ :param extras: An extra values that pass at run-time.
394
+ """
395
+ rsx: Optional[T] = extras[key] if extras and key in extras else None
396
+ rs: Optional[T] = f or getattr(config, key, None)
397
+ if rsx is not None and not isinstance(rsx, type(rs)):
398
+ raise TypeError(
399
+ f"Type of config {key!r} from extras: {rsx!r} does not valid "
400
+ f"as config {type(rs)}."
401
+ )
402
+ return rsx or rs
392
403
 
393
404
 
394
405
  @lru_cache
@@ -72,17 +72,16 @@ class On(BaseModel):
72
72
 
73
73
  model_config = ConfigDict(arbitrary_types_allowed=True)
74
74
 
75
- # NOTE: This is fields of the base schedule.
76
75
  extras: Annotated[
77
76
  DictData,
78
77
  Field(
79
78
  default_factory=dict,
80
- description="An extras mapping parameters",
79
+ description="An extras mapping parameters.",
81
80
  ),
82
81
  ]
83
82
  cronjob: Annotated[
84
83
  CronJob,
85
- Field(description="Cron job of this schedule"),
84
+ Field(description="A Cronjob object of this schedule."),
86
85
  ]
87
86
  tz: Annotated[
88
87
  str,
@@ -93,12 +92,12 @@ class On(BaseModel):
93
92
  ] = "Etc/UTC"
94
93
 
95
94
  @classmethod
96
- def from_value(cls, value: DictStr, externals: DictData) -> Self:
95
+ def from_value(cls, value: DictStr, extras: DictData) -> Self:
97
96
  """Constructor from values that will generate crontab by function.
98
97
 
99
98
  :param value: A mapping value that will generate crontab before create
100
99
  schedule model.
101
- :param externals: An extras external parameter that will keep in extras.
100
+ :param extras: An extras parameter that will keep in extras.
102
101
  """
103
102
  passing: DictStr = {}
104
103
  if "timezone" in value:
@@ -106,22 +105,22 @@ class On(BaseModel):
106
105
  passing["cronjob"] = interval2crontab(
107
106
  **{v: value[v] for v in value if v in ("interval", "day", "time")}
108
107
  )
109
- return cls(extras=externals | passing.pop("extras", {}), **passing)
108
+ return cls(extras=extras | passing.pop("extras", {}), **passing)
110
109
 
111
110
  @classmethod
112
- def from_loader(
111
+ def from_conf(
113
112
  cls,
114
113
  name: str,
115
- externals: DictData | None = None,
114
+ extras: DictData | None = None,
116
115
  ) -> Self:
117
116
  """Constructor from the name of config that will use loader object for
118
117
  getting the data.
119
118
 
120
119
  :param name: A name of config that will get from loader.
121
- :param externals: An extras external parameter that will keep in extras.
120
+ :param extras: An extra parameter that will keep in extras.
122
121
  """
123
- externals: DictData = externals or {}
124
- loader: Loader = Loader(name, externals=externals)
122
+ extras: DictData = extras or {}
123
+ loader: Loader = Loader(name, externals=extras)
125
124
 
126
125
  # NOTE: Validate the config type match with current connection model
127
126
  if loader.type != cls.__name__:
@@ -138,7 +137,7 @@ class On(BaseModel):
138
137
  if v in ("interval", "day", "time")
139
138
  }
140
139
  ),
141
- extras=externals | loader_data.pop("extras", {}),
140
+ extras=extras | loader_data.pop("extras", {}),
142
141
  **loader_data,
143
142
  )
144
143
  )
@@ -149,7 +148,7 @@ class On(BaseModel):
149
148
  return cls.model_validate(
150
149
  obj=dict(
151
150
  cronjob=loader_data.pop("cronjob"),
152
- extras=externals | loader_data.pop("extras", {}),
151
+ extras=extras | loader_data.pop("extras", {}),
153
152
  **loader_data,
154
153
  )
155
154
  )
@@ -3,6 +3,7 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
+ # [x] Use dynamic config
6
7
  """Job Model that use for keeping stages and node that running its stages.
7
8
  The job handle the lineage of stages and location of execution of stages that
8
9
  mean the job model able to define `runs-on` key that allow you to run this
@@ -32,7 +33,7 @@ from pydantic.functional_validators import field_validator, model_validator
32
33
  from typing_extensions import Self
33
34
 
34
35
  from .__types import DictData, DictStr, Matrix, TupleStr
35
- from .conf import config
36
+ from .conf import dynamic
36
37
  from .exceptions import (
37
38
  JobException,
38
39
  StageException,
@@ -346,7 +347,7 @@ class Job(BaseModel):
346
347
  )
347
348
  extras: DictData = Field(
348
349
  default_factory=dict,
349
- description="An extra override values.",
350
+ description="An extra override config values.",
350
351
  )
351
352
 
352
353
  @field_validator("desc", mode="after")
@@ -404,11 +405,14 @@ class Job(BaseModel):
404
405
  raise ValueError(f"Stage ID {stage_id} does not exists")
405
406
 
406
407
  def check_needs(
407
- self, jobs: dict[str, Any]
408
+ self,
409
+ jobs: dict[str, Any],
408
410
  ) -> TriggerState: # pragma: no cov
409
411
  """Return True if job's need exists in an input list of job's ID.
410
412
 
411
- :param jobs: A mapping of job model and its ID.
413
+ :param jobs: A mapping of job ID and result context.
414
+
415
+ :raise NotImplementedError: If the job trigger rule out of scope.
412
416
 
413
417
  :rtype: TriggerState
414
418
  """
@@ -477,7 +481,9 @@ class Job(BaseModel):
477
481
  # should use the `re` module to validate eval-string before
478
482
  # running.
479
483
  rs: bool = eval(
480
- param2template(self.condition, params), globals() | params, {}
484
+ param2template(self.condition, params, extras=self.extras),
485
+ globals() | params,
486
+ {},
481
487
  )
482
488
  if not isinstance(rs, bool):
483
489
  raise TypeError("Return type of condition does not be boolean")
@@ -519,7 +525,9 @@ class Job(BaseModel):
519
525
  if "jobs" not in to:
520
526
  to["jobs"] = {}
521
527
 
522
- if self.id is None and not config.job_default_id:
528
+ if self.id is None and not dynamic(
529
+ "job_default_id", extras=self.extras
530
+ ):
523
531
  raise JobException(
524
532
  "This job do not set the ID before setting execution output."
525
533
  )
@@ -706,8 +714,8 @@ def local_execute_strategy(
706
714
  )
707
715
  except (StageException, UtilException) as err:
708
716
  result.trace.error(f"[JOB]: {err.__class__.__name__}: {err}")
709
- do_raise: bool = (
710
- config.job_raise_error if raise_error is None else raise_error
717
+ do_raise: bool = dynamic(
718
+ "job_raise_error", f=raise_error, extras=job.extras
711
719
  )
712
720
  if do_raise:
713
721
  raise JobException(
@@ -905,8 +913,8 @@ def self_hosted_execute(
905
913
  )
906
914
 
907
915
  if resp.status_code != 200:
908
- do_raise: bool = (
909
- config.job_raise_error if raise_error is None else raise_error
916
+ do_raise: bool = dynamic(
917
+ "job_raise_error", f=raise_error, extras=job.extras
910
918
  )
911
919
  if do_raise:
912
920
  raise JobException(
@@ -3,6 +3,7 @@
3
3
  # Licensed under the MIT License. See LICENSE in the project root for
4
4
  # license information.
5
5
  # ------------------------------------------------------------------------------
6
+ # [x] Use fix config
6
7
  """A Logs module contain TraceLog dataclass and AuditLog model.
7
8
  """
8
9
  from __future__ import annotations
@@ -160,7 +161,6 @@ class BaseTraceLog(ABC): # pragma: no cov
160
161
  """
161
162
  msg: str = self.make_message(message)
162
163
 
163
- # NOTE: Write file if debug mode was enabled.
164
164
  if config.debug:
165
165
  self.writer(msg)
166
166
 
@@ -206,6 +206,32 @@ class BaseTraceLog(ABC): # pragma: no cov
206
206
  self.writer(msg, is_err=True)
207
207
  logger.exception(msg, stacklevel=2)
208
208
 
209
+ async def adebug(self, message: str) -> None: # pragma: no cov
210
+ msg: str = self.make_message(message)
211
+ if config.debug:
212
+ await self.awriter(msg)
213
+ logger.info(msg, stacklevel=2)
214
+
215
+ async def ainfo(self, message: str) -> None: # pragma: no cov
216
+ msg: str = self.make_message(message)
217
+ await self.awriter(msg)
218
+ logger.info(msg, stacklevel=2)
219
+
220
+ async def awarning(self, message: str) -> None: # pragma: no cov
221
+ msg: str = self.make_message(message)
222
+ await self.awriter(msg)
223
+ logger.warning(msg, stacklevel=2)
224
+
225
+ async def aerror(self, message: str) -> None: # pragma: no cov
226
+ msg: str = self.make_message(message)
227
+ await self.awriter(msg, is_err=True)
228
+ logger.error(msg, stacklevel=2)
229
+
230
+ async def aexception(self, message: str) -> None: # pragma: no cov
231
+ msg: str = self.make_message(message)
232
+ await self.awriter(msg, is_err=True)
233
+ logger.exception(msg, stacklevel=2)
234
+
209
235
 
210
236
  class FileTraceLog(BaseTraceLog): # pragma: no cov
211
237
  """Trace Log object that write file to the local storage."""
@@ -296,11 +322,13 @@ class FileTraceLog(BaseTraceLog): # pragma: no cov
296
322
  async def awriter(
297
323
  self, message: str, is_err: bool = False
298
324
  ) -> None: # pragma: no cov
299
- """TODO: Use `aiofiles` for make writer method support async."""
300
325
  if not config.enable_write_log:
301
326
  return
302
327
 
303
- import aiofiles
328
+ try:
329
+ import aiofiles
330
+ except ImportError as e:
331
+ raise ImportError("Async mode need aiofiles package") from e
304
332
 
305
333
  write_file: str = "stderr" if is_err else "stdout"
306
334
  trace_meta: TraceMeda = TraceMeda.make(mode=write_file, message=message)
@@ -468,9 +496,8 @@ class FileAudit(BaseAudit):
468
496
  f"release={release:%Y%m%d%H%M%S} does not found."
469
497
  )
470
498
 
471
- with max(pointer.glob("./*.log"), key=os.path.getctime).open(
472
- mode="r", encoding="utf-8"
473
- ) as f:
499
+ latest_file: Path = max(pointer.glob("./*.log"), key=os.path.getctime)
500
+ with latest_file.open(mode="r", encoding="utf-8") as f:
474
501
  return cls.model_validate(obj=json.load(f))
475
502
 
476
503
  @classmethod