ddeutil-workflow 0.0.40__tar.gz → 0.0.42__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. {ddeutil_workflow-0.0.40/src/ddeutil_workflow.egg-info → ddeutil_workflow-0.0.42}/PKG-INFO +12 -9
  2. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/README.md +5 -8
  3. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/pyproject.toml +8 -0
  4. ddeutil_workflow-0.0.42/src/ddeutil/workflow/__about__.py +1 -0
  5. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/__init__.py +21 -25
  6. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/api/api.py +8 -8
  7. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/api/routes/logs.py +1 -2
  8. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/api/routes/schedules.py +5 -5
  9. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/api/routes/workflows.py +3 -3
  10. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/conf.py +50 -29
  11. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/cron.py +12 -13
  12. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/job.py +68 -9
  13. ddeutil_workflow-0.0.42/src/ddeutil/workflow/logs.py +620 -0
  14. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/params.py +1 -0
  15. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/result.py +6 -15
  16. ddeutil_workflow-0.0.40/src/ddeutil/workflow/templates.py → ddeutil_workflow-0.0.42/src/ddeutil/workflow/reusables.py +199 -10
  17. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/scheduler.py +27 -29
  18. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/stages.py +423 -64
  19. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/utils.py +10 -0
  20. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/workflow.py +119 -74
  21. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42/src/ddeutil_workflow.egg-info}/PKG-INFO +12 -9
  22. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/src/ddeutil_workflow.egg-info/SOURCES.txt +6 -10
  23. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/src/ddeutil_workflow.egg-info/requires.txt +7 -0
  24. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/tests/test_conf.py +18 -4
  25. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/tests/test_cron_on.py +20 -20
  26. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/tests/test_job_exec.py +11 -23
  27. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/tests/test_job_exec_strategy.py +6 -18
  28. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/tests/test_job_strategy.py +1 -3
  29. ddeutil_workflow-0.0.40/tests/test_audit.py → ddeutil_workflow-0.0.42/tests/test_logs_audit.py +1 -1
  30. ddeutil_workflow-0.0.40/tests/test_call_tag.py → ddeutil_workflow-0.0.42/tests/test_reusables_call_tag.py +10 -3
  31. ddeutil_workflow-0.0.40/tests/test_templates.py → ddeutil_workflow-0.0.42/tests/test_reusables_template.py +1 -1
  32. ddeutil_workflow-0.0.42/tests/test_reusables_template_filter.py +108 -0
  33. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/tests/test_schedule.py +8 -8
  34. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/tests/test_schedule_pending.py +1 -1
  35. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/tests/test_schedule_tasks.py +3 -3
  36. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/tests/test_stage.py +16 -1
  37. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/tests/test_stage_handler_exec.py +30 -15
  38. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/tests/test_workflow.py +29 -9
  39. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/tests/test_workflow_exec.py +105 -83
  40. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/tests/test_workflow_exec_poke.py +5 -5
  41. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/tests/test_workflow_exec_release.py +4 -4
  42. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/tests/test_workflow_task.py +8 -10
  43. ddeutil_workflow-0.0.40/src/ddeutil/workflow/__about__.py +0 -1
  44. ddeutil_workflow-0.0.40/src/ddeutil/workflow/audit.py +0 -257
  45. ddeutil_workflow-0.0.40/src/ddeutil/workflow/caller.py +0 -179
  46. ddeutil_workflow-0.0.40/src/ddeutil/workflow/context.py +0 -61
  47. ddeutil_workflow-0.0.40/src/ddeutil/workflow/logs.py +0 -331
  48. ddeutil_workflow-0.0.40/tests/test_context.py +0 -136
  49. ddeutil_workflow-0.0.40/tests/test_templates_filter.py +0 -102
  50. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/LICENSE +0 -0
  51. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/setup.cfg +0 -0
  52. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/__cron.py +0 -0
  53. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/__types.py +0 -0
  54. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/api/__init__.py +0 -0
  55. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/api/log.py +0 -0
  56. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/api/repeat.py +0 -0
  57. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/api/routes/__init__.py +0 -0
  58. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/api/routes/job.py +0 -0
  59. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/src/ddeutil/workflow/exceptions.py +0 -0
  60. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
  61. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
  62. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/tests/test__cron.py +0 -0
  63. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/tests/test__regex.py +0 -0
  64. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/tests/test_job.py +0 -0
  65. /ddeutil_workflow-0.0.40/tests/test_logs.py → /ddeutil_workflow-0.0.42/tests/test_logs_trace.py +0 -0
  66. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/tests/test_params.py +0 -0
  67. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/tests/test_release.py +0 -0
  68. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/tests/test_release_queue.py +0 -0
  69. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/tests/test_result.py +0 -0
  70. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/tests/test_schedule_workflow.py +0 -0
  71. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/tests/test_scheduler_control.py +0 -0
  72. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/tests/test_utils.py +0 -0
  73. {ddeutil_workflow-0.0.40 → ddeutil_workflow-0.0.42}/tests/test_workflow_exec_job.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.40
3
+ Version: 0.0.42
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -27,6 +27,12 @@ Requires-Dist: ddeutil-io[toml,yaml]>=0.2.10
27
27
  Requires-Dist: pydantic==2.11.1
28
28
  Requires-Dist: python-dotenv==1.1.0
29
29
  Requires-Dist: schedule<2.0.0,==1.2.2
30
+ Provides-Extra: all
31
+ Requires-Dist: fastapi<1.0.0,>=0.115.0; extra == "all"
32
+ Requires-Dist: httpx; extra == "all"
33
+ Requires-Dist: ujson; extra == "all"
34
+ Requires-Dist: aiofiles; extra == "all"
35
+ Requires-Dist: aiohttp; extra == "all"
30
36
  Provides-Extra: api
31
37
  Requires-Dist: fastapi<1.0.0,>=0.115.0; extra == "api"
32
38
  Requires-Dist: httpx; extra == "api"
@@ -258,7 +264,7 @@ it will use default value and do not raise any error to you.
258
264
  | Name | Component | Default | Description |
259
265
  |:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|
260
266
  | **ROOT_PATH** | Core | `.` | The root path of the workflow application. |
261
- | **REGISTRY** | Core | `.` | List of importable string for the call stage. |
267
+ | **REGISTRY_CALLER** | Core | `.` | List of importable string for the call stage. |
262
268
  | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
263
269
  | **CONF_PATH** | Core | `conf` | The config path that keep all template `.yaml` files. |
264
270
  | **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
@@ -266,20 +272,17 @@ it will use default value and do not raise any error to you.
266
272
  | **STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. |
267
273
  | **JOB_DEFAULT_ID** | Core | `false` | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
268
274
  | **JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. |
269
- | **MAX_NUM_POKING** | Core | `4` | . |
270
- | **MAX_JOB_PARALLEL** | Core | `2` | The maximum job number that able to run parallel in workflow executor. |
271
- | **MAX_JOB_EXEC_TIMEOUT** | Core | `600` | |
272
275
  | **MAX_CRON_PER_WORKFLOW** | Core | `5` | |
273
276
  | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | |
274
277
  | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. |
275
- | **PATH** | Log | `./logs` | The log path of the workflow saving log. |
278
+ | **TRACE_PATH** | Log | `./logs` | The log path of the workflow saving log. |
276
279
  | **DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. |
277
280
  | **FORMAT** | Log | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | |
278
281
  | **FORMAT_FILE** | Log | `{datetime} ({process:5d}, {thread:5d}) {message:120s} ({filename}:{lineno})` | |
279
282
  | **DATETIME_FORMAT** | Log | `%Y-%m-%d %H:%M:%S` | |
280
- | **ENABLE_WRITE** | Log | `false` | |
281
- | **PATH** | Audit | `./audits` | |
282
- | **ENABLE_WRITE** | Audit | `true` | A flag that enable logging object saving log to its destination. |
283
+ | **TRACE_ENABLE_WRITE** | Log | `false` | |
284
+ | **AUDIT_PATH** | Log | `./audits` | |
285
+ | **AUDIT_ENABLE_WRITE** | Log | `true` | A flag that enable logging object saving log to its destination. |
283
286
  | **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
284
287
  | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
285
288
  | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
@@ -220,7 +220,7 @@ it will use default value and do not raise any error to you.
220
220
  | Name | Component | Default | Description |
221
221
  |:-----------------------------|:---------:|:--------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|
222
222
  | **ROOT_PATH** | Core | `.` | The root path of the workflow application. |
223
- | **REGISTRY** | Core | `.` | List of importable string for the call stage. |
223
+ | **REGISTRY_CALLER** | Core | `.` | List of importable string for the call stage. |
224
224
  | **REGISTRY_FILTER** | Core | `ddeutil.workflow.templates` | List of importable string for the filter template. |
225
225
  | **CONF_PATH** | Core | `conf` | The config path that keep all template `.yaml` files. |
226
226
  | **TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. |
@@ -228,20 +228,17 @@ it will use default value and do not raise any error to you.
228
228
  | **STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. |
229
229
  | **JOB_DEFAULT_ID** | Core | `false` | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. |
230
230
  | **JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. |
231
- | **MAX_NUM_POKING** | Core | `4` | . |
232
- | **MAX_JOB_PARALLEL** | Core | `2` | The maximum job number that able to run parallel in workflow executor. |
233
- | **MAX_JOB_EXEC_TIMEOUT** | Core | `600` | |
234
231
  | **MAX_CRON_PER_WORKFLOW** | Core | `5` | |
235
232
  | **MAX_QUEUE_COMPLETE_HIST** | Core | `16` | |
236
233
  | **GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. |
237
- | **PATH** | Log | `./logs` | The log path of the workflow saving log. |
234
+ | **TRACE_PATH** | Log | `./logs` | The log path of the workflow saving log. |
238
235
  | **DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. |
239
236
  | **FORMAT** | Log | `%(asctime)s.%(msecs)03d (%(name)-10s, %(process)-5d,%(thread)-5d) [%(levelname)-7s] %(message)-120s (%(filename)s:%(lineno)s)` | |
240
237
  | **FORMAT_FILE** | Log | `{datetime} ({process:5d}, {thread:5d}) {message:120s} ({filename}:{lineno})` | |
241
238
  | **DATETIME_FORMAT** | Log | `%Y-%m-%d %H:%M:%S` | |
242
- | **ENABLE_WRITE** | Log | `false` | |
243
- | **PATH** | Audit | `./audits` | |
244
- | **ENABLE_WRITE** | Audit | `true` | A flag that enable logging object saving log to its destination. |
239
+ | **TRACE_ENABLE_WRITE** | Log | `false` | |
240
+ | **AUDIT_PATH** | Log | `./audits` | |
241
+ | **AUDIT_ENABLE_WRITE** | Log | `true` | A flag that enable logging object saving log to its destination. |
245
242
  | **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
246
243
  | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
247
244
  | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
@@ -35,6 +35,13 @@ dependencies = [
35
35
  dynamic = ["version"]
36
36
 
37
37
  [project.optional-dependencies]
38
+ all = [
39
+ "fastapi>=0.115.0,<1.0.0",
40
+ "httpx",
41
+ "ujson",
42
+ "aiofiles",
43
+ "aiohttp",
44
+ ]
38
45
  api = [
39
46
  "fastapi>=0.115.0,<1.0.0",
40
47
  "httpx",
@@ -97,6 +104,7 @@ markers = [
97
104
  "poke: marks tests as slow by poking (deselect with '-m \"not poke\"')",
98
105
  "schedule: marks tests as schedule (deselect with '-m \"not schedule\"')",
99
106
  "api: marks tests as api (deselect with '-m \"not api\"')",
107
+ "asyncio: marks async testcases",
100
108
  ]
101
109
  console_output_style = "count"
102
110
  addopts = [
@@ -0,0 +1 @@
1
+ __version__: str = "0.0.42"
@@ -5,17 +5,6 @@
5
5
  # ------------------------------------------------------------------------------
6
6
  from .__cron import CronJob, CronRunner
7
7
  from .__types import Re
8
- from .audit import (
9
- Audit,
10
- get_audit,
11
- )
12
- from .caller import (
13
- ReturnTagFunc,
14
- TagFunc,
15
- extract_call,
16
- make_registry,
17
- tag,
18
- )
19
8
  from .conf import (
20
9
  Config,
21
10
  Loader,
@@ -43,8 +32,10 @@ from .job import (
43
32
  local_execute_strategy,
44
33
  )
45
34
  from .logs import (
35
+ Audit,
46
36
  TraceData,
47
37
  TraceLog,
38
+ get_audit,
48
39
  get_dt_tznow,
49
40
  get_trace,
50
41
  )
@@ -58,7 +49,24 @@ from .params import (
58
49
  from .result import (
59
50
  Result,
60
51
  Status,
61
- default_gen_id,
52
+ )
53
+ from .reusables import (
54
+ FILTERS,
55
+ FilterFunc,
56
+ FilterRegistry,
57
+ ReturnTagFunc,
58
+ TagFunc,
59
+ custom_filter,
60
+ extract_call,
61
+ get_args_const,
62
+ has_template,
63
+ make_filter_registry,
64
+ make_registry,
65
+ map_post_filter,
66
+ not_in_template,
67
+ param2template,
68
+ str2template,
69
+ tag,
62
70
  )
63
71
  from .scheduler import (
64
72
  Schedule,
@@ -77,22 +85,10 @@ from .stages import (
77
85
  Stage,
78
86
  TriggerStage,
79
87
  )
80
- from .templates import (
81
- FILTERS,
82
- FilterFunc,
83
- FilterRegistry,
84
- custom_filter,
85
- get_args_const,
86
- has_template,
87
- make_filter_registry,
88
- map_post_filter,
89
- not_in_template,
90
- param2template,
91
- str2template,
92
- )
93
88
  from .utils import (
94
89
  batch,
95
90
  cross_product,
91
+ default_gen_id,
96
92
  delay,
97
93
  filter_func,
98
94
  gen_id,
@@ -20,7 +20,7 @@ from fastapi.middleware.gzip import GZipMiddleware
20
20
  from fastapi.responses import UJSONResponse
21
21
 
22
22
  from ..__about__ import __version__
23
- from ..conf import config, get_logger
23
+ from ..conf import api_config, config, get_logger
24
24
  from ..scheduler import ReleaseThread, ReleaseThreads
25
25
  from ..workflow import ReleaseQueue, WorkflowTask
26
26
  from .repeat import repeat_at
@@ -96,24 +96,24 @@ async def health():
96
96
 
97
97
 
98
98
  # NOTE Add the jobs and logs routes by default.
99
- app.include_router(job, prefix=config.prefix_path)
100
- app.include_router(log, prefix=config.prefix_path)
99
+ app.include_router(job, prefix=api_config.prefix_path)
100
+ app.include_router(log, prefix=api_config.prefix_path)
101
101
 
102
102
 
103
103
  # NOTE: Enable the workflows route.
104
- if config.enable_route_workflow:
104
+ if api_config.enable_route_workflow:
105
105
  from .routes import workflow
106
106
 
107
- app.include_router(workflow, prefix=config.prefix_path)
107
+ app.include_router(workflow, prefix=api_config.prefix_path)
108
108
 
109
109
 
110
110
  # NOTE: Enable the schedules route.
111
- if config.enable_route_schedule:
112
- from ..audit import get_audit
111
+ if api_config.enable_route_schedule:
112
+ from ..logs import get_audit
113
113
  from ..scheduler import schedule_task
114
114
  from .routes import schedule
115
115
 
116
- app.include_router(schedule, prefix=config.prefix_path)
116
+ app.include_router(schedule, prefix=api_config.prefix_path)
117
117
 
118
118
  @schedule.on_event("startup")
119
119
  @repeat_at(cron="* * * * *", delay=2)
@@ -10,8 +10,7 @@ from fastapi import APIRouter, Path, Query
10
10
  from fastapi import status as st
11
11
  from fastapi.responses import UJSONResponse
12
12
 
13
- from ...audit import get_audit
14
- from ...logs import get_trace_obj
13
+ from ...logs import get_audit, get_trace_obj
15
14
 
16
15
  log_route = APIRouter(
17
16
  prefix="/logs",
@@ -28,7 +28,7 @@ schedule_route = APIRouter(
28
28
  async def get_schedules(name: str):
29
29
  """Get schedule object."""
30
30
  try:
31
- schedule: Schedule = Schedule.from_loader(name=name, externals={})
31
+ schedule: Schedule = Schedule.from_conf(name=name, extras={})
32
32
  except ValueError:
33
33
  raise HTTPException(
34
34
  status_code=st.HTTP_404_NOT_FOUND,
@@ -51,7 +51,7 @@ async def get_deploy_schedulers(request: Request):
51
51
  @schedule_route.get(path="/deploy/{name}", status_code=st.HTTP_200_OK)
52
52
  async def get_deploy_scheduler(request: Request, name: str):
53
53
  if name in request.state.scheduler:
54
- schedule = Schedule.from_loader(name)
54
+ schedule = Schedule.from_conf(name)
55
55
  getter: list[dict[str, dict[str, list[datetime]]]] = []
56
56
  for workflow in schedule.workflows:
57
57
  getter.append(
@@ -94,7 +94,7 @@ async def add_deploy_scheduler(request: Request, name: str):
94
94
 
95
95
  # NOTE: Create a pair of workflow and on from schedule model.
96
96
  try:
97
- schedule: Schedule = Schedule.from_loader(name)
97
+ schedule: Schedule = Schedule.from_conf(name)
98
98
  except ValueError as err:
99
99
  request.state.scheduler.remove(name)
100
100
  logger.exception(err)
@@ -107,7 +107,7 @@ async def add_deploy_scheduler(request: Request, name: str):
107
107
  schedule.tasks(
108
108
  start_date_waiting,
109
109
  queue=request.state.workflow_queue,
110
- externals={},
110
+ extras={},
111
111
  ),
112
112
  )
113
113
  return {
@@ -124,7 +124,7 @@ async def del_deploy_scheduler(request: Request, name: str):
124
124
  # NOTE: Remove current schedule name from the state.
125
125
  request.state.scheduler.remove(name)
126
126
 
127
- schedule: Schedule = Schedule.from_loader(name)
127
+ schedule: Schedule = Schedule.from_conf(name)
128
128
 
129
129
  for task in schedule.tasks(datetime.now(tz=config.tz), queue={}):
130
130
  if task in request.state.workflow_tasks:
@@ -15,8 +15,8 @@ from fastapi.responses import UJSONResponse
15
15
  from pydantic import BaseModel
16
16
 
17
17
  from ...__types import DictData
18
- from ...audit import Audit, get_audit
19
18
  from ...conf import Loader, get_logger
19
+ from ...logs import Audit, get_audit
20
20
  from ...result import Result
21
21
  from ...workflow import Workflow
22
22
 
@@ -44,7 +44,7 @@ async def get_workflows() -> DictData:
44
44
  async def get_workflow_by_name(name: str) -> DictData:
45
45
  """Return model of workflow that passing an input workflow name."""
46
46
  try:
47
- workflow: Workflow = Workflow.from_loader(name=name, externals={})
47
+ workflow: Workflow = Workflow.from_conf(name=name, extras={})
48
48
  except ValueError as err:
49
49
  logger.exception(err)
50
50
  raise HTTPException(
@@ -69,7 +69,7 @@ class ExecutePayload(BaseModel):
69
69
  async def workflow_execute(name: str, payload: ExecutePayload) -> DictData:
70
70
  """Return model of workflow that passing an input workflow name."""
71
71
  try:
72
- workflow: Workflow = Workflow.from_loader(name=name, externals={})
72
+ workflow: Workflow = Workflow.from_conf(name=name, extras={})
73
73
  except ValueError:
74
74
  raise HTTPException(
75
75
  status_code=st.HTTP_404_NOT_FOUND,
@@ -12,6 +12,7 @@ from collections.abc import Iterator
12
12
  from datetime import timedelta
13
13
  from functools import cached_property, lru_cache
14
14
  from pathlib import Path
15
+ from typing import Optional, TypeVar
15
16
  from zoneinfo import ZoneInfo
16
17
 
17
18
  from ddeutil.core import str2bool
@@ -20,20 +21,24 @@ from ddeutil.io.paths import glob_files, is_ignored, read_ignore
20
21
 
21
22
  from .__types import DictData, TupleStr
22
23
 
24
+ T = TypeVar("T")
23
25
  PREFIX: str = "WORKFLOW"
24
26
 
25
27
 
26
28
  def env(var: str, default: str | None = None) -> str | None: # pragma: no cov
29
+ """Get environment variable with uppercase and adding prefix string."""
27
30
  return os.getenv(f"{PREFIX}_{var.upper().replace(' ', '_')}", default)
28
31
 
29
32
 
30
33
  __all__: TupleStr = (
34
+ "api_config",
31
35
  "env",
32
36
  "get_logger",
33
37
  "Config",
34
38
  "SimLoad",
35
39
  "Loader",
36
40
  "config",
41
+ "dynamic",
37
42
  )
38
43
 
39
44
 
@@ -94,11 +99,19 @@ class Config(BaseConfig): # pragma: no cov
94
99
  # NOTE: Register
95
100
  @property
96
101
  def regis_call(self) -> list[str]:
97
- regis_call_str: str = env("CORE_REGISTRY", ".")
102
+ """Register Caller module importer str.
103
+
104
+ :rtype: list[str]
105
+ """
106
+ regis_call_str: str = env("CORE_REGISTRY_CALLER", ".")
98
107
  return [r.strip() for r in regis_call_str.split(",")]
99
108
 
100
109
  @property
101
110
  def regis_filter(self) -> list[str]:
111
+ """Register Filter module.
112
+
113
+ :rtype: list[str]
114
+ """
102
115
  regis_filter_str: str = env(
103
116
  "CORE_REGISTRY_FILTER", "ddeutil.workflow.templates"
104
117
  )
@@ -107,7 +120,7 @@ class Config(BaseConfig): # pragma: no cov
107
120
  # NOTE: Log
108
121
  @property
109
122
  def log_path(self) -> Path:
110
- return Path(env("LOG_PATH", "./logs"))
123
+ return Path(env("LOG_TRACE_PATH", "./logs"))
111
124
 
112
125
  @property
113
126
  def debug(self) -> bool:
@@ -136,16 +149,15 @@ class Config(BaseConfig): # pragma: no cov
136
149
 
137
150
  @property
138
151
  def enable_write_log(self) -> bool:
139
- return str2bool(env("LOG_ENABLE_WRITE", "false"))
152
+ return str2bool(env("LOG_TRACE_ENABLE_WRITE", "false"))
140
153
 
141
- # NOTE: Audit Log
142
154
  @property
143
155
  def audit_path(self) -> Path:
144
- return Path(env("AUDIT_PATH", "./audits"))
156
+ return Path(env("LOG_AUDIT_PATH", "./audits"))
145
157
 
146
158
  @property
147
159
  def enable_write_audit(self) -> bool:
148
- return str2bool(env("AUDIT_ENABLE_WRITE", "false"))
160
+ return str2bool(env("LOG_AUDIT_ENABLE_WRITE", "false"))
149
161
 
150
162
  @property
151
163
  def log_datetime_format(self) -> str:
@@ -169,29 +181,12 @@ class Config(BaseConfig): # pragma: no cov
169
181
  def job_default_id(self) -> bool:
170
182
  return str2bool(env("CORE_JOB_DEFAULT_ID", "false"))
171
183
 
172
- # NOTE: Workflow
173
- @property
174
- def max_job_parallel(self) -> int:
175
- max_job_parallel = int(env("CORE_MAX_JOB_PARALLEL", "2"))
176
-
177
- # VALIDATE: the MAX_JOB_PARALLEL value should not less than 0.
178
- if max_job_parallel < 0:
179
- raise ValueError(
180
- f"``WORKFLOW_MAX_JOB_PARALLEL`` should more than 0 but got "
181
- f"{max_job_parallel}."
182
- )
183
- return max_job_parallel
184
-
185
- @property
186
- def max_job_exec_timeout(self) -> int:
187
- return int(env("CORE_MAX_JOB_EXEC_TIMEOUT", "600"))
188
-
189
- @property
190
- def max_poking_pool_worker(self) -> int:
191
- return int(env("CORE_MAX_NUM_POKING", "4"))
192
-
193
184
  @property
194
185
  def max_on_per_workflow(self) -> int:
186
+ """The maximum on value that store in workflow model.
187
+
188
+ :rtype: int
189
+ """
195
190
  return int(env("CORE_MAX_CRON_PER_WORKFLOW", "5"))
196
191
 
197
192
  @property
@@ -220,7 +215,9 @@ class Config(BaseConfig): # pragma: no cov
220
215
  f"timedelta with {stop_boundary_delta_str}."
221
216
  ) from err
222
217
 
223
- # NOTE: API
218
+
219
+ class APIConfig:
220
+
224
221
  @property
225
222
  def prefix_path(self) -> str:
226
223
  return env("API_PREFIX_PATH", "/api/v1")
@@ -326,7 +323,8 @@ class SimLoad:
326
323
  def filter_yaml(cls, file: Path, name: str | None = None) -> DictData:
327
324
  if any(file.suffix.endswith(s) for s in (".yml", ".yaml")):
328
325
  values: DictData = YamlFlResolve(file).read()
329
- return values.get(name, {}) if name else values
326
+ if values is not None:
327
+ return values.get(name, {}) if name else values
330
328
  return {}
331
329
 
332
330
  @cached_property
@@ -379,6 +377,29 @@ class Loader(SimLoad):
379
377
 
380
378
 
381
379
  config: Config = Config()
380
+ api_config: APIConfig = APIConfig()
381
+
382
+
383
+ def dynamic(
384
+ key: Optional[str] = None,
385
+ *,
386
+ f: Optional[T] = None,
387
+ extras: Optional[DictData] = None,
388
+ ) -> Optional[T]:
389
+ """Dynamic get config if extra value was passed at run-time.
390
+
391
+ :param key: (str) A config key that get from Config object.
392
+ :param f: An inner config function scope.
393
+ :param extras: An extra values that pass at run-time.
394
+ """
395
+ rsx: Optional[T] = extras[key] if extras and key in extras else None
396
+ rs: Optional[T] = f or getattr(config, key, None)
397
+ if rsx is not None and not isinstance(rsx, type(rs)):
398
+ raise TypeError(
399
+ f"Type of config {key!r} from extras: {rsx!r} does not valid "
400
+ f"as config {type(rs)}."
401
+ )
402
+ return rsx or rs
382
403
 
383
404
 
384
405
  @lru_cache
@@ -72,17 +72,16 @@ class On(BaseModel):
72
72
 
73
73
  model_config = ConfigDict(arbitrary_types_allowed=True)
74
74
 
75
- # NOTE: This is fields of the base schedule.
76
75
  extras: Annotated[
77
76
  DictData,
78
77
  Field(
79
78
  default_factory=dict,
80
- description="An extras mapping parameters",
79
+ description="An extras mapping parameters.",
81
80
  ),
82
81
  ]
83
82
  cronjob: Annotated[
84
83
  CronJob,
85
- Field(description="Cron job of this schedule"),
84
+ Field(description="A Cronjob object of this schedule."),
86
85
  ]
87
86
  tz: Annotated[
88
87
  str,
@@ -93,12 +92,12 @@ class On(BaseModel):
93
92
  ] = "Etc/UTC"
94
93
 
95
94
  @classmethod
96
- def from_value(cls, value: DictStr, externals: DictData) -> Self:
95
+ def from_value(cls, value: DictStr, extras: DictData) -> Self:
97
96
  """Constructor from values that will generate crontab by function.
98
97
 
99
98
  :param value: A mapping value that will generate crontab before create
100
99
  schedule model.
101
- :param externals: An extras external parameter that will keep in extras.
100
+ :param extras: An extras parameter that will keep in extras.
102
101
  """
103
102
  passing: DictStr = {}
104
103
  if "timezone" in value:
@@ -106,22 +105,22 @@ class On(BaseModel):
106
105
  passing["cronjob"] = interval2crontab(
107
106
  **{v: value[v] for v in value if v in ("interval", "day", "time")}
108
107
  )
109
- return cls(extras=externals | passing.pop("extras", {}), **passing)
108
+ return cls(extras=extras | passing.pop("extras", {}), **passing)
110
109
 
111
110
  @classmethod
112
- def from_loader(
111
+ def from_conf(
113
112
  cls,
114
113
  name: str,
115
- externals: DictData | None = None,
114
+ extras: DictData | None = None,
116
115
  ) -> Self:
117
116
  """Constructor from the name of config that will use loader object for
118
117
  getting the data.
119
118
 
120
119
  :param name: A name of config that will get from loader.
121
- :param externals: An extras external parameter that will keep in extras.
120
+ :param extras: An extra parameter that will keep in extras.
122
121
  """
123
- externals: DictData = externals or {}
124
- loader: Loader = Loader(name, externals=externals)
122
+ extras: DictData = extras or {}
123
+ loader: Loader = Loader(name, externals=extras)
125
124
 
126
125
  # NOTE: Validate the config type match with current connection model
127
126
  if loader.type != cls.__name__:
@@ -138,7 +137,7 @@ class On(BaseModel):
138
137
  if v in ("interval", "day", "time")
139
138
  }
140
139
  ),
141
- extras=externals | loader_data.pop("extras", {}),
140
+ extras=extras | loader_data.pop("extras", {}),
142
141
  **loader_data,
143
142
  )
144
143
  )
@@ -149,7 +148,7 @@ class On(BaseModel):
149
148
  return cls.model_validate(
150
149
  obj=dict(
151
150
  cronjob=loader_data.pop("cronjob"),
152
- extras=externals | loader_data.pop("extras", {}),
151
+ extras=extras | loader_data.pop("extras", {}),
153
152
  **loader_data,
154
153
  )
155
154
  )