ddeutil-workflow 0.0.63__tar.gz → 0.0.64__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/PKG-INFO +4 -56
  2. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/README.md +3 -55
  3. ddeutil_workflow-0.0.64/src/ddeutil/workflow/__about__.py +1 -0
  4. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/src/ddeutil/workflow/__init__.py +0 -7
  5. ddeutil_workflow-0.0.64/src/ddeutil/workflow/api/__init__.py +91 -0
  6. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/src/ddeutil/workflow/api/routes/__init__.py +0 -1
  7. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/src/ddeutil/workflow/api/routes/job.py +0 -1
  8. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/src/ddeutil/workflow/api/routes/logs.py +0 -2
  9. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/src/ddeutil/workflow/api/routes/workflows.py +0 -3
  10. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/src/ddeutil/workflow/conf.py +6 -34
  11. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/src/ddeutil/workflow/exceptions.py +0 -3
  12. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/src/ddeutil/workflow/reusables.py +39 -6
  13. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/src/ddeutil/workflow/stages.py +30 -28
  14. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/src/ddeutil/workflow/utils.py +4 -2
  15. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/src/ddeutil_workflow.egg-info/PKG-INFO +4 -56
  16. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/src/ddeutil_workflow.egg-info/SOURCES.txt +2 -12
  17. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/tests/test_conf.py +13 -65
  18. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/tests/test_reusables_call_tag.py +0 -70
  19. ddeutil_workflow-0.0.64/tests/test_reusables_func_model.py +158 -0
  20. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/tests/test_stage_handler_exec.py +20 -3
  21. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/tests/test_workflow_exec.py +1 -1
  22. ddeutil_workflow-0.0.63/src/ddeutil/workflow/__about__.py +0 -1
  23. ddeutil_workflow-0.0.63/src/ddeutil/workflow/api/__init__.py +0 -170
  24. ddeutil_workflow-0.0.63/src/ddeutil/workflow/api/routes/schedules.py +0 -141
  25. ddeutil_workflow-0.0.63/src/ddeutil/workflow/api/utils.py +0 -174
  26. ddeutil_workflow-0.0.63/src/ddeutil/workflow/scheduler.py +0 -813
  27. ddeutil_workflow-0.0.63/tests/test_schedule.py +0 -173
  28. ddeutil_workflow-0.0.63/tests/test_schedule_pending.py +0 -13
  29. ddeutil_workflow-0.0.63/tests/test_schedule_tasks.py +0 -82
  30. ddeutil_workflow-0.0.63/tests/test_schedule_workflow.py +0 -124
  31. ddeutil_workflow-0.0.63/tests/test_scheduler_control.py +0 -49
  32. ddeutil_workflow-0.0.63/tests/test_workflow_poke.py +0 -168
  33. ddeutil_workflow-0.0.63/tests/test_workflow_release.py +0 -153
  34. ddeutil_workflow-0.0.63/tests/test_workflow_task.py +0 -223
  35. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/LICENSE +0 -0
  36. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/pyproject.toml +0 -0
  37. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/setup.cfg +0 -0
  38. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/src/ddeutil/workflow/__cron.py +0 -0
  39. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/src/ddeutil/workflow/__main__.py +0 -0
  40. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/src/ddeutil/workflow/__types.py +0 -0
  41. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/src/ddeutil/workflow/api/logs.py +0 -0
  42. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/src/ddeutil/workflow/event.py +0 -0
  43. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/src/ddeutil/workflow/job.py +0 -0
  44. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/src/ddeutil/workflow/logs.py +0 -0
  45. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/src/ddeutil/workflow/params.py +0 -0
  46. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/src/ddeutil/workflow/result.py +0 -0
  47. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/src/ddeutil/workflow/workflow.py +0 -0
  48. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
  49. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/src/ddeutil_workflow.egg-info/entry_points.txt +0 -0
  50. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/src/ddeutil_workflow.egg-info/requires.txt +0 -0
  51. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
  52. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/tests/test__cron.py +0 -0
  53. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/tests/test__regex.py +0 -0
  54. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/tests/test_event.py +0 -0
  55. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/tests/test_job.py +0 -0
  56. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/tests/test_job_exec.py +0 -0
  57. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/tests/test_job_exec_strategy.py +0 -0
  58. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/tests/test_logs_audit.py +0 -0
  59. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/tests/test_logs_trace.py +0 -0
  60. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/tests/test_params.py +0 -0
  61. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/tests/test_release.py +0 -0
  62. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/tests/test_release_queue.py +0 -0
  63. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/tests/test_result.py +0 -0
  64. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/tests/test_reusables_template.py +0 -0
  65. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/tests/test_reusables_template_filter.py +0 -0
  66. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/tests/test_stage.py +0 -0
  67. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/tests/test_strategy.py +0 -0
  68. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/tests/test_utils.py +0 -0
  69. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/tests/test_workflow.py +0 -0
  70. {ddeutil_workflow-0.0.63 → ddeutil_workflow-0.0.64}/tests/test_workflow_exec_job.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ddeutil-workflow
3
- Version: 0.0.63
3
+ Version: 0.0.64
4
4
  Summary: Lightweight workflow orchestration
5
5
  Author-email: ddeutils <korawich.anu@gmail.com>
6
6
  License: MIT
@@ -221,18 +221,18 @@ value (This config can override by extra parameters with `registry_caller` key).
221
221
  > engine will auto use the `model_validate` method before run your caller function.
222
222
 
223
223
  ```python
224
- from ddeutil.workflow import Result, WorkflowSecret, tag
224
+ from ddeutil.workflow import Result, CallerSecret, tag
225
225
  from ddeutil.workflow.exceptions import StageException
226
226
  from pydantic import BaseModel
227
227
 
228
228
  class AwsCredential(BaseModel):
229
229
  path: str
230
230
  access_client_id: str
231
- access_client_secret: WorkflowSecret
231
+ access_client_secret: CallerSecret
232
232
 
233
233
  class RestAuth(BaseModel):
234
234
  type: str
235
- keys: WorkflowSecret
235
+ keys: CallerSecret
236
236
 
237
237
  @tag("requests", alias="get-api-with-oauth-to-s3")
238
238
  def get_api_with_oauth_to_s3(
@@ -265,45 +265,6 @@ result: Result = workflow.execute(
265
265
  )
266
266
  ```
267
267
 
268
- > [!NOTE]
269
- > So, this package provide the `Schedule` template for this action, and you can
270
- > pass the parameters dynamically for changing align with that running time by
271
- > the `release` prefix.
272
- >
273
- > ```yaml
274
- > schedule-run-local-wf:
275
- >
276
- > # Validate model that use to parsing exists for template file
277
- > type: Schedule
278
- > workflows:
279
- >
280
- > # Map existing workflow that want to deploy with scheduler application.
281
- > # It allows you to pass release parameter that dynamic change depend on the
282
- > # current context of this scheduler application releasing that time.
283
- > - name: run-py-local
284
- > params:
285
- > source-extract: "USD-THB"
286
- > run-date: "${{ release.logical_date }}"
287
- > ```
288
- >
289
- > The main method of the `Schedule` model that use to running is `pending`. If you
290
- > do not pass the `stop` date on this method, it will use config with
291
- > `WORKFLOW_APP_STOP_BOUNDARY_DELTA` key for generate this stop date.
292
- >
293
- > ```python
294
- > from ddeutil.workflow import Schedule
295
- >
296
- > (
297
- > Schedule
298
- > .from_conf("schedule-run-local-wf")
299
- > .pending(stop=None)
300
- > )
301
- > ```
302
-
303
- > [!WARNING]
304
- > The scheduler feature is the expensive feature of this project. You should
305
- > avoid to use it and find a scheduler tool instead.
306
-
307
268
  ## :cookie: Configuration
308
269
 
309
270
  The main configuration that use to dynamic changing this workflow engine for your
@@ -333,19 +294,6 @@ it will use default value and do not raise any error to you.
333
294
  | **TRACE_ENABLE_WRITE** | Log | `false` | |
334
295
  | **AUDIT_PATH** | Log | `./audits` | |
335
296
  | **AUDIT_ENABLE_WRITE** | Log | `true` | A flag that enable logging object saving log to its destination. |
336
- | **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
337
- | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
338
- | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
339
-
340
- **API Application**:
341
-
342
- This config part use for the workflow application that build from the FastAPI
343
- only.
344
-
345
- | Environment | Component | Default | Description |
346
- |:---------------------------|:-----------:|---------|------------------------------------------------------------------------------------|
347
- | **ENABLE_ROUTE_WORKFLOW** | API | `true` | A flag that enable workflow route to manage execute manually and workflow logging. |
348
- | **ENABLE_ROUTE_SCHEDULE** | API | `true` | A flag that enable run scheduler. |
349
297
 
350
298
  ## :rocket: Deployment
351
299
 
@@ -170,18 +170,18 @@ value (This config can override by extra parameters with `registry_caller` key).
170
170
  > engine will auto use the `model_validate` method before run your caller function.
171
171
 
172
172
  ```python
173
- from ddeutil.workflow import Result, WorkflowSecret, tag
173
+ from ddeutil.workflow import Result, CallerSecret, tag
174
174
  from ddeutil.workflow.exceptions import StageException
175
175
  from pydantic import BaseModel
176
176
 
177
177
  class AwsCredential(BaseModel):
178
178
  path: str
179
179
  access_client_id: str
180
- access_client_secret: WorkflowSecret
180
+ access_client_secret: CallerSecret
181
181
 
182
182
  class RestAuth(BaseModel):
183
183
  type: str
184
- keys: WorkflowSecret
184
+ keys: CallerSecret
185
185
 
186
186
  @tag("requests", alias="get-api-with-oauth-to-s3")
187
187
  def get_api_with_oauth_to_s3(
@@ -214,45 +214,6 @@ result: Result = workflow.execute(
214
214
  )
215
215
  ```
216
216
 
217
- > [!NOTE]
218
- > So, this package provide the `Schedule` template for this action, and you can
219
- > pass the parameters dynamically for changing align with that running time by
220
- > the `release` prefix.
221
- >
222
- > ```yaml
223
- > schedule-run-local-wf:
224
- >
225
- > # Validate model that use to parsing exists for template file
226
- > type: Schedule
227
- > workflows:
228
- >
229
- > # Map existing workflow that want to deploy with scheduler application.
230
- > # It allows you to pass release parameter that dynamic change depend on the
231
- > # current context of this scheduler application releasing that time.
232
- > - name: run-py-local
233
- > params:
234
- > source-extract: "USD-THB"
235
- > run-date: "${{ release.logical_date }}"
236
- > ```
237
- >
238
- > The main method of the `Schedule` model that use to running is `pending`. If you
239
- > do not pass the `stop` date on this method, it will use config with
240
- > `WORKFLOW_APP_STOP_BOUNDARY_DELTA` key for generate this stop date.
241
- >
242
- > ```python
243
- > from ddeutil.workflow import Schedule
244
- >
245
- > (
246
- > Schedule
247
- > .from_conf("schedule-run-local-wf")
248
- > .pending(stop=None)
249
- > )
250
- > ```
251
-
252
- > [!WARNING]
253
- > The scheduler feature is the expensive feature of this project. You should
254
- > avoid to use it and find a scheduler tool instead.
255
-
256
217
  ## :cookie: Configuration
257
218
 
258
219
  The main configuration that use to dynamic changing this workflow engine for your
@@ -282,19 +243,6 @@ it will use default value and do not raise any error to you.
282
243
  | **TRACE_ENABLE_WRITE** | Log | `false` | |
283
244
  | **AUDIT_PATH** | Log | `./audits` | |
284
245
  | **AUDIT_ENABLE_WRITE** | Log | `true` | A flag that enable logging object saving log to its destination. |
285
- | **MAX_PROCESS** | App | `2` | The maximum process worker number that run in scheduler app module. |
286
- | **MAX_SCHEDULE_PER_PROCESS** | App | `100` | A schedule per process that run parallel. |
287
- | **STOP_BOUNDARY_DELTA** | App | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. |
288
-
289
- **API Application**:
290
-
291
- This config part use for the workflow application that build from the FastAPI
292
- only.
293
-
294
- | Environment | Component | Default | Description |
295
- |:---------------------------|:-----------:|---------|------------------------------------------------------------------------------------|
296
- | **ENABLE_ROUTE_WORKFLOW** | API | `true` | A flag that enable workflow route to manage execute manually and workflow logging. |
297
- | **ENABLE_ROUTE_SCHEDULE** | API | `true` | A flag that enable run scheduler. |
298
246
 
299
247
  ## :rocket: Deployment
300
248
 
@@ -0,0 +1 @@
1
+ __version__: str = "0.0.64"
@@ -33,13 +33,6 @@ from .result import (
33
33
  Status,
34
34
  )
35
35
  from .reusables import *
36
- from .scheduler import (
37
- Schedule,
38
- ScheduleWorkflow,
39
- schedule_control,
40
- schedule_runner,
41
- schedule_task,
42
- )
43
36
  from .stages import *
44
37
  from .utils import *
45
38
  from .workflow import *
@@ -0,0 +1,91 @@
1
+ # ------------------------------------------------------------------------------
2
+ # Copyright (c) 2022 Korawich Anuttra. All rights reserved.
3
+ # Licensed under the MIT License. See LICENSE in the project root for
4
+ # license information.
5
+ # ------------------------------------------------------------------------------
6
+ from __future__ import annotations
7
+
8
+ import contextlib
9
+ from collections.abc import AsyncIterator
10
+
11
+ from dotenv import load_dotenv
12
+ from fastapi import FastAPI, Request
13
+ from fastapi import status as st
14
+ from fastapi.encoders import jsonable_encoder
15
+ from fastapi.exceptions import RequestValidationError
16
+ from fastapi.middleware.cors import CORSMiddleware
17
+ from fastapi.middleware.gzip import GZipMiddleware
18
+ from fastapi.responses import UJSONResponse
19
+
20
+ from ..__about__ import __version__
21
+ from ..conf import api_config
22
+ from ..logs import get_logger
23
+ from .routes import job, log, workflow
24
+
25
+ load_dotenv()
26
+ logger = get_logger("uvicorn.error")
27
+
28
+
29
+ @contextlib.asynccontextmanager
30
+ async def lifespan(_: FastAPI) -> AsyncIterator[dict[str, list]]:
31
+ """Lifespan function for the FastAPI application."""
32
+ yield {}
33
+
34
+
35
+ app = FastAPI(
36
+ titile="Workflow",
37
+ description=(
38
+ "This is a workflow FastAPI application that use to manage manual "
39
+ "execute, logging, and schedule workflow via RestAPI."
40
+ ),
41
+ version=__version__,
42
+ lifespan=lifespan,
43
+ default_response_class=UJSONResponse,
44
+ )
45
+ app.add_middleware(GZipMiddleware, minimum_size=1000)
46
+ origins: list[str] = [
47
+ "http://localhost",
48
+ "http://localhost:88",
49
+ "http://localhost:80",
50
+ ]
51
+ app.add_middleware(
52
+ CORSMiddleware,
53
+ allow_origins=origins,
54
+ allow_credentials=True,
55
+ allow_methods=["*"],
56
+ allow_headers=["*"],
57
+ )
58
+
59
+
60
+ @app.get(path="/", response_class=UJSONResponse)
61
+ async def health():
62
+ """Index view that not return any template without json status."""
63
+ return {"message": "Workflow already start up with healthy status."}
64
+
65
+
66
+ # NOTE Add the jobs and logs routes by default.
67
+ app.include_router(job, prefix=api_config.prefix_path)
68
+ app.include_router(log, prefix=api_config.prefix_path)
69
+ app.include_router(workflow, prefix=api_config.prefix_path)
70
+
71
+
72
+ @app.exception_handler(RequestValidationError)
73
+ async def validation_exception_handler(
74
+ request: Request, exc: RequestValidationError
75
+ ):
76
+ _ = request
77
+ return UJSONResponse(
78
+ status_code=st.HTTP_422_UNPROCESSABLE_ENTITY,
79
+ content=jsonable_encoder({"detail": exc.errors(), "body": exc.body}),
80
+ )
81
+
82
+
83
+ if __name__ == "__main__":
84
+ import uvicorn
85
+
86
+ uvicorn.run(
87
+ app,
88
+ host="0.0.0.0",
89
+ port=80,
90
+ log_level="DEBUG",
91
+ )
@@ -5,5 +5,4 @@
5
5
  # ------------------------------------------------------------------------------
6
6
  from .job import job_route as job
7
7
  from .logs import log_route as log
8
- from .schedules import schedule_route as schedule
9
8
  from .workflows import workflow_route as workflow
@@ -69,7 +69,6 @@ async def job_execute(
69
69
  by_alias=True,
70
70
  exclude_none=False,
71
71
  exclude_unset=True,
72
- exclude_defaults=True,
73
72
  ),
74
73
  "params": params,
75
74
  "context": context,
@@ -44,7 +44,6 @@ async def get_traces(
44
44
  by_alias=True,
45
45
  exclude_none=True,
46
46
  exclude_unset=True,
47
- exclude_defaults=True,
48
47
  )
49
48
  for trace in result.trace.find_traces()
50
49
  ],
@@ -73,7 +72,6 @@ async def get_trace_with_id(run_id: str):
73
72
  by_alias=True,
74
73
  exclude_none=True,
75
74
  exclude_unset=True,
76
- exclude_defaults=True,
77
75
  )
78
76
  ),
79
77
  }
@@ -56,7 +56,6 @@ async def get_workflow_by_name(name: str) -> DictData:
56
56
  by_alias=True,
57
57
  exclude_none=False,
58
58
  exclude_unset=True,
59
- exclude_defaults=True,
60
59
  )
61
60
 
62
61
 
@@ -99,7 +98,6 @@ async def get_workflow_audits(name: str):
99
98
  by_alias=True,
100
99
  exclude_none=False,
101
100
  exclude_unset=True,
102
- exclude_defaults=True,
103
101
  )
104
102
  for audit in get_audit().find_audits(name=name)
105
103
  ],
@@ -133,6 +131,5 @@ async def get_workflow_release_audit(name: str, release: str):
133
131
  by_alias=True,
134
132
  exclude_none=False,
135
133
  exclude_unset=True,
136
- exclude_defaults=True,
137
134
  ),
138
135
  }
@@ -6,11 +6,9 @@
6
6
  from __future__ import annotations
7
7
 
8
8
  import copy
9
- import json
10
9
  import os
11
10
  from abc import ABC, abstractmethod
12
11
  from collections.abc import Iterator
13
- from datetime import timedelta
14
12
  from functools import cached_property
15
13
  from inspect import isclass
16
14
  from pathlib import Path
@@ -20,7 +18,7 @@ from zoneinfo import ZoneInfo
20
18
  from ddeutil.core import str2bool
21
19
  from ddeutil.io import YamlFlResolve, search_env_replace
22
20
  from ddeutil.io.paths import glob_files, is_ignored, read_ignore
23
- from pydantic import SecretStr
21
+ from pydantic import SecretStr, TypeAdapter
24
22
 
25
23
  from .__types import DictData
26
24
 
@@ -163,28 +161,6 @@ class Config: # pragma: no cov
163
161
  def max_queue_complete_hist(self) -> int:
164
162
  return int(env("CORE_MAX_QUEUE_COMPLETE_HIST", "16"))
165
163
 
166
- # NOTE: App
167
- @property
168
- def max_schedule_process(self) -> int:
169
- return int(env("APP_MAX_PROCESS", "2"))
170
-
171
- @property
172
- def max_schedule_per_process(self) -> int:
173
- return int(env("APP_MAX_SCHEDULE_PER_PROCESS", "100"))
174
-
175
- @property
176
- def stop_boundary_delta(self) -> timedelta:
177
- stop_boundary_delta_str: str = env(
178
- "APP_STOP_BOUNDARY_DELTA", '{"minutes": 5, "seconds": 20}'
179
- )
180
- try:
181
- return timedelta(**json.loads(stop_boundary_delta_str))
182
- except Exception as err:
183
- raise ValueError(
184
- "Config `WORKFLOW_APP_STOP_BOUNDARY_DELTA` can not parsing to"
185
- f"timedelta with {stop_boundary_delta_str}."
186
- ) from err
187
-
188
164
 
189
165
  class APIConfig:
190
166
  """API Config object."""
@@ -193,14 +169,6 @@ class APIConfig:
193
169
  def prefix_path(self) -> str:
194
170
  return env("API_PREFIX_PATH", "/api/v1")
195
171
 
196
- @property
197
- def enable_route_workflow(self) -> bool:
198
- return str2bool(env("API_ENABLE_ROUTE_WORKFLOW", "true"))
199
-
200
- @property
201
- def enable_route_schedule(self) -> bool:
202
- return str2bool(env("API_ENABLE_ROUTE_SCHEDULE", "true"))
203
-
204
172
 
205
173
  class BaseLoad(ABC): # pragma: no cov
206
174
  """Base Load object is the abstraction object for any Load object that
@@ -496,7 +464,7 @@ def pass_env(value: T) -> T: # pragma: no cov
496
464
  return None if rs == "null" else rs
497
465
 
498
466
 
499
- class WorkflowSecret(SecretStr): # pragma: no cov
467
+ class CallerSecret(SecretStr): # pragma: no cov
500
468
  """Workflow Secret String model."""
501
469
 
502
470
  def get_secret_value(self) -> str:
@@ -506,3 +474,7 @@ class WorkflowSecret(SecretStr): # pragma: no cov
506
474
  :rtype: str
507
475
  """
508
476
  return pass_env(super().get_secret_value())
477
+
478
+
479
+ # NOTE: Define the caller secret type for use it directly in the caller func.
480
+ CallerSecretType = TypeAdapter(CallerSecret)
@@ -83,6 +83,3 @@ class WorkflowException(BaseWorkflowException): ...
83
83
 
84
84
 
85
85
  class ParamValueException(WorkflowException): ...
86
-
87
-
88
- class ScheduleException(BaseWorkflowException): ...
@@ -15,6 +15,7 @@ from datetime import datetime
15
15
  from functools import wraps
16
16
  from importlib import import_module
17
17
  from typing import (
18
+ Annotated,
18
19
  Any,
19
20
  Callable,
20
21
  Literal,
@@ -32,7 +33,8 @@ except ImportError:
32
33
 
33
34
  from ddeutil.core import getdot, import_string, lazy
34
35
  from ddeutil.io import search_env_replace
35
- from pydantic import BaseModel, create_model
36
+ from pydantic import BaseModel, ConfigDict, Field, create_model
37
+ from pydantic.alias_generators import to_pascal
36
38
  from pydantic.dataclasses import dataclass
37
39
 
38
40
  from .__types import DictData, Re
@@ -121,7 +123,7 @@ def make_filter_registry(
121
123
  if not (
122
124
  hasattr(func, "filter")
123
125
  and str(getattr(func, "mark", "NOT SET")) == "filter"
124
- ):
126
+ ): # pragma: no cov
125
127
  continue
126
128
 
127
129
  func: FilterFunc
@@ -635,12 +637,25 @@ def extract_call(
635
637
  return rgt[call.func][call.tag]
636
638
 
637
639
 
640
+ class BaseCallerArgs(BaseModel): # pragma: no cov
641
+ """Base Caller Args model."""
642
+
643
+ model_config = ConfigDict(
644
+ arbitrary_types_allowed=True,
645
+ use_enum_values=True,
646
+ )
647
+
648
+
638
649
  def create_model_from_caller(func: Callable) -> BaseModel: # pragma: no cov
639
650
  """Create model from the caller function. This function will use for
640
651
  validate the caller function argument typed-hint that valid with the args
641
652
  field.
642
653
 
643
- :param func: A caller function.
654
+ Reference:
655
+ - https://github.com/lmmx/pydantic-function-models
656
+ - https://docs.pydantic.dev/1.10/usage/models/#dynamic-model-creation
657
+
658
+ :param func: (Callable) A caller function.
644
659
 
645
660
  :rtype: BaseModel
646
661
  """
@@ -649,16 +664,34 @@ def create_model_from_caller(func: Callable) -> BaseModel: # pragma: no cov
649
664
  fields: dict[str, Any] = {}
650
665
  for name in sig.parameters:
651
666
  param: inspect.Parameter = sig.parameters[name]
667
+
668
+ # NOTE: Skip all `*args` and `**kwargs` parameters.
652
669
  if param.kind in (
653
670
  inspect.Parameter.VAR_KEYWORD,
654
671
  inspect.Parameter.VAR_POSITIONAL,
655
672
  ):
656
673
  continue
674
+
675
+ if name.startswith("_"):
676
+ kwargs = {"serialization_alias": name}
677
+ rename: str = name.removeprefix("_")
678
+ else:
679
+ kwargs = {}
680
+ rename: str = name
681
+
657
682
  if param.default != inspect.Parameter.empty:
658
- fields[name] = (type_hints[name], param.default)
683
+ fields[rename] = Annotated[
684
+ type_hints[name],
685
+ Field(default=param.default, **kwargs),
686
+ ]
659
687
  else:
660
- fields[name] = (type_hints[name], ...)
688
+ fields[rename] = Annotated[
689
+ type_hints[name],
690
+ Field(..., **kwargs),
691
+ ]
661
692
 
662
693
  return create_model(
663
- "".join(i.title() for i in func.__name__.split("_")), **fields
694
+ to_pascal(func.__name__),
695
+ __base__=BaseCallerArgs,
696
+ **fields,
664
697
  )
@@ -55,7 +55,7 @@ from textwrap import dedent
55
55
  from threading import Event
56
56
  from typing import Annotated, Any, Optional, TypeVar, Union, get_type_hints
57
57
 
58
- from pydantic import BaseModel, Field
58
+ from pydantic import BaseModel, Field, ValidationError
59
59
  from pydantic.functional_validators import model_validator
60
60
  from typing_extensions import Self
61
61
 
@@ -63,7 +63,13 @@ from .__types import DictData, DictStr, StrOrInt, StrOrNone, TupleStr
63
63
  from .conf import dynamic, pass_env
64
64
  from .exceptions import StageException, to_dict
65
65
  from .result import CANCEL, FAILED, SUCCESS, WAIT, Result, Status
66
- from .reusables import TagFunc, extract_call, not_in_template, param2template
66
+ from .reusables import (
67
+ TagFunc,
68
+ create_model_from_caller,
69
+ extract_call,
70
+ not_in_template,
71
+ param2template,
72
+ )
67
73
  from .utils import (
68
74
  delay,
69
75
  dump_all,
@@ -1088,8 +1094,7 @@ class CallStage(BaseAsyncStage):
1088
1094
  if "result" not in sig.parameters and not has_keyword:
1089
1095
  args.pop("result")
1090
1096
 
1091
- args = self.parse_model_args(call_func, args, result)
1092
-
1097
+ args = self.validate_model_args(call_func, args, result)
1093
1098
  if inspect.iscoroutinefunction(call_func):
1094
1099
  loop = asyncio.get_event_loop()
1095
1100
  rs: DictData = loop.run_until_complete(
@@ -1177,7 +1182,7 @@ class CallStage(BaseAsyncStage):
1177
1182
  if "result" not in sig.parameters and not has_keyword:
1178
1183
  args.pop("result")
1179
1184
 
1180
- args = self.parse_model_args(call_func, args, result)
1185
+ args = self.validate_model_args(call_func, args, result)
1181
1186
  if inspect.iscoroutinefunction(call_func):
1182
1187
  rs: DictOrModel = await call_func(
1183
1188
  **param2template(args, params, extras=self.extras)
@@ -1200,13 +1205,12 @@ class CallStage(BaseAsyncStage):
1200
1205
  return result.catch(status=SUCCESS, context=dump_all(rs, by_alias=True))
1201
1206
 
1202
1207
  @staticmethod
1203
- def parse_model_args(
1208
+ def validate_model_args(
1204
1209
  func: TagFunc,
1205
1210
  args: DictData,
1206
1211
  result: Result,
1207
1212
  ) -> DictData:
1208
- """Parse Pydantic model from any dict data before parsing to target
1209
- caller function.
1213
+ """Validate an input arguments before passing to the caller function.
1210
1214
 
1211
1215
  :param func: A tag function that want to get typing.
1212
1216
  :param args: An arguments before passing to this tag function.
@@ -1216,7 +1220,25 @@ class CallStage(BaseAsyncStage):
1216
1220
  :rtype: DictData
1217
1221
  """
1218
1222
  try:
1223
+ model_instance = create_model_from_caller(func).model_validate(args)
1224
+ override = dict(model_instance)
1225
+ args.update(override)
1226
+
1219
1227
  type_hints: dict[str, Any] = get_type_hints(func)
1228
+
1229
+ for arg in type_hints:
1230
+
1231
+ if arg == "return":
1232
+ continue
1233
+
1234
+ if arg.removeprefix("_") in args:
1235
+ args[arg] = args.pop(arg.removeprefix("_"))
1236
+
1237
+ return args
1238
+ except ValidationError as e:
1239
+ raise StageException(
1240
+ "Validate argument from the caller function raise invalid type."
1241
+ ) from e
1220
1242
  except TypeError as e:
1221
1243
  result.trace.warning(
1222
1244
  f"[STAGE]: Get type hint raise TypeError: {e}, so, it skip "
@@ -1224,26 +1246,6 @@ class CallStage(BaseAsyncStage):
1224
1246
  )
1225
1247
  return args
1226
1248
 
1227
- for arg in type_hints:
1228
-
1229
- if arg == "return":
1230
- continue
1231
-
1232
- if arg.removeprefix("_") in args:
1233
- args[arg] = args.pop(arg.removeprefix("_"))
1234
-
1235
- t: Any = type_hints[arg]
1236
-
1237
- # NOTE: Check Result argument was passed to this caller function.
1238
- #
1239
- # if is_dataclass(t) and t.__name__ == "Result" and arg not in args:
1240
- # args[arg] = result
1241
-
1242
- if issubclass(t, BaseModel) and arg in args:
1243
- args[arg] = t.model_validate(obj=args[arg])
1244
-
1245
- return args
1246
-
1247
1249
 
1248
1250
  class TriggerStage(BaseStage):
1249
1251
  """Trigger workflow executor stage that run an input trigger Workflow
@@ -293,11 +293,13 @@ def cut_id(run_id: str, *, num: int = 6) -> str:
293
293
 
294
294
 
295
295
  @overload
296
- def dump_all(value: BaseModel, by_alias: bool = False) -> DictData: ...
296
+ def dump_all(
297
+ value: BaseModel, by_alias: bool = False
298
+ ) -> DictData: ... # pragma: no cov
297
299
 
298
300
 
299
301
  @overload
300
- def dump_all(value: T, by_alias: bool = False) -> T: ...
302
+ def dump_all(value: T, by_alias: bool = False) -> T: ... # pragma: no cov
301
303
 
302
304
 
303
305
  def dump_all(