ddeutil-workflow 0.0.22__tar.gz → 0.0.24__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ddeutil_workflow-0.0.22/src/ddeutil_workflow.egg-info → ddeutil_workflow-0.0.24}/PKG-INFO +4 -3
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/README.md +2 -1
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/pyproject.toml +1 -1
- ddeutil_workflow-0.0.24/src/ddeutil/workflow/__about__.py +1 -0
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/src/ddeutil/workflow/__cron.py +6 -0
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/src/ddeutil/workflow/__init__.py +16 -14
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/src/ddeutil/workflow/api.py +2 -2
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/src/ddeutil/workflow/cli.py +2 -2
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/src/ddeutil/workflow/conf.py +18 -2
- ddeutil_workflow-0.0.22/src/ddeutil/workflow/on.py → ddeutil_workflow-0.0.24/src/ddeutil/workflow/cron.py +3 -3
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/src/ddeutil/workflow/job.py +2 -2
- ddeutil_workflow-0.0.24/src/ddeutil/workflow/params.py +176 -0
- ddeutil_workflow-0.0.24/src/ddeutil/workflow/result.py +102 -0
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/src/ddeutil/workflow/route.py +1 -1
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/src/ddeutil/workflow/scheduler.py +220 -173
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/src/ddeutil/workflow/stage.py +6 -1
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/src/ddeutil/workflow/utils.py +4 -245
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/src/ddeutil/workflow/workflow.py +145 -133
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24/src/ddeutil_workflow.egg-info}/PKG-INFO +4 -3
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/src/ddeutil_workflow.egg-info/SOURCES.txt +11 -7
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/src/ddeutil_workflow.egg-info/requires.txt +1 -1
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/tests/test_conf.py +7 -1
- ddeutil_workflow-0.0.22/tests/test_on.py → ddeutil_workflow-0.0.24/tests/test_cron_on.py +9 -9
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/tests/test_job_exec_py.py +1 -1
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/tests/test_job_exec_strategy.py +1 -1
- ddeutil_workflow-0.0.22/tests/test_utils_params.py → ddeutil_workflow-0.0.24/tests/test_params.py +5 -1
- ddeutil_workflow-0.0.22/tests/test_utils_result.py → ddeutil_workflow-0.0.24/tests/test_result.py +1 -1
- ddeutil_workflow-0.0.22/tests/test_scheduler.py → ddeutil_workflow-0.0.24/tests/test_schedule.py +30 -46
- ddeutil_workflow-0.0.24/tests/test_schedule_control.py +32 -0
- ddeutil_workflow-0.0.24/tests/test_schedule_tasks.py +72 -0
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/tests/test_stage.py +37 -15
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/tests/test_stage_exec_bash.py +1 -1
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/tests/test_stage_exec_hook.py +1 -1
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/tests/test_stage_exec_py.py +1 -1
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/tests/test_stage_exec_trigger.py +1 -1
- ddeutil_workflow-0.0.24/tests/test_workflow.py +254 -0
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/tests/test_workflow_exec.py +1 -1
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/tests/test_workflow_exec_needs.py +1 -1
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/tests/test_workflow_job_exec.py +1 -1
- ddeutil_workflow-0.0.24/tests/test_workflow_poke.py +151 -0
- ddeutil_workflow-0.0.24/tests/test_workflow_release.py +176 -0
- ddeutil_workflow-0.0.24/tests/test_workflow_schedule.py +118 -0
- ddeutil_workflow-0.0.24/tests/test_workflow_task.py +174 -0
- ddeutil_workflow-0.0.22/src/ddeutil/workflow/__about__.py +0 -1
- ddeutil_workflow-0.0.22/tests/test_params.py +0 -13
- ddeutil_workflow-0.0.22/tests/test_scheduler_tasks.py +0 -79
- ddeutil_workflow-0.0.22/tests/test_workflow.py +0 -187
- ddeutil_workflow-0.0.22/tests/test_workflow_poke.py +0 -67
- ddeutil_workflow-0.0.22/tests/test_workflow_release.py +0 -64
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/LICENSE +0 -0
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/setup.cfg +0 -0
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/src/ddeutil/workflow/__types.py +0 -0
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/src/ddeutil/workflow/exceptions.py +0 -0
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/src/ddeutil/workflow/repeat.py +0 -0
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/src/ddeutil_workflow.egg-info/entry_points.txt +0 -0
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/tests/test__cron.py +0 -0
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/tests/test__regex.py +0 -0
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/tests/test_conf_log.py +0 -0
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/tests/test_job.py +0 -0
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/tests/test_job_strategy.py +0 -0
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/tests/test_utils.py +0 -0
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/tests/test_utils_filter.py +0 -0
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/tests/test_utils_tag.py +0 -0
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/tests/test_utils_template.py +0 -0
- {ddeutil_workflow-0.0.22 → ddeutil_workflow-0.0.24}/tests/test_workflow_exec_hook.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.24
|
4
4
|
Summary: Lightweight workflow orchestration with less dependencies
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -24,7 +24,7 @@ Description-Content-Type: text/markdown
|
|
24
24
|
License-File: LICENSE
|
25
25
|
Requires-Dist: ddeutil>=0.4.3
|
26
26
|
Requires-Dist: ddeutil-io[toml,yaml]>=0.2.3
|
27
|
-
Requires-Dist: pydantic==2.10.
|
27
|
+
Requires-Dist: pydantic==2.10.4
|
28
28
|
Requires-Dist: python-dotenv==1.0.1
|
29
29
|
Requires-Dist: typer==0.15.1
|
30
30
|
Requires-Dist: schedule<2.0.0,==1.2.2
|
@@ -196,7 +196,8 @@ and do not raise any error to you.
|
|
196
196
|
| `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | . | |
|
197
197
|
| `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor. | |
|
198
198
|
| `WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT` | Core | 600 | | |
|
199
|
-
| `
|
199
|
+
| `WORKFLOW_CORE_MAX_CRON_PER_WORKFLOW` | Core | 5 | | |
|
200
|
+
| `WORKFLOW_CORE_MAX_QUEUE_COMPLETE_HIST` | Core | 16 | | |
|
200
201
|
| `WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE` | Core | true | A flog that enable generating ID with `md5` algorithm. | |
|
201
202
|
| `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode. | |
|
202
203
|
| `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination. | |
|
@@ -163,7 +163,8 @@ and do not raise any error to you.
|
|
163
163
|
| `WORKFLOW_CORE_MAX_NUM_POKING` | Core | 4 | . | |
|
164
164
|
| `WORKFLOW_CORE_MAX_JOB_PARALLEL` | Core | 2 | The maximum job number that able to run parallel in workflow executor. | |
|
165
165
|
| `WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT` | Core | 600 | | |
|
166
|
-
| `
|
166
|
+
| `WORKFLOW_CORE_MAX_CRON_PER_WORKFLOW` | Core | 5 | | |
|
167
|
+
| `WORKFLOW_CORE_MAX_QUEUE_COMPLETE_HIST` | Core | 16 | | |
|
167
168
|
| `WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE` | Core | true | A flog that enable generating ID with `md5` algorithm. | |
|
168
169
|
| `WORKFLOW_LOG_DEBUG_MODE` | Log | true | A flag that enable logging with debug level mode. | |
|
169
170
|
| `WORKFLOW_LOG_ENABLE_WRITE` | Log | true | A flag that enable logging object saving log to its destination. | |
|
@@ -0,0 +1 @@
|
|
1
|
+
__version__: str = "0.0.24"
|
@@ -736,6 +736,12 @@ class CronRunner:
|
|
736
736
|
self.is_year: bool = isinstance(cron, CronJobYear)
|
737
737
|
self.reset_flag: bool = True
|
738
738
|
|
739
|
+
def __repr__(self) -> str:
|
740
|
+
return (
|
741
|
+
f"{self.__class__.__name__}(CronJob('{self.cron}'), "
|
742
|
+
f"{self.date:%Y-%m-%d %H:%M:%S}, tz='{self.tz}')"
|
743
|
+
)
|
744
|
+
|
739
745
|
def reset(self) -> None:
|
740
746
|
"""Resets the iterator to start time."""
|
741
747
|
self.date: datetime = self.__start_date
|
@@ -3,11 +3,17 @@
|
|
3
3
|
# Licensed under the MIT License. See LICENSE in the project root for
|
4
4
|
# license information.
|
5
5
|
# ------------------------------------------------------------------------------
|
6
|
+
from .__cron import CronRunner
|
6
7
|
from .conf import (
|
7
8
|
Config,
|
8
9
|
FileLog,
|
9
10
|
Loader,
|
10
11
|
)
|
12
|
+
from .cron import (
|
13
|
+
On,
|
14
|
+
YearOn,
|
15
|
+
interval2crontab,
|
16
|
+
)
|
11
17
|
from .exceptions import (
|
12
18
|
JobException,
|
13
19
|
ParamValueException,
|
@@ -19,14 +25,18 @@ from .job import (
|
|
19
25
|
Job,
|
20
26
|
Strategy,
|
21
27
|
)
|
22
|
-
from .
|
23
|
-
|
24
|
-
|
25
|
-
|
28
|
+
from .params import (
|
29
|
+
ChoiceParam,
|
30
|
+
DatetimeParam,
|
31
|
+
IntParam,
|
32
|
+
Param,
|
33
|
+
StrParam,
|
26
34
|
)
|
35
|
+
from .result import Result
|
27
36
|
from .scheduler import (
|
28
37
|
Schedule,
|
29
|
-
|
38
|
+
WorkflowSchedule,
|
39
|
+
schedule_runner,
|
30
40
|
)
|
31
41
|
from .stage import (
|
32
42
|
BashStage,
|
@@ -39,16 +49,9 @@ from .stage import (
|
|
39
49
|
)
|
40
50
|
from .utils import (
|
41
51
|
FILTERS,
|
42
|
-
ChoiceParam,
|
43
|
-
DatetimeParam,
|
44
|
-
DefaultParam,
|
45
52
|
FilterFunc,
|
46
53
|
FilterRegistry,
|
47
|
-
IntParam,
|
48
|
-
Param,
|
49
|
-
Result,
|
50
54
|
ReturnTagFunc,
|
51
|
-
StrParam,
|
52
55
|
TagFunc,
|
53
56
|
batch,
|
54
57
|
cross_product,
|
@@ -67,11 +70,10 @@ from .utils import (
|
|
67
70
|
map_post_filter,
|
68
71
|
not_in_template,
|
69
72
|
param2template,
|
70
|
-
queue2str,
|
71
73
|
str2template,
|
72
74
|
tag,
|
73
75
|
)
|
74
76
|
from .workflow import (
|
75
77
|
Workflow,
|
76
|
-
|
78
|
+
WorkflowTask,
|
77
79
|
)
|
@@ -23,7 +23,7 @@ from pydantic import BaseModel
|
|
23
23
|
from .__about__ import __version__
|
24
24
|
from .conf import config, get_logger
|
25
25
|
from .repeat import repeat_at, repeat_every
|
26
|
-
from .
|
26
|
+
from .workflow import WorkflowTask
|
27
27
|
|
28
28
|
load_dotenv()
|
29
29
|
logger = get_logger("ddeutil.workflow")
|
@@ -34,7 +34,7 @@ class State(TypedDict):
|
|
34
34
|
upper_result: dict[str, str]
|
35
35
|
scheduler: list[str]
|
36
36
|
workflow_threads: dict[str, Thread]
|
37
|
-
workflow_tasks: list[
|
37
|
+
workflow_tasks: list[WorkflowTask]
|
38
38
|
workflow_queue: dict[str, list[datetime]]
|
39
39
|
workflow_running: dict[str, list[datetime]]
|
40
40
|
|
@@ -73,10 +73,10 @@ def schedule(
|
|
73
73
|
if stop:
|
74
74
|
stop: datetime = stop.astimezone(tz=config.tz)
|
75
75
|
|
76
|
-
from .scheduler import
|
76
|
+
from .scheduler import schedule_runner
|
77
77
|
|
78
78
|
# NOTE: Start running workflow scheduler application.
|
79
|
-
workflow_rs: list[str] =
|
79
|
+
workflow_rs: list[str] = schedule_runner(
|
80
80
|
stop=stop, excluded=excluded, externals=json.loads(externals)
|
81
81
|
)
|
82
82
|
logger.info(f"Application run success: {workflow_rs}")
|
@@ -23,7 +23,7 @@ from pydantic import BaseModel, Field
|
|
23
23
|
from pydantic.functional_validators import model_validator
|
24
24
|
from typing_extensions import Self
|
25
25
|
|
26
|
-
from .__types import DictData
|
26
|
+
from .__types import DictData, TupleStr
|
27
27
|
|
28
28
|
AnyModel = TypeVar("AnyModel", bound=BaseModel)
|
29
29
|
AnyModelType = type[AnyModel]
|
@@ -32,6 +32,19 @@ load_dotenv()
|
|
32
32
|
|
33
33
|
env = os.getenv
|
34
34
|
|
35
|
+
__all__: TupleStr = (
|
36
|
+
"get_logger",
|
37
|
+
"Config",
|
38
|
+
"SimLoad",
|
39
|
+
"Loader",
|
40
|
+
"get_type",
|
41
|
+
"config",
|
42
|
+
"logger",
|
43
|
+
"FileLog",
|
44
|
+
"SQLiteLog",
|
45
|
+
"Log",
|
46
|
+
)
|
47
|
+
|
35
48
|
|
36
49
|
@lru_cache
|
37
50
|
def get_logger(name: str):
|
@@ -107,7 +120,10 @@ class Config:
|
|
107
120
|
os.getenv("WORKFLOW_CORE_MAX_NUM_POKING", "4")
|
108
121
|
)
|
109
122
|
max_on_per_workflow: int = int(
|
110
|
-
env("
|
123
|
+
env("WORKFLOW_CORE_MAX_CRON_PER_WORKFLOW", "5")
|
124
|
+
)
|
125
|
+
max_queue_complete_hist: int = int(
|
126
|
+
os.getenv("WORKFLOW_CORE_MAX_QUEUE_COMPLETE_HIST", "16")
|
111
127
|
)
|
112
128
|
|
113
129
|
# NOTE: Schedule App
|
@@ -61,7 +61,7 @@ def interval2crontab(
|
|
61
61
|
|
62
62
|
|
63
63
|
class On(BaseModel):
|
64
|
-
"""On
|
64
|
+
"""On Pydantic model (Warped crontab object by model).
|
65
65
|
|
66
66
|
See Also:
|
67
67
|
* ``generate()`` is the main usecase of this schedule object.
|
@@ -197,8 +197,8 @@ class On(BaseModel):
|
|
197
197
|
|
198
198
|
|
199
199
|
class YearOn(On):
|
200
|
-
"""
|
201
|
-
data schedule tools like AWS Glue.
|
200
|
+
"""On with enhance Year Pydantic model for limit year matrix that use by
|
201
|
+
some data schedule tools like AWS Glue.
|
202
202
|
"""
|
203
203
|
|
204
204
|
model_config = ConfigDict(arbitrary_types_allowed=True)
|
@@ -36,9 +36,9 @@ from .exceptions import (
|
|
36
36
|
StageException,
|
37
37
|
UtilException,
|
38
38
|
)
|
39
|
+
from .result import Result
|
39
40
|
from .stage import Stage
|
40
41
|
from .utils import (
|
41
|
-
Result,
|
42
42
|
cross_product,
|
43
43
|
cut_id,
|
44
44
|
dash2underscore,
|
@@ -313,7 +313,7 @@ class Job(BaseModel):
|
|
313
313
|
# VALIDATE: Validate stage id should not duplicate.
|
314
314
|
rs: list[str] = []
|
315
315
|
for stage in value:
|
316
|
-
name: str = stage.
|
316
|
+
name: str = stage.iden
|
317
317
|
if name in rs:
|
318
318
|
raise ValueError(
|
319
319
|
"Stage name in jobs object should not be duplicate."
|
@@ -0,0 +1,176 @@
|
|
1
|
+
# ------------------------------------------------------------------------------
|
2
|
+
# Copyright (c) 2022 Korawich Anuttra. All rights reserved.
|
3
|
+
# Licensed under the MIT License. See LICENSE in the project root for
|
4
|
+
# license information.
|
5
|
+
# ------------------------------------------------------------------------------
|
6
|
+
from __future__ import annotations
|
7
|
+
|
8
|
+
import logging
|
9
|
+
from abc import ABC, abstractmethod
|
10
|
+
from datetime import date, datetime
|
11
|
+
from typing import Any, Literal, Optional, Union
|
12
|
+
|
13
|
+
from pydantic import BaseModel, Field
|
14
|
+
|
15
|
+
from .__types import TupleStr
|
16
|
+
from .exceptions import ParamValueException
|
17
|
+
from .utils import get_dt_now
|
18
|
+
|
19
|
+
logger = logging.getLogger("ddeutil.workflow")
|
20
|
+
|
21
|
+
__all__: TupleStr = (
|
22
|
+
"ChoiceParam",
|
23
|
+
"DatetimeParam",
|
24
|
+
"IntParam",
|
25
|
+
"Param",
|
26
|
+
"StrParam",
|
27
|
+
)
|
28
|
+
|
29
|
+
|
30
|
+
class BaseParam(BaseModel, ABC):
|
31
|
+
"""Base Parameter that use to make any Params Model. The type will dynamic
|
32
|
+
with the type field that made from literal string."""
|
33
|
+
|
34
|
+
desc: Optional[str] = Field(
|
35
|
+
default=None, description="A description of parameter providing."
|
36
|
+
)
|
37
|
+
required: bool = Field(
|
38
|
+
default=True,
|
39
|
+
description="A require flag that force to pass this parameter value.",
|
40
|
+
)
|
41
|
+
type: str = Field(description="A type of parameter.")
|
42
|
+
|
43
|
+
@abstractmethod
|
44
|
+
def receive(self, value: Optional[Any] = None) -> Any:
|
45
|
+
raise NotImplementedError(
|
46
|
+
"Receive value and validate typing before return valid value."
|
47
|
+
)
|
48
|
+
|
49
|
+
|
50
|
+
class DefaultParam(BaseParam):
|
51
|
+
"""Default Parameter that will check default if it required. This model do
|
52
|
+
not implement the receive method.
|
53
|
+
"""
|
54
|
+
|
55
|
+
required: bool = Field(
|
56
|
+
default=False,
|
57
|
+
description="A require flag for the default-able parameter value.",
|
58
|
+
)
|
59
|
+
default: Optional[str] = Field(
|
60
|
+
default=None,
|
61
|
+
description="A default value if parameter does not pass.",
|
62
|
+
)
|
63
|
+
|
64
|
+
@abstractmethod
|
65
|
+
def receive(self, value: Optional[Any] = None) -> Any:
|
66
|
+
raise NotImplementedError(
|
67
|
+
"Receive value and validate typing before return valid value."
|
68
|
+
)
|
69
|
+
|
70
|
+
|
71
|
+
class DatetimeParam(DefaultParam):
|
72
|
+
"""Datetime parameter."""
|
73
|
+
|
74
|
+
type: Literal["datetime"] = "datetime"
|
75
|
+
default: datetime = Field(default_factory=get_dt_now)
|
76
|
+
|
77
|
+
def receive(self, value: str | datetime | date | None = None) -> datetime:
|
78
|
+
"""Receive value that match with datetime. If a input value pass with
|
79
|
+
None, it will use default value instead.
|
80
|
+
|
81
|
+
:param value: A value that want to validate with datetime parameter
|
82
|
+
type.
|
83
|
+
:rtype: datetime
|
84
|
+
"""
|
85
|
+
if value is None:
|
86
|
+
return self.default
|
87
|
+
|
88
|
+
if isinstance(value, datetime):
|
89
|
+
return value
|
90
|
+
elif isinstance(value, date):
|
91
|
+
return datetime(value.year, value.month, value.day)
|
92
|
+
elif not isinstance(value, str):
|
93
|
+
raise ParamValueException(
|
94
|
+
f"Value that want to convert to datetime does not support for "
|
95
|
+
f"type: {type(value)}"
|
96
|
+
)
|
97
|
+
try:
|
98
|
+
return datetime.fromisoformat(value)
|
99
|
+
except ValueError:
|
100
|
+
raise ParamValueException(
|
101
|
+
f"Invalid isoformat string: {value!r}"
|
102
|
+
) from None
|
103
|
+
|
104
|
+
|
105
|
+
class StrParam(DefaultParam):
|
106
|
+
"""String parameter."""
|
107
|
+
|
108
|
+
type: Literal["str"] = "str"
|
109
|
+
|
110
|
+
def receive(self, value: str | None = None) -> str | None:
|
111
|
+
"""Receive value that match with str.
|
112
|
+
|
113
|
+
:param value: A value that want to validate with string parameter type.
|
114
|
+
:rtype: str | None
|
115
|
+
"""
|
116
|
+
if value is None:
|
117
|
+
return self.default
|
118
|
+
return str(value)
|
119
|
+
|
120
|
+
|
121
|
+
class IntParam(DefaultParam):
|
122
|
+
"""Integer parameter."""
|
123
|
+
|
124
|
+
type: Literal["int"] = "int"
|
125
|
+
default: Optional[int] = Field(
|
126
|
+
default=None,
|
127
|
+
description="A default value if parameter does not pass.",
|
128
|
+
)
|
129
|
+
|
130
|
+
def receive(self, value: int | None = None) -> int | None:
|
131
|
+
"""Receive value that match with int.
|
132
|
+
|
133
|
+
:param value: A value that want to validate with integer parameter type.
|
134
|
+
:rtype: int | None
|
135
|
+
"""
|
136
|
+
if value is None:
|
137
|
+
return self.default
|
138
|
+
if not isinstance(value, int):
|
139
|
+
try:
|
140
|
+
return int(str(value))
|
141
|
+
except ValueError as err:
|
142
|
+
raise ParamValueException(
|
143
|
+
f"Value can not convert to int, {value}, with base 10"
|
144
|
+
) from err
|
145
|
+
return value
|
146
|
+
|
147
|
+
|
148
|
+
class ChoiceParam(BaseParam):
|
149
|
+
"""Choice parameter."""
|
150
|
+
|
151
|
+
type: Literal["choice"] = "choice"
|
152
|
+
options: list[str] = Field(description="A list of choice parameters.")
|
153
|
+
|
154
|
+
def receive(self, value: str | None = None) -> str:
|
155
|
+
"""Receive value that match with options.
|
156
|
+
|
157
|
+
:param value: A value that want to select from the options field.
|
158
|
+
:rtype: str
|
159
|
+
"""
|
160
|
+
# NOTE:
|
161
|
+
# Return the first value in options if does not pass any input value
|
162
|
+
if value is None:
|
163
|
+
return self.options[0]
|
164
|
+
if value not in self.options:
|
165
|
+
raise ParamValueException(
|
166
|
+
f"{value!r} does not match any value in choice options."
|
167
|
+
)
|
168
|
+
return value
|
169
|
+
|
170
|
+
|
171
|
+
Param = Union[
|
172
|
+
ChoiceParam,
|
173
|
+
DatetimeParam,
|
174
|
+
IntParam,
|
175
|
+
StrParam,
|
176
|
+
]
|
@@ -0,0 +1,102 @@
|
|
1
|
+
# ------------------------------------------------------------------------------
|
2
|
+
# Copyright (c) 2022 Korawich Anuttra. All rights reserved.
|
3
|
+
# Licensed under the MIT License. See LICENSE in the project root for
|
4
|
+
# license information.
|
5
|
+
# ------------------------------------------------------------------------------
|
6
|
+
from __future__ import annotations
|
7
|
+
|
8
|
+
from dataclasses import field
|
9
|
+
from typing import Optional
|
10
|
+
|
11
|
+
from pydantic.dataclasses import dataclass
|
12
|
+
from pydantic.functional_validators import model_validator
|
13
|
+
from typing_extensions import Self
|
14
|
+
|
15
|
+
from .__types import DictData, TupleStr
|
16
|
+
from .utils import gen_id
|
17
|
+
|
18
|
+
__all__: TupleStr = ("Result",)
|
19
|
+
|
20
|
+
|
21
|
+
@dataclass
|
22
|
+
class Result:
|
23
|
+
"""Result Pydantic Model for passing and receiving data context from any
|
24
|
+
module execution process like stage execution, job execution, or workflow
|
25
|
+
execution.
|
26
|
+
|
27
|
+
For comparison property, this result will use ``status``, ``context``,
|
28
|
+
and ``_run_id`` fields to comparing with other result instance.
|
29
|
+
"""
|
30
|
+
|
31
|
+
status: int = field(default=2)
|
32
|
+
context: DictData = field(default_factory=dict)
|
33
|
+
run_id: Optional[str] = field(default=None)
|
34
|
+
|
35
|
+
# NOTE: Ignore this field to compare another result model with __eq__.
|
36
|
+
parent_run_id: Optional[str] = field(default=None, compare=False)
|
37
|
+
|
38
|
+
@model_validator(mode="after")
|
39
|
+
def __prepare_run_id(self) -> Self:
|
40
|
+
"""Prepare running ID which use default ID if it initialize at the first
|
41
|
+
time
|
42
|
+
|
43
|
+
:rtype: Self
|
44
|
+
"""
|
45
|
+
self._run_id = gen_id("manual", unique=True)
|
46
|
+
return self
|
47
|
+
|
48
|
+
def set_run_id(self, running_id: str) -> Self:
|
49
|
+
"""Set a running ID.
|
50
|
+
|
51
|
+
:param running_id: A running ID that want to update on this model.
|
52
|
+
:rtype: Self
|
53
|
+
"""
|
54
|
+
self.run_id = running_id
|
55
|
+
return self
|
56
|
+
|
57
|
+
def set_parent_run_id(self, running_id: str) -> Self:
|
58
|
+
"""Set a parent running ID.
|
59
|
+
|
60
|
+
:param running_id: A running ID that want to update on this model.
|
61
|
+
:rtype: Self
|
62
|
+
"""
|
63
|
+
self.parent_run_id: str = running_id
|
64
|
+
return self
|
65
|
+
|
66
|
+
def catch(self, status: int, context: DictData) -> Self:
|
67
|
+
"""Catch the status and context to current data."""
|
68
|
+
self.__dict__["status"] = status
|
69
|
+
self.__dict__["context"].update(context)
|
70
|
+
return self
|
71
|
+
|
72
|
+
def receive(self, result: Result) -> Self:
|
73
|
+
"""Receive context from another result object.
|
74
|
+
|
75
|
+
:rtype: Self
|
76
|
+
"""
|
77
|
+
self.__dict__["status"] = result.status
|
78
|
+
self.__dict__["context"].update(result.context)
|
79
|
+
|
80
|
+
# NOTE: Update running ID from an incoming result.
|
81
|
+
self.parent_run_id = result.parent_run_id
|
82
|
+
self.run_id = result.run_id
|
83
|
+
return self
|
84
|
+
|
85
|
+
def receive_jobs(self, result: Result) -> Self:
|
86
|
+
"""Receive context from another result object that use on the workflow
|
87
|
+
execution which create a ``jobs`` keys on the context if it do not
|
88
|
+
exist.
|
89
|
+
|
90
|
+
:rtype: Self
|
91
|
+
"""
|
92
|
+
self.__dict__["status"] = result.status
|
93
|
+
|
94
|
+
# NOTE: Check the context has jobs key.
|
95
|
+
if "jobs" not in self.__dict__["context"]:
|
96
|
+
self.__dict__["context"]["jobs"] = {}
|
97
|
+
self.__dict__["context"]["jobs"].update(result.context)
|
98
|
+
|
99
|
+
# NOTE: Update running ID from an incoming result.
|
100
|
+
self.parent_run_id: str = result.parent_run_id
|
101
|
+
self.run_id: str = result.run_id
|
102
|
+
return self
|
@@ -17,8 +17,8 @@ from pydantic import BaseModel
|
|
17
17
|
from . import Workflow
|
18
18
|
from .__types import DictData
|
19
19
|
from .conf import Loader, config, get_logger
|
20
|
+
from .result import Result
|
20
21
|
from .scheduler import Schedule
|
21
|
-
from .utils import Result
|
22
22
|
|
23
23
|
logger = get_logger("ddeutil.workflow")
|
24
24
|
workflow = APIRouter(
|