ddeutil-workflow 0.0.67__tar.gz → 0.0.68__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/PKG-INFO +3 -3
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/pyproject.toml +4 -3
- ddeutil_workflow-0.0.68/src/ddeutil/workflow/__about__.py +1 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil/workflow/api/__init__.py +20 -19
- ddeutil_workflow-0.0.67/src/ddeutil/workflow/api/logs.py → ddeutil_workflow-0.0.68/src/ddeutil/workflow/api/log_conf.py +28 -15
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil/workflow/api/routes/__init__.py +3 -3
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil/workflow/api/routes/job.py +42 -16
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil/workflow/api/routes/logs.py +7 -7
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil/workflow/api/routes/workflows.py +10 -9
- ddeutil_workflow-0.0.68/src/ddeutil/workflow/cli.py +119 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil/workflow/conf.py +7 -3
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil/workflow/event.py +4 -3
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil/workflow/logs.py +18 -14
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil/workflow/stages.py +38 -9
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil/workflow/utils.py +1 -52
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil_workflow.egg-info/PKG-INFO +3 -3
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil_workflow.egg-info/SOURCES.txt +1 -1
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil_workflow.egg-info/requires.txt +2 -2
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/tests/test_utils.py +0 -10
- ddeutil_workflow-0.0.67/src/ddeutil/workflow/__about__.py +0 -1
- ddeutil_workflow-0.0.67/src/ddeutil/workflow/cli.py +0 -68
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/LICENSE +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/README.md +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/setup.cfg +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil/workflow/__cron.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil/workflow/__init__.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil/workflow/__main__.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil/workflow/__types.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil/workflow/errors.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil/workflow/job.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil/workflow/params.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil/workflow/result.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil/workflow/reusables.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil/workflow/workflow.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil_workflow.egg-info/dependency_links.txt +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil_workflow.egg-info/entry_points.txt +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil_workflow.egg-info/top_level.txt +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/tests/test__cron.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/tests/test__regex.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/tests/test_conf.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/tests/test_errors.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/tests/test_event.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/tests/test_job.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/tests/test_job_exec.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/tests/test_job_exec_strategy.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/tests/test_logs_audit.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/tests/test_logs_trace.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/tests/test_params.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/tests/test_result.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/tests/test_reusables_call_tag.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/tests/test_reusables_func_model.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/tests/test_reusables_template.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/tests/test_reusables_template_filter.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/tests/test_strategy.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/tests/test_workflow.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/tests/test_workflow_exec.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/tests/test_workflow_exec_job.py +0 -0
- {ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/tests/test_workflow_release.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.68
|
4
4
|
Summary: Lightweight workflow orchestration with YAML template
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -24,10 +24,10 @@ Description-Content-Type: text/markdown
|
|
24
24
|
License-File: LICENSE
|
25
25
|
Requires-Dist: ddeutil[checksum]>=0.4.8
|
26
26
|
Requires-Dist: ddeutil-io[toml,yaml]>=0.2.14
|
27
|
-
Requires-Dist: pydantic==2.11.
|
27
|
+
Requires-Dist: pydantic==2.11.5
|
28
28
|
Requires-Dist: pydantic-extra-types==2.10.4
|
29
29
|
Requires-Dist: python-dotenv==1.1.0
|
30
|
-
Requires-Dist: typer
|
30
|
+
Requires-Dist: typer>=0.16.0
|
31
31
|
Provides-Extra: all
|
32
32
|
Requires-Dist: fastapi<1.0.0,>=0.115.0; extra == "all"
|
33
33
|
Requires-Dist: uvicorn; extra == "all"
|
@@ -27,10 +27,10 @@ requires-python = ">=3.9.13"
|
|
27
27
|
dependencies = [
|
28
28
|
"ddeutil[checksum]>=0.4.8",
|
29
29
|
"ddeutil-io[yaml,toml]>=0.2.14",
|
30
|
-
"pydantic==2.11.
|
30
|
+
"pydantic==2.11.5",
|
31
31
|
"pydantic-extra-types==2.10.4",
|
32
32
|
"python-dotenv==1.1.0",
|
33
|
-
"typer
|
33
|
+
"typer>=0.16.0",
|
34
34
|
]
|
35
35
|
dynamic = ["version"]
|
36
36
|
|
@@ -78,9 +78,10 @@ omit = [
|
|
78
78
|
"src/ddeutil/workflow/__about__.py",
|
79
79
|
"src/ddeutil/workflow/__cron.py",
|
80
80
|
"src/ddeutil/workflow/__main__.py",
|
81
|
+
"src/ddeutil/workflow/__types.py",
|
81
82
|
"src/ddeutil/workflow/cli.py",
|
82
83
|
"src/ddeutil/workflow/api/__init__.py",
|
83
|
-
"src/ddeutil/workflow/api/
|
84
|
+
"src/ddeutil/workflow/api/log_conf.py",
|
84
85
|
"src/ddeutil/workflow/api/routes/__init__.py",
|
85
86
|
"src/ddeutil/workflow/api/routes/job.py",
|
86
87
|
"src/ddeutil/workflow/api/routes/logs.py",
|
@@ -0,0 +1 @@
|
|
1
|
+
__version__: str = "0.0.68"
|
@@ -6,6 +6,7 @@
|
|
6
6
|
from __future__ import annotations
|
7
7
|
|
8
8
|
import contextlib
|
9
|
+
import logging
|
9
10
|
from collections.abc import AsyncIterator
|
10
11
|
|
11
12
|
from dotenv import load_dotenv
|
@@ -19,11 +20,10 @@ from fastapi.responses import UJSONResponse
|
|
19
20
|
|
20
21
|
from ..__about__ import __version__
|
21
22
|
from ..conf import api_config
|
22
|
-
from ..logs import get_logger
|
23
23
|
from .routes import job, log, workflow
|
24
24
|
|
25
25
|
load_dotenv()
|
26
|
-
logger =
|
26
|
+
logger = logging.getLogger("uvicorn.error")
|
27
27
|
|
28
28
|
|
29
29
|
@contextlib.asynccontextmanager
|
@@ -58,12 +58,16 @@ app.add_middleware(
|
|
58
58
|
|
59
59
|
|
60
60
|
@app.get(path="/", response_class=UJSONResponse)
|
61
|
-
async def health():
|
61
|
+
async def health() -> UJSONResponse:
|
62
62
|
"""Index view that not return any template without json status."""
|
63
|
-
|
63
|
+
logger.info("[API]: Workflow API Application already running ...")
|
64
|
+
return UJSONResponse(
|
65
|
+
content={"message": "Workflow already start up with healthy status."},
|
66
|
+
status_code=st.HTTP_200_OK,
|
67
|
+
)
|
64
68
|
|
65
69
|
|
66
|
-
# NOTE Add the jobs and logs routes by default.
|
70
|
+
# NOTE: Add the jobs and logs routes by default.
|
67
71
|
app.include_router(job, prefix=api_config.prefix_path)
|
68
72
|
app.include_router(log, prefix=api_config.prefix_path)
|
69
73
|
app.include_router(workflow, prefix=api_config.prefix_path)
|
@@ -71,21 +75,18 @@ app.include_router(workflow, prefix=api_config.prefix_path)
|
|
71
75
|
|
72
76
|
@app.exception_handler(RequestValidationError)
|
73
77
|
async def validation_exception_handler(
|
74
|
-
request: Request,
|
75
|
-
|
78
|
+
request: Request,
|
79
|
+
exc: RequestValidationError,
|
80
|
+
) -> UJSONResponse:
|
81
|
+
"""Error Handler for model validate does not valid."""
|
76
82
|
_ = request
|
77
83
|
return UJSONResponse(
|
78
84
|
status_code=st.HTTP_422_UNPROCESSABLE_ENTITY,
|
79
|
-
content=jsonable_encoder(
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
uvicorn.run(
|
87
|
-
app,
|
88
|
-
host="0.0.0.0",
|
89
|
-
port=80,
|
90
|
-
log_level="DEBUG",
|
85
|
+
content=jsonable_encoder(
|
86
|
+
{
|
87
|
+
"message": "Body does not parsing with model.",
|
88
|
+
"detail": exc.errors(),
|
89
|
+
"body": exc.body,
|
90
|
+
}
|
91
|
+
),
|
91
92
|
)
|
@@ -1,6 +1,8 @@
|
|
1
|
+
from typing import Any
|
2
|
+
|
1
3
|
from ..conf import config
|
2
4
|
|
3
|
-
LOGGING_CONFIG = { # pragma: no cov
|
5
|
+
LOGGING_CONFIG: dict[str, Any] = { # pragma: no cov
|
4
6
|
"version": 1,
|
5
7
|
"disable_existing_loggers": False,
|
6
8
|
"formatters": {
|
@@ -22,38 +24,49 @@ LOGGING_CONFIG = { # pragma: no cov
|
|
22
24
|
"stream": "ext://sys.stderr",
|
23
25
|
},
|
24
26
|
"stream_handler": {
|
27
|
+
# "formatter": "standard",
|
25
28
|
"formatter": "custom_formatter",
|
26
29
|
"class": "logging.StreamHandler",
|
27
30
|
"stream": "ext://sys.stdout",
|
28
31
|
},
|
29
|
-
"file_handler": {
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
},
|
32
|
+
# "file_handler": {
|
33
|
+
# "formatter": "custom_formatter",
|
34
|
+
# "class": "logging.handlers.RotatingFileHandler",
|
35
|
+
# "filename": "logs/app.log",
|
36
|
+
# "maxBytes": 1024 * 1024 * 1,
|
37
|
+
# "backupCount": 3,
|
38
|
+
# },
|
36
39
|
},
|
37
40
|
"loggers": {
|
38
41
|
"uvicorn": {
|
39
|
-
"handlers": ["default", "file_handler"],
|
42
|
+
# "handlers": ["default", "file_handler"],
|
43
|
+
"handlers": ["default"],
|
40
44
|
"level": "DEBUG" if config.debug else "INFO",
|
41
45
|
"propagate": False,
|
42
46
|
},
|
43
47
|
"uvicorn.access": {
|
44
|
-
"handlers": ["stream_handler", "file_handler"],
|
48
|
+
# "handlers": ["stream_handler", "file_handler"],
|
49
|
+
"handlers": ["stream_handler"],
|
45
50
|
"level": "DEBUG" if config.debug else "INFO",
|
46
51
|
"propagate": False,
|
47
52
|
},
|
48
53
|
"uvicorn.error": {
|
49
|
-
"handlers": ["stream_handler", "file_handler"],
|
54
|
+
# "handlers": ["stream_handler", "file_handler"],
|
55
|
+
"handlers": ["stream_handler"],
|
50
56
|
"level": "DEBUG" if config.debug else "INFO",
|
51
57
|
"propagate": False,
|
52
58
|
},
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
59
|
+
"uvicorn.asgi": {
|
60
|
+
# "handlers": ["stream_handler", "file_handler"],
|
61
|
+
"handlers": ["stream_handler"],
|
62
|
+
"level": "TRACE",
|
63
|
+
"propagate": False,
|
64
|
+
},
|
65
|
+
# "ddeutil.workflow": {
|
66
|
+
# "handlers": ["stream_handler"],
|
67
|
+
# "level": "INFO",
|
68
|
+
# # "propagate": False,
|
69
|
+
# "propagate": True,
|
57
70
|
# },
|
58
71
|
},
|
59
72
|
}
|
{ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil/workflow/api/routes/__init__.py
RENAMED
@@ -3,6 +3,6 @@
|
|
3
3
|
# Licensed under the MIT License. See LICENSE in the project root for
|
4
4
|
# license information.
|
5
5
|
# ------------------------------------------------------------------------------
|
6
|
-
from .job import
|
7
|
-
from .logs import
|
8
|
-
from .workflows import
|
6
|
+
from .job import router as job
|
7
|
+
from .logs import router as log
|
8
|
+
from .workflows import router as workflow
|
@@ -5,20 +5,21 @@
|
|
5
5
|
# ------------------------------------------------------------------------------
|
6
6
|
from __future__ import annotations
|
7
7
|
|
8
|
+
import logging
|
8
9
|
from typing import Any, Optional
|
9
10
|
|
10
11
|
from fastapi import APIRouter
|
12
|
+
from fastapi import status as st
|
11
13
|
from fastapi.responses import UJSONResponse
|
12
14
|
from pydantic import BaseModel, Field
|
13
15
|
|
14
16
|
from ...__types import DictData
|
15
17
|
from ...errors import JobError
|
16
18
|
from ...job import Job
|
17
|
-
from ...logs import get_logger
|
18
19
|
from ...result import Result
|
19
20
|
|
20
|
-
logger =
|
21
|
-
|
21
|
+
logger = logging.getLogger("uvicorn.error")
|
22
|
+
router = APIRouter(prefix="/job", tags=["job"])
|
22
23
|
|
23
24
|
|
24
25
|
class ResultCreate(BaseModel):
|
@@ -32,14 +33,19 @@ class ResultCreate(BaseModel):
|
|
32
33
|
)
|
33
34
|
|
34
35
|
|
35
|
-
@
|
36
|
+
@router.post(
|
37
|
+
path="/execute/",
|
38
|
+
response_class=UJSONResponse,
|
39
|
+
status_code=st.HTTP_200_OK,
|
40
|
+
)
|
36
41
|
async def job_execute(
|
37
42
|
result: ResultCreate,
|
38
43
|
job: Job,
|
39
44
|
params: dict[str, Any],
|
40
45
|
extras: Optional[dict[str, Any]] = None,
|
41
|
-
):
|
46
|
+
) -> UJSONResponse:
|
42
47
|
"""Execute job via RestAPI with execute route path."""
|
48
|
+
logger.info("[API]: Start execute job ...")
|
43
49
|
rs: Result = Result(
|
44
50
|
run_id=result.run_id,
|
45
51
|
parent_run_id=result.parent_run_id,
|
@@ -61,15 +67,35 @@ async def job_execute(
|
|
61
67
|
)
|
62
68
|
except JobError as err:
|
63
69
|
rs.trace.error(f"[JOB]: {err.__class__.__name__}: {err}")
|
70
|
+
return UJSONResponse(
|
71
|
+
content={
|
72
|
+
"message": str(err),
|
73
|
+
"result": {
|
74
|
+
"run_id": rs.run_id,
|
75
|
+
"parent_run_id": rs.parent_run_id,
|
76
|
+
},
|
77
|
+
"job": job.model_dump(
|
78
|
+
by_alias=True,
|
79
|
+
exclude_none=False,
|
80
|
+
exclude_unset=True,
|
81
|
+
),
|
82
|
+
"params": params,
|
83
|
+
"context": context,
|
84
|
+
},
|
85
|
+
status_code=st.HTTP_500_INTERNAL_SERVER_ERROR,
|
86
|
+
)
|
64
87
|
|
65
|
-
return
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
88
|
+
return UJSONResponse(
|
89
|
+
content={
|
90
|
+
"message": "Execute job via RestAPI successful.",
|
91
|
+
"result": {"run_id": rs.run_id, "parent_run_id": rs.parent_run_id},
|
92
|
+
"job": job.model_dump(
|
93
|
+
by_alias=True,
|
94
|
+
exclude_none=False,
|
95
|
+
exclude_unset=True,
|
96
|
+
),
|
97
|
+
"params": params,
|
98
|
+
"context": context,
|
99
|
+
},
|
100
|
+
status_code=st.HTTP_200_OK,
|
101
|
+
)
|
@@ -13,14 +13,14 @@ from fastapi.responses import UJSONResponse
|
|
13
13
|
from ...logs import get_audit
|
14
14
|
from ...result import Result
|
15
15
|
|
16
|
-
|
16
|
+
router = APIRouter(
|
17
17
|
prefix="/logs",
|
18
18
|
tags=["logs"],
|
19
19
|
default_response_class=UJSONResponse,
|
20
20
|
)
|
21
21
|
|
22
22
|
|
23
|
-
@
|
23
|
+
@router.get(
|
24
24
|
path="/traces/",
|
25
25
|
response_class=UJSONResponse,
|
26
26
|
status_code=st.HTTP_200_OK,
|
@@ -50,7 +50,7 @@ async def get_traces(
|
|
50
50
|
}
|
51
51
|
|
52
52
|
|
53
|
-
@
|
53
|
+
@router.get(
|
54
54
|
path="/traces/{run_id}",
|
55
55
|
response_class=UJSONResponse,
|
56
56
|
status_code=st.HTTP_200_OK,
|
@@ -77,7 +77,7 @@ async def get_trace_with_id(run_id: str):
|
|
77
77
|
}
|
78
78
|
|
79
79
|
|
80
|
-
@
|
80
|
+
@router.get(
|
81
81
|
path="/audits/",
|
82
82
|
response_class=UJSONResponse,
|
83
83
|
status_code=st.HTTP_200_OK,
|
@@ -94,7 +94,7 @@ async def get_audits():
|
|
94
94
|
}
|
95
95
|
|
96
96
|
|
97
|
-
@
|
97
|
+
@router.get(
|
98
98
|
path="/audits/{workflow}/",
|
99
99
|
response_class=UJSONResponse,
|
100
100
|
status_code=st.HTTP_200_OK,
|
@@ -113,7 +113,7 @@ async def get_audit_with_workflow(workflow: str):
|
|
113
113
|
}
|
114
114
|
|
115
115
|
|
116
|
-
@
|
116
|
+
@router.get(
|
117
117
|
path="/audits/{workflow}/{release}",
|
118
118
|
response_class=UJSONResponse,
|
119
119
|
status_code=st.HTTP_200_OK,
|
@@ -140,7 +140,7 @@ async def get_audit_with_workflow_release(
|
|
140
140
|
}
|
141
141
|
|
142
142
|
|
143
|
-
@
|
143
|
+
@router.get(
|
144
144
|
path="/audits/{workflow}/{release}/{run_id}",
|
145
145
|
response_class=UJSONResponse,
|
146
146
|
status_code=st.HTTP_200_OK,
|
{ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil/workflow/api/routes/workflows.py
RENAMED
@@ -5,6 +5,7 @@
|
|
5
5
|
# ------------------------------------------------------------------------------
|
6
6
|
from __future__ import annotations
|
7
7
|
|
8
|
+
import logging
|
8
9
|
from dataclasses import asdict
|
9
10
|
from datetime import datetime
|
10
11
|
from typing import Any
|
@@ -16,19 +17,19 @@ from pydantic import BaseModel
|
|
16
17
|
|
17
18
|
from ...__types import DictData
|
18
19
|
from ...conf import Loader
|
19
|
-
from ...logs import
|
20
|
+
from ...logs import AuditModel, get_audit
|
20
21
|
from ...result import Result
|
21
22
|
from ...workflow import Workflow
|
22
23
|
|
23
|
-
logger =
|
24
|
-
|
24
|
+
logger = logging.getLogger("uvicorn.error")
|
25
|
+
router = APIRouter(
|
25
26
|
prefix="/workflows",
|
26
27
|
tags=["workflows"],
|
27
28
|
default_response_class=UJSONResponse,
|
28
29
|
)
|
29
30
|
|
30
31
|
|
31
|
-
@
|
32
|
+
@router.get(path="/", status_code=st.HTTP_200_OK)
|
32
33
|
async def get_workflows() -> DictData:
|
33
34
|
"""Return all workflow workflows that exists in config path."""
|
34
35
|
workflows: DictData = dict(Loader.finds(Workflow))
|
@@ -39,7 +40,7 @@ async def get_workflows() -> DictData:
|
|
39
40
|
}
|
40
41
|
|
41
42
|
|
42
|
-
@
|
43
|
+
@router.get(path="/{name}", status_code=st.HTTP_200_OK)
|
43
44
|
async def get_workflow_by_name(name: str) -> DictData:
|
44
45
|
"""Return model of workflow that passing an input workflow name."""
|
45
46
|
try:
|
@@ -63,7 +64,7 @@ class ExecutePayload(BaseModel):
|
|
63
64
|
params: dict[str, Any]
|
64
65
|
|
65
66
|
|
66
|
-
@
|
67
|
+
@router.post(path="/{name}/execute", status_code=st.HTTP_202_ACCEPTED)
|
67
68
|
async def workflow_execute(name: str, payload: ExecutePayload) -> DictData:
|
68
69
|
"""Return model of workflow that passing an input workflow name."""
|
69
70
|
try:
|
@@ -88,7 +89,7 @@ async def workflow_execute(name: str, payload: ExecutePayload) -> DictData:
|
|
88
89
|
return asdict(result)
|
89
90
|
|
90
91
|
|
91
|
-
@
|
92
|
+
@router.get(path="/{name}/audits", status_code=st.HTTP_200_OK)
|
92
93
|
async def get_workflow_audits(name: str):
|
93
94
|
try:
|
94
95
|
return {
|
@@ -109,11 +110,11 @@ async def get_workflow_audits(name: str):
|
|
109
110
|
) from None
|
110
111
|
|
111
112
|
|
112
|
-
@
|
113
|
+
@router.get(path="/{name}/audits/{release}", status_code=st.HTTP_200_OK)
|
113
114
|
async def get_workflow_release_audit(name: str, release: str):
|
114
115
|
"""Get Workflow audit log with an input release value."""
|
115
116
|
try:
|
116
|
-
audit:
|
117
|
+
audit: AuditModel = get_audit().find_audit_with_release(
|
117
118
|
name=name,
|
118
119
|
release=datetime.strptime(release, "%Y%m%d%H%M%S"),
|
119
120
|
)
|
@@ -0,0 +1,119 @@
|
|
1
|
+
import json
|
2
|
+
from pathlib import Path
|
3
|
+
from platform import python_version
|
4
|
+
from typing import Annotated, Any, Optional
|
5
|
+
|
6
|
+
import typer
|
7
|
+
import uvicorn
|
8
|
+
|
9
|
+
from .__about__ import __version__
|
10
|
+
from .__types import DictData
|
11
|
+
from .api import app as fastapp
|
12
|
+
from .errors import JobError
|
13
|
+
from .job import Job
|
14
|
+
from .result import Result
|
15
|
+
|
16
|
+
app = typer.Typer(
|
17
|
+
pretty_exceptions_enable=True,
|
18
|
+
)
|
19
|
+
|
20
|
+
|
21
|
+
@app.callback()
|
22
|
+
def callback():
|
23
|
+
"""Manage Workflow CLI app.
|
24
|
+
|
25
|
+
Use it with the interface workflow engine.
|
26
|
+
"""
|
27
|
+
|
28
|
+
|
29
|
+
@app.command()
|
30
|
+
def version():
|
31
|
+
"""Get the ddeutil-workflow package version."""
|
32
|
+
typer.echo(f"ddeutil-workflow=={__version__}")
|
33
|
+
typer.echo(f"python-version=={python_version()}")
|
34
|
+
|
35
|
+
|
36
|
+
@app.command()
|
37
|
+
def job(
|
38
|
+
params: Annotated[str, typer.Option(help="A job execute parameters")],
|
39
|
+
job: Annotated[str, typer.Option(help="A job model")],
|
40
|
+
parent_run_id: Annotated[str, typer.Option(help="A parent running ID")],
|
41
|
+
run_id: Annotated[Optional[str], typer.Option(help="A running ID")] = None,
|
42
|
+
) -> None:
|
43
|
+
"""Job execution on the local.
|
44
|
+
|
45
|
+
Example:
|
46
|
+
... workflow-cli job --params "{\"test\": 1}"
|
47
|
+
"""
|
48
|
+
try:
|
49
|
+
params_dict: dict[str, Any] = json.loads(params)
|
50
|
+
except json.JSONDecodeError as e:
|
51
|
+
raise ValueError(f"Params does not support format: {params!r}.") from e
|
52
|
+
|
53
|
+
try:
|
54
|
+
job_dict: dict[str, Any] = json.loads(job)
|
55
|
+
_job: Job = Job.model_validate(obj=job_dict)
|
56
|
+
except json.JSONDecodeError as e:
|
57
|
+
raise ValueError(f"Params does not support format: {params!r}.") from e
|
58
|
+
|
59
|
+
typer.echo(f"Job params: {params_dict}")
|
60
|
+
rs: Result = Result(
|
61
|
+
run_id=run_id,
|
62
|
+
parent_run_id=parent_run_id,
|
63
|
+
)
|
64
|
+
|
65
|
+
context: DictData = {}
|
66
|
+
try:
|
67
|
+
_job.set_outputs(
|
68
|
+
_job.execute(
|
69
|
+
params=params_dict,
|
70
|
+
run_id=rs.run_id,
|
71
|
+
parent_run_id=rs.parent_run_id,
|
72
|
+
).context,
|
73
|
+
to=context,
|
74
|
+
)
|
75
|
+
except JobError as err:
|
76
|
+
rs.trace.error(f"[JOB]: {err.__class__.__name__}: {err}")
|
77
|
+
|
78
|
+
|
79
|
+
@app.command()
|
80
|
+
def api(
|
81
|
+
host: Annotated[str, typer.Option(help="A host url.")] = "0.0.0.0",
|
82
|
+
port: Annotated[int, typer.Option(help="A port url.")] = 80,
|
83
|
+
debug: Annotated[bool, typer.Option(help="A debug mode flag")] = True,
|
84
|
+
workers: Annotated[int, typer.Option(help="A worker number")] = None,
|
85
|
+
reload: Annotated[bool, typer.Option(help="A reload flag")] = False,
|
86
|
+
):
|
87
|
+
"""
|
88
|
+
Provision API application from the FastAPI.
|
89
|
+
"""
|
90
|
+
from .api.log_conf import LOGGING_CONFIG
|
91
|
+
|
92
|
+
# LOGGING_CONFIG = {}
|
93
|
+
|
94
|
+
uvicorn.run(
|
95
|
+
fastapp,
|
96
|
+
host=host,
|
97
|
+
port=port,
|
98
|
+
log_config=uvicorn.config.LOGGING_CONFIG | LOGGING_CONFIG,
|
99
|
+
# NOTE: Logging level of uvicorn should be lowered case.
|
100
|
+
log_level=("debug" if debug else "info"),
|
101
|
+
workers=workers,
|
102
|
+
reload=reload,
|
103
|
+
)
|
104
|
+
|
105
|
+
|
106
|
+
@app.command()
|
107
|
+
def make(
|
108
|
+
name: Annotated[Path, typer.Argument()],
|
109
|
+
) -> None:
|
110
|
+
"""
|
111
|
+
Create Workflow YAML template.
|
112
|
+
|
113
|
+
:param name:
|
114
|
+
"""
|
115
|
+
typer.echo(f"Start create YAML template filename: {name.resolve()}")
|
116
|
+
|
117
|
+
|
118
|
+
if __name__ == "__main__":
|
119
|
+
app()
|
@@ -109,9 +109,9 @@ class Config: # pragma: no cov
|
|
109
109
|
return env(
|
110
110
|
"LOG_FORMAT",
|
111
111
|
(
|
112
|
-
"%(asctime)s.%(msecs)03d (%(
|
112
|
+
"%(asctime)s.%(msecs)03d (%(process)-5d, "
|
113
113
|
"%(thread)-5d) [%(levelname)-7s] %(message)-120s "
|
114
|
-
"(%(filename)s:%(lineno)s)"
|
114
|
+
"(%(filename)s:%(lineno)s) (%(name)-10s)"
|
115
115
|
),
|
116
116
|
)
|
117
117
|
|
@@ -161,9 +161,13 @@ class Config: # pragma: no cov
|
|
161
161
|
class APIConfig:
|
162
162
|
"""API Config object."""
|
163
163
|
|
164
|
+
@property
|
165
|
+
def version(self) -> str:
|
166
|
+
return env("API_VERSION", "1")
|
167
|
+
|
164
168
|
@property
|
165
169
|
def prefix_path(self) -> str:
|
166
|
-
return env("API_PREFIX_PATH", "/api/
|
170
|
+
return env("API_PREFIX_PATH", f"/api/v{self.version}")
|
167
171
|
|
168
172
|
|
169
173
|
class BaseLoad(ABC): # pragma: no cov
|
@@ -3,8 +3,9 @@
|
|
3
3
|
# Licensed under the MIT License. See LICENSE in the project root for
|
4
4
|
# license information.
|
5
5
|
# ------------------------------------------------------------------------------
|
6
|
-
"""Event module
|
7
|
-
|
6
|
+
"""An Event module keep all triggerable object to the Workflow model. The simple
|
7
|
+
event trigger that use to run workflow is `Crontab` model.
|
8
|
+
Now, it has only `Crontab` and `CrontabYear` event models in this module because
|
8
9
|
I think it is the core event for workflow orchestration.
|
9
10
|
"""
|
10
11
|
from __future__ import annotations
|
@@ -95,7 +96,7 @@ class Crontab(BaseModel):
|
|
95
96
|
tz: Annotated[
|
96
97
|
TimeZoneName,
|
97
98
|
Field(
|
98
|
-
description="A timezone string value",
|
99
|
+
description="A timezone string value.",
|
99
100
|
alias="timezone",
|
100
101
|
),
|
101
102
|
] = "UTC"
|
@@ -37,33 +37,34 @@ METADATA: str = "metadata.json"
|
|
37
37
|
|
38
38
|
|
39
39
|
@lru_cache
|
40
|
-
def
|
41
|
-
"""Return logger object with an input module name
|
40
|
+
def set_logging(name: str) -> logging.Logger:
|
41
|
+
"""Return logger object with an input module name that already implement the
|
42
|
+
custom handler and formatter from this package config.
|
42
43
|
|
43
44
|
:param name: (str) A module name that want to log.
|
45
|
+
|
46
|
+
:rtype: logging.Logger
|
44
47
|
"""
|
45
|
-
|
48
|
+
_logger = logging.getLogger(name)
|
46
49
|
|
47
50
|
# NOTE: Developers using this package can then disable all logging just for
|
48
51
|
# this package by;
|
49
52
|
#
|
50
53
|
# `logging.getLogger('ddeutil.workflow').propagate = False`
|
51
54
|
#
|
52
|
-
|
55
|
+
_logger.addHandler(logging.NullHandler())
|
53
56
|
|
54
57
|
formatter = logging.Formatter(
|
55
|
-
fmt=config.log_format,
|
56
|
-
datefmt=config.log_datetime_format,
|
58
|
+
fmt=config.log_format, datefmt=config.log_datetime_format
|
57
59
|
)
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
return lg
|
60
|
+
stream_handler = logging.StreamHandler()
|
61
|
+
stream_handler.setFormatter(formatter)
|
62
|
+
_logger.addHandler(stream_handler)
|
63
|
+
_logger.setLevel(logging.DEBUG if config.debug else logging.INFO)
|
64
|
+
return _logger
|
64
65
|
|
65
66
|
|
66
|
-
logger =
|
67
|
+
logger = logging.getLogger("ddeutil.workflow")
|
67
68
|
|
68
69
|
|
69
70
|
def get_dt_tznow() -> datetime: # pragma: no cov
|
@@ -689,6 +690,9 @@ class BaseAudit(BaseModel, ABC):
|
|
689
690
|
"""
|
690
691
|
if dynamic("enable_write_audit", extras=self.extras):
|
691
692
|
self.do_before()
|
693
|
+
|
694
|
+
# NOTE: Start setting log config in this line with cache.
|
695
|
+
set_logging("ddeutil.workflow")
|
692
696
|
return self
|
693
697
|
|
694
698
|
@classmethod
|
@@ -732,7 +736,7 @@ class BaseAudit(BaseModel, ABC):
|
|
732
736
|
@abstractmethod
|
733
737
|
def save(self, excluded: Optional[list[str]]) -> None: # pragma: no cov
|
734
738
|
"""Save this model logging to target logging store."""
|
735
|
-
raise NotImplementedError("Audit should implement
|
739
|
+
raise NotImplementedError("Audit should implement `save` method.")
|
736
740
|
|
737
741
|
|
738
742
|
class FileAudit(BaseAudit):
|
@@ -3,7 +3,7 @@
|
|
3
3
|
# Licensed under the MIT License. See LICENSE in the project root for
|
4
4
|
# license information.
|
5
5
|
# ------------------------------------------------------------------------------
|
6
|
-
"""Stages module include all stage model that implemented to be the minimum execution
|
6
|
+
r"""Stages module include all stage model that implemented to be the minimum execution
|
7
7
|
layer of this workflow core engine. The stage handle the minimize task that run
|
8
8
|
in a thread (same thread at its job owner) that mean it is the lowest executor that
|
9
9
|
you can track logs.
|
@@ -15,17 +15,39 @@ have a lot of use-case, and it should does not worry about it error output.
|
|
15
15
|
So, I will create `handler_execute` for any exception class that raise from
|
16
16
|
the stage execution method.
|
17
17
|
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
╰--( handler )---> Result with `SKIP`
|
18
|
+
Handler --> Ok --> Result
|
19
|
+
|-status: SUCCESS
|
20
|
+
╰-context:
|
21
|
+
╰-outputs: ...
|
23
22
|
|
24
|
-
-->
|
23
|
+
--> Ok --> Result
|
24
|
+
╰-status: CANCEL
|
25
|
+
|
26
|
+
--> Ok --> Result
|
27
|
+
╰-status: SKIP
|
28
|
+
|
29
|
+
--> Ok --> Result
|
30
|
+
|-status: FAILED
|
31
|
+
╰-errors:
|
32
|
+
|-name: ...
|
33
|
+
╰-message: ...
|
25
34
|
|
26
35
|
On the context I/O that pass to a stage object at execute process. The
|
27
36
|
execute method receives a `params={"params": {...}}` value for passing template
|
28
37
|
searching.
|
38
|
+
|
39
|
+
All stages model inherit from `BaseStage` or `AsyncBaseStage` models that has the
|
40
|
+
base fields:
|
41
|
+
|
42
|
+
| field | alias | data type | default | description |
|
43
|
+
|-----------|-------|-------------|:--------:|-----------------------------------------------------------------------|
|
44
|
+
| id | | str \| None | `None` | A stage ID that use to keep execution output or getting by job owner. |
|
45
|
+
| name | | str | | A stage name that want to log when start execution. |
|
46
|
+
| condition | if | str \| None | `None` | A stage condition statement to allow stage executable. |
|
47
|
+
| extras | | dict | `dict()` | An extra parameter that override core config values. |
|
48
|
+
|
49
|
+
It has a special base class is `BaseRetryStage` that inherit from `AsyncBaseStage`
|
50
|
+
that use to handle retry execution when it got any error with `retry` field.
|
29
51
|
"""
|
30
52
|
from __future__ import annotations
|
31
53
|
|
@@ -450,6 +472,13 @@ class BaseStage(BaseModel, ABC):
|
|
450
472
|
"""
|
451
473
|
return False
|
452
474
|
|
475
|
+
def docs(self) -> str: # pragma: no cov
|
476
|
+
"""Return generated document that will be the interface of this stage.
|
477
|
+
|
478
|
+
:rtype: str
|
479
|
+
"""
|
480
|
+
return self.desc
|
481
|
+
|
453
482
|
|
454
483
|
class BaseAsyncStage(BaseStage, ABC):
|
455
484
|
"""Base Async Stage model to make any stage model allow async execution for
|
@@ -594,7 +623,7 @@ class BaseRetryStage(BaseAsyncStage, ABC): # pragma: no cov
|
|
594
623
|
default=0,
|
595
624
|
ge=0,
|
596
625
|
lt=20,
|
597
|
-
description="
|
626
|
+
description="A retry number if stage execution get the error.",
|
598
627
|
)
|
599
628
|
|
600
629
|
def _execute(
|
@@ -1249,7 +1278,7 @@ class CallStage(BaseRetryStage):
|
|
1249
1278
|
function complexly that you can for your objective to invoked by this stage
|
1250
1279
|
object.
|
1251
1280
|
|
1252
|
-
This stage is the most
|
1281
|
+
This stage is the most powerful stage of this package for run every
|
1253
1282
|
use-case by a custom requirement that you want by creating the Python
|
1254
1283
|
function and adding it to the caller registry value by importer syntax like
|
1255
1284
|
`module.caller.registry` not path style like `module/caller/registry`.
|
@@ -6,15 +6,13 @@
|
|
6
6
|
"""Utility function model."""
|
7
7
|
from __future__ import annotations
|
8
8
|
|
9
|
-
import asyncio
|
10
9
|
import stat
|
11
10
|
import time
|
12
11
|
from collections.abc import Iterator
|
13
12
|
from datetime import date, datetime, timedelta
|
14
|
-
from functools import wraps
|
15
13
|
from hashlib import md5
|
16
14
|
from inspect import isfunction
|
17
|
-
from itertools import
|
15
|
+
from itertools import product
|
18
16
|
from pathlib import Path
|
19
17
|
from random import randrange
|
20
18
|
from typing import Any, Final, Optional, TypeVar, Union, overload
|
@@ -258,34 +256,6 @@ def cross_product(matrix: Matrix) -> Iterator[DictData]:
|
|
258
256
|
)
|
259
257
|
|
260
258
|
|
261
|
-
def batch(iterable: Union[Iterator[Any], range], n: int) -> Iterator[Any]:
|
262
|
-
"""Batch data into iterators of length n. The last batch may be shorter.
|
263
|
-
|
264
|
-
Example:
|
265
|
-
>>> for b in batch(iter('ABCDEFG'), 3):
|
266
|
-
... print(list(b))
|
267
|
-
['A', 'B', 'C']
|
268
|
-
['D', 'E', 'F']
|
269
|
-
['G']
|
270
|
-
|
271
|
-
:param iterable:
|
272
|
-
:param n: (int) A number of returning batch size.
|
273
|
-
|
274
|
-
:rtype: Iterator[Any]
|
275
|
-
"""
|
276
|
-
if n < 1:
|
277
|
-
raise ValueError("n must be at least one")
|
278
|
-
|
279
|
-
it: Iterator[Any] = iter(iterable)
|
280
|
-
while True:
|
281
|
-
chunk_it = islice(it, n)
|
282
|
-
try:
|
283
|
-
first_el = next(chunk_it)
|
284
|
-
except StopIteration:
|
285
|
-
return
|
286
|
-
yield chain((first_el,), chunk_it)
|
287
|
-
|
288
|
-
|
289
259
|
def cut_id(run_id: str, *, num: int = 6) -> str:
|
290
260
|
"""Cutting running ID with length.
|
291
261
|
|
@@ -325,24 +295,3 @@ def dump_all(
|
|
325
295
|
elif isinstance(value, BaseModel):
|
326
296
|
return value.model_dump(by_alias=by_alias)
|
327
297
|
return value
|
328
|
-
|
329
|
-
|
330
|
-
def awaitable(func):
|
331
|
-
"""Dynamic function to async or not depend on the called statement."""
|
332
|
-
|
333
|
-
@wraps(func)
|
334
|
-
async def async_wrapper(*args, **kwargs):
|
335
|
-
return func(*args, **kwargs)
|
336
|
-
|
337
|
-
@wraps(func)
|
338
|
-
def sync_wrapper(*args, **kwargs):
|
339
|
-
return func(*args, **kwargs)
|
340
|
-
|
341
|
-
def dispatch(*args, **kwargs):
|
342
|
-
try:
|
343
|
-
asyncio.get_running_loop()
|
344
|
-
return async_wrapper(*args, **kwargs)
|
345
|
-
except RuntimeError:
|
346
|
-
return sync_wrapper(*args, **kwargs)
|
347
|
-
|
348
|
-
return dispatch
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.68
|
4
4
|
Summary: Lightweight workflow orchestration with YAML template
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -24,10 +24,10 @@ Description-Content-Type: text/markdown
|
|
24
24
|
License-File: LICENSE
|
25
25
|
Requires-Dist: ddeutil[checksum]>=0.4.8
|
26
26
|
Requires-Dist: ddeutil-io[toml,yaml]>=0.2.14
|
27
|
-
Requires-Dist: pydantic==2.11.
|
27
|
+
Requires-Dist: pydantic==2.11.5
|
28
28
|
Requires-Dist: pydantic-extra-types==2.10.4
|
29
29
|
Requires-Dist: python-dotenv==1.1.0
|
30
|
-
Requires-Dist: typer
|
30
|
+
Requires-Dist: typer>=0.16.0
|
31
31
|
Provides-Extra: all
|
32
32
|
Requires-Dist: fastapi<1.0.0,>=0.115.0; extra == "all"
|
33
33
|
Requires-Dist: uvicorn; extra == "all"
|
{ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil_workflow.egg-info/SOURCES.txt
RENAMED
@@ -19,7 +19,7 @@ src/ddeutil/workflow/stages.py
|
|
19
19
|
src/ddeutil/workflow/utils.py
|
20
20
|
src/ddeutil/workflow/workflow.py
|
21
21
|
src/ddeutil/workflow/api/__init__.py
|
22
|
-
src/ddeutil/workflow/api/
|
22
|
+
src/ddeutil/workflow/api/log_conf.py
|
23
23
|
src/ddeutil/workflow/api/routes/__init__.py
|
24
24
|
src/ddeutil/workflow/api/routes/job.py
|
25
25
|
src/ddeutil/workflow/api/routes/logs.py
|
@@ -6,7 +6,6 @@ from zoneinfo import ZoneInfo
|
|
6
6
|
import pytest
|
7
7
|
from ddeutil.workflow.utils import (
|
8
8
|
UTC,
|
9
|
-
batch,
|
10
9
|
cut_id,
|
11
10
|
dump_all,
|
12
11
|
filter_func,
|
@@ -91,15 +90,6 @@ def test_filter_func():
|
|
91
90
|
}
|
92
91
|
|
93
92
|
|
94
|
-
def test_batch():
|
95
|
-
with pytest.raises(ValueError):
|
96
|
-
next(batch(range(10), n=-1))
|
97
|
-
|
98
|
-
assert [[0, 1], [2, 3], [4, 5], [6, 7], [8, 9]] == [
|
99
|
-
list(i) for i in batch(range(10), n=2)
|
100
|
-
]
|
101
|
-
|
102
|
-
|
103
93
|
def test_make_exec():
|
104
94
|
test_file: str = "./tmp_test_make_exec.txt"
|
105
95
|
|
@@ -1 +0,0 @@
|
|
1
|
-
__version__: str = "0.0.67"
|
@@ -1,68 +0,0 @@
|
|
1
|
-
import json
|
2
|
-
from typing import Annotated, Any
|
3
|
-
|
4
|
-
import typer
|
5
|
-
import uvicorn
|
6
|
-
|
7
|
-
from .__about__ import __version__
|
8
|
-
from .api import app as fastapp
|
9
|
-
from .api.logs import LOGGING_CONFIG
|
10
|
-
|
11
|
-
app = typer.Typer(
|
12
|
-
pretty_exceptions_enable=True,
|
13
|
-
)
|
14
|
-
|
15
|
-
|
16
|
-
@app.callback()
|
17
|
-
def callback():
|
18
|
-
"""
|
19
|
-
Awesome Portal Gun
|
20
|
-
"""
|
21
|
-
typer.echo("Start call from callback function")
|
22
|
-
|
23
|
-
|
24
|
-
@app.command()
|
25
|
-
def version():
|
26
|
-
"""Get the ddeutil-workflow package version."""
|
27
|
-
typer.echo(__version__)
|
28
|
-
|
29
|
-
|
30
|
-
@app.command()
|
31
|
-
def job(
|
32
|
-
params: Annotated[str, typer.Option(help="A job execute parameters")],
|
33
|
-
):
|
34
|
-
"""Job execution on the local.
|
35
|
-
|
36
|
-
Example:
|
37
|
-
... workflow-cli job --params "{\"test\": 1}"
|
38
|
-
"""
|
39
|
-
try:
|
40
|
-
params_dict: dict[str, Any] = json.loads(params)
|
41
|
-
except json.JSONDecodeError as e:
|
42
|
-
raise ValueError(f"params does not support format: {params!r}.") from e
|
43
|
-
typer.echo(f"Job params: {params_dict}")
|
44
|
-
|
45
|
-
|
46
|
-
@app.command()
|
47
|
-
def api(
|
48
|
-
host: Annotated[str, typer.Option(help="A host url.")] = "0.0.0.0",
|
49
|
-
port: Annotated[int, typer.Option(help="A port url.")] = 80,
|
50
|
-
debug: Annotated[bool, typer.Option(help="A debug mode flag")] = True,
|
51
|
-
worker: Annotated[int, typer.Option(help="A worker number")] = None,
|
52
|
-
):
|
53
|
-
"""
|
54
|
-
Provision API application from the FastAPI.
|
55
|
-
"""
|
56
|
-
|
57
|
-
uvicorn.run(
|
58
|
-
fastapp,
|
59
|
-
host=host,
|
60
|
-
port=port,
|
61
|
-
log_config=uvicorn.config.LOGGING_CONFIG | LOGGING_CONFIG,
|
62
|
-
log_level=("DEBUG" if debug else "INFO"),
|
63
|
-
workers=worker,
|
64
|
-
)
|
65
|
-
|
66
|
-
|
67
|
-
if __name__ == "__main__":
|
68
|
-
app()
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil_workflow.egg-info/entry_points.txt
RENAMED
File without changes
|
{ddeutil_workflow-0.0.67 → ddeutil_workflow-0.0.68}/src/ddeutil_workflow.egg-info/top_level.txt
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|