ddeutil-workflow 0.0.25__py3-none-any.whl → 0.0.26.post0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ddeutil/workflow/__about__.py +1 -1
- ddeutil/workflow/api/route.py +40 -3
- ddeutil/workflow/conf.py +21 -22
- {ddeutil_workflow-0.0.25.dist-info → ddeutil_workflow-0.0.26.post0.dist-info}/METADATA +34 -50
- {ddeutil_workflow-0.0.25.dist-info → ddeutil_workflow-0.0.26.post0.dist-info}/RECORD +8 -10
- {ddeutil_workflow-0.0.25.dist-info → ddeutil_workflow-0.0.26.post0.dist-info}/WHEEL +1 -1
- ddeutil/workflow/cli.py +0 -108
- ddeutil_workflow-0.0.25.dist-info/entry_points.txt +0 -2
- {ddeutil_workflow-0.0.25.dist-info → ddeutil_workflow-0.0.26.post0.dist-info}/LICENSE +0 -0
- {ddeutil_workflow-0.0.25.dist-info → ddeutil_workflow-0.0.26.post0.dist-info}/top_level.txt +0 -0
ddeutil/workflow/__about__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__: str = "0.0.
|
1
|
+
__version__: str = "0.0.26.post0"
|
ddeutil/workflow/api/route.py
CHANGED
@@ -16,7 +16,7 @@ from fastapi.responses import UJSONResponse
|
|
16
16
|
from pydantic import BaseModel
|
17
17
|
|
18
18
|
from ..__types import DictData
|
19
|
-
from ..conf import Loader, config, get_logger
|
19
|
+
from ..conf import FileLog, Loader, config, get_logger
|
20
20
|
from ..result import Result
|
21
21
|
from ..scheduler import Schedule
|
22
22
|
from ..workflow import Workflow
|
@@ -99,12 +99,49 @@ async def execute_workflow(name: str, payload: ExecutePayload) -> DictData:
|
|
99
99
|
|
100
100
|
@workflow_route.get(path="/{name}/logs")
|
101
101
|
async def get_workflow_logs(name: str):
|
102
|
-
|
102
|
+
try:
|
103
|
+
return {
|
104
|
+
"message": f"Getting workflow {name!r} logs",
|
105
|
+
"logs": [
|
106
|
+
log.model_dump(
|
107
|
+
by_alias=True,
|
108
|
+
exclude_none=True,
|
109
|
+
exclude_unset=True,
|
110
|
+
exclude_defaults=True,
|
111
|
+
)
|
112
|
+
for log in FileLog.find_logs(name=name)
|
113
|
+
],
|
114
|
+
}
|
115
|
+
except FileNotFoundError:
|
116
|
+
raise HTTPException(
|
117
|
+
status_code=st.HTTP_404_NOT_FOUND,
|
118
|
+
detail=f"Does not found log for workflow {name!r}",
|
119
|
+
) from None
|
103
120
|
|
104
121
|
|
105
122
|
@workflow_route.get(path="/{name}/logs/{release}")
|
106
123
|
async def get_workflow_release_log(name: str, release: str):
|
107
|
-
|
124
|
+
try:
|
125
|
+
log: FileLog = FileLog.find_log_with_release(
|
126
|
+
name=name, release=datetime.strptime(release, "%Y%m%d%H%M%S")
|
127
|
+
)
|
128
|
+
except FileNotFoundError:
|
129
|
+
raise HTTPException(
|
130
|
+
status_code=st.HTTP_404_NOT_FOUND,
|
131
|
+
detail=(
|
132
|
+
f"Does not found log for workflow {name!r} "
|
133
|
+
f"with release {release!r}"
|
134
|
+
),
|
135
|
+
) from None
|
136
|
+
return {
|
137
|
+
"message": f"Getting workflow {name!r} log in release {release}",
|
138
|
+
"log": log.model_dump(
|
139
|
+
by_alias=True,
|
140
|
+
exclude_none=True,
|
141
|
+
exclude_unset=True,
|
142
|
+
exclude_defaults=True,
|
143
|
+
),
|
144
|
+
}
|
108
145
|
|
109
146
|
|
110
147
|
@workflow_route.delete(
|
ddeutil/workflow/conf.py
CHANGED
@@ -17,7 +17,7 @@ from typing import ClassVar, Optional, TypeVar, Union
|
|
17
17
|
from zoneinfo import ZoneInfo
|
18
18
|
|
19
19
|
from ddeutil.core import str2bool
|
20
|
-
from ddeutil.io import
|
20
|
+
from ddeutil.io import YamlFlResolve
|
21
21
|
from dotenv import load_dotenv
|
22
22
|
from pydantic import BaseModel, Field
|
23
23
|
from pydantic.functional_validators import model_validator
|
@@ -93,6 +93,7 @@ class Config:
|
|
93
93
|
enable_write_log: bool = str2bool(
|
94
94
|
os.getenv("WORKFLOW_LOG_ENABLE_WRITE", "false")
|
95
95
|
)
|
96
|
+
log_path: Path = Path(os.getenv("WORKFLOW_LOG_PATH", "./logs"))
|
96
97
|
|
97
98
|
# NOTE: Stage
|
98
99
|
stage_raise_error: bool = str2bool(
|
@@ -161,14 +162,6 @@ class Config:
|
|
161
162
|
f"timedelta with {self.stop_boundary_delta_str}."
|
162
163
|
) from err
|
163
164
|
|
164
|
-
def refresh_dotenv(self) -> Self:
|
165
|
-
"""Reload environment variables from the current stage."""
|
166
|
-
self.tz: ZoneInfo = ZoneInfo(env("WORKFLOW_CORE_TIMEZONE", "UTC"))
|
167
|
-
self.stage_raise_error: bool = str2bool(
|
168
|
-
env("WORKFLOW_CORE_STAGE_RAISE_ERROR", "false")
|
169
|
-
)
|
170
|
-
return self
|
171
|
-
|
172
165
|
@property
|
173
166
|
def conf_path(self) -> Path:
|
174
167
|
"""Config path that use root_path class argument for this construction.
|
@@ -213,8 +206,14 @@ class SimLoad:
|
|
213
206
|
externals: DictData | None = None,
|
214
207
|
) -> None:
|
215
208
|
self.data: DictData = {}
|
216
|
-
for file in
|
217
|
-
if
|
209
|
+
for file in conf.conf_path.rglob("*"):
|
210
|
+
if not file.is_file():
|
211
|
+
continue
|
212
|
+
|
213
|
+
if data := self.filter_suffix(
|
214
|
+
file,
|
215
|
+
name,
|
216
|
+
):
|
218
217
|
self.data = data
|
219
218
|
|
220
219
|
# VALIDATE: check the data that reading should not empty.
|
@@ -246,7 +245,10 @@ class SimLoad:
|
|
246
245
|
:rtype: Iterator[tuple[str, DictData]]
|
247
246
|
"""
|
248
247
|
exclude: list[str] = excluded or []
|
249
|
-
for file in
|
248
|
+
for file in conf.conf_path.rglob("*"):
|
249
|
+
|
250
|
+
if not file.is_file():
|
251
|
+
continue
|
250
252
|
|
251
253
|
for key, data in cls.filter_suffix(file).items():
|
252
254
|
|
@@ -362,7 +364,7 @@ class FileLog(BaseLog):
|
|
362
364
|
"""
|
363
365
|
|
364
366
|
filename_fmt: ClassVar[str] = (
|
365
|
-
"
|
367
|
+
"workflow={name}/release={release:%Y%m%d%H%M%S}"
|
366
368
|
)
|
367
369
|
|
368
370
|
def do_before(self) -> None:
|
@@ -378,18 +380,16 @@ class FileLog(BaseLog):
|
|
378
380
|
|
379
381
|
:rtype: Iterator[Self]
|
380
382
|
"""
|
381
|
-
pointer: Path = config.
|
383
|
+
pointer: Path = config.log_path / f"workflow={name}"
|
382
384
|
if not pointer.exists():
|
383
|
-
raise FileNotFoundError(
|
384
|
-
f"Pointer: ./logs/workflow={name} does not found."
|
385
|
-
)
|
385
|
+
raise FileNotFoundError(f"Pointer: {pointer.absolute()}.")
|
386
386
|
|
387
387
|
for file in pointer.glob("./release=*/*.log"):
|
388
388
|
with file.open(mode="r", encoding="utf-8") as f:
|
389
389
|
yield cls.model_validate(obj=json.load(f))
|
390
390
|
|
391
391
|
@classmethod
|
392
|
-
def
|
392
|
+
def find_log_with_release(
|
393
393
|
cls,
|
394
394
|
name: str,
|
395
395
|
release: datetime | None = None,
|
@@ -410,8 +410,7 @@ class FileLog(BaseLog):
|
|
410
410
|
raise NotImplementedError("Find latest log does not implement yet.")
|
411
411
|
|
412
412
|
pointer: Path = (
|
413
|
-
config.
|
414
|
-
/ f"./logs/workflow={name}/release={release:%Y%m%d%H%M%S}"
|
413
|
+
config.log_path / f"workflow={name}/release={release:%Y%m%d%H%M%S}"
|
415
414
|
)
|
416
415
|
if not pointer.exists():
|
417
416
|
raise FileNotFoundError(
|
@@ -440,7 +439,7 @@ class FileLog(BaseLog):
|
|
440
439
|
return False
|
441
440
|
|
442
441
|
# NOTE: create pointer path that use the same logic of pointer method.
|
443
|
-
pointer: Path = config.
|
442
|
+
pointer: Path = config.log_path / cls.filename_fmt.format(
|
444
443
|
name=name, release=release
|
445
444
|
)
|
446
445
|
|
@@ -451,7 +450,7 @@ class FileLog(BaseLog):
|
|
451
450
|
|
452
451
|
:rtype: Path
|
453
452
|
"""
|
454
|
-
return config.
|
453
|
+
return config.log_path / self.filename_fmt.format(
|
455
454
|
name=self.name, release=self.release
|
456
455
|
)
|
457
456
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: ddeutil-workflow
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.26.post0
|
4
4
|
Summary: Lightweight workflow orchestration with less dependencies
|
5
5
|
Author-email: ddeutils <korawich.anu@gmail.com>
|
6
6
|
License: MIT
|
@@ -26,7 +26,6 @@ Requires-Dist: ddeutil>=0.4.3
|
|
26
26
|
Requires-Dist: ddeutil-io[toml,yaml]>=0.2.3
|
27
27
|
Requires-Dist: pydantic==2.10.4
|
28
28
|
Requires-Dist: python-dotenv==1.0.1
|
29
|
-
Requires-Dist: typer==0.15.1
|
30
29
|
Requires-Dist: schedule<2.0.0,==1.2.2
|
31
30
|
Provides-Extra: api
|
32
31
|
Requires-Dist: fastapi<1.0.0,>=0.115.0; extra == "api"
|
@@ -80,20 +79,10 @@ This project need `ddeutil` and `ddeutil-io` extension namespace packages.
|
|
80
79
|
If you want to install this package with application add-ons, you should add
|
81
80
|
`app` in installation;
|
82
81
|
|
83
|
-
| Usecase
|
84
|
-
|
85
|
-
| Python
|
86
|
-
| FastAPI Server
|
87
|
-
|
88
|
-
> :egg: **Docker Images** supported:
|
89
|
-
>
|
90
|
-
> | Docker Image | Python Version | Support |
|
91
|
-
> |-----------------------------|----------------|---------|
|
92
|
-
> | ddeutil-workflow:latest | `3.9` | :x: |
|
93
|
-
> | ddeutil-workflow:python3.10 | `3.10` | :x: |
|
94
|
-
> | ddeutil-workflow:python3.11 | `3.11` | :x: |
|
95
|
-
> | ddeutil-workflow:python3.12 | `3.12` | :x: |
|
96
|
-
> | ddeutil-workflow:python3.12 | `3.13` | :x: |
|
82
|
+
| Usecase | Install Optional | Support |
|
83
|
+
|----------------|------------------------------------------|--------------------|
|
84
|
+
| Python | `pip install ddeutil-workflow` | :heavy_check_mark: |
|
85
|
+
| FastAPI Server | `pip install ddeutil-workflow[api]` | :heavy_check_mark: |
|
97
86
|
|
98
87
|
## :beers: Usage
|
99
88
|
|
@@ -110,7 +99,7 @@ use-case.
|
|
110
99
|
run-py-local:
|
111
100
|
|
112
101
|
# Validate model that use to parsing exists for template file
|
113
|
-
type:
|
102
|
+
type: Workflow
|
114
103
|
on:
|
115
104
|
# If workflow deploy to schedule, it will running every 5 minutes
|
116
105
|
# with Asia/Bangkok timezone.
|
@@ -161,7 +150,7 @@ So, this package provide the `Schedule` template for this action.
|
|
161
150
|
schedule-run-local-wf:
|
162
151
|
|
163
152
|
# Validate model that use to parsing exists for template file
|
164
|
-
type:
|
153
|
+
type: Schedule
|
165
154
|
workflows:
|
166
155
|
|
167
156
|
# Map existing workflow that want to deploy with scheduler application.
|
@@ -179,35 +168,36 @@ The main configuration that use to dynamic changing with your propose of this
|
|
179
168
|
application. If any configuration values do not set yet, it will use default value
|
180
169
|
and do not raise any error to you.
|
181
170
|
|
182
|
-
| Environment
|
183
|
-
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
171
|
+
| Environment | Component | Default | Description | Remark |
|
172
|
+
|:-------------------------------------------|:---------:|:-----------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------|--------|
|
173
|
+
| **WORKFLOW_ROOT_PATH** | Core | `.` | The root path of the workflow application. | |
|
174
|
+
| **WORKFLOW_CORE_REGISTRY** | Core | `src,src.ddeutil.workflow,tests,tests.utils` | List of importable string for the hook stage. | |
|
175
|
+
| **WORKFLOW_CORE_REGISTRY_FILTER** | Core | `src.ddeutil.workflow.utils,ddeutil.workflow.utils` | List of importable string for the filter template. | |
|
176
|
+
| **WORKFLOW_CORE_PATH_CONF** | Core | `conf` | The config path that keep all template `.yaml` files. | |
|
177
|
+
| **WORKFLOW_CORE_TIMEZONE** | Core | `Asia/Bangkok` | A Timezone string value that will pass to `ZoneInfo` object. | |
|
178
|
+
| **WORKFLOW_CORE_STAGE_DEFAULT_ID** | Core | `true` | A flag that enable default stage ID that use for catch an execution output. | |
|
179
|
+
| **WORKFLOW_CORE_STAGE_RAISE_ERROR** | Core | `false` | A flag that all stage raise StageException from stage execution. | |
|
180
|
+
| **WORKFLOW_CORE_JOB_DEFAULT_ID** | Core | `false` | A flag that enable default job ID that use for catch an execution output. The ID that use will be sequence number. | |
|
181
|
+
| **WORKFLOW_CORE_JOB_RAISE_ERROR** | Core | `true` | A flag that all job raise JobException from job strategy execution. | |
|
182
|
+
| **WORKFLOW_CORE_MAX_NUM_POKING** | Core | `4` | . | |
|
183
|
+
| **WORKFLOW_CORE_MAX_JOB_PARALLEL** | Core | `2` | The maximum job number that able to run parallel in workflow executor. | |
|
184
|
+
| **WORKFLOW_CORE_MAX_JOB_EXEC_TIMEOUT** | Core | `600` | | |
|
185
|
+
| **WORKFLOW_CORE_MAX_CRON_PER_WORKFLOW** | Core | `5` | | |
|
186
|
+
| **WORKFLOW_CORE_MAX_QUEUE_COMPLETE_HIST** | Core | `16` | | |
|
187
|
+
| **WORKFLOW_CORE_GENERATE_ID_SIMPLE_MODE** | Core | `true` | A flog that enable generating ID with `md5` algorithm. | |
|
188
|
+
| **WORKFLOW_LOG_PATH** | Log | `./logs` | The log path of the workflow saving log. | |
|
189
|
+
| **WORKFLOW_LOG_DEBUG_MODE** | Log | `true` | A flag that enable logging with debug level mode. | |
|
190
|
+
| **WORKFLOW_LOG_ENABLE_WRITE** | Log | `true` | A flag that enable logging object saving log to its destination. | |
|
191
|
+
| **WORKFLOW_APP_MAX_PROCESS** | Schedule | `2` | The maximum process worker number that run in scheduler app module. | |
|
192
|
+
| **WORKFLOW_APP_MAX_SCHEDULE_PER_PROCESS** | Schedule | `100` | A schedule per process that run parallel. | |
|
193
|
+
| **WORKFLOW_APP_STOP_BOUNDARY_DELTA** | Schedule | `'{"minutes": 5, "seconds": 20}'` | A time delta value that use to stop scheduler app in json string format. | |
|
204
194
|
|
205
195
|
**API Application**:
|
206
196
|
|
207
|
-
| Environment
|
208
|
-
|
209
|
-
|
|
210
|
-
|
|
197
|
+
| Environment | Component | Default | Description | Remark |
|
198
|
+
|:----------------------------------------|:-----------:|---------|------------------------------------------------------------------------------------|--------|
|
199
|
+
| **WORKFLOW_API_ENABLE_ROUTE_WORKFLOW** | API | `true` | A flag that enable workflow route to manage execute manually and workflow logging. | |
|
200
|
+
| **WORKFLOW_API_ENABLE_ROUTE_SCHEDULE** | API | `true` | A flag that enable run scheduler. | |
|
211
201
|
|
212
202
|
## :rocket: Deployment
|
213
203
|
|
@@ -215,12 +205,6 @@ This package able to run as a application service for receive manual trigger
|
|
215
205
|
from the master node via RestAPI or use to be Scheduler background service
|
216
206
|
like crontab job but via Python API.
|
217
207
|
|
218
|
-
### CLI
|
219
|
-
|
220
|
-
```shell
|
221
|
-
(venv) $ ddeutil-workflow schedule
|
222
|
-
```
|
223
|
-
|
224
208
|
### API Server
|
225
209
|
|
226
210
|
```shell
|
@@ -1,9 +1,8 @@
|
|
1
|
-
ddeutil/workflow/__about__.py,sha256=
|
1
|
+
ddeutil/workflow/__about__.py,sha256=9NeOhqXsrgLKL6AmtINFl4dct9jvs8oismQYXutnmJ8,34
|
2
2
|
ddeutil/workflow/__cron.py,sha256=uA8XcbY_GwA9rJSHaHUaXaJyGDObJN0ZeYlJSinL8y8,26880
|
3
3
|
ddeutil/workflow/__init__.py,sha256=49eGrCuchPVZKMybRouAviNhbulK_F6VwCmLm76hIss,1478
|
4
4
|
ddeutil/workflow/__types.py,sha256=Ia7f38kvL3NibwmRKi0wQ1ud_45Z-SojYGhNJwIqcu8,3713
|
5
|
-
ddeutil/workflow/
|
6
|
-
ddeutil/workflow/conf.py,sha256=YY2zZ_qv9JkTDs_73bkyrF1n1cqBINuxzMxbBjzYw-8,15361
|
5
|
+
ddeutil/workflow/conf.py,sha256=MtjeaapHaEuW8GjZadA56KpkmbPQg6Ws_CHcmmAzaFc,15174
|
7
6
|
ddeutil/workflow/cron.py,sha256=75A0hqevvouziKoLALncLJspVAeki9qCH3zniAJaxzY,7513
|
8
7
|
ddeutil/workflow/exceptions.py,sha256=P56K7VD3etGm9y-k_GXrzEyqsTCaz9EJazTIshZDf9g,943
|
9
8
|
ddeutil/workflow/job.py,sha256=cvSLMdc1sMl1MeU7so7Oe2SdRYxQwt6hm55mLV1iP-Y,24219
|
@@ -16,10 +15,9 @@ ddeutil/workflow/workflow.py,sha256=AD0rs1tRT2EpvUyNVAEr2bBPgF6-KOzGmLedR3o4y0Q,
|
|
16
15
|
ddeutil/workflow/api/__init__.py,sha256=F53NMBWtb9IKaDWkPU5KvybGGfKAcbehgn6TLBwHuuM,21
|
17
16
|
ddeutil/workflow/api/api.py,sha256=Md1cz3Edc7_uz63s_L_i-R3IE4mkO3aTADrX8GOGU-Y,5644
|
18
17
|
ddeutil/workflow/api/repeat.py,sha256=zyvsrXKk-3-_N8ZRZSki0Mueshugum2jtqctEOp9QSc,4927
|
19
|
-
ddeutil/workflow/api/route.py,sha256=
|
20
|
-
ddeutil_workflow-0.0.
|
21
|
-
ddeutil_workflow-0.0.
|
22
|
-
ddeutil_workflow-0.0.
|
23
|
-
ddeutil_workflow-0.0.
|
24
|
-
ddeutil_workflow-0.0.
|
25
|
-
ddeutil_workflow-0.0.25.dist-info/RECORD,,
|
18
|
+
ddeutil/workflow/api/route.py,sha256=v96jNbgjM1cJ2MpVSRWs2kgRqF8DQElEBdRZrVFEpEw,8578
|
19
|
+
ddeutil_workflow-0.0.26.post0.dist-info/LICENSE,sha256=nGFZ1QEhhhWeMHf9n99_fdt4vQaXS29xWKxt-OcLywk,1085
|
20
|
+
ddeutil_workflow-0.0.26.post0.dist-info/METADATA,sha256=GRkczE6ZJ7NhFefqGI2GbzGd6Lu2FDGO6oUafA8n4nw,14364
|
21
|
+
ddeutil_workflow-0.0.26.post0.dist-info/WHEEL,sha256=A3WOREP4zgxI0fKrHUG8DC8013e3dK3n7a6HDbcEIwE,91
|
22
|
+
ddeutil_workflow-0.0.26.post0.dist-info/top_level.txt,sha256=m9M6XeSWDwt_yMsmH6gcOjHZVK5O0-vgtNBuncHjzW4,8
|
23
|
+
ddeutil_workflow-0.0.26.post0.dist-info/RECORD,,
|
ddeutil/workflow/cli.py
DELETED
@@ -1,108 +0,0 @@
|
|
1
|
-
# ------------------------------------------------------------------------------
|
2
|
-
# Copyright (c) 2022 Korawich Anuttra. All rights reserved.
|
3
|
-
# Licensed under the MIT License. See LICENSE in the project root for
|
4
|
-
# license information.
|
5
|
-
# ------------------------------------------------------------------------------
|
6
|
-
from __future__ import annotations
|
7
|
-
|
8
|
-
import json
|
9
|
-
import sys
|
10
|
-
from datetime import datetime
|
11
|
-
from typing import Annotated, Optional
|
12
|
-
|
13
|
-
from ddeutil.core import str2list
|
14
|
-
from typer import Argument, Typer, echo
|
15
|
-
|
16
|
-
from .conf import config
|
17
|
-
|
18
|
-
cli: Typer = Typer()
|
19
|
-
|
20
|
-
|
21
|
-
@cli.command()
|
22
|
-
def run(
|
23
|
-
workflow: Annotated[
|
24
|
-
str,
|
25
|
-
Argument(help="A workflow name that want to run manually"),
|
26
|
-
],
|
27
|
-
params: Annotated[
|
28
|
-
str,
|
29
|
-
Argument(
|
30
|
-
help="A json string for parameters of this workflow execution.",
|
31
|
-
),
|
32
|
-
],
|
33
|
-
):
|
34
|
-
"""Run workflow workflow manually with an input custom parameters that able
|
35
|
-
to receive with workflow params config.
|
36
|
-
"""
|
37
|
-
echo(f"Running workflow name: ({type(workflow)}) {workflow!r}")
|
38
|
-
echo(f"... with Parameters: ({type(params)}) {params!r}")
|
39
|
-
|
40
|
-
from .result import Result
|
41
|
-
from .workflow import Workflow
|
42
|
-
|
43
|
-
try:
|
44
|
-
wf: Workflow = Workflow.from_loader(name=workflow)
|
45
|
-
rs: Result = wf.execute(params=json.loads(params))
|
46
|
-
except Exception as err:
|
47
|
-
echo(str(err))
|
48
|
-
sys.exit(1)
|
49
|
-
|
50
|
-
echo(f"Result: {rs}")
|
51
|
-
sys.exit(0)
|
52
|
-
|
53
|
-
|
54
|
-
@cli.command()
|
55
|
-
def schedule(
|
56
|
-
stop: Annotated[
|
57
|
-
Optional[datetime],
|
58
|
-
Argument(
|
59
|
-
formats=["%Y-%m-%d", "%Y-%m-%d %H:%M:%S"],
|
60
|
-
help="A stopping datetime that want to stop on schedule app.",
|
61
|
-
),
|
62
|
-
] = None,
|
63
|
-
excluded: Annotated[
|
64
|
-
Optional[str],
|
65
|
-
Argument(help="A list of exclude workflow name in str."),
|
66
|
-
] = None,
|
67
|
-
externals: Annotated[
|
68
|
-
Optional[str],
|
69
|
-
Argument(
|
70
|
-
help="A json string for parameters of this workflow execution."
|
71
|
-
),
|
72
|
-
] = None,
|
73
|
-
):
|
74
|
-
"""Start workflow scheduler that will call workflow function from scheduler
|
75
|
-
module.
|
76
|
-
"""
|
77
|
-
excluded: list[str] = str2list(excluded) if excluded else []
|
78
|
-
echo(f"... with Excluded Parameters: {excluded!r}")
|
79
|
-
externals: str = externals or "{}"
|
80
|
-
|
81
|
-
# NOTE: Convert timezone on the stop date.
|
82
|
-
if stop:
|
83
|
-
stop: datetime = stop.astimezone(tz=config.tz)
|
84
|
-
|
85
|
-
from .scheduler import schedule_runner
|
86
|
-
|
87
|
-
try:
|
88
|
-
# NOTE: Start running workflow scheduler application.
|
89
|
-
workflow_rs: list[str] = schedule_runner(
|
90
|
-
stop=stop, excluded=excluded, externals=json.loads(externals)
|
91
|
-
)
|
92
|
-
echo(f"Schedule with CLI run success with: {workflow_rs}")
|
93
|
-
except Exception as err:
|
94
|
-
echo(str(err))
|
95
|
-
sys.exit(1)
|
96
|
-
|
97
|
-
sys.exit(0)
|
98
|
-
|
99
|
-
|
100
|
-
@cli.callback()
|
101
|
-
def main():
|
102
|
-
"""
|
103
|
-
Manage workflow with CLI.
|
104
|
-
"""
|
105
|
-
|
106
|
-
|
107
|
-
if __name__ == "__main__":
|
108
|
-
cli()
|
File without changes
|
File without changes
|